]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/function.c
Simplify setjmp and non-local goto implementation (PR84521)
[thirdparty/gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
24
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
28
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "gimple-expr.h"
42 #include "cfghooks.h"
43 #include "df.h"
44 #include "memmodel.h"
45 #include "tm_p.h"
46 #include "stringpool.h"
47 #include "expmed.h"
48 #include "optabs.h"
49 #include "regs.h"
50 #include "emit-rtl.h"
51 #include "recog.h"
52 #include "rtl-error.h"
53 #include "alias.h"
54 #include "fold-const.h"
55 #include "stor-layout.h"
56 #include "varasm.h"
57 #include "except.h"
58 #include "dojump.h"
59 #include "explow.h"
60 #include "calls.h"
61 #include "expr.h"
62 #include "optabs-tree.h"
63 #include "output.h"
64 #include "langhooks.h"
65 #include "common/common-target.h"
66 #include "gimplify.h"
67 #include "tree-pass.h"
68 #include "cfgrtl.h"
69 #include "cfganal.h"
70 #include "cfgbuild.h"
71 #include "cfgcleanup.h"
72 #include "cfgexpand.h"
73 #include "shrink-wrap.h"
74 #include "toplev.h"
75 #include "rtl-iter.h"
76 #include "tree-dfa.h"
77 #include "tree-ssa.h"
78 #include "stringpool.h"
79 #include "attribs.h"
80 #include "gimple.h"
81 #include "options.h"
82
83 /* So we can assign to cfun in this file. */
84 #undef cfun
85
86 #ifndef STACK_ALIGNMENT_NEEDED
87 #define STACK_ALIGNMENT_NEEDED 1
88 #endif
89
90 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
91
92 /* Round a value to the lowest integer less than it that is a multiple of
93 the required alignment. Avoid using division in case the value is
94 negative. Assume the alignment is a power of two. */
95 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
96
97 /* Similar, but round to the next highest integer that meets the
98 alignment. */
99 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
100
101 /* Nonzero once virtual register instantiation has been done.
102 assign_stack_local uses frame_pointer_rtx when this is nonzero.
103 calls.c:emit_library_call_value_1 uses it to set up
104 post-instantiation libcalls. */
105 int virtuals_instantiated;
106
107 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
108 static GTY(()) int funcdef_no;
109
110 /* These variables hold pointers to functions to create and destroy
111 target specific, per-function data structures. */
112 struct machine_function * (*init_machine_status) (void);
113
114 /* The currently compiled function. */
115 struct function *cfun = 0;
116
117 /* These hashes record the prologue and epilogue insns. */
118
119 struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def>
120 {
121 static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
122 static bool equal (rtx a, rtx b) { return a == b; }
123 };
124
125 static GTY((cache))
126 hash_table<insn_cache_hasher> *prologue_insn_hash;
127 static GTY((cache))
128 hash_table<insn_cache_hasher> *epilogue_insn_hash;
129 \f
130
131 hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
132 vec<tree, va_gc> *types_used_by_cur_var_decl;
133
134 /* Forward declarations. */
135
136 static struct temp_slot *find_temp_slot_from_address (rtx);
137 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
138 static void pad_below (struct args_size *, machine_mode, tree);
139 static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
140 static int all_blocks (tree, tree *);
141 static tree *get_block_vector (tree, int *);
142 extern tree debug_find_var_in_block_tree (tree, tree);
143 /* We always define `record_insns' even if it's not used so that we
144 can always export `prologue_epilogue_contains'. */
145 static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
146 ATTRIBUTE_UNUSED;
147 static bool contains (const rtx_insn *, hash_table<insn_cache_hasher> *);
148 static void prepare_function_start (void);
149 static void do_clobber_return_reg (rtx, void *);
150 static void do_use_return_reg (rtx, void *);
151
152 \f
153 /* Stack of nested functions. */
154 /* Keep track of the cfun stack. */
155
156 static vec<function *> function_context_stack;
157
158 /* Save the current context for compilation of a nested function.
159 This is called from language-specific code. */
160
161 void
162 push_function_context (void)
163 {
164 if (cfun == 0)
165 allocate_struct_function (NULL, false);
166
167 function_context_stack.safe_push (cfun);
168 set_cfun (NULL);
169 }
170
171 /* Restore the last saved context, at the end of a nested function.
172 This function is called from language-specific code. */
173
174 void
175 pop_function_context (void)
176 {
177 struct function *p = function_context_stack.pop ();
178 set_cfun (p);
179 current_function_decl = p->decl;
180
181 /* Reset variables that have known state during rtx generation. */
182 virtuals_instantiated = 0;
183 generating_concat_p = 1;
184 }
185
186 /* Clear out all parts of the state in F that can safely be discarded
187 after the function has been parsed, but not compiled, to let
188 garbage collection reclaim the memory. */
189
190 void
191 free_after_parsing (struct function *f)
192 {
193 f->language = 0;
194 }
195
196 /* Clear out all parts of the state in F that can safely be discarded
197 after the function has been compiled, to let garbage collection
198 reclaim the memory. */
199
200 void
201 free_after_compilation (struct function *f)
202 {
203 prologue_insn_hash = NULL;
204 epilogue_insn_hash = NULL;
205
206 free (crtl->emit.regno_pointer_align);
207
208 memset (crtl, 0, sizeof (struct rtl_data));
209 f->eh = NULL;
210 f->machine = NULL;
211 f->cfg = NULL;
212 f->curr_properties &= ~PROP_cfg;
213
214 regno_reg_rtx = NULL;
215 }
216 \f
217 /* Return size needed for stack frame based on slots so far allocated.
218 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
219 the caller may have to do that. */
220
221 poly_int64
222 get_frame_size (void)
223 {
224 if (FRAME_GROWS_DOWNWARD)
225 return -frame_offset;
226 else
227 return frame_offset;
228 }
229
230 /* Issue an error message and return TRUE if frame OFFSET overflows in
231 the signed target pointer arithmetics for function FUNC. Otherwise
232 return FALSE. */
233
234 bool
235 frame_offset_overflow (poly_int64 offset, tree func)
236 {
237 poly_uint64 size = FRAME_GROWS_DOWNWARD ? -offset : offset;
238 unsigned HOST_WIDE_INT limit
239 = ((HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1))
240 /* Leave room for the fixed part of the frame. */
241 - 64 * UNITS_PER_WORD);
242
243 if (!coeffs_in_range_p (size, 0U, limit))
244 {
245 unsigned HOST_WIDE_INT hwisize;
246 if (size.is_constant (&hwisize))
247 error_at (DECL_SOURCE_LOCATION (func),
248 "total size of local objects %wu exceeds maximum %wu",
249 hwisize, limit);
250 else
251 error_at (DECL_SOURCE_LOCATION (func),
252 "total size of local objects exceeds maximum %wu",
253 limit);
254 return true;
255 }
256
257 return false;
258 }
259
260 /* Return the minimum spill slot alignment for a register of mode MODE. */
261
262 unsigned int
263 spill_slot_alignment (machine_mode mode ATTRIBUTE_UNUSED)
264 {
265 return STACK_SLOT_ALIGNMENT (NULL_TREE, mode, GET_MODE_ALIGNMENT (mode));
266 }
267
268 /* Return stack slot alignment in bits for TYPE and MODE. */
269
270 static unsigned int
271 get_stack_local_alignment (tree type, machine_mode mode)
272 {
273 unsigned int alignment;
274
275 if (mode == BLKmode)
276 alignment = BIGGEST_ALIGNMENT;
277 else
278 alignment = GET_MODE_ALIGNMENT (mode);
279
280 /* Allow the frond-end to (possibly) increase the alignment of this
281 stack slot. */
282 if (! type)
283 type = lang_hooks.types.type_for_mode (mode, 0);
284
285 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
286 }
287
288 /* Determine whether it is possible to fit a stack slot of size SIZE and
289 alignment ALIGNMENT into an area in the stack frame that starts at
290 frame offset START and has a length of LENGTH. If so, store the frame
291 offset to be used for the stack slot in *POFFSET and return true;
292 return false otherwise. This function will extend the frame size when
293 given a start/length pair that lies at the end of the frame. */
294
295 static bool
296 try_fit_stack_local (poly_int64 start, poly_int64 length,
297 poly_int64 size, unsigned int alignment,
298 poly_int64_pod *poffset)
299 {
300 poly_int64 this_frame_offset;
301 int frame_off, frame_alignment, frame_phase;
302
303 /* Calculate how many bytes the start of local variables is off from
304 stack alignment. */
305 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
306 frame_off = targetm.starting_frame_offset () % frame_alignment;
307 frame_phase = frame_off ? frame_alignment - frame_off : 0;
308
309 /* Round the frame offset to the specified alignment. */
310
311 if (FRAME_GROWS_DOWNWARD)
312 this_frame_offset
313 = (aligned_lower_bound (start + length - size - frame_phase, alignment)
314 + frame_phase);
315 else
316 this_frame_offset
317 = aligned_upper_bound (start - frame_phase, alignment) + frame_phase;
318
319 /* See if it fits. If this space is at the edge of the frame,
320 consider extending the frame to make it fit. Our caller relies on
321 this when allocating a new slot. */
322 if (maybe_lt (this_frame_offset, start))
323 {
324 if (known_eq (frame_offset, start))
325 frame_offset = this_frame_offset;
326 else
327 return false;
328 }
329 else if (maybe_gt (this_frame_offset + size, start + length))
330 {
331 if (known_eq (frame_offset, start + length))
332 frame_offset = this_frame_offset + size;
333 else
334 return false;
335 }
336
337 *poffset = this_frame_offset;
338 return true;
339 }
340
341 /* Create a new frame_space structure describing free space in the stack
342 frame beginning at START and ending at END, and chain it into the
343 function's frame_space_list. */
344
345 static void
346 add_frame_space (poly_int64 start, poly_int64 end)
347 {
348 struct frame_space *space = ggc_alloc<frame_space> ();
349 space->next = crtl->frame_space_list;
350 crtl->frame_space_list = space;
351 space->start = start;
352 space->length = end - start;
353 }
354
355 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
356 with machine mode MODE.
357
358 ALIGN controls the amount of alignment for the address of the slot:
359 0 means according to MODE,
360 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
361 -2 means use BITS_PER_UNIT,
362 positive specifies alignment boundary in bits.
363
364 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
365 alignment and ASLK_RECORD_PAD bit set if we should remember
366 extra space we allocated for alignment purposes. When we are
367 called from assign_stack_temp_for_type, it is not set so we don't
368 track the same stack slot in two independent lists.
369
370 We do not round to stack_boundary here. */
371
372 rtx
373 assign_stack_local_1 (machine_mode mode, poly_int64 size,
374 int align, int kind)
375 {
376 rtx x, addr;
377 poly_int64 bigend_correction = 0;
378 poly_int64 slot_offset = 0, old_frame_offset;
379 unsigned int alignment, alignment_in_bits;
380
381 if (align == 0)
382 {
383 alignment = get_stack_local_alignment (NULL, mode);
384 alignment /= BITS_PER_UNIT;
385 }
386 else if (align == -1)
387 {
388 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
389 size = aligned_upper_bound (size, alignment);
390 }
391 else if (align == -2)
392 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
393 else
394 alignment = align / BITS_PER_UNIT;
395
396 alignment_in_bits = alignment * BITS_PER_UNIT;
397
398 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
399 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
400 {
401 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
402 alignment = MAX_SUPPORTED_STACK_ALIGNMENT / BITS_PER_UNIT;
403 }
404
405 if (SUPPORTS_STACK_ALIGNMENT)
406 {
407 if (crtl->stack_alignment_estimated < alignment_in_bits)
408 {
409 if (!crtl->stack_realign_processed)
410 crtl->stack_alignment_estimated = alignment_in_bits;
411 else
412 {
413 /* If stack is realigned and stack alignment value
414 hasn't been finalized, it is OK not to increase
415 stack_alignment_estimated. The bigger alignment
416 requirement is recorded in stack_alignment_needed
417 below. */
418 gcc_assert (!crtl->stack_realign_finalized);
419 if (!crtl->stack_realign_needed)
420 {
421 /* It is OK to reduce the alignment as long as the
422 requested size is 0 or the estimated stack
423 alignment >= mode alignment. */
424 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
425 || known_eq (size, 0)
426 || (crtl->stack_alignment_estimated
427 >= GET_MODE_ALIGNMENT (mode)));
428 alignment_in_bits = crtl->stack_alignment_estimated;
429 alignment = alignment_in_bits / BITS_PER_UNIT;
430 }
431 }
432 }
433 }
434
435 if (crtl->stack_alignment_needed < alignment_in_bits)
436 crtl->stack_alignment_needed = alignment_in_bits;
437 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
438 crtl->max_used_stack_slot_alignment = alignment_in_bits;
439
440 if (mode != BLKmode || maybe_ne (size, 0))
441 {
442 if (kind & ASLK_RECORD_PAD)
443 {
444 struct frame_space **psp;
445
446 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
447 {
448 struct frame_space *space = *psp;
449 if (!try_fit_stack_local (space->start, space->length, size,
450 alignment, &slot_offset))
451 continue;
452 *psp = space->next;
453 if (known_gt (slot_offset, space->start))
454 add_frame_space (space->start, slot_offset);
455 if (known_lt (slot_offset + size, space->start + space->length))
456 add_frame_space (slot_offset + size,
457 space->start + space->length);
458 goto found_space;
459 }
460 }
461 }
462 else if (!STACK_ALIGNMENT_NEEDED)
463 {
464 slot_offset = frame_offset;
465 goto found_space;
466 }
467
468 old_frame_offset = frame_offset;
469
470 if (FRAME_GROWS_DOWNWARD)
471 {
472 frame_offset -= size;
473 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
474
475 if (kind & ASLK_RECORD_PAD)
476 {
477 if (known_gt (slot_offset, frame_offset))
478 add_frame_space (frame_offset, slot_offset);
479 if (known_lt (slot_offset + size, old_frame_offset))
480 add_frame_space (slot_offset + size, old_frame_offset);
481 }
482 }
483 else
484 {
485 frame_offset += size;
486 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
487
488 if (kind & ASLK_RECORD_PAD)
489 {
490 if (known_gt (slot_offset, old_frame_offset))
491 add_frame_space (old_frame_offset, slot_offset);
492 if (known_lt (slot_offset + size, frame_offset))
493 add_frame_space (slot_offset + size, frame_offset);
494 }
495 }
496
497 found_space:
498 /* On a big-endian machine, if we are allocating more space than we will use,
499 use the least significant bytes of those that are allocated. */
500 if (mode != BLKmode)
501 {
502 /* The slot size can sometimes be smaller than the mode size;
503 e.g. the rs6000 port allocates slots with a vector mode
504 that have the size of only one element. However, the slot
505 size must always be ordered wrt to the mode size, in the
506 same way as for a subreg. */
507 gcc_checking_assert (ordered_p (GET_MODE_SIZE (mode), size));
508 if (BYTES_BIG_ENDIAN && maybe_lt (GET_MODE_SIZE (mode), size))
509 bigend_correction = size - GET_MODE_SIZE (mode);
510 }
511
512 /* If we have already instantiated virtual registers, return the actual
513 address relative to the frame pointer. */
514 if (virtuals_instantiated)
515 addr = plus_constant (Pmode, frame_pointer_rtx,
516 trunc_int_for_mode
517 (slot_offset + bigend_correction
518 + targetm.starting_frame_offset (), Pmode));
519 else
520 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
521 trunc_int_for_mode
522 (slot_offset + bigend_correction,
523 Pmode));
524
525 x = gen_rtx_MEM (mode, addr);
526 set_mem_align (x, alignment_in_bits);
527 MEM_NOTRAP_P (x) = 1;
528
529 vec_safe_push (stack_slot_list, x);
530
531 if (frame_offset_overflow (frame_offset, current_function_decl))
532 frame_offset = 0;
533
534 return x;
535 }
536
537 /* Wrap up assign_stack_local_1 with last parameter as false. */
538
539 rtx
540 assign_stack_local (machine_mode mode, poly_int64 size, int align)
541 {
542 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
543 }
544 \f
545 /* In order to evaluate some expressions, such as function calls returning
546 structures in memory, we need to temporarily allocate stack locations.
547 We record each allocated temporary in the following structure.
548
549 Associated with each temporary slot is a nesting level. When we pop up
550 one level, all temporaries associated with the previous level are freed.
551 Normally, all temporaries are freed after the execution of the statement
552 in which they were created. However, if we are inside a ({...}) grouping,
553 the result may be in a temporary and hence must be preserved. If the
554 result could be in a temporary, we preserve it if we can determine which
555 one it is in. If we cannot determine which temporary may contain the
556 result, all temporaries are preserved. A temporary is preserved by
557 pretending it was allocated at the previous nesting level. */
558
559 struct GTY(()) temp_slot {
560 /* Points to next temporary slot. */
561 struct temp_slot *next;
562 /* Points to previous temporary slot. */
563 struct temp_slot *prev;
564 /* The rtx to used to reference the slot. */
565 rtx slot;
566 /* The size, in units, of the slot. */
567 poly_int64 size;
568 /* The type of the object in the slot, or zero if it doesn't correspond
569 to a type. We use this to determine whether a slot can be reused.
570 It can be reused if objects of the type of the new slot will always
571 conflict with objects of the type of the old slot. */
572 tree type;
573 /* The alignment (in bits) of the slot. */
574 unsigned int align;
575 /* Nonzero if this temporary is currently in use. */
576 char in_use;
577 /* Nesting level at which this slot is being used. */
578 int level;
579 /* The offset of the slot from the frame_pointer, including extra space
580 for alignment. This info is for combine_temp_slots. */
581 poly_int64 base_offset;
582 /* The size of the slot, including extra space for alignment. This
583 info is for combine_temp_slots. */
584 poly_int64 full_size;
585 };
586
587 /* Entry for the below hash table. */
588 struct GTY((for_user)) temp_slot_address_entry {
589 hashval_t hash;
590 rtx address;
591 struct temp_slot *temp_slot;
592 };
593
594 struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
595 {
596 static hashval_t hash (temp_slot_address_entry *);
597 static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
598 };
599
600 /* A table of addresses that represent a stack slot. The table is a mapping
601 from address RTXen to a temp slot. */
602 static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
603 static size_t n_temp_slots_in_use;
604
605 /* Removes temporary slot TEMP from LIST. */
606
607 static void
608 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
609 {
610 if (temp->next)
611 temp->next->prev = temp->prev;
612 if (temp->prev)
613 temp->prev->next = temp->next;
614 else
615 *list = temp->next;
616
617 temp->prev = temp->next = NULL;
618 }
619
620 /* Inserts temporary slot TEMP to LIST. */
621
622 static void
623 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
624 {
625 temp->next = *list;
626 if (*list)
627 (*list)->prev = temp;
628 temp->prev = NULL;
629 *list = temp;
630 }
631
632 /* Returns the list of used temp slots at LEVEL. */
633
634 static struct temp_slot **
635 temp_slots_at_level (int level)
636 {
637 if (level >= (int) vec_safe_length (used_temp_slots))
638 vec_safe_grow_cleared (used_temp_slots, level + 1);
639
640 return &(*used_temp_slots)[level];
641 }
642
643 /* Returns the maximal temporary slot level. */
644
645 static int
646 max_slot_level (void)
647 {
648 if (!used_temp_slots)
649 return -1;
650
651 return used_temp_slots->length () - 1;
652 }
653
654 /* Moves temporary slot TEMP to LEVEL. */
655
656 static void
657 move_slot_to_level (struct temp_slot *temp, int level)
658 {
659 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
660 insert_slot_to_list (temp, temp_slots_at_level (level));
661 temp->level = level;
662 }
663
664 /* Make temporary slot TEMP available. */
665
666 static void
667 make_slot_available (struct temp_slot *temp)
668 {
669 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
670 insert_slot_to_list (temp, &avail_temp_slots);
671 temp->in_use = 0;
672 temp->level = -1;
673 n_temp_slots_in_use--;
674 }
675
676 /* Compute the hash value for an address -> temp slot mapping.
677 The value is cached on the mapping entry. */
678 static hashval_t
679 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
680 {
681 int do_not_record = 0;
682 return hash_rtx (t->address, GET_MODE (t->address),
683 &do_not_record, NULL, false);
684 }
685
686 /* Return the hash value for an address -> temp slot mapping. */
687 hashval_t
688 temp_address_hasher::hash (temp_slot_address_entry *t)
689 {
690 return t->hash;
691 }
692
693 /* Compare two address -> temp slot mapping entries. */
694 bool
695 temp_address_hasher::equal (temp_slot_address_entry *t1,
696 temp_slot_address_entry *t2)
697 {
698 return exp_equiv_p (t1->address, t2->address, 0, true);
699 }
700
701 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
702 static void
703 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
704 {
705 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
706 t->address = address;
707 t->temp_slot = temp_slot;
708 t->hash = temp_slot_address_compute_hash (t);
709 *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
710 }
711
712 /* Remove an address -> temp slot mapping entry if the temp slot is
713 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
714 int
715 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
716 {
717 const struct temp_slot_address_entry *t = *slot;
718 if (! t->temp_slot->in_use)
719 temp_slot_address_table->clear_slot (slot);
720 return 1;
721 }
722
723 /* Remove all mappings of addresses to unused temp slots. */
724 static void
725 remove_unused_temp_slot_addresses (void)
726 {
727 /* Use quicker clearing if there aren't any active temp slots. */
728 if (n_temp_slots_in_use)
729 temp_slot_address_table->traverse
730 <void *, remove_unused_temp_slot_addresses_1> (NULL);
731 else
732 temp_slot_address_table->empty ();
733 }
734
735 /* Find the temp slot corresponding to the object at address X. */
736
737 static struct temp_slot *
738 find_temp_slot_from_address (rtx x)
739 {
740 struct temp_slot *p;
741 struct temp_slot_address_entry tmp, *t;
742
743 /* First try the easy way:
744 See if X exists in the address -> temp slot mapping. */
745 tmp.address = x;
746 tmp.temp_slot = NULL;
747 tmp.hash = temp_slot_address_compute_hash (&tmp);
748 t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
749 if (t)
750 return t->temp_slot;
751
752 /* If we have a sum involving a register, see if it points to a temp
753 slot. */
754 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
755 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
756 return p;
757 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
758 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
759 return p;
760
761 /* Last resort: Address is a virtual stack var address. */
762 poly_int64 offset;
763 if (strip_offset (x, &offset) == virtual_stack_vars_rtx)
764 {
765 int i;
766 for (i = max_slot_level (); i >= 0; i--)
767 for (p = *temp_slots_at_level (i); p; p = p->next)
768 if (known_in_range_p (offset, p->base_offset, p->full_size))
769 return p;
770 }
771
772 return NULL;
773 }
774 \f
775 /* Allocate a temporary stack slot and record it for possible later
776 reuse.
777
778 MODE is the machine mode to be given to the returned rtx.
779
780 SIZE is the size in units of the space required. We do no rounding here
781 since assign_stack_local will do any required rounding.
782
783 TYPE is the type that will be used for the stack slot. */
784
785 rtx
786 assign_stack_temp_for_type (machine_mode mode, poly_int64 size, tree type)
787 {
788 unsigned int align;
789 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
790 rtx slot;
791
792 gcc_assert (known_size_p (size));
793
794 align = get_stack_local_alignment (type, mode);
795
796 /* Try to find an available, already-allocated temporary of the proper
797 mode which meets the size and alignment requirements. Choose the
798 smallest one with the closest alignment.
799
800 If assign_stack_temp is called outside of the tree->rtl expansion,
801 we cannot reuse the stack slots (that may still refer to
802 VIRTUAL_STACK_VARS_REGNUM). */
803 if (!virtuals_instantiated)
804 {
805 for (p = avail_temp_slots; p; p = p->next)
806 {
807 if (p->align >= align
808 && known_ge (p->size, size)
809 && GET_MODE (p->slot) == mode
810 && objects_must_conflict_p (p->type, type)
811 && (best_p == 0
812 || (known_eq (best_p->size, p->size)
813 ? best_p->align > p->align
814 : known_ge (best_p->size, p->size))))
815 {
816 if (p->align == align && known_eq (p->size, size))
817 {
818 selected = p;
819 cut_slot_from_list (selected, &avail_temp_slots);
820 best_p = 0;
821 break;
822 }
823 best_p = p;
824 }
825 }
826 }
827
828 /* Make our best, if any, the one to use. */
829 if (best_p)
830 {
831 selected = best_p;
832 cut_slot_from_list (selected, &avail_temp_slots);
833
834 /* If there are enough aligned bytes left over, make them into a new
835 temp_slot so that the extra bytes don't get wasted. Do this only
836 for BLKmode slots, so that we can be sure of the alignment. */
837 if (GET_MODE (best_p->slot) == BLKmode)
838 {
839 int alignment = best_p->align / BITS_PER_UNIT;
840 poly_int64 rounded_size = aligned_upper_bound (size, alignment);
841
842 if (known_ge (best_p->size - rounded_size, alignment))
843 {
844 p = ggc_alloc<temp_slot> ();
845 p->in_use = 0;
846 p->size = best_p->size - rounded_size;
847 p->base_offset = best_p->base_offset + rounded_size;
848 p->full_size = best_p->full_size - rounded_size;
849 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
850 p->align = best_p->align;
851 p->type = best_p->type;
852 insert_slot_to_list (p, &avail_temp_slots);
853
854 vec_safe_push (stack_slot_list, p->slot);
855
856 best_p->size = rounded_size;
857 best_p->full_size = rounded_size;
858 }
859 }
860 }
861
862 /* If we still didn't find one, make a new temporary. */
863 if (selected == 0)
864 {
865 poly_int64 frame_offset_old = frame_offset;
866
867 p = ggc_alloc<temp_slot> ();
868
869 /* We are passing an explicit alignment request to assign_stack_local.
870 One side effect of that is assign_stack_local will not round SIZE
871 to ensure the frame offset remains suitably aligned.
872
873 So for requests which depended on the rounding of SIZE, we go ahead
874 and round it now. We also make sure ALIGNMENT is at least
875 BIGGEST_ALIGNMENT. */
876 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
877 p->slot = assign_stack_local_1 (mode,
878 (mode == BLKmode
879 ? aligned_upper_bound (size,
880 (int) align
881 / BITS_PER_UNIT)
882 : size),
883 align, 0);
884
885 p->align = align;
886
887 /* The following slot size computation is necessary because we don't
888 know the actual size of the temporary slot until assign_stack_local
889 has performed all the frame alignment and size rounding for the
890 requested temporary. Note that extra space added for alignment
891 can be either above or below this stack slot depending on which
892 way the frame grows. We include the extra space if and only if it
893 is above this slot. */
894 if (FRAME_GROWS_DOWNWARD)
895 p->size = frame_offset_old - frame_offset;
896 else
897 p->size = size;
898
899 /* Now define the fields used by combine_temp_slots. */
900 if (FRAME_GROWS_DOWNWARD)
901 {
902 p->base_offset = frame_offset;
903 p->full_size = frame_offset_old - frame_offset;
904 }
905 else
906 {
907 p->base_offset = frame_offset_old;
908 p->full_size = frame_offset - frame_offset_old;
909 }
910
911 selected = p;
912 }
913
914 p = selected;
915 p->in_use = 1;
916 p->type = type;
917 p->level = temp_slot_level;
918 n_temp_slots_in_use++;
919
920 pp = temp_slots_at_level (p->level);
921 insert_slot_to_list (p, pp);
922 insert_temp_slot_address (XEXP (p->slot, 0), p);
923
924 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
925 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
926 vec_safe_push (stack_slot_list, slot);
927
928 /* If we know the alias set for the memory that will be used, use
929 it. If there's no TYPE, then we don't know anything about the
930 alias set for the memory. */
931 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
932 set_mem_align (slot, align);
933
934 /* If a type is specified, set the relevant flags. */
935 if (type != 0)
936 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
937 MEM_NOTRAP_P (slot) = 1;
938
939 return slot;
940 }
941
942 /* Allocate a temporary stack slot and record it for possible later
943 reuse. First two arguments are same as in preceding function. */
944
945 rtx
946 assign_stack_temp (machine_mode mode, poly_int64 size)
947 {
948 return assign_stack_temp_for_type (mode, size, NULL_TREE);
949 }
950 \f
951 /* Assign a temporary.
952 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
953 and so that should be used in error messages. In either case, we
954 allocate of the given type.
955 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
956 it is 0 if a register is OK.
957 DONT_PROMOTE is 1 if we should not promote values in register
958 to wider modes. */
959
960 rtx
961 assign_temp (tree type_or_decl, int memory_required,
962 int dont_promote ATTRIBUTE_UNUSED)
963 {
964 tree type, decl;
965 machine_mode mode;
966 #ifdef PROMOTE_MODE
967 int unsignedp;
968 #endif
969
970 if (DECL_P (type_or_decl))
971 decl = type_or_decl, type = TREE_TYPE (decl);
972 else
973 decl = NULL, type = type_or_decl;
974
975 mode = TYPE_MODE (type);
976 #ifdef PROMOTE_MODE
977 unsignedp = TYPE_UNSIGNED (type);
978 #endif
979
980 /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
981 end. See also create_tmp_var for the gimplification-time check. */
982 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
983
984 if (mode == BLKmode || memory_required)
985 {
986 poly_int64 size;
987 rtx tmp;
988
989 /* Unfortunately, we don't yet know how to allocate variable-sized
990 temporaries. However, sometimes we can find a fixed upper limit on
991 the size, so try that instead. */
992 if (!poly_int_tree_p (TYPE_SIZE_UNIT (type), &size))
993 size = max_int_size_in_bytes (type);
994
995 /* Zero sized arrays are a GNU C extension. Set size to 1 to avoid
996 problems with allocating the stack space. */
997 if (known_eq (size, 0))
998 size = 1;
999
1000 /* The size of the temporary may be too large to fit into an integer. */
1001 /* ??? Not sure this should happen except for user silliness, so limit
1002 this to things that aren't compiler-generated temporaries. The
1003 rest of the time we'll die in assign_stack_temp_for_type. */
1004 if (decl
1005 && !known_size_p (size)
1006 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
1007 {
1008 error ("size of variable %q+D is too large", decl);
1009 size = 1;
1010 }
1011
1012 tmp = assign_stack_temp_for_type (mode, size, type);
1013 return tmp;
1014 }
1015
1016 #ifdef PROMOTE_MODE
1017 if (! dont_promote)
1018 mode = promote_mode (type, mode, &unsignedp);
1019 #endif
1020
1021 return gen_reg_rtx (mode);
1022 }
1023 \f
1024 /* Combine temporary stack slots which are adjacent on the stack.
1025
1026 This allows for better use of already allocated stack space. This is only
1027 done for BLKmode slots because we can be sure that we won't have alignment
1028 problems in this case. */
1029
1030 static void
1031 combine_temp_slots (void)
1032 {
1033 struct temp_slot *p, *q, *next, *next_q;
1034 int num_slots;
1035
1036 /* We can't combine slots, because the information about which slot
1037 is in which alias set will be lost. */
1038 if (flag_strict_aliasing)
1039 return;
1040
1041 /* If there are a lot of temp slots, don't do anything unless
1042 high levels of optimization. */
1043 if (! flag_expensive_optimizations)
1044 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1045 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1046 return;
1047
1048 for (p = avail_temp_slots; p; p = next)
1049 {
1050 int delete_p = 0;
1051
1052 next = p->next;
1053
1054 if (GET_MODE (p->slot) != BLKmode)
1055 continue;
1056
1057 for (q = p->next; q; q = next_q)
1058 {
1059 int delete_q = 0;
1060
1061 next_q = q->next;
1062
1063 if (GET_MODE (q->slot) != BLKmode)
1064 continue;
1065
1066 if (known_eq (p->base_offset + p->full_size, q->base_offset))
1067 {
1068 /* Q comes after P; combine Q into P. */
1069 p->size += q->size;
1070 p->full_size += q->full_size;
1071 delete_q = 1;
1072 }
1073 else if (known_eq (q->base_offset + q->full_size, p->base_offset))
1074 {
1075 /* P comes after Q; combine P into Q. */
1076 q->size += p->size;
1077 q->full_size += p->full_size;
1078 delete_p = 1;
1079 break;
1080 }
1081 if (delete_q)
1082 cut_slot_from_list (q, &avail_temp_slots);
1083 }
1084
1085 /* Either delete P or advance past it. */
1086 if (delete_p)
1087 cut_slot_from_list (p, &avail_temp_slots);
1088 }
1089 }
1090 \f
1091 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1092 slot that previously was known by OLD_RTX. */
1093
1094 void
1095 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1096 {
1097 struct temp_slot *p;
1098
1099 if (rtx_equal_p (old_rtx, new_rtx))
1100 return;
1101
1102 p = find_temp_slot_from_address (old_rtx);
1103
1104 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1105 NEW_RTX is a register, see if one operand of the PLUS is a
1106 temporary location. If so, NEW_RTX points into it. Otherwise,
1107 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1108 in common between them. If so, try a recursive call on those
1109 values. */
1110 if (p == 0)
1111 {
1112 if (GET_CODE (old_rtx) != PLUS)
1113 return;
1114
1115 if (REG_P (new_rtx))
1116 {
1117 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1118 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1119 return;
1120 }
1121 else if (GET_CODE (new_rtx) != PLUS)
1122 return;
1123
1124 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1125 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1126 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1127 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1128 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1129 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1130 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1131 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1132
1133 return;
1134 }
1135
1136 /* Otherwise add an alias for the temp's address. */
1137 insert_temp_slot_address (new_rtx, p);
1138 }
1139
1140 /* If X could be a reference to a temporary slot, mark that slot as
1141 belonging to the to one level higher than the current level. If X
1142 matched one of our slots, just mark that one. Otherwise, we can't
1143 easily predict which it is, so upgrade all of them.
1144
1145 This is called when an ({...}) construct occurs and a statement
1146 returns a value in memory. */
1147
1148 void
1149 preserve_temp_slots (rtx x)
1150 {
1151 struct temp_slot *p = 0, *next;
1152
1153 if (x == 0)
1154 return;
1155
1156 /* If X is a register that is being used as a pointer, see if we have
1157 a temporary slot we know it points to. */
1158 if (REG_P (x) && REG_POINTER (x))
1159 p = find_temp_slot_from_address (x);
1160
1161 /* If X is not in memory or is at a constant address, it cannot be in
1162 a temporary slot. */
1163 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1164 return;
1165
1166 /* First see if we can find a match. */
1167 if (p == 0)
1168 p = find_temp_slot_from_address (XEXP (x, 0));
1169
1170 if (p != 0)
1171 {
1172 if (p->level == temp_slot_level)
1173 move_slot_to_level (p, temp_slot_level - 1);
1174 return;
1175 }
1176
1177 /* Otherwise, preserve all non-kept slots at this level. */
1178 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1179 {
1180 next = p->next;
1181 move_slot_to_level (p, temp_slot_level - 1);
1182 }
1183 }
1184
1185 /* Free all temporaries used so far. This is normally called at the
1186 end of generating code for a statement. */
1187
1188 void
1189 free_temp_slots (void)
1190 {
1191 struct temp_slot *p, *next;
1192 bool some_available = false;
1193
1194 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1195 {
1196 next = p->next;
1197 make_slot_available (p);
1198 some_available = true;
1199 }
1200
1201 if (some_available)
1202 {
1203 remove_unused_temp_slot_addresses ();
1204 combine_temp_slots ();
1205 }
1206 }
1207
1208 /* Push deeper into the nesting level for stack temporaries. */
1209
1210 void
1211 push_temp_slots (void)
1212 {
1213 temp_slot_level++;
1214 }
1215
1216 /* Pop a temporary nesting level. All slots in use in the current level
1217 are freed. */
1218
1219 void
1220 pop_temp_slots (void)
1221 {
1222 free_temp_slots ();
1223 temp_slot_level--;
1224 }
1225
1226 /* Initialize temporary slots. */
1227
1228 void
1229 init_temp_slots (void)
1230 {
1231 /* We have not allocated any temporaries yet. */
1232 avail_temp_slots = 0;
1233 vec_alloc (used_temp_slots, 0);
1234 temp_slot_level = 0;
1235 n_temp_slots_in_use = 0;
1236
1237 /* Set up the table to map addresses to temp slots. */
1238 if (! temp_slot_address_table)
1239 temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
1240 else
1241 temp_slot_address_table->empty ();
1242 }
1243 \f
1244 /* Functions and data structures to keep track of the values hard regs
1245 had at the start of the function. */
1246
1247 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1248 and has_hard_reg_initial_val.. */
1249 struct GTY(()) initial_value_pair {
1250 rtx hard_reg;
1251 rtx pseudo;
1252 };
1253 /* ??? This could be a VEC but there is currently no way to define an
1254 opaque VEC type. This could be worked around by defining struct
1255 initial_value_pair in function.h. */
1256 struct GTY(()) initial_value_struct {
1257 int num_entries;
1258 int max_entries;
1259 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1260 };
1261
1262 /* If a pseudo represents an initial hard reg (or expression), return
1263 it, else return NULL_RTX. */
1264
1265 rtx
1266 get_hard_reg_initial_reg (rtx reg)
1267 {
1268 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1269 int i;
1270
1271 if (ivs == 0)
1272 return NULL_RTX;
1273
1274 for (i = 0; i < ivs->num_entries; i++)
1275 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1276 return ivs->entries[i].hard_reg;
1277
1278 return NULL_RTX;
1279 }
1280
1281 /* Make sure that there's a pseudo register of mode MODE that stores the
1282 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1283
1284 rtx
1285 get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1286 {
1287 struct initial_value_struct *ivs;
1288 rtx rv;
1289
1290 rv = has_hard_reg_initial_val (mode, regno);
1291 if (rv)
1292 return rv;
1293
1294 ivs = crtl->hard_reg_initial_vals;
1295 if (ivs == 0)
1296 {
1297 ivs = ggc_alloc<initial_value_struct> ();
1298 ivs->num_entries = 0;
1299 ivs->max_entries = 5;
1300 ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
1301 crtl->hard_reg_initial_vals = ivs;
1302 }
1303
1304 if (ivs->num_entries >= ivs->max_entries)
1305 {
1306 ivs->max_entries += 5;
1307 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1308 ivs->max_entries);
1309 }
1310
1311 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1312 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1313
1314 return ivs->entries[ivs->num_entries++].pseudo;
1315 }
1316
1317 /* See if get_hard_reg_initial_val has been used to create a pseudo
1318 for the initial value of hard register REGNO in mode MODE. Return
1319 the associated pseudo if so, otherwise return NULL. */
1320
1321 rtx
1322 has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1323 {
1324 struct initial_value_struct *ivs;
1325 int i;
1326
1327 ivs = crtl->hard_reg_initial_vals;
1328 if (ivs != 0)
1329 for (i = 0; i < ivs->num_entries; i++)
1330 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1331 && REGNO (ivs->entries[i].hard_reg) == regno)
1332 return ivs->entries[i].pseudo;
1333
1334 return NULL_RTX;
1335 }
1336
1337 unsigned int
1338 emit_initial_value_sets (void)
1339 {
1340 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1341 int i;
1342 rtx_insn *seq;
1343
1344 if (ivs == 0)
1345 return 0;
1346
1347 start_sequence ();
1348 for (i = 0; i < ivs->num_entries; i++)
1349 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1350 seq = get_insns ();
1351 end_sequence ();
1352
1353 emit_insn_at_entry (seq);
1354 return 0;
1355 }
1356
1357 /* Return the hardreg-pseudoreg initial values pair entry I and
1358 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1359 bool
1360 initial_value_entry (int i, rtx *hreg, rtx *preg)
1361 {
1362 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1363 if (!ivs || i >= ivs->num_entries)
1364 return false;
1365
1366 *hreg = ivs->entries[i].hard_reg;
1367 *preg = ivs->entries[i].pseudo;
1368 return true;
1369 }
1370 \f
1371 /* These routines are responsible for converting virtual register references
1372 to the actual hard register references once RTL generation is complete.
1373
1374 The following four variables are used for communication between the
1375 routines. They contain the offsets of the virtual registers from their
1376 respective hard registers. */
1377
1378 static poly_int64 in_arg_offset;
1379 static poly_int64 var_offset;
1380 static poly_int64 dynamic_offset;
1381 static poly_int64 out_arg_offset;
1382 static poly_int64 cfa_offset;
1383
1384 /* In most machines, the stack pointer register is equivalent to the bottom
1385 of the stack. */
1386
1387 #ifndef STACK_POINTER_OFFSET
1388 #define STACK_POINTER_OFFSET 0
1389 #endif
1390
1391 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1392 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1393 #endif
1394
1395 /* If not defined, pick an appropriate default for the offset of dynamically
1396 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1397 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1398
1399 #ifndef STACK_DYNAMIC_OFFSET
1400
1401 /* The bottom of the stack points to the actual arguments. If
1402 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1403 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1404 stack space for register parameters is not pushed by the caller, but
1405 rather part of the fixed stack areas and hence not included in
1406 `crtl->outgoing_args_size'. Nevertheless, we must allow
1407 for it when allocating stack dynamic objects. */
1408
1409 #ifdef INCOMING_REG_PARM_STACK_SPACE
1410 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1411 ((ACCUMULATE_OUTGOING_ARGS \
1412 ? (crtl->outgoing_args_size \
1413 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1414 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1415 : 0) + (STACK_POINTER_OFFSET))
1416 #else
1417 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1418 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : poly_int64 (0)) \
1419 + (STACK_POINTER_OFFSET))
1420 #endif
1421 #endif
1422
1423 \f
1424 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1425 is a virtual register, return the equivalent hard register and set the
1426 offset indirectly through the pointer. Otherwise, return 0. */
1427
1428 static rtx
1429 instantiate_new_reg (rtx x, poly_int64_pod *poffset)
1430 {
1431 rtx new_rtx;
1432 poly_int64 offset;
1433
1434 if (x == virtual_incoming_args_rtx)
1435 {
1436 if (stack_realign_drap)
1437 {
1438 /* Replace virtual_incoming_args_rtx with internal arg
1439 pointer if DRAP is used to realign stack. */
1440 new_rtx = crtl->args.internal_arg_pointer;
1441 offset = 0;
1442 }
1443 else
1444 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1445 }
1446 else if (x == virtual_stack_vars_rtx)
1447 new_rtx = frame_pointer_rtx, offset = var_offset;
1448 else if (x == virtual_stack_dynamic_rtx)
1449 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1450 else if (x == virtual_outgoing_args_rtx)
1451 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1452 else if (x == virtual_cfa_rtx)
1453 {
1454 #ifdef FRAME_POINTER_CFA_OFFSET
1455 new_rtx = frame_pointer_rtx;
1456 #else
1457 new_rtx = arg_pointer_rtx;
1458 #endif
1459 offset = cfa_offset;
1460 }
1461 else if (x == virtual_preferred_stack_boundary_rtx)
1462 {
1463 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1464 offset = 0;
1465 }
1466 else
1467 return NULL_RTX;
1468
1469 *poffset = offset;
1470 return new_rtx;
1471 }
1472
1473 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1474 registers present inside of *LOC. The expression is simplified,
1475 as much as possible, but is not to be considered "valid" in any sense
1476 implied by the target. Return true if any change is made. */
1477
1478 static bool
1479 instantiate_virtual_regs_in_rtx (rtx *loc)
1480 {
1481 if (!*loc)
1482 return false;
1483 bool changed = false;
1484 subrtx_ptr_iterator::array_type array;
1485 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
1486 {
1487 rtx *loc = *iter;
1488 if (rtx x = *loc)
1489 {
1490 rtx new_rtx;
1491 poly_int64 offset;
1492 switch (GET_CODE (x))
1493 {
1494 case REG:
1495 new_rtx = instantiate_new_reg (x, &offset);
1496 if (new_rtx)
1497 {
1498 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1499 changed = true;
1500 }
1501 iter.skip_subrtxes ();
1502 break;
1503
1504 case PLUS:
1505 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1506 if (new_rtx)
1507 {
1508 XEXP (x, 0) = new_rtx;
1509 *loc = plus_constant (GET_MODE (x), x, offset, true);
1510 changed = true;
1511 iter.skip_subrtxes ();
1512 break;
1513 }
1514
1515 /* FIXME -- from old code */
1516 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1517 we can commute the PLUS and SUBREG because pointers into the
1518 frame are well-behaved. */
1519 break;
1520
1521 default:
1522 break;
1523 }
1524 }
1525 }
1526 return changed;
1527 }
1528
1529 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1530 matches the predicate for insn CODE operand OPERAND. */
1531
1532 static int
1533 safe_insn_predicate (int code, int operand, rtx x)
1534 {
1535 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1536 }
1537
1538 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1539 registers present inside of insn. The result will be a valid insn. */
1540
1541 static void
1542 instantiate_virtual_regs_in_insn (rtx_insn *insn)
1543 {
1544 poly_int64 offset;
1545 int insn_code, i;
1546 bool any_change = false;
1547 rtx set, new_rtx, x;
1548 rtx_insn *seq;
1549
1550 /* There are some special cases to be handled first. */
1551 set = single_set (insn);
1552 if (set)
1553 {
1554 /* We're allowed to assign to a virtual register. This is interpreted
1555 to mean that the underlying register gets assigned the inverse
1556 transformation. This is used, for example, in the handling of
1557 non-local gotos. */
1558 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1559 if (new_rtx)
1560 {
1561 start_sequence ();
1562
1563 instantiate_virtual_regs_in_rtx (&SET_SRC (set));
1564 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1565 gen_int_mode (-offset, GET_MODE (new_rtx)));
1566 x = force_operand (x, new_rtx);
1567 if (x != new_rtx)
1568 emit_move_insn (new_rtx, x);
1569
1570 seq = get_insns ();
1571 end_sequence ();
1572
1573 emit_insn_before (seq, insn);
1574 delete_insn (insn);
1575 return;
1576 }
1577
1578 /* Handle a straight copy from a virtual register by generating a
1579 new add insn. The difference between this and falling through
1580 to the generic case is avoiding a new pseudo and eliminating a
1581 move insn in the initial rtl stream. */
1582 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1583 if (new_rtx
1584 && maybe_ne (offset, 0)
1585 && REG_P (SET_DEST (set))
1586 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1587 {
1588 start_sequence ();
1589
1590 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1591 gen_int_mode (offset,
1592 GET_MODE (SET_DEST (set))),
1593 SET_DEST (set), 1, OPTAB_LIB_WIDEN);
1594 if (x != SET_DEST (set))
1595 emit_move_insn (SET_DEST (set), x);
1596
1597 seq = get_insns ();
1598 end_sequence ();
1599
1600 emit_insn_before (seq, insn);
1601 delete_insn (insn);
1602 return;
1603 }
1604
1605 extract_insn (insn);
1606 insn_code = INSN_CODE (insn);
1607
1608 /* Handle a plus involving a virtual register by determining if the
1609 operands remain valid if they're modified in place. */
1610 poly_int64 delta;
1611 if (GET_CODE (SET_SRC (set)) == PLUS
1612 && recog_data.n_operands >= 3
1613 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1614 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1615 && poly_int_rtx_p (recog_data.operand[2], &delta)
1616 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1617 {
1618 offset += delta;
1619
1620 /* If the sum is zero, then replace with a plain move. */
1621 if (known_eq (offset, 0)
1622 && REG_P (SET_DEST (set))
1623 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1624 {
1625 start_sequence ();
1626 emit_move_insn (SET_DEST (set), new_rtx);
1627 seq = get_insns ();
1628 end_sequence ();
1629
1630 emit_insn_before (seq, insn);
1631 delete_insn (insn);
1632 return;
1633 }
1634
1635 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1636
1637 /* Using validate_change and apply_change_group here leaves
1638 recog_data in an invalid state. Since we know exactly what
1639 we want to check, do those two by hand. */
1640 if (safe_insn_predicate (insn_code, 1, new_rtx)
1641 && safe_insn_predicate (insn_code, 2, x))
1642 {
1643 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1644 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1645 any_change = true;
1646
1647 /* Fall through into the regular operand fixup loop in
1648 order to take care of operands other than 1 and 2. */
1649 }
1650 }
1651 }
1652 else
1653 {
1654 extract_insn (insn);
1655 insn_code = INSN_CODE (insn);
1656 }
1657
1658 /* In the general case, we expect virtual registers to appear only in
1659 operands, and then only as either bare registers or inside memories. */
1660 for (i = 0; i < recog_data.n_operands; ++i)
1661 {
1662 x = recog_data.operand[i];
1663 switch (GET_CODE (x))
1664 {
1665 case MEM:
1666 {
1667 rtx addr = XEXP (x, 0);
1668
1669 if (!instantiate_virtual_regs_in_rtx (&addr))
1670 continue;
1671
1672 start_sequence ();
1673 x = replace_equiv_address (x, addr, true);
1674 /* It may happen that the address with the virtual reg
1675 was valid (e.g. based on the virtual stack reg, which might
1676 be acceptable to the predicates with all offsets), whereas
1677 the address now isn't anymore, for instance when the address
1678 is still offsetted, but the base reg isn't virtual-stack-reg
1679 anymore. Below we would do a force_reg on the whole operand,
1680 but this insn might actually only accept memory. Hence,
1681 before doing that last resort, try to reload the address into
1682 a register, so this operand stays a MEM. */
1683 if (!safe_insn_predicate (insn_code, i, x))
1684 {
1685 addr = force_reg (GET_MODE (addr), addr);
1686 x = replace_equiv_address (x, addr, true);
1687 }
1688 seq = get_insns ();
1689 end_sequence ();
1690 if (seq)
1691 emit_insn_before (seq, insn);
1692 }
1693 break;
1694
1695 case REG:
1696 new_rtx = instantiate_new_reg (x, &offset);
1697 if (new_rtx == NULL)
1698 continue;
1699 if (known_eq (offset, 0))
1700 x = new_rtx;
1701 else
1702 {
1703 start_sequence ();
1704
1705 /* Careful, special mode predicates may have stuff in
1706 insn_data[insn_code].operand[i].mode that isn't useful
1707 to us for computing a new value. */
1708 /* ??? Recognize address_operand and/or "p" constraints
1709 to see if (plus new offset) is a valid before we put
1710 this through expand_simple_binop. */
1711 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1712 gen_int_mode (offset, GET_MODE (x)),
1713 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1714 seq = get_insns ();
1715 end_sequence ();
1716 emit_insn_before (seq, insn);
1717 }
1718 break;
1719
1720 case SUBREG:
1721 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1722 if (new_rtx == NULL)
1723 continue;
1724 if (maybe_ne (offset, 0))
1725 {
1726 start_sequence ();
1727 new_rtx = expand_simple_binop
1728 (GET_MODE (new_rtx), PLUS, new_rtx,
1729 gen_int_mode (offset, GET_MODE (new_rtx)),
1730 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1731 seq = get_insns ();
1732 end_sequence ();
1733 emit_insn_before (seq, insn);
1734 }
1735 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1736 GET_MODE (new_rtx), SUBREG_BYTE (x));
1737 gcc_assert (x);
1738 break;
1739
1740 default:
1741 continue;
1742 }
1743
1744 /* At this point, X contains the new value for the operand.
1745 Validate the new value vs the insn predicate. Note that
1746 asm insns will have insn_code -1 here. */
1747 if (!safe_insn_predicate (insn_code, i, x))
1748 {
1749 start_sequence ();
1750 if (REG_P (x))
1751 {
1752 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1753 x = copy_to_reg (x);
1754 }
1755 else
1756 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1757 seq = get_insns ();
1758 end_sequence ();
1759 if (seq)
1760 emit_insn_before (seq, insn);
1761 }
1762
1763 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1764 any_change = true;
1765 }
1766
1767 if (any_change)
1768 {
1769 /* Propagate operand changes into the duplicates. */
1770 for (i = 0; i < recog_data.n_dups; ++i)
1771 *recog_data.dup_loc[i]
1772 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1773
1774 /* Force re-recognition of the instruction for validation. */
1775 INSN_CODE (insn) = -1;
1776 }
1777
1778 if (asm_noperands (PATTERN (insn)) >= 0)
1779 {
1780 if (!check_asm_operands (PATTERN (insn)))
1781 {
1782 error_for_asm (insn, "impossible constraint in %<asm%>");
1783 /* For asm goto, instead of fixing up all the edges
1784 just clear the template and clear input operands
1785 (asm goto doesn't have any output operands). */
1786 if (JUMP_P (insn))
1787 {
1788 rtx asm_op = extract_asm_operands (PATTERN (insn));
1789 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1790 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1791 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1792 }
1793 else
1794 delete_insn (insn);
1795 }
1796 }
1797 else
1798 {
1799 if (recog_memoized (insn) < 0)
1800 fatal_insn_not_found (insn);
1801 }
1802 }
1803
1804 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1805 do any instantiation required. */
1806
1807 void
1808 instantiate_decl_rtl (rtx x)
1809 {
1810 rtx addr;
1811
1812 if (x == 0)
1813 return;
1814
1815 /* If this is a CONCAT, recurse for the pieces. */
1816 if (GET_CODE (x) == CONCAT)
1817 {
1818 instantiate_decl_rtl (XEXP (x, 0));
1819 instantiate_decl_rtl (XEXP (x, 1));
1820 return;
1821 }
1822
1823 /* If this is not a MEM, no need to do anything. Similarly if the
1824 address is a constant or a register that is not a virtual register. */
1825 if (!MEM_P (x))
1826 return;
1827
1828 addr = XEXP (x, 0);
1829 if (CONSTANT_P (addr)
1830 || (REG_P (addr)
1831 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1832 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1833 return;
1834
1835 instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
1836 }
1837
1838 /* Helper for instantiate_decls called via walk_tree: Process all decls
1839 in the given DECL_VALUE_EXPR. */
1840
1841 static tree
1842 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1843 {
1844 tree t = *tp;
1845 if (! EXPR_P (t))
1846 {
1847 *walk_subtrees = 0;
1848 if (DECL_P (t))
1849 {
1850 if (DECL_RTL_SET_P (t))
1851 instantiate_decl_rtl (DECL_RTL (t));
1852 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1853 && DECL_INCOMING_RTL (t))
1854 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1855 if ((VAR_P (t) || TREE_CODE (t) == RESULT_DECL)
1856 && DECL_HAS_VALUE_EXPR_P (t))
1857 {
1858 tree v = DECL_VALUE_EXPR (t);
1859 walk_tree (&v, instantiate_expr, NULL, NULL);
1860 }
1861 }
1862 }
1863 return NULL;
1864 }
1865
1866 /* Subroutine of instantiate_decls: Process all decls in the given
1867 BLOCK node and all its subblocks. */
1868
1869 static void
1870 instantiate_decls_1 (tree let)
1871 {
1872 tree t;
1873
1874 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1875 {
1876 if (DECL_RTL_SET_P (t))
1877 instantiate_decl_rtl (DECL_RTL (t));
1878 if (VAR_P (t) && DECL_HAS_VALUE_EXPR_P (t))
1879 {
1880 tree v = DECL_VALUE_EXPR (t);
1881 walk_tree (&v, instantiate_expr, NULL, NULL);
1882 }
1883 }
1884
1885 /* Process all subblocks. */
1886 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1887 instantiate_decls_1 (t);
1888 }
1889
1890 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1891 all virtual registers in their DECL_RTL's. */
1892
1893 static void
1894 instantiate_decls (tree fndecl)
1895 {
1896 tree decl;
1897 unsigned ix;
1898
1899 /* Process all parameters of the function. */
1900 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1901 {
1902 instantiate_decl_rtl (DECL_RTL (decl));
1903 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1904 if (DECL_HAS_VALUE_EXPR_P (decl))
1905 {
1906 tree v = DECL_VALUE_EXPR (decl);
1907 walk_tree (&v, instantiate_expr, NULL, NULL);
1908 }
1909 }
1910
1911 if ((decl = DECL_RESULT (fndecl))
1912 && TREE_CODE (decl) == RESULT_DECL)
1913 {
1914 if (DECL_RTL_SET_P (decl))
1915 instantiate_decl_rtl (DECL_RTL (decl));
1916 if (DECL_HAS_VALUE_EXPR_P (decl))
1917 {
1918 tree v = DECL_VALUE_EXPR (decl);
1919 walk_tree (&v, instantiate_expr, NULL, NULL);
1920 }
1921 }
1922
1923 /* Process the saved static chain if it exists. */
1924 decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1925 if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1926 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1927
1928 /* Now process all variables defined in the function or its subblocks. */
1929 if (DECL_INITIAL (fndecl))
1930 instantiate_decls_1 (DECL_INITIAL (fndecl));
1931
1932 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1933 if (DECL_RTL_SET_P (decl))
1934 instantiate_decl_rtl (DECL_RTL (decl));
1935 vec_free (cfun->local_decls);
1936 }
1937
1938 /* Pass through the INSNS of function FNDECL and convert virtual register
1939 references to hard register references. */
1940
1941 static unsigned int
1942 instantiate_virtual_regs (void)
1943 {
1944 rtx_insn *insn;
1945
1946 /* Compute the offsets to use for this function. */
1947 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1948 var_offset = targetm.starting_frame_offset ();
1949 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1950 out_arg_offset = STACK_POINTER_OFFSET;
1951 #ifdef FRAME_POINTER_CFA_OFFSET
1952 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1953 #else
1954 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1955 #endif
1956
1957 /* Initialize recognition, indicating that volatile is OK. */
1958 init_recog ();
1959
1960 /* Scan through all the insns, instantiating every virtual register still
1961 present. */
1962 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1963 if (INSN_P (insn))
1964 {
1965 /* These patterns in the instruction stream can never be recognized.
1966 Fortunately, they shouldn't contain virtual registers either. */
1967 if (GET_CODE (PATTERN (insn)) == USE
1968 || GET_CODE (PATTERN (insn)) == CLOBBER
1969 || GET_CODE (PATTERN (insn)) == ASM_INPUT
1970 || DEBUG_MARKER_INSN_P (insn))
1971 continue;
1972 else if (DEBUG_BIND_INSN_P (insn))
1973 instantiate_virtual_regs_in_rtx (INSN_VAR_LOCATION_PTR (insn));
1974 else
1975 instantiate_virtual_regs_in_insn (insn);
1976
1977 if (insn->deleted ())
1978 continue;
1979
1980 instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
1981
1982 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1983 if (CALL_P (insn))
1984 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
1985 }
1986
1987 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1988 instantiate_decls (current_function_decl);
1989
1990 targetm.instantiate_decls ();
1991
1992 /* Indicate that, from now on, assign_stack_local should use
1993 frame_pointer_rtx. */
1994 virtuals_instantiated = 1;
1995
1996 return 0;
1997 }
1998
1999 namespace {
2000
2001 const pass_data pass_data_instantiate_virtual_regs =
2002 {
2003 RTL_PASS, /* type */
2004 "vregs", /* name */
2005 OPTGROUP_NONE, /* optinfo_flags */
2006 TV_NONE, /* tv_id */
2007 0, /* properties_required */
2008 0, /* properties_provided */
2009 0, /* properties_destroyed */
2010 0, /* todo_flags_start */
2011 0, /* todo_flags_finish */
2012 };
2013
2014 class pass_instantiate_virtual_regs : public rtl_opt_pass
2015 {
2016 public:
2017 pass_instantiate_virtual_regs (gcc::context *ctxt)
2018 : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
2019 {}
2020
2021 /* opt_pass methods: */
2022 virtual unsigned int execute (function *)
2023 {
2024 return instantiate_virtual_regs ();
2025 }
2026
2027 }; // class pass_instantiate_virtual_regs
2028
2029 } // anon namespace
2030
2031 rtl_opt_pass *
2032 make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2033 {
2034 return new pass_instantiate_virtual_regs (ctxt);
2035 }
2036
2037 \f
2038 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2039 This means a type for which function calls must pass an address to the
2040 function or get an address back from the function.
2041 EXP may be a type node or an expression (whose type is tested). */
2042
2043 int
2044 aggregate_value_p (const_tree exp, const_tree fntype)
2045 {
2046 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
2047 int i, regno, nregs;
2048 rtx reg;
2049
2050 if (fntype)
2051 switch (TREE_CODE (fntype))
2052 {
2053 case CALL_EXPR:
2054 {
2055 tree fndecl = get_callee_fndecl (fntype);
2056 if (fndecl)
2057 fntype = TREE_TYPE (fndecl);
2058 else if (CALL_EXPR_FN (fntype))
2059 fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
2060 else
2061 /* For internal functions, assume nothing needs to be
2062 returned in memory. */
2063 return 0;
2064 }
2065 break;
2066 case FUNCTION_DECL:
2067 fntype = TREE_TYPE (fntype);
2068 break;
2069 case FUNCTION_TYPE:
2070 case METHOD_TYPE:
2071 break;
2072 case IDENTIFIER_NODE:
2073 fntype = NULL_TREE;
2074 break;
2075 default:
2076 /* We don't expect other tree types here. */
2077 gcc_unreachable ();
2078 }
2079
2080 if (VOID_TYPE_P (type))
2081 return 0;
2082
2083 /* If a record should be passed the same as its first (and only) member
2084 don't pass it as an aggregate. */
2085 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2086 return aggregate_value_p (first_field (type), fntype);
2087
2088 /* If the front end has decided that this needs to be passed by
2089 reference, do so. */
2090 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2091 && DECL_BY_REFERENCE (exp))
2092 return 1;
2093
2094 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2095 if (fntype && TREE_ADDRESSABLE (fntype))
2096 return 1;
2097
2098 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2099 and thus can't be returned in registers. */
2100 if (TREE_ADDRESSABLE (type))
2101 return 1;
2102
2103 if (TYPE_EMPTY_P (type))
2104 return 0;
2105
2106 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2107 return 1;
2108
2109 if (targetm.calls.return_in_memory (type, fntype))
2110 return 1;
2111
2112 /* Make sure we have suitable call-clobbered regs to return
2113 the value in; if not, we must return it in memory. */
2114 reg = hard_function_value (type, 0, fntype, 0);
2115
2116 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2117 it is OK. */
2118 if (!REG_P (reg))
2119 return 0;
2120
2121 regno = REGNO (reg);
2122 nregs = hard_regno_nregs (regno, TYPE_MODE (type));
2123 for (i = 0; i < nregs; i++)
2124 if (! call_used_regs[regno + i])
2125 return 1;
2126
2127 return 0;
2128 }
2129 \f
2130 /* Return true if we should assign DECL a pseudo register; false if it
2131 should live on the local stack. */
2132
2133 bool
2134 use_register_for_decl (const_tree decl)
2135 {
2136 if (TREE_CODE (decl) == SSA_NAME)
2137 {
2138 /* We often try to use the SSA_NAME, instead of its underlying
2139 decl, to get type information and guide decisions, to avoid
2140 differences of behavior between anonymous and named
2141 variables, but in this one case we have to go for the actual
2142 variable if there is one. The main reason is that, at least
2143 at -O0, we want to place user variables on the stack, but we
2144 don't mind using pseudos for anonymous or ignored temps.
2145 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
2146 should go in pseudos, whereas their corresponding variables
2147 might have to go on the stack. So, disregarding the decl
2148 here would negatively impact debug info at -O0, enable
2149 coalescing between SSA_NAMEs that ought to get different
2150 stack/pseudo assignments, and get the incoming argument
2151 processing thoroughly confused by PARM_DECLs expected to live
2152 in stack slots but assigned to pseudos. */
2153 if (!SSA_NAME_VAR (decl))
2154 return TYPE_MODE (TREE_TYPE (decl)) != BLKmode
2155 && !(flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)));
2156
2157 decl = SSA_NAME_VAR (decl);
2158 }
2159
2160 /* Honor volatile. */
2161 if (TREE_SIDE_EFFECTS (decl))
2162 return false;
2163
2164 /* Honor addressability. */
2165 if (TREE_ADDRESSABLE (decl))
2166 return false;
2167
2168 /* RESULT_DECLs are a bit special in that they're assigned without
2169 regard to use_register_for_decl, but we generally only store in
2170 them. If we coalesce their SSA NAMEs, we'd better return a
2171 result that matches the assignment in expand_function_start. */
2172 if (TREE_CODE (decl) == RESULT_DECL)
2173 {
2174 /* If it's not an aggregate, we're going to use a REG or a
2175 PARALLEL containing a REG. */
2176 if (!aggregate_value_p (decl, current_function_decl))
2177 return true;
2178
2179 /* If expand_function_start determines the return value, we'll
2180 use MEM if it's not by reference. */
2181 if (cfun->returns_pcc_struct
2182 || (targetm.calls.struct_value_rtx
2183 (TREE_TYPE (current_function_decl), 1)))
2184 return DECL_BY_REFERENCE (decl);
2185
2186 /* Otherwise, we're taking an extra all.function_result_decl
2187 argument. It's set up in assign_parms_augmented_arg_list,
2188 under the (negated) conditions above, and then it's used to
2189 set up the RESULT_DECL rtl in assign_params, after looping
2190 over all parameters. Now, if the RESULT_DECL is not by
2191 reference, we'll use a MEM either way. */
2192 if (!DECL_BY_REFERENCE (decl))
2193 return false;
2194
2195 /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
2196 the function_result_decl's assignment. Since it's a pointer,
2197 we can short-circuit a number of the tests below, and we must
2198 duplicat e them because we don't have the
2199 function_result_decl to test. */
2200 if (!targetm.calls.allocate_stack_slots_for_args ())
2201 return true;
2202 /* We don't set DECL_IGNORED_P for the function_result_decl. */
2203 if (optimize)
2204 return true;
2205 /* We don't set DECL_REGISTER for the function_result_decl. */
2206 return false;
2207 }
2208
2209 /* Only register-like things go in registers. */
2210 if (DECL_MODE (decl) == BLKmode)
2211 return false;
2212
2213 /* If -ffloat-store specified, don't put explicit float variables
2214 into registers. */
2215 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2216 propagates values across these stores, and it probably shouldn't. */
2217 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2218 return false;
2219
2220 if (!targetm.calls.allocate_stack_slots_for_args ())
2221 return true;
2222
2223 /* If we're not interested in tracking debugging information for
2224 this decl, then we can certainly put it in a register. */
2225 if (DECL_IGNORED_P (decl))
2226 return true;
2227
2228 if (optimize)
2229 return true;
2230
2231 if (!DECL_REGISTER (decl))
2232 return false;
2233
2234 /* When not optimizing, disregard register keyword for types that
2235 could have methods, otherwise the methods won't be callable from
2236 the debugger. */
2237 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl)))
2238 return false;
2239
2240 return true;
2241 }
2242
2243 /* Structures to communicate between the subroutines of assign_parms.
2244 The first holds data persistent across all parameters, the second
2245 is cleared out for each parameter. */
2246
2247 struct assign_parm_data_all
2248 {
2249 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2250 should become a job of the target or otherwise encapsulated. */
2251 CUMULATIVE_ARGS args_so_far_v;
2252 cumulative_args_t args_so_far;
2253 struct args_size stack_args_size;
2254 tree function_result_decl;
2255 tree orig_fnargs;
2256 rtx_insn *first_conversion_insn;
2257 rtx_insn *last_conversion_insn;
2258 HOST_WIDE_INT pretend_args_size;
2259 HOST_WIDE_INT extra_pretend_bytes;
2260 int reg_parm_stack_space;
2261 };
2262
2263 struct assign_parm_data_one
2264 {
2265 tree nominal_type;
2266 tree passed_type;
2267 rtx entry_parm;
2268 rtx stack_parm;
2269 machine_mode nominal_mode;
2270 machine_mode passed_mode;
2271 machine_mode promoted_mode;
2272 struct locate_and_pad_arg_data locate;
2273 int partial;
2274 BOOL_BITFIELD named_arg : 1;
2275 BOOL_BITFIELD passed_pointer : 1;
2276 BOOL_BITFIELD on_stack : 1;
2277 BOOL_BITFIELD loaded_in_reg : 1;
2278 };
2279
2280 /* A subroutine of assign_parms. Initialize ALL. */
2281
2282 static void
2283 assign_parms_initialize_all (struct assign_parm_data_all *all)
2284 {
2285 tree fntype ATTRIBUTE_UNUSED;
2286
2287 memset (all, 0, sizeof (*all));
2288
2289 fntype = TREE_TYPE (current_function_decl);
2290
2291 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2292 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2293 #else
2294 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2295 current_function_decl, -1);
2296 #endif
2297 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2298
2299 #ifdef INCOMING_REG_PARM_STACK_SPACE
2300 all->reg_parm_stack_space
2301 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
2302 #endif
2303 }
2304
2305 /* If ARGS contains entries with complex types, split the entry into two
2306 entries of the component type. Return a new list of substitutions are
2307 needed, else the old list. */
2308
2309 static void
2310 split_complex_args (vec<tree> *args)
2311 {
2312 unsigned i;
2313 tree p;
2314
2315 FOR_EACH_VEC_ELT (*args, i, p)
2316 {
2317 tree type = TREE_TYPE (p);
2318 if (TREE_CODE (type) == COMPLEX_TYPE
2319 && targetm.calls.split_complex_arg (type))
2320 {
2321 tree decl;
2322 tree subtype = TREE_TYPE (type);
2323 bool addressable = TREE_ADDRESSABLE (p);
2324
2325 /* Rewrite the PARM_DECL's type with its component. */
2326 p = copy_node (p);
2327 TREE_TYPE (p) = subtype;
2328 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2329 SET_DECL_MODE (p, VOIDmode);
2330 DECL_SIZE (p) = NULL;
2331 DECL_SIZE_UNIT (p) = NULL;
2332 /* If this arg must go in memory, put it in a pseudo here.
2333 We can't allow it to go in memory as per normal parms,
2334 because the usual place might not have the imag part
2335 adjacent to the real part. */
2336 DECL_ARTIFICIAL (p) = addressable;
2337 DECL_IGNORED_P (p) = addressable;
2338 TREE_ADDRESSABLE (p) = 0;
2339 layout_decl (p, 0);
2340 (*args)[i] = p;
2341
2342 /* Build a second synthetic decl. */
2343 decl = build_decl (EXPR_LOCATION (p),
2344 PARM_DECL, NULL_TREE, subtype);
2345 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2346 DECL_ARTIFICIAL (decl) = addressable;
2347 DECL_IGNORED_P (decl) = addressable;
2348 layout_decl (decl, 0);
2349 args->safe_insert (++i, decl);
2350 }
2351 }
2352 }
2353
2354 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2355 the hidden struct return argument, and (abi willing) complex args.
2356 Return the new parameter list. */
2357
2358 static vec<tree>
2359 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2360 {
2361 tree fndecl = current_function_decl;
2362 tree fntype = TREE_TYPE (fndecl);
2363 vec<tree> fnargs = vNULL;
2364 tree arg;
2365
2366 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2367 fnargs.safe_push (arg);
2368
2369 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2370
2371 /* If struct value address is treated as the first argument, make it so. */
2372 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2373 && ! cfun->returns_pcc_struct
2374 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2375 {
2376 tree type = build_pointer_type (TREE_TYPE (fntype));
2377 tree decl;
2378
2379 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2380 PARM_DECL, get_identifier (".result_ptr"), type);
2381 DECL_ARG_TYPE (decl) = type;
2382 DECL_ARTIFICIAL (decl) = 1;
2383 DECL_NAMELESS (decl) = 1;
2384 TREE_CONSTANT (decl) = 1;
2385 /* We don't set DECL_IGNORED_P or DECL_REGISTER here. If this
2386 changes, the end of the RESULT_DECL handling block in
2387 use_register_for_decl must be adjusted to match. */
2388
2389 DECL_CHAIN (decl) = all->orig_fnargs;
2390 all->orig_fnargs = decl;
2391 fnargs.safe_insert (0, decl);
2392
2393 all->function_result_decl = decl;
2394 }
2395
2396 /* If the target wants to split complex arguments into scalars, do so. */
2397 if (targetm.calls.split_complex_arg)
2398 split_complex_args (&fnargs);
2399
2400 return fnargs;
2401 }
2402
2403 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2404 data for the parameter. Incorporate ABI specifics such as pass-by-
2405 reference and type promotion. */
2406
2407 static void
2408 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2409 struct assign_parm_data_one *data)
2410 {
2411 tree nominal_type, passed_type;
2412 machine_mode nominal_mode, passed_mode, promoted_mode;
2413 int unsignedp;
2414
2415 memset (data, 0, sizeof (*data));
2416
2417 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2418 if (!cfun->stdarg)
2419 data->named_arg = 1; /* No variadic parms. */
2420 else if (DECL_CHAIN (parm))
2421 data->named_arg = 1; /* Not the last non-variadic parm. */
2422 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2423 data->named_arg = 1; /* Only variadic ones are unnamed. */
2424 else
2425 data->named_arg = 0; /* Treat as variadic. */
2426
2427 nominal_type = TREE_TYPE (parm);
2428 passed_type = DECL_ARG_TYPE (parm);
2429
2430 /* Look out for errors propagating this far. Also, if the parameter's
2431 type is void then its value doesn't matter. */
2432 if (TREE_TYPE (parm) == error_mark_node
2433 /* This can happen after weird syntax errors
2434 or if an enum type is defined among the parms. */
2435 || TREE_CODE (parm) != PARM_DECL
2436 || passed_type == NULL
2437 || VOID_TYPE_P (nominal_type))
2438 {
2439 nominal_type = passed_type = void_type_node;
2440 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2441 goto egress;
2442 }
2443
2444 /* Find mode of arg as it is passed, and mode of arg as it should be
2445 during execution of this function. */
2446 passed_mode = TYPE_MODE (passed_type);
2447 nominal_mode = TYPE_MODE (nominal_type);
2448
2449 /* If the parm is to be passed as a transparent union or record, use the
2450 type of the first field for the tests below. We have already verified
2451 that the modes are the same. */
2452 if ((TREE_CODE (passed_type) == UNION_TYPE
2453 || TREE_CODE (passed_type) == RECORD_TYPE)
2454 && TYPE_TRANSPARENT_AGGR (passed_type))
2455 passed_type = TREE_TYPE (first_field (passed_type));
2456
2457 /* See if this arg was passed by invisible reference. */
2458 if (pass_by_reference (&all->args_so_far_v, passed_mode,
2459 passed_type, data->named_arg))
2460 {
2461 passed_type = nominal_type = build_pointer_type (passed_type);
2462 data->passed_pointer = true;
2463 passed_mode = nominal_mode = TYPE_MODE (nominal_type);
2464 }
2465
2466 /* Find mode as it is passed by the ABI. */
2467 unsignedp = TYPE_UNSIGNED (passed_type);
2468 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2469 TREE_TYPE (current_function_decl), 0);
2470
2471 egress:
2472 data->nominal_type = nominal_type;
2473 data->passed_type = passed_type;
2474 data->nominal_mode = nominal_mode;
2475 data->passed_mode = passed_mode;
2476 data->promoted_mode = promoted_mode;
2477 }
2478
2479 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2480
2481 static void
2482 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2483 struct assign_parm_data_one *data, bool no_rtl)
2484 {
2485 int varargs_pretend_bytes = 0;
2486
2487 targetm.calls.setup_incoming_varargs (all->args_so_far,
2488 data->promoted_mode,
2489 data->passed_type,
2490 &varargs_pretend_bytes, no_rtl);
2491
2492 /* If the back-end has requested extra stack space, record how much is
2493 needed. Do not change pretend_args_size otherwise since it may be
2494 nonzero from an earlier partial argument. */
2495 if (varargs_pretend_bytes > 0)
2496 all->pretend_args_size = varargs_pretend_bytes;
2497 }
2498
2499 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2500 the incoming location of the current parameter. */
2501
2502 static void
2503 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2504 struct assign_parm_data_one *data)
2505 {
2506 HOST_WIDE_INT pretend_bytes = 0;
2507 rtx entry_parm;
2508 bool in_regs;
2509
2510 if (data->promoted_mode == VOIDmode)
2511 {
2512 data->entry_parm = data->stack_parm = const0_rtx;
2513 return;
2514 }
2515
2516 targetm.calls.warn_parameter_passing_abi (all->args_so_far,
2517 data->passed_type);
2518
2519 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2520 data->promoted_mode,
2521 data->passed_type,
2522 data->named_arg);
2523
2524 if (entry_parm == 0)
2525 data->promoted_mode = data->passed_mode;
2526
2527 /* Determine parm's home in the stack, in case it arrives in the stack
2528 or we should pretend it did. Compute the stack position and rtx where
2529 the argument arrives and its size.
2530
2531 There is one complexity here: If this was a parameter that would
2532 have been passed in registers, but wasn't only because it is
2533 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2534 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2535 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2536 as it was the previous time. */
2537 in_regs = (entry_parm != 0);
2538 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2539 in_regs = true;
2540 #endif
2541 if (!in_regs && !data->named_arg)
2542 {
2543 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2544 {
2545 rtx tem;
2546 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2547 data->promoted_mode,
2548 data->passed_type, true);
2549 in_regs = tem != NULL;
2550 }
2551 }
2552
2553 /* If this parameter was passed both in registers and in the stack, use
2554 the copy on the stack. */
2555 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2556 data->passed_type))
2557 entry_parm = 0;
2558
2559 if (entry_parm)
2560 {
2561 int partial;
2562
2563 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
2564 data->promoted_mode,
2565 data->passed_type,
2566 data->named_arg);
2567 data->partial = partial;
2568
2569 /* The caller might already have allocated stack space for the
2570 register parameters. */
2571 if (partial != 0 && all->reg_parm_stack_space == 0)
2572 {
2573 /* Part of this argument is passed in registers and part
2574 is passed on the stack. Ask the prologue code to extend
2575 the stack part so that we can recreate the full value.
2576
2577 PRETEND_BYTES is the size of the registers we need to store.
2578 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2579 stack space that the prologue should allocate.
2580
2581 Internally, gcc assumes that the argument pointer is aligned
2582 to STACK_BOUNDARY bits. This is used both for alignment
2583 optimizations (see init_emit) and to locate arguments that are
2584 aligned to more than PARM_BOUNDARY bits. We must preserve this
2585 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2586 a stack boundary. */
2587
2588 /* We assume at most one partial arg, and it must be the first
2589 argument on the stack. */
2590 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2591
2592 pretend_bytes = partial;
2593 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2594
2595 /* We want to align relative to the actual stack pointer, so
2596 don't include this in the stack size until later. */
2597 all->extra_pretend_bytes = all->pretend_args_size;
2598 }
2599 }
2600
2601 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2602 all->reg_parm_stack_space,
2603 entry_parm ? data->partial : 0, current_function_decl,
2604 &all->stack_args_size, &data->locate);
2605
2606 /* Update parm_stack_boundary if this parameter is passed in the
2607 stack. */
2608 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2609 crtl->parm_stack_boundary = data->locate.boundary;
2610
2611 /* Adjust offsets to include the pretend args. */
2612 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2613 data->locate.slot_offset.constant += pretend_bytes;
2614 data->locate.offset.constant += pretend_bytes;
2615
2616 data->entry_parm = entry_parm;
2617 }
2618
2619 /* A subroutine of assign_parms. If there is actually space on the stack
2620 for this parm, count it in stack_args_size and return true. */
2621
2622 static bool
2623 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2624 struct assign_parm_data_one *data)
2625 {
2626 /* Trivially true if we've no incoming register. */
2627 if (data->entry_parm == NULL)
2628 ;
2629 /* Also true if we're partially in registers and partially not,
2630 since we've arranged to drop the entire argument on the stack. */
2631 else if (data->partial != 0)
2632 ;
2633 /* Also true if the target says that it's passed in both registers
2634 and on the stack. */
2635 else if (GET_CODE (data->entry_parm) == PARALLEL
2636 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2637 ;
2638 /* Also true if the target says that there's stack allocated for
2639 all register parameters. */
2640 else if (all->reg_parm_stack_space > 0)
2641 ;
2642 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2643 else
2644 return false;
2645
2646 all->stack_args_size.constant += data->locate.size.constant;
2647 if (data->locate.size.var)
2648 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2649
2650 return true;
2651 }
2652
2653 /* A subroutine of assign_parms. Given that this parameter is allocated
2654 stack space by the ABI, find it. */
2655
2656 static void
2657 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2658 {
2659 rtx offset_rtx, stack_parm;
2660 unsigned int align, boundary;
2661
2662 /* If we're passing this arg using a reg, make its stack home the
2663 aligned stack slot. */
2664 if (data->entry_parm)
2665 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2666 else
2667 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2668
2669 stack_parm = crtl->args.internal_arg_pointer;
2670 if (offset_rtx != const0_rtx)
2671 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2672 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2673
2674 if (!data->passed_pointer)
2675 {
2676 set_mem_attributes (stack_parm, parm, 1);
2677 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2678 while promoted mode's size is needed. */
2679 if (data->promoted_mode != BLKmode
2680 && data->promoted_mode != DECL_MODE (parm))
2681 {
2682 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2683 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2684 {
2685 poly_int64 offset = subreg_lowpart_offset (DECL_MODE (parm),
2686 data->promoted_mode);
2687 if (maybe_ne (offset, 0))
2688 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2689 }
2690 }
2691 }
2692
2693 boundary = data->locate.boundary;
2694 align = BITS_PER_UNIT;
2695
2696 /* If we're padding upward, we know that the alignment of the slot
2697 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2698 intentionally forcing upward padding. Otherwise we have to come
2699 up with a guess at the alignment based on OFFSET_RTX. */
2700 poly_int64 offset;
2701 if (data->locate.where_pad != PAD_DOWNWARD || data->entry_parm)
2702 align = boundary;
2703 else if (poly_int_rtx_p (offset_rtx, &offset))
2704 {
2705 align = least_bit_hwi (boundary);
2706 unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
2707 if (offset_align != 0)
2708 align = MIN (align, offset_align);
2709 }
2710 set_mem_align (stack_parm, align);
2711
2712 if (data->entry_parm)
2713 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2714
2715 data->stack_parm = stack_parm;
2716 }
2717
2718 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2719 always valid and contiguous. */
2720
2721 static void
2722 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2723 {
2724 rtx entry_parm = data->entry_parm;
2725 rtx stack_parm = data->stack_parm;
2726
2727 /* If this parm was passed part in regs and part in memory, pretend it
2728 arrived entirely in memory by pushing the register-part onto the stack.
2729 In the special case of a DImode or DFmode that is split, we could put
2730 it together in a pseudoreg directly, but for now that's not worth
2731 bothering with. */
2732 if (data->partial != 0)
2733 {
2734 /* Handle calls that pass values in multiple non-contiguous
2735 locations. The Irix 6 ABI has examples of this. */
2736 if (GET_CODE (entry_parm) == PARALLEL)
2737 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
2738 data->passed_type,
2739 int_size_in_bytes (data->passed_type));
2740 else
2741 {
2742 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2743 move_block_from_reg (REGNO (entry_parm),
2744 validize_mem (copy_rtx (stack_parm)),
2745 data->partial / UNITS_PER_WORD);
2746 }
2747
2748 entry_parm = stack_parm;
2749 }
2750
2751 /* If we didn't decide this parm came in a register, by default it came
2752 on the stack. */
2753 else if (entry_parm == NULL)
2754 entry_parm = stack_parm;
2755
2756 /* When an argument is passed in multiple locations, we can't make use
2757 of this information, but we can save some copying if the whole argument
2758 is passed in a single register. */
2759 else if (GET_CODE (entry_parm) == PARALLEL
2760 && data->nominal_mode != BLKmode
2761 && data->passed_mode != BLKmode)
2762 {
2763 size_t i, len = XVECLEN (entry_parm, 0);
2764
2765 for (i = 0; i < len; i++)
2766 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2767 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2768 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2769 == data->passed_mode)
2770 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2771 {
2772 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2773 break;
2774 }
2775 }
2776
2777 data->entry_parm = entry_parm;
2778 }
2779
2780 /* A subroutine of assign_parms. Reconstitute any values which were
2781 passed in multiple registers and would fit in a single register. */
2782
2783 static void
2784 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2785 {
2786 rtx entry_parm = data->entry_parm;
2787
2788 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2789 This can be done with register operations rather than on the
2790 stack, even if we will store the reconstituted parameter on the
2791 stack later. */
2792 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2793 {
2794 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2795 emit_group_store (parmreg, entry_parm, data->passed_type,
2796 GET_MODE_SIZE (GET_MODE (entry_parm)));
2797 entry_parm = parmreg;
2798 }
2799
2800 data->entry_parm = entry_parm;
2801 }
2802
2803 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2804 always valid and properly aligned. */
2805
2806 static void
2807 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2808 {
2809 rtx stack_parm = data->stack_parm;
2810
2811 /* If we can't trust the parm stack slot to be aligned enough for its
2812 ultimate type, don't use that slot after entry. We'll make another
2813 stack slot, if we need one. */
2814 if (stack_parm
2815 && ((STRICT_ALIGNMENT
2816 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2817 || (data->nominal_type
2818 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2819 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2820 stack_parm = NULL;
2821
2822 /* If parm was passed in memory, and we need to convert it on entry,
2823 don't store it back in that same slot. */
2824 else if (data->entry_parm == stack_parm
2825 && data->nominal_mode != BLKmode
2826 && data->nominal_mode != data->passed_mode)
2827 stack_parm = NULL;
2828
2829 /* If stack protection is in effect for this function, don't leave any
2830 pointers in their passed stack slots. */
2831 else if (crtl->stack_protect_guard
2832 && (flag_stack_protect == 2
2833 || data->passed_pointer
2834 || POINTER_TYPE_P (data->nominal_type)))
2835 stack_parm = NULL;
2836
2837 data->stack_parm = stack_parm;
2838 }
2839
2840 /* A subroutine of assign_parms. Return true if the current parameter
2841 should be stored as a BLKmode in the current frame. */
2842
2843 static bool
2844 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2845 {
2846 if (data->nominal_mode == BLKmode)
2847 return true;
2848 if (GET_MODE (data->entry_parm) == BLKmode)
2849 return true;
2850
2851 #ifdef BLOCK_REG_PADDING
2852 /* Only assign_parm_setup_block knows how to deal with register arguments
2853 that are padded at the least significant end. */
2854 if (REG_P (data->entry_parm)
2855 && known_lt (GET_MODE_SIZE (data->promoted_mode), UNITS_PER_WORD)
2856 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2857 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
2858 return true;
2859 #endif
2860
2861 return false;
2862 }
2863
2864 /* A subroutine of assign_parms. Arrange for the parameter to be
2865 present and valid in DATA->STACK_RTL. */
2866
2867 static void
2868 assign_parm_setup_block (struct assign_parm_data_all *all,
2869 tree parm, struct assign_parm_data_one *data)
2870 {
2871 rtx entry_parm = data->entry_parm;
2872 rtx stack_parm = data->stack_parm;
2873 rtx target_reg = NULL_RTX;
2874 bool in_conversion_seq = false;
2875 HOST_WIDE_INT size;
2876 HOST_WIDE_INT size_stored;
2877
2878 if (GET_CODE (entry_parm) == PARALLEL)
2879 entry_parm = emit_group_move_into_temps (entry_parm);
2880
2881 /* If we want the parameter in a pseudo, don't use a stack slot. */
2882 if (is_gimple_reg (parm) && use_register_for_decl (parm))
2883 {
2884 tree def = ssa_default_def (cfun, parm);
2885 gcc_assert (def);
2886 machine_mode mode = promote_ssa_mode (def, NULL);
2887 rtx reg = gen_reg_rtx (mode);
2888 if (GET_CODE (reg) != CONCAT)
2889 stack_parm = reg;
2890 else
2891 {
2892 target_reg = reg;
2893 /* Avoid allocating a stack slot, if there isn't one
2894 preallocated by the ABI. It might seem like we should
2895 always prefer a pseudo, but converting between
2896 floating-point and integer modes goes through the stack
2897 on various machines, so it's better to use the reserved
2898 stack slot than to risk wasting it and allocating more
2899 for the conversion. */
2900 if (stack_parm == NULL_RTX)
2901 {
2902 int save = generating_concat_p;
2903 generating_concat_p = 0;
2904 stack_parm = gen_reg_rtx (mode);
2905 generating_concat_p = save;
2906 }
2907 }
2908 data->stack_parm = NULL;
2909 }
2910
2911 size = int_size_in_bytes (data->passed_type);
2912 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2913 if (stack_parm == 0)
2914 {
2915 HOST_WIDE_INT parm_align
2916 = (STRICT_ALIGNMENT
2917 ? MAX (DECL_ALIGN (parm), BITS_PER_WORD) : DECL_ALIGN (parm));
2918
2919 SET_DECL_ALIGN (parm, parm_align);
2920 if (DECL_ALIGN (parm) > MAX_SUPPORTED_STACK_ALIGNMENT)
2921 {
2922 rtx allocsize = gen_int_mode (size_stored, Pmode);
2923 get_dynamic_stack_size (&allocsize, 0, DECL_ALIGN (parm), NULL);
2924 stack_parm = assign_stack_local (BLKmode, UINTVAL (allocsize),
2925 MAX_SUPPORTED_STACK_ALIGNMENT);
2926 rtx addr = align_dynamic_address (XEXP (stack_parm, 0),
2927 DECL_ALIGN (parm));
2928 mark_reg_pointer (addr, DECL_ALIGN (parm));
2929 stack_parm = gen_rtx_MEM (GET_MODE (stack_parm), addr);
2930 MEM_NOTRAP_P (stack_parm) = 1;
2931 }
2932 else
2933 stack_parm = assign_stack_local (BLKmode, size_stored,
2934 DECL_ALIGN (parm));
2935 if (known_eq (GET_MODE_SIZE (GET_MODE (entry_parm)), size))
2936 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2937 set_mem_attributes (stack_parm, parm, 1);
2938 }
2939
2940 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2941 calls that pass values in multiple non-contiguous locations. */
2942 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2943 {
2944 rtx mem;
2945
2946 /* Note that we will be storing an integral number of words.
2947 So we have to be careful to ensure that we allocate an
2948 integral number of words. We do this above when we call
2949 assign_stack_local if space was not allocated in the argument
2950 list. If it was, this will not work if PARM_BOUNDARY is not
2951 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2952 if it becomes a problem. Exception is when BLKmode arrives
2953 with arguments not conforming to word_mode. */
2954
2955 if (data->stack_parm == 0)
2956 ;
2957 else if (GET_CODE (entry_parm) == PARALLEL)
2958 ;
2959 else
2960 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2961
2962 mem = validize_mem (copy_rtx (stack_parm));
2963
2964 /* Handle values in multiple non-contiguous locations. */
2965 if (GET_CODE (entry_parm) == PARALLEL && !MEM_P (mem))
2966 emit_group_store (mem, entry_parm, data->passed_type, size);
2967 else if (GET_CODE (entry_parm) == PARALLEL)
2968 {
2969 push_to_sequence2 (all->first_conversion_insn,
2970 all->last_conversion_insn);
2971 emit_group_store (mem, entry_parm, data->passed_type, size);
2972 all->first_conversion_insn = get_insns ();
2973 all->last_conversion_insn = get_last_insn ();
2974 end_sequence ();
2975 in_conversion_seq = true;
2976 }
2977
2978 else if (size == 0)
2979 ;
2980
2981 /* If SIZE is that of a mode no bigger than a word, just use
2982 that mode's store operation. */
2983 else if (size <= UNITS_PER_WORD)
2984 {
2985 unsigned int bits = size * BITS_PER_UNIT;
2986 machine_mode mode = int_mode_for_size (bits, 0).else_blk ();
2987
2988 if (mode != BLKmode
2989 #ifdef BLOCK_REG_PADDING
2990 && (size == UNITS_PER_WORD
2991 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2992 != (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
2993 #endif
2994 )
2995 {
2996 rtx reg;
2997
2998 /* We are really truncating a word_mode value containing
2999 SIZE bytes into a value of mode MODE. If such an
3000 operation requires no actual instructions, we can refer
3001 to the value directly in mode MODE, otherwise we must
3002 start with the register in word_mode and explicitly
3003 convert it. */
3004 if (targetm.truly_noop_truncation (size * BITS_PER_UNIT,
3005 BITS_PER_WORD))
3006 reg = gen_rtx_REG (mode, REGNO (entry_parm));
3007 else
3008 {
3009 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3010 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
3011 }
3012 emit_move_insn (change_address (mem, mode, 0), reg);
3013 }
3014
3015 #ifdef BLOCK_REG_PADDING
3016 /* Storing the register in memory as a full word, as
3017 move_block_from_reg below would do, and then using the
3018 MEM in a smaller mode, has the effect of shifting right
3019 if BYTES_BIG_ENDIAN. If we're bypassing memory, the
3020 shifting must be explicit. */
3021 else if (!MEM_P (mem))
3022 {
3023 rtx x;
3024
3025 /* If the assert below fails, we should have taken the
3026 mode != BLKmode path above, unless we have downward
3027 padding of smaller-than-word arguments on a machine
3028 with little-endian bytes, which would likely require
3029 additional changes to work correctly. */
3030 gcc_checking_assert (BYTES_BIG_ENDIAN
3031 && (BLOCK_REG_PADDING (mode,
3032 data->passed_type, 1)
3033 == PAD_UPWARD));
3034
3035 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3036
3037 x = gen_rtx_REG (word_mode, REGNO (entry_parm));
3038 x = expand_shift (RSHIFT_EXPR, word_mode, x, by,
3039 NULL_RTX, 1);
3040 x = force_reg (word_mode, x);
3041 x = gen_lowpart_SUBREG (GET_MODE (mem), x);
3042
3043 emit_move_insn (mem, x);
3044 }
3045 #endif
3046
3047 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
3048 machine must be aligned to the left before storing
3049 to memory. Note that the previous test doesn't
3050 handle all cases (e.g. SIZE == 3). */
3051 else if (size != UNITS_PER_WORD
3052 #ifdef BLOCK_REG_PADDING
3053 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
3054 == PAD_DOWNWARD)
3055 #else
3056 && BYTES_BIG_ENDIAN
3057 #endif
3058 )
3059 {
3060 rtx tem, x;
3061 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3062 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3063
3064 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
3065 tem = change_address (mem, word_mode, 0);
3066 emit_move_insn (tem, x);
3067 }
3068 else
3069 move_block_from_reg (REGNO (entry_parm), mem,
3070 size_stored / UNITS_PER_WORD);
3071 }
3072 else if (!MEM_P (mem))
3073 {
3074 gcc_checking_assert (size > UNITS_PER_WORD);
3075 #ifdef BLOCK_REG_PADDING
3076 gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem),
3077 data->passed_type, 0)
3078 == PAD_UPWARD);
3079 #endif
3080 emit_move_insn (mem, entry_parm);
3081 }
3082 else
3083 move_block_from_reg (REGNO (entry_parm), mem,
3084 size_stored / UNITS_PER_WORD);
3085 }
3086 else if (data->stack_parm == 0)
3087 {
3088 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3089 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
3090 BLOCK_OP_NORMAL);
3091 all->first_conversion_insn = get_insns ();
3092 all->last_conversion_insn = get_last_insn ();
3093 end_sequence ();
3094 in_conversion_seq = true;
3095 }
3096
3097 if (target_reg)
3098 {
3099 if (!in_conversion_seq)
3100 emit_move_insn (target_reg, stack_parm);
3101 else
3102 {
3103 push_to_sequence2 (all->first_conversion_insn,
3104 all->last_conversion_insn);
3105 emit_move_insn (target_reg, stack_parm);
3106 all->first_conversion_insn = get_insns ();
3107 all->last_conversion_insn = get_last_insn ();
3108 end_sequence ();
3109 }
3110 stack_parm = target_reg;
3111 }
3112
3113 data->stack_parm = stack_parm;
3114 set_parm_rtl (parm, stack_parm);
3115 }
3116
3117 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
3118 parameter. Get it there. Perform all ABI specified conversions. */
3119
3120 static void
3121 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
3122 struct assign_parm_data_one *data)
3123 {
3124 rtx parmreg, validated_mem;
3125 rtx equiv_stack_parm;
3126 machine_mode promoted_nominal_mode;
3127 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
3128 bool did_conversion = false;
3129 bool need_conversion, moved;
3130 rtx rtl;
3131
3132 /* Store the parm in a pseudoregister during the function, but we may
3133 need to do it in a wider mode. Using 2 here makes the result
3134 consistent with promote_decl_mode and thus expand_expr_real_1. */
3135 promoted_nominal_mode
3136 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
3137 TREE_TYPE (current_function_decl), 2);
3138
3139 parmreg = gen_reg_rtx (promoted_nominal_mode);
3140 if (!DECL_ARTIFICIAL (parm))
3141 mark_user_reg (parmreg);
3142
3143 /* If this was an item that we received a pointer to,
3144 set rtl appropriately. */
3145 if (data->passed_pointer)
3146 {
3147 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
3148 set_mem_attributes (rtl, parm, 1);
3149 }
3150 else
3151 rtl = parmreg;
3152
3153 assign_parm_remove_parallels (data);
3154
3155 /* Copy the value into the register, thus bridging between
3156 assign_parm_find_data_types and expand_expr_real_1. */
3157
3158 equiv_stack_parm = data->stack_parm;
3159 validated_mem = validize_mem (copy_rtx (data->entry_parm));
3160
3161 need_conversion = (data->nominal_mode != data->passed_mode
3162 || promoted_nominal_mode != data->promoted_mode);
3163 moved = false;
3164
3165 if (need_conversion
3166 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3167 && data->nominal_mode == data->passed_mode
3168 && data->nominal_mode == GET_MODE (data->entry_parm))
3169 {
3170 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3171 mode, by the caller. We now have to convert it to
3172 NOMINAL_MODE, if different. However, PARMREG may be in
3173 a different mode than NOMINAL_MODE if it is being stored
3174 promoted.
3175
3176 If ENTRY_PARM is a hard register, it might be in a register
3177 not valid for operating in its mode (e.g., an odd-numbered
3178 register for a DFmode). In that case, moves are the only
3179 thing valid, so we can't do a convert from there. This
3180 occurs when the calling sequence allow such misaligned
3181 usages.
3182
3183 In addition, the conversion may involve a call, which could
3184 clobber parameters which haven't been copied to pseudo
3185 registers yet.
3186
3187 First, we try to emit an insn which performs the necessary
3188 conversion. We verify that this insn does not clobber any
3189 hard registers. */
3190
3191 enum insn_code icode;
3192 rtx op0, op1;
3193
3194 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3195 unsignedp);
3196
3197 op0 = parmreg;
3198 op1 = validated_mem;
3199 if (icode != CODE_FOR_nothing
3200 && insn_operand_matches (icode, 0, op0)
3201 && insn_operand_matches (icode, 1, op1))
3202 {
3203 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3204 rtx_insn *insn, *insns;
3205 rtx t = op1;
3206 HARD_REG_SET hardregs;
3207
3208 start_sequence ();
3209 /* If op1 is a hard register that is likely spilled, first
3210 force it into a pseudo, otherwise combiner might extend
3211 its lifetime too much. */
3212 if (GET_CODE (t) == SUBREG)
3213 t = SUBREG_REG (t);
3214 if (REG_P (t)
3215 && HARD_REGISTER_P (t)
3216 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3217 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3218 {
3219 t = gen_reg_rtx (GET_MODE (op1));
3220 emit_move_insn (t, op1);
3221 }
3222 else
3223 t = op1;
3224 rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3225 data->passed_mode, unsignedp);
3226 emit_insn (pat);
3227 insns = get_insns ();
3228
3229 moved = true;
3230 CLEAR_HARD_REG_SET (hardregs);
3231 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3232 {
3233 if (INSN_P (insn))
3234 note_stores (PATTERN (insn), record_hard_reg_sets,
3235 &hardregs);
3236 if (!hard_reg_set_empty_p (hardregs))
3237 moved = false;
3238 }
3239
3240 end_sequence ();
3241
3242 if (moved)
3243 {
3244 emit_insn (insns);
3245 if (equiv_stack_parm != NULL_RTX)
3246 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3247 equiv_stack_parm);
3248 }
3249 }
3250 }
3251
3252 if (moved)
3253 /* Nothing to do. */
3254 ;
3255 else if (need_conversion)
3256 {
3257 /* We did not have an insn to convert directly, or the sequence
3258 generated appeared unsafe. We must first copy the parm to a
3259 pseudo reg, and save the conversion until after all
3260 parameters have been moved. */
3261
3262 int save_tree_used;
3263 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3264
3265 emit_move_insn (tempreg, validated_mem);
3266
3267 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3268 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3269
3270 if (partial_subreg_p (tempreg)
3271 && GET_MODE (tempreg) == data->nominal_mode
3272 && REG_P (SUBREG_REG (tempreg))
3273 && data->nominal_mode == data->passed_mode
3274 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm))
3275 {
3276 /* The argument is already sign/zero extended, so note it
3277 into the subreg. */
3278 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3279 SUBREG_PROMOTED_SET (tempreg, unsignedp);
3280 }
3281
3282 /* TREE_USED gets set erroneously during expand_assignment. */
3283 save_tree_used = TREE_USED (parm);
3284 SET_DECL_RTL (parm, rtl);
3285 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3286 SET_DECL_RTL (parm, NULL_RTX);
3287 TREE_USED (parm) = save_tree_used;
3288 all->first_conversion_insn = get_insns ();
3289 all->last_conversion_insn = get_last_insn ();
3290 end_sequence ();
3291
3292 did_conversion = true;
3293 }
3294 else
3295 emit_move_insn (parmreg, validated_mem);
3296
3297 /* If we were passed a pointer but the actual value can safely live
3298 in a register, retrieve it and use it directly. */
3299 if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
3300 {
3301 /* We can't use nominal_mode, because it will have been set to
3302 Pmode above. We must use the actual mode of the parm. */
3303 if (use_register_for_decl (parm))
3304 {
3305 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3306 mark_user_reg (parmreg);
3307 }
3308 else
3309 {
3310 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3311 TYPE_MODE (TREE_TYPE (parm)),
3312 TYPE_ALIGN (TREE_TYPE (parm)));
3313 parmreg
3314 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3315 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3316 align);
3317 set_mem_attributes (parmreg, parm, 1);
3318 }
3319
3320 /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
3321 the debug info in case it is not legitimate. */
3322 if (GET_MODE (parmreg) != GET_MODE (rtl))
3323 {
3324 rtx tempreg = gen_reg_rtx (GET_MODE (rtl));
3325 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3326
3327 push_to_sequence2 (all->first_conversion_insn,
3328 all->last_conversion_insn);
3329 emit_move_insn (tempreg, rtl);
3330 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3331 emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg,
3332 tempreg);
3333 all->first_conversion_insn = get_insns ();
3334 all->last_conversion_insn = get_last_insn ();
3335 end_sequence ();
3336
3337 did_conversion = true;
3338 }
3339 else
3340 emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg, rtl);
3341
3342 rtl = parmreg;
3343
3344 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3345 now the parm. */
3346 data->stack_parm = NULL;
3347 }
3348
3349 set_parm_rtl (parm, rtl);
3350
3351 /* Mark the register as eliminable if we did no conversion and it was
3352 copied from memory at a fixed offset, and the arg pointer was not
3353 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3354 offset formed an invalid address, such memory-equivalences as we
3355 make here would screw up life analysis for it. */
3356 if (data->nominal_mode == data->passed_mode
3357 && !did_conversion
3358 && data->stack_parm != 0
3359 && MEM_P (data->stack_parm)
3360 && data->locate.offset.var == 0
3361 && reg_mentioned_p (virtual_incoming_args_rtx,
3362 XEXP (data->stack_parm, 0)))
3363 {
3364 rtx_insn *linsn = get_last_insn ();
3365 rtx_insn *sinsn;
3366 rtx set;
3367
3368 /* Mark complex types separately. */
3369 if (GET_CODE (parmreg) == CONCAT)
3370 {
3371 scalar_mode submode = GET_MODE_INNER (GET_MODE (parmreg));
3372 int regnor = REGNO (XEXP (parmreg, 0));
3373 int regnoi = REGNO (XEXP (parmreg, 1));
3374 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3375 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3376 GET_MODE_SIZE (submode));
3377
3378 /* Scan backwards for the set of the real and
3379 imaginary parts. */
3380 for (sinsn = linsn; sinsn != 0;
3381 sinsn = prev_nonnote_insn (sinsn))
3382 {
3383 set = single_set (sinsn);
3384 if (set == 0)
3385 continue;
3386
3387 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3388 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3389 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3390 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3391 }
3392 }
3393 else
3394 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3395 }
3396
3397 /* For pointer data type, suggest pointer register. */
3398 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3399 mark_reg_pointer (parmreg,
3400 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3401 }
3402
3403 /* A subroutine of assign_parms. Allocate stack space to hold the current
3404 parameter. Get it there. Perform all ABI specified conversions. */
3405
3406 static void
3407 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3408 struct assign_parm_data_one *data)
3409 {
3410 /* Value must be stored in the stack slot STACK_PARM during function
3411 execution. */
3412 bool to_conversion = false;
3413
3414 assign_parm_remove_parallels (data);
3415
3416 if (data->promoted_mode != data->nominal_mode)
3417 {
3418 /* Conversion is required. */
3419 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3420
3421 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
3422
3423 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3424 to_conversion = true;
3425
3426 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3427 TYPE_UNSIGNED (TREE_TYPE (parm)));
3428
3429 if (data->stack_parm)
3430 {
3431 poly_int64 offset
3432 = subreg_lowpart_offset (data->nominal_mode,
3433 GET_MODE (data->stack_parm));
3434 /* ??? This may need a big-endian conversion on sparc64. */
3435 data->stack_parm
3436 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3437 if (maybe_ne (offset, 0) && MEM_OFFSET_KNOWN_P (data->stack_parm))
3438 set_mem_offset (data->stack_parm,
3439 MEM_OFFSET (data->stack_parm) + offset);
3440 }
3441 }
3442
3443 if (data->entry_parm != data->stack_parm)
3444 {
3445 rtx src, dest;
3446
3447 if (data->stack_parm == 0)
3448 {
3449 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3450 GET_MODE (data->entry_parm),
3451 TYPE_ALIGN (data->passed_type));
3452 data->stack_parm
3453 = assign_stack_local (GET_MODE (data->entry_parm),
3454 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3455 align);
3456 set_mem_attributes (data->stack_parm, parm, 1);
3457 }
3458
3459 dest = validize_mem (copy_rtx (data->stack_parm));
3460 src = validize_mem (copy_rtx (data->entry_parm));
3461
3462 if (MEM_P (src))
3463 {
3464 /* Use a block move to handle potentially misaligned entry_parm. */
3465 if (!to_conversion)
3466 push_to_sequence2 (all->first_conversion_insn,
3467 all->last_conversion_insn);
3468 to_conversion = true;
3469
3470 emit_block_move (dest, src,
3471 GEN_INT (int_size_in_bytes (data->passed_type)),
3472 BLOCK_OP_NORMAL);
3473 }
3474 else
3475 {
3476 if (!REG_P (src))
3477 src = force_reg (GET_MODE (src), src);
3478 emit_move_insn (dest, src);
3479 }
3480 }
3481
3482 if (to_conversion)
3483 {
3484 all->first_conversion_insn = get_insns ();
3485 all->last_conversion_insn = get_last_insn ();
3486 end_sequence ();
3487 }
3488
3489 set_parm_rtl (parm, data->stack_parm);
3490 }
3491
3492 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3493 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3494
3495 static void
3496 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3497 vec<tree> fnargs)
3498 {
3499 tree parm;
3500 tree orig_fnargs = all->orig_fnargs;
3501 unsigned i = 0;
3502
3503 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3504 {
3505 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3506 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3507 {
3508 rtx tmp, real, imag;
3509 scalar_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3510
3511 real = DECL_RTL (fnargs[i]);
3512 imag = DECL_RTL (fnargs[i + 1]);
3513 if (inner != GET_MODE (real))
3514 {
3515 real = gen_lowpart_SUBREG (inner, real);
3516 imag = gen_lowpart_SUBREG (inner, imag);
3517 }
3518
3519 if (TREE_ADDRESSABLE (parm))
3520 {
3521 rtx rmem, imem;
3522 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3523 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3524 DECL_MODE (parm),
3525 TYPE_ALIGN (TREE_TYPE (parm)));
3526
3527 /* split_complex_arg put the real and imag parts in
3528 pseudos. Move them to memory. */
3529 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3530 set_mem_attributes (tmp, parm, 1);
3531 rmem = adjust_address_nv (tmp, inner, 0);
3532 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3533 push_to_sequence2 (all->first_conversion_insn,
3534 all->last_conversion_insn);
3535 emit_move_insn (rmem, real);
3536 emit_move_insn (imem, imag);
3537 all->first_conversion_insn = get_insns ();
3538 all->last_conversion_insn = get_last_insn ();
3539 end_sequence ();
3540 }
3541 else
3542 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3543 set_parm_rtl (parm, tmp);
3544
3545 real = DECL_INCOMING_RTL (fnargs[i]);
3546 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3547 if (inner != GET_MODE (real))
3548 {
3549 real = gen_lowpart_SUBREG (inner, real);
3550 imag = gen_lowpart_SUBREG (inner, imag);
3551 }
3552 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3553 set_decl_incoming_rtl (parm, tmp, false);
3554 i++;
3555 }
3556 }
3557 }
3558
3559 /* Assign RTL expressions to the function's parameters. This may involve
3560 copying them into registers and using those registers as the DECL_RTL. */
3561
3562 static void
3563 assign_parms (tree fndecl)
3564 {
3565 struct assign_parm_data_all all;
3566 tree parm;
3567 vec<tree> fnargs;
3568 unsigned i;
3569
3570 crtl->args.internal_arg_pointer
3571 = targetm.calls.internal_arg_pointer ();
3572
3573 assign_parms_initialize_all (&all);
3574 fnargs = assign_parms_augmented_arg_list (&all);
3575
3576 FOR_EACH_VEC_ELT (fnargs, i, parm)
3577 {
3578 struct assign_parm_data_one data;
3579
3580 /* Extract the type of PARM; adjust it according to ABI. */
3581 assign_parm_find_data_types (&all, parm, &data);
3582
3583 /* Early out for errors and void parameters. */
3584 if (data.passed_mode == VOIDmode)
3585 {
3586 SET_DECL_RTL (parm, const0_rtx);
3587 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3588 continue;
3589 }
3590
3591 /* Estimate stack alignment from parameter alignment. */
3592 if (SUPPORTS_STACK_ALIGNMENT)
3593 {
3594 unsigned int align
3595 = targetm.calls.function_arg_boundary (data.promoted_mode,
3596 data.passed_type);
3597 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3598 align);
3599 if (TYPE_ALIGN (data.nominal_type) > align)
3600 align = MINIMUM_ALIGNMENT (data.nominal_type,
3601 TYPE_MODE (data.nominal_type),
3602 TYPE_ALIGN (data.nominal_type));
3603 if (crtl->stack_alignment_estimated < align)
3604 {
3605 gcc_assert (!crtl->stack_realign_processed);
3606 crtl->stack_alignment_estimated = align;
3607 }
3608 }
3609
3610 /* Find out where the parameter arrives in this function. */
3611 assign_parm_find_entry_rtl (&all, &data);
3612
3613 /* Find out where stack space for this parameter might be. */
3614 if (assign_parm_is_stack_parm (&all, &data))
3615 {
3616 assign_parm_find_stack_rtl (parm, &data);
3617 assign_parm_adjust_entry_rtl (&data);
3618 }
3619 /* Record permanently how this parm was passed. */
3620 if (data.passed_pointer)
3621 {
3622 rtx incoming_rtl
3623 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3624 data.entry_parm);
3625 set_decl_incoming_rtl (parm, incoming_rtl, true);
3626 }
3627 else
3628 set_decl_incoming_rtl (parm, data.entry_parm, false);
3629
3630 assign_parm_adjust_stack_rtl (&data);
3631
3632 if (assign_parm_setup_block_p (&data))
3633 assign_parm_setup_block (&all, parm, &data);
3634 else if (data.passed_pointer || use_register_for_decl (parm))
3635 assign_parm_setup_reg (&all, parm, &data);
3636 else
3637 assign_parm_setup_stack (&all, parm, &data);
3638
3639 if (cfun->stdarg && !DECL_CHAIN (parm))
3640 assign_parms_setup_varargs (&all, &data, false);
3641
3642 /* Update info on where next arg arrives in registers. */
3643 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3644 data.passed_type, data.named_arg);
3645 }
3646
3647 if (targetm.calls.split_complex_arg)
3648 assign_parms_unsplit_complex (&all, fnargs);
3649
3650 fnargs.release ();
3651
3652 /* Output all parameter conversion instructions (possibly including calls)
3653 now that all parameters have been copied out of hard registers. */
3654 emit_insn (all.first_conversion_insn);
3655
3656 /* Estimate reload stack alignment from scalar return mode. */
3657 if (SUPPORTS_STACK_ALIGNMENT)
3658 {
3659 if (DECL_RESULT (fndecl))
3660 {
3661 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3662 machine_mode mode = TYPE_MODE (type);
3663
3664 if (mode != BLKmode
3665 && mode != VOIDmode
3666 && !AGGREGATE_TYPE_P (type))
3667 {
3668 unsigned int align = GET_MODE_ALIGNMENT (mode);
3669 if (crtl->stack_alignment_estimated < align)
3670 {
3671 gcc_assert (!crtl->stack_realign_processed);
3672 crtl->stack_alignment_estimated = align;
3673 }
3674 }
3675 }
3676 }
3677
3678 /* If we are receiving a struct value address as the first argument, set up
3679 the RTL for the function result. As this might require code to convert
3680 the transmitted address to Pmode, we do this here to ensure that possible
3681 preliminary conversions of the address have been emitted already. */
3682 if (all.function_result_decl)
3683 {
3684 tree result = DECL_RESULT (current_function_decl);
3685 rtx addr = DECL_RTL (all.function_result_decl);
3686 rtx x;
3687
3688 if (DECL_BY_REFERENCE (result))
3689 {
3690 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3691 x = addr;
3692 }
3693 else
3694 {
3695 SET_DECL_VALUE_EXPR (result,
3696 build1 (INDIRECT_REF, TREE_TYPE (result),
3697 all.function_result_decl));
3698 addr = convert_memory_address (Pmode, addr);
3699 x = gen_rtx_MEM (DECL_MODE (result), addr);
3700 set_mem_attributes (x, result, 1);
3701 }
3702
3703 DECL_HAS_VALUE_EXPR_P (result) = 1;
3704
3705 set_parm_rtl (result, x);
3706 }
3707
3708 /* We have aligned all the args, so add space for the pretend args. */
3709 crtl->args.pretend_args_size = all.pretend_args_size;
3710 all.stack_args_size.constant += all.extra_pretend_bytes;
3711 crtl->args.size = all.stack_args_size.constant;
3712
3713 /* Adjust function incoming argument size for alignment and
3714 minimum length. */
3715
3716 crtl->args.size = upper_bound (crtl->args.size, all.reg_parm_stack_space);
3717 crtl->args.size = aligned_upper_bound (crtl->args.size,
3718 PARM_BOUNDARY / BITS_PER_UNIT);
3719
3720 if (ARGS_GROW_DOWNWARD)
3721 {
3722 crtl->args.arg_offset_rtx
3723 = (all.stack_args_size.var == 0
3724 ? gen_int_mode (-all.stack_args_size.constant, Pmode)
3725 : expand_expr (size_diffop (all.stack_args_size.var,
3726 size_int (-all.stack_args_size.constant)),
3727 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3728 }
3729 else
3730 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3731
3732 /* See how many bytes, if any, of its args a function should try to pop
3733 on return. */
3734
3735 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3736 TREE_TYPE (fndecl),
3737 crtl->args.size);
3738
3739 /* For stdarg.h function, save info about
3740 regs and stack space used by the named args. */
3741
3742 crtl->args.info = all.args_so_far_v;
3743
3744 /* Set the rtx used for the function return value. Put this in its
3745 own variable so any optimizers that need this information don't have
3746 to include tree.h. Do this here so it gets done when an inlined
3747 function gets output. */
3748
3749 crtl->return_rtx
3750 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3751 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3752
3753 /* If scalar return value was computed in a pseudo-reg, or was a named
3754 return value that got dumped to the stack, copy that to the hard
3755 return register. */
3756 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3757 {
3758 tree decl_result = DECL_RESULT (fndecl);
3759 rtx decl_rtl = DECL_RTL (decl_result);
3760
3761 if (REG_P (decl_rtl)
3762 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3763 : DECL_REGISTER (decl_result))
3764 {
3765 rtx real_decl_rtl;
3766
3767 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3768 fndecl, true);
3769 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3770 /* The delay slot scheduler assumes that crtl->return_rtx
3771 holds the hard register containing the return value, not a
3772 temporary pseudo. */
3773 crtl->return_rtx = real_decl_rtl;
3774 }
3775 }
3776 }
3777
3778 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3779 For all seen types, gimplify their sizes. */
3780
3781 static tree
3782 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3783 {
3784 tree t = *tp;
3785
3786 *walk_subtrees = 0;
3787 if (TYPE_P (t))
3788 {
3789 if (POINTER_TYPE_P (t))
3790 *walk_subtrees = 1;
3791 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3792 && !TYPE_SIZES_GIMPLIFIED (t))
3793 {
3794 gimplify_type_sizes (t, (gimple_seq *) data);
3795 *walk_subtrees = 1;
3796 }
3797 }
3798
3799 return NULL;
3800 }
3801
3802 /* Gimplify the parameter list for current_function_decl. This involves
3803 evaluating SAVE_EXPRs of variable sized parameters and generating code
3804 to implement callee-copies reference parameters. Returns a sequence of
3805 statements to add to the beginning of the function. */
3806
3807 gimple_seq
3808 gimplify_parameters (gimple_seq *cleanup)
3809 {
3810 struct assign_parm_data_all all;
3811 tree parm;
3812 gimple_seq stmts = NULL;
3813 vec<tree> fnargs;
3814 unsigned i;
3815
3816 assign_parms_initialize_all (&all);
3817 fnargs = assign_parms_augmented_arg_list (&all);
3818
3819 FOR_EACH_VEC_ELT (fnargs, i, parm)
3820 {
3821 struct assign_parm_data_one data;
3822
3823 /* Extract the type of PARM; adjust it according to ABI. */
3824 assign_parm_find_data_types (&all, parm, &data);
3825
3826 /* Early out for errors and void parameters. */
3827 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3828 continue;
3829
3830 /* Update info on where next arg arrives in registers. */
3831 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3832 data.passed_type, data.named_arg);
3833
3834 /* ??? Once upon a time variable_size stuffed parameter list
3835 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3836 turned out to be less than manageable in the gimple world.
3837 Now we have to hunt them down ourselves. */
3838 walk_tree_without_duplicates (&data.passed_type,
3839 gimplify_parm_type, &stmts);
3840
3841 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3842 {
3843 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3844 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3845 }
3846
3847 if (data.passed_pointer)
3848 {
3849 tree type = TREE_TYPE (data.passed_type);
3850 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
3851 type, data.named_arg))
3852 {
3853 tree local, t;
3854
3855 /* For constant-sized objects, this is trivial; for
3856 variable-sized objects, we have to play games. */
3857 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3858 && !(flag_stack_check == GENERIC_STACK_CHECK
3859 && compare_tree_int (DECL_SIZE_UNIT (parm),
3860 STACK_CHECK_MAX_VAR_SIZE) > 0))
3861 {
3862 local = create_tmp_var (type, get_name (parm));
3863 DECL_IGNORED_P (local) = 0;
3864 /* If PARM was addressable, move that flag over
3865 to the local copy, as its address will be taken,
3866 not the PARMs. Keep the parms address taken
3867 as we'll query that flag during gimplification. */
3868 if (TREE_ADDRESSABLE (parm))
3869 TREE_ADDRESSABLE (local) = 1;
3870 else if (TREE_CODE (type) == COMPLEX_TYPE
3871 || TREE_CODE (type) == VECTOR_TYPE)
3872 DECL_GIMPLE_REG_P (local) = 1;
3873
3874 if (!is_gimple_reg (local)
3875 && flag_stack_reuse != SR_NONE)
3876 {
3877 tree clobber = build_constructor (type, NULL);
3878 gimple *clobber_stmt;
3879 TREE_THIS_VOLATILE (clobber) = 1;
3880 clobber_stmt = gimple_build_assign (local, clobber);
3881 gimple_seq_add_stmt (cleanup, clobber_stmt);
3882 }
3883 }
3884 else
3885 {
3886 tree ptr_type, addr;
3887
3888 ptr_type = build_pointer_type (type);
3889 addr = create_tmp_reg (ptr_type, get_name (parm));
3890 DECL_IGNORED_P (addr) = 0;
3891 local = build_fold_indirect_ref (addr);
3892
3893 t = build_alloca_call_expr (DECL_SIZE_UNIT (parm),
3894 DECL_ALIGN (parm),
3895 max_int_size_in_bytes (type));
3896 /* The call has been built for a variable-sized object. */
3897 CALL_ALLOCA_FOR_VAR_P (t) = 1;
3898 t = fold_convert (ptr_type, t);
3899 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3900 gimplify_and_add (t, &stmts);
3901 }
3902
3903 gimplify_assign (local, parm, &stmts);
3904
3905 SET_DECL_VALUE_EXPR (parm, local);
3906 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3907 }
3908 }
3909 }
3910
3911 fnargs.release ();
3912
3913 return stmts;
3914 }
3915 \f
3916 /* Compute the size and offset from the start of the stacked arguments for a
3917 parm passed in mode PASSED_MODE and with type TYPE.
3918
3919 INITIAL_OFFSET_PTR points to the current offset into the stacked
3920 arguments.
3921
3922 The starting offset and size for this parm are returned in
3923 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3924 nonzero, the offset is that of stack slot, which is returned in
3925 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3926 padding required from the initial offset ptr to the stack slot.
3927
3928 IN_REGS is nonzero if the argument will be passed in registers. It will
3929 never be set if REG_PARM_STACK_SPACE is not defined.
3930
3931 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3932 for arguments which are passed in registers.
3933
3934 FNDECL is the function in which the argument was defined.
3935
3936 There are two types of rounding that are done. The first, controlled by
3937 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3938 argument list to be aligned to the specific boundary (in bits). This
3939 rounding affects the initial and starting offsets, but not the argument
3940 size.
3941
3942 The second, controlled by TARGET_FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3943 optionally rounds the size of the parm to PARM_BOUNDARY. The
3944 initial offset is not affected by this rounding, while the size always
3945 is and the starting offset may be. */
3946
3947 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3948 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3949 callers pass in the total size of args so far as
3950 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3951
3952 void
3953 locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
3954 int reg_parm_stack_space, int partial,
3955 tree fndecl ATTRIBUTE_UNUSED,
3956 struct args_size *initial_offset_ptr,
3957 struct locate_and_pad_arg_data *locate)
3958 {
3959 tree sizetree;
3960 pad_direction where_pad;
3961 unsigned int boundary, round_boundary;
3962 int part_size_in_regs;
3963
3964 /* If we have found a stack parm before we reach the end of the
3965 area reserved for registers, skip that area. */
3966 if (! in_regs)
3967 {
3968 if (reg_parm_stack_space > 0)
3969 {
3970 if (initial_offset_ptr->var
3971 || !ordered_p (initial_offset_ptr->constant,
3972 reg_parm_stack_space))
3973 {
3974 initial_offset_ptr->var
3975 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3976 ssize_int (reg_parm_stack_space));
3977 initial_offset_ptr->constant = 0;
3978 }
3979 else
3980 initial_offset_ptr->constant
3981 = ordered_max (initial_offset_ptr->constant,
3982 reg_parm_stack_space);
3983 }
3984 }
3985
3986 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3987
3988 sizetree = (type
3989 ? arg_size_in_bytes (type)
3990 : size_int (GET_MODE_SIZE (passed_mode)));
3991 where_pad = targetm.calls.function_arg_padding (passed_mode, type);
3992 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
3993 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
3994 type);
3995 locate->where_pad = where_pad;
3996
3997 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3998 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3999 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4000
4001 locate->boundary = boundary;
4002
4003 if (SUPPORTS_STACK_ALIGNMENT)
4004 {
4005 /* stack_alignment_estimated can't change after stack has been
4006 realigned. */
4007 if (crtl->stack_alignment_estimated < boundary)
4008 {
4009 if (!crtl->stack_realign_processed)
4010 crtl->stack_alignment_estimated = boundary;
4011 else
4012 {
4013 /* If stack is realigned and stack alignment value
4014 hasn't been finalized, it is OK not to increase
4015 stack_alignment_estimated. The bigger alignment
4016 requirement is recorded in stack_alignment_needed
4017 below. */
4018 gcc_assert (!crtl->stack_realign_finalized
4019 && crtl->stack_realign_needed);
4020 }
4021 }
4022 }
4023
4024 if (ARGS_GROW_DOWNWARD)
4025 {
4026 locate->slot_offset.constant = -initial_offset_ptr->constant;
4027 if (initial_offset_ptr->var)
4028 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4029 initial_offset_ptr->var);
4030
4031 {
4032 tree s2 = sizetree;
4033 if (where_pad != PAD_NONE
4034 && (!tree_fits_uhwi_p (sizetree)
4035 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4036 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
4037 SUB_PARM_SIZE (locate->slot_offset, s2);
4038 }
4039
4040 locate->slot_offset.constant += part_size_in_regs;
4041
4042 if (!in_regs || reg_parm_stack_space > 0)
4043 pad_to_arg_alignment (&locate->slot_offset, boundary,
4044 &locate->alignment_pad);
4045
4046 locate->size.constant = (-initial_offset_ptr->constant
4047 - locate->slot_offset.constant);
4048 if (initial_offset_ptr->var)
4049 locate->size.var = size_binop (MINUS_EXPR,
4050 size_binop (MINUS_EXPR,
4051 ssize_int (0),
4052 initial_offset_ptr->var),
4053 locate->slot_offset.var);
4054
4055 /* Pad_below needs the pre-rounded size to know how much to pad
4056 below. */
4057 locate->offset = locate->slot_offset;
4058 if (where_pad == PAD_DOWNWARD)
4059 pad_below (&locate->offset, passed_mode, sizetree);
4060
4061 }
4062 else
4063 {
4064 if (!in_regs || reg_parm_stack_space > 0)
4065 pad_to_arg_alignment (initial_offset_ptr, boundary,
4066 &locate->alignment_pad);
4067 locate->slot_offset = *initial_offset_ptr;
4068
4069 #ifdef PUSH_ROUNDING
4070 if (passed_mode != BLKmode)
4071 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4072 #endif
4073
4074 /* Pad_below needs the pre-rounded size to know how much to pad below
4075 so this must be done before rounding up. */
4076 locate->offset = locate->slot_offset;
4077 if (where_pad == PAD_DOWNWARD)
4078 pad_below (&locate->offset, passed_mode, sizetree);
4079
4080 if (where_pad != PAD_NONE
4081 && (!tree_fits_uhwi_p (sizetree)
4082 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4083 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
4084
4085 ADD_PARM_SIZE (locate->size, sizetree);
4086
4087 locate->size.constant -= part_size_in_regs;
4088 }
4089
4090 locate->offset.constant
4091 += targetm.calls.function_arg_offset (passed_mode, type);
4092 }
4093
4094 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4095 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4096
4097 static void
4098 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4099 struct args_size *alignment_pad)
4100 {
4101 tree save_var = NULL_TREE;
4102 poly_int64 save_constant = 0;
4103 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4104 poly_int64 sp_offset = STACK_POINTER_OFFSET;
4105
4106 #ifdef SPARC_STACK_BOUNDARY_HACK
4107 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4108 the real alignment of %sp. However, when it does this, the
4109 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
4110 if (SPARC_STACK_BOUNDARY_HACK)
4111 sp_offset = 0;
4112 #endif
4113
4114 if (boundary > PARM_BOUNDARY)
4115 {
4116 save_var = offset_ptr->var;
4117 save_constant = offset_ptr->constant;
4118 }
4119
4120 alignment_pad->var = NULL_TREE;
4121 alignment_pad->constant = 0;
4122
4123 if (boundary > BITS_PER_UNIT)
4124 {
4125 int misalign;
4126 if (offset_ptr->var
4127 || !known_misalignment (offset_ptr->constant + sp_offset,
4128 boundary_in_bytes, &misalign))
4129 {
4130 tree sp_offset_tree = ssize_int (sp_offset);
4131 tree offset = size_binop (PLUS_EXPR,
4132 ARGS_SIZE_TREE (*offset_ptr),
4133 sp_offset_tree);
4134 tree rounded;
4135 if (ARGS_GROW_DOWNWARD)
4136 rounded = round_down (offset, boundary / BITS_PER_UNIT);
4137 else
4138 rounded = round_up (offset, boundary / BITS_PER_UNIT);
4139
4140 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
4141 /* ARGS_SIZE_TREE includes constant term. */
4142 offset_ptr->constant = 0;
4143 if (boundary > PARM_BOUNDARY)
4144 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
4145 save_var);
4146 }
4147 else
4148 {
4149 if (ARGS_GROW_DOWNWARD)
4150 offset_ptr->constant -= misalign;
4151 else
4152 offset_ptr->constant += -misalign & (boundary_in_bytes - 1);
4153
4154 if (boundary > PARM_BOUNDARY)
4155 alignment_pad->constant = offset_ptr->constant - save_constant;
4156 }
4157 }
4158 }
4159
4160 static void
4161 pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
4162 {
4163 unsigned int align = PARM_BOUNDARY / BITS_PER_UNIT;
4164 int misalign;
4165 if (passed_mode != BLKmode
4166 && known_misalignment (GET_MODE_SIZE (passed_mode), align, &misalign))
4167 offset_ptr->constant += -misalign & (align - 1);
4168 else
4169 {
4170 if (TREE_CODE (sizetree) != INTEGER_CST
4171 || (TREE_INT_CST_LOW (sizetree) & (align - 1)) != 0)
4172 {
4173 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4174 tree s2 = round_up (sizetree, align);
4175 /* Add it in. */
4176 ADD_PARM_SIZE (*offset_ptr, s2);
4177 SUB_PARM_SIZE (*offset_ptr, sizetree);
4178 }
4179 }
4180 }
4181 \f
4182
4183 /* True if register REGNO was alive at a place where `setjmp' was
4184 called and was set more than once or is an argument. Such regs may
4185 be clobbered by `longjmp'. */
4186
4187 static bool
4188 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4189 {
4190 /* There appear to be cases where some local vars never reach the
4191 backend but have bogus regnos. */
4192 if (regno >= max_reg_num ())
4193 return false;
4194
4195 return ((REG_N_SETS (regno) > 1
4196 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4197 regno))
4198 && REGNO_REG_SET_P (setjmp_crosses, regno));
4199 }
4200
4201 /* Walk the tree of blocks describing the binding levels within a
4202 function and warn about variables the might be killed by setjmp or
4203 vfork. This is done after calling flow_analysis before register
4204 allocation since that will clobber the pseudo-regs to hard
4205 regs. */
4206
4207 static void
4208 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
4209 {
4210 tree decl, sub;
4211
4212 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
4213 {
4214 if (VAR_P (decl)
4215 && DECL_RTL_SET_P (decl)
4216 && REG_P (DECL_RTL (decl))
4217 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4218 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
4219 " %<longjmp%> or %<vfork%>", decl);
4220 }
4221
4222 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
4223 setjmp_vars_warning (setjmp_crosses, sub);
4224 }
4225
4226 /* Do the appropriate part of setjmp_vars_warning
4227 but for arguments instead of local variables. */
4228
4229 static void
4230 setjmp_args_warning (bitmap setjmp_crosses)
4231 {
4232 tree decl;
4233 for (decl = DECL_ARGUMENTS (current_function_decl);
4234 decl; decl = DECL_CHAIN (decl))
4235 if (DECL_RTL (decl) != 0
4236 && REG_P (DECL_RTL (decl))
4237 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4238 warning (OPT_Wclobbered,
4239 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4240 decl);
4241 }
4242
4243 /* Generate warning messages for variables live across setjmp. */
4244
4245 void
4246 generate_setjmp_warnings (void)
4247 {
4248 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4249
4250 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
4251 || bitmap_empty_p (setjmp_crosses))
4252 return;
4253
4254 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4255 setjmp_args_warning (setjmp_crosses);
4256 }
4257
4258 \f
4259 /* Reverse the order of elements in the fragment chain T of blocks,
4260 and return the new head of the chain (old last element).
4261 In addition to that clear BLOCK_SAME_RANGE flags when needed
4262 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4263 its super fragment origin. */
4264
4265 static tree
4266 block_fragments_nreverse (tree t)
4267 {
4268 tree prev = 0, block, next, prev_super = 0;
4269 tree super = BLOCK_SUPERCONTEXT (t);
4270 if (BLOCK_FRAGMENT_ORIGIN (super))
4271 super = BLOCK_FRAGMENT_ORIGIN (super);
4272 for (block = t; block; block = next)
4273 {
4274 next = BLOCK_FRAGMENT_CHAIN (block);
4275 BLOCK_FRAGMENT_CHAIN (block) = prev;
4276 if ((prev && !BLOCK_SAME_RANGE (prev))
4277 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4278 != prev_super))
4279 BLOCK_SAME_RANGE (block) = 0;
4280 prev_super = BLOCK_SUPERCONTEXT (block);
4281 BLOCK_SUPERCONTEXT (block) = super;
4282 prev = block;
4283 }
4284 t = BLOCK_FRAGMENT_ORIGIN (t);
4285 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4286 != prev_super)
4287 BLOCK_SAME_RANGE (t) = 0;
4288 BLOCK_SUPERCONTEXT (t) = super;
4289 return prev;
4290 }
4291
4292 /* Reverse the order of elements in the chain T of blocks,
4293 and return the new head of the chain (old last element).
4294 Also do the same on subblocks and reverse the order of elements
4295 in BLOCK_FRAGMENT_CHAIN as well. */
4296
4297 static tree
4298 blocks_nreverse_all (tree t)
4299 {
4300 tree prev = 0, block, next;
4301 for (block = t; block; block = next)
4302 {
4303 next = BLOCK_CHAIN (block);
4304 BLOCK_CHAIN (block) = prev;
4305 if (BLOCK_FRAGMENT_CHAIN (block)
4306 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4307 {
4308 BLOCK_FRAGMENT_CHAIN (block)
4309 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4310 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4311 BLOCK_SAME_RANGE (block) = 0;
4312 }
4313 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4314 prev = block;
4315 }
4316 return prev;
4317 }
4318
4319
4320 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4321 and create duplicate blocks. */
4322 /* ??? Need an option to either create block fragments or to create
4323 abstract origin duplicates of a source block. It really depends
4324 on what optimization has been performed. */
4325
4326 void
4327 reorder_blocks (void)
4328 {
4329 tree block = DECL_INITIAL (current_function_decl);
4330
4331 if (block == NULL_TREE)
4332 return;
4333
4334 auto_vec<tree, 10> block_stack;
4335
4336 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4337 clear_block_marks (block);
4338
4339 /* Prune the old trees away, so that they don't get in the way. */
4340 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4341 BLOCK_CHAIN (block) = NULL_TREE;
4342
4343 /* Recreate the block tree from the note nesting. */
4344 reorder_blocks_1 (get_insns (), block, &block_stack);
4345 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4346 }
4347
4348 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4349
4350 void
4351 clear_block_marks (tree block)
4352 {
4353 while (block)
4354 {
4355 TREE_ASM_WRITTEN (block) = 0;
4356 clear_block_marks (BLOCK_SUBBLOCKS (block));
4357 block = BLOCK_CHAIN (block);
4358 }
4359 }
4360
4361 static void
4362 reorder_blocks_1 (rtx_insn *insns, tree current_block,
4363 vec<tree> *p_block_stack)
4364 {
4365 rtx_insn *insn;
4366 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4367
4368 for (insn = insns; insn; insn = NEXT_INSN (insn))
4369 {
4370 if (NOTE_P (insn))
4371 {
4372 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4373 {
4374 tree block = NOTE_BLOCK (insn);
4375 tree origin;
4376
4377 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4378 origin = block;
4379
4380 if (prev_end)
4381 BLOCK_SAME_RANGE (prev_end) = 0;
4382 prev_end = NULL_TREE;
4383
4384 /* If we have seen this block before, that means it now
4385 spans multiple address regions. Create a new fragment. */
4386 if (TREE_ASM_WRITTEN (block))
4387 {
4388 tree new_block = copy_node (block);
4389
4390 BLOCK_SAME_RANGE (new_block) = 0;
4391 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4392 BLOCK_FRAGMENT_CHAIN (new_block)
4393 = BLOCK_FRAGMENT_CHAIN (origin);
4394 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4395
4396 NOTE_BLOCK (insn) = new_block;
4397 block = new_block;
4398 }
4399
4400 if (prev_beg == current_block && prev_beg)
4401 BLOCK_SAME_RANGE (block) = 1;
4402
4403 prev_beg = origin;
4404
4405 BLOCK_SUBBLOCKS (block) = 0;
4406 TREE_ASM_WRITTEN (block) = 1;
4407 /* When there's only one block for the entire function,
4408 current_block == block and we mustn't do this, it
4409 will cause infinite recursion. */
4410 if (block != current_block)
4411 {
4412 tree super;
4413 if (block != origin)
4414 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4415 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4416 (origin))
4417 == current_block);
4418 if (p_block_stack->is_empty ())
4419 super = current_block;
4420 else
4421 {
4422 super = p_block_stack->last ();
4423 gcc_assert (super == current_block
4424 || BLOCK_FRAGMENT_ORIGIN (super)
4425 == current_block);
4426 }
4427 BLOCK_SUPERCONTEXT (block) = super;
4428 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4429 BLOCK_SUBBLOCKS (current_block) = block;
4430 current_block = origin;
4431 }
4432 p_block_stack->safe_push (block);
4433 }
4434 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4435 {
4436 NOTE_BLOCK (insn) = p_block_stack->pop ();
4437 current_block = BLOCK_SUPERCONTEXT (current_block);
4438 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4439 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4440 prev_beg = NULL_TREE;
4441 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4442 ? NOTE_BLOCK (insn) : NULL_TREE;
4443 }
4444 }
4445 else
4446 {
4447 prev_beg = NULL_TREE;
4448 if (prev_end)
4449 BLOCK_SAME_RANGE (prev_end) = 0;
4450 prev_end = NULL_TREE;
4451 }
4452 }
4453 }
4454
4455 /* Reverse the order of elements in the chain T of blocks,
4456 and return the new head of the chain (old last element). */
4457
4458 tree
4459 blocks_nreverse (tree t)
4460 {
4461 tree prev = 0, block, next;
4462 for (block = t; block; block = next)
4463 {
4464 next = BLOCK_CHAIN (block);
4465 BLOCK_CHAIN (block) = prev;
4466 prev = block;
4467 }
4468 return prev;
4469 }
4470
4471 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4472 by modifying the last node in chain 1 to point to chain 2. */
4473
4474 tree
4475 block_chainon (tree op1, tree op2)
4476 {
4477 tree t1;
4478
4479 if (!op1)
4480 return op2;
4481 if (!op2)
4482 return op1;
4483
4484 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4485 continue;
4486 BLOCK_CHAIN (t1) = op2;
4487
4488 #ifdef ENABLE_TREE_CHECKING
4489 {
4490 tree t2;
4491 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4492 gcc_assert (t2 != t1);
4493 }
4494 #endif
4495
4496 return op1;
4497 }
4498
4499 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4500 non-NULL, list them all into VECTOR, in a depth-first preorder
4501 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4502 blocks. */
4503
4504 static int
4505 all_blocks (tree block, tree *vector)
4506 {
4507 int n_blocks = 0;
4508
4509 while (block)
4510 {
4511 TREE_ASM_WRITTEN (block) = 0;
4512
4513 /* Record this block. */
4514 if (vector)
4515 vector[n_blocks] = block;
4516
4517 ++n_blocks;
4518
4519 /* Record the subblocks, and their subblocks... */
4520 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4521 vector ? vector + n_blocks : 0);
4522 block = BLOCK_CHAIN (block);
4523 }
4524
4525 return n_blocks;
4526 }
4527
4528 /* Return a vector containing all the blocks rooted at BLOCK. The
4529 number of elements in the vector is stored in N_BLOCKS_P. The
4530 vector is dynamically allocated; it is the caller's responsibility
4531 to call `free' on the pointer returned. */
4532
4533 static tree *
4534 get_block_vector (tree block, int *n_blocks_p)
4535 {
4536 tree *block_vector;
4537
4538 *n_blocks_p = all_blocks (block, NULL);
4539 block_vector = XNEWVEC (tree, *n_blocks_p);
4540 all_blocks (block, block_vector);
4541
4542 return block_vector;
4543 }
4544
4545 static GTY(()) int next_block_index = 2;
4546
4547 /* Set BLOCK_NUMBER for all the blocks in FN. */
4548
4549 void
4550 number_blocks (tree fn)
4551 {
4552 int i;
4553 int n_blocks;
4554 tree *block_vector;
4555
4556 /* For XCOFF debugging output, we start numbering the blocks
4557 from 1 within each function, rather than keeping a running
4558 count. */
4559 #if defined (XCOFF_DEBUGGING_INFO)
4560 if (write_symbols == XCOFF_DEBUG)
4561 next_block_index = 1;
4562 #endif
4563
4564 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4565
4566 /* The top-level BLOCK isn't numbered at all. */
4567 for (i = 1; i < n_blocks; ++i)
4568 /* We number the blocks from two. */
4569 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4570
4571 free (block_vector);
4572
4573 return;
4574 }
4575
4576 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4577
4578 DEBUG_FUNCTION tree
4579 debug_find_var_in_block_tree (tree var, tree block)
4580 {
4581 tree t;
4582
4583 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4584 if (t == var)
4585 return block;
4586
4587 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4588 {
4589 tree ret = debug_find_var_in_block_tree (var, t);
4590 if (ret)
4591 return ret;
4592 }
4593
4594 return NULL_TREE;
4595 }
4596 \f
4597 /* Keep track of whether we're in a dummy function context. If we are,
4598 we don't want to invoke the set_current_function hook, because we'll
4599 get into trouble if the hook calls target_reinit () recursively or
4600 when the initial initialization is not yet complete. */
4601
4602 static bool in_dummy_function;
4603
4604 /* Invoke the target hook when setting cfun. Update the optimization options
4605 if the function uses different options than the default. */
4606
4607 static void
4608 invoke_set_current_function_hook (tree fndecl)
4609 {
4610 if (!in_dummy_function)
4611 {
4612 tree opts = ((fndecl)
4613 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4614 : optimization_default_node);
4615
4616 if (!opts)
4617 opts = optimization_default_node;
4618
4619 /* Change optimization options if needed. */
4620 if (optimization_current_node != opts)
4621 {
4622 optimization_current_node = opts;
4623 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4624 }
4625
4626 targetm.set_current_function (fndecl);
4627 this_fn_optabs = this_target_optabs;
4628
4629 /* Initialize global alignment variables after op. */
4630 parse_alignment_opts ();
4631
4632 if (opts != optimization_default_node)
4633 {
4634 init_tree_optimization_optabs (opts);
4635 if (TREE_OPTIMIZATION_OPTABS (opts))
4636 this_fn_optabs = (struct target_optabs *)
4637 TREE_OPTIMIZATION_OPTABS (opts);
4638 }
4639 }
4640 }
4641
4642 /* cfun should never be set directly; use this function. */
4643
4644 void
4645 set_cfun (struct function *new_cfun, bool force)
4646 {
4647 if (cfun != new_cfun || force)
4648 {
4649 cfun = new_cfun;
4650 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4651 redirect_edge_var_map_empty ();
4652 }
4653 }
4654
4655 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4656
4657 static vec<function *> cfun_stack;
4658
4659 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4660 current_function_decl accordingly. */
4661
4662 void
4663 push_cfun (struct function *new_cfun)
4664 {
4665 gcc_assert ((!cfun && !current_function_decl)
4666 || (cfun && current_function_decl == cfun->decl));
4667 cfun_stack.safe_push (cfun);
4668 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4669 set_cfun (new_cfun);
4670 }
4671
4672 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4673
4674 void
4675 pop_cfun (void)
4676 {
4677 struct function *new_cfun = cfun_stack.pop ();
4678 /* When in_dummy_function, we do have a cfun but current_function_decl is
4679 NULL. We also allow pushing NULL cfun and subsequently changing
4680 current_function_decl to something else and have both restored by
4681 pop_cfun. */
4682 gcc_checking_assert (in_dummy_function
4683 || !cfun
4684 || current_function_decl == cfun->decl);
4685 set_cfun (new_cfun);
4686 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4687 }
4688
4689 /* Return value of funcdef and increase it. */
4690 int
4691 get_next_funcdef_no (void)
4692 {
4693 return funcdef_no++;
4694 }
4695
4696 /* Return value of funcdef. */
4697 int
4698 get_last_funcdef_no (void)
4699 {
4700 return funcdef_no;
4701 }
4702
4703 /* Allocate a function structure for FNDECL and set its contents
4704 to the defaults. Set cfun to the newly-allocated object.
4705 Some of the helper functions invoked during initialization assume
4706 that cfun has already been set. Therefore, assign the new object
4707 directly into cfun and invoke the back end hook explicitly at the
4708 very end, rather than initializing a temporary and calling set_cfun
4709 on it.
4710
4711 ABSTRACT_P is true if this is a function that will never be seen by
4712 the middle-end. Such functions are front-end concepts (like C++
4713 function templates) that do not correspond directly to functions
4714 placed in object files. */
4715
4716 void
4717 allocate_struct_function (tree fndecl, bool abstract_p)
4718 {
4719 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4720
4721 cfun = ggc_cleared_alloc<function> ();
4722
4723 init_eh_for_function ();
4724
4725 if (init_machine_status)
4726 cfun->machine = (*init_machine_status) ();
4727
4728 #ifdef OVERRIDE_ABI_FORMAT
4729 OVERRIDE_ABI_FORMAT (fndecl);
4730 #endif
4731
4732 if (fndecl != NULL_TREE)
4733 {
4734 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4735 cfun->decl = fndecl;
4736 current_function_funcdef_no = get_next_funcdef_no ();
4737 }
4738
4739 invoke_set_current_function_hook (fndecl);
4740
4741 if (fndecl != NULL_TREE)
4742 {
4743 tree result = DECL_RESULT (fndecl);
4744
4745 if (!abstract_p)
4746 {
4747 /* Now that we have activated any function-specific attributes
4748 that might affect layout, particularly vector modes, relayout
4749 each of the parameters and the result. */
4750 relayout_decl (result);
4751 for (tree parm = DECL_ARGUMENTS (fndecl); parm;
4752 parm = DECL_CHAIN (parm))
4753 relayout_decl (parm);
4754
4755 /* Similarly relayout the function decl. */
4756 targetm.target_option.relayout_function (fndecl);
4757 }
4758
4759 if (!abstract_p && aggregate_value_p (result, fndecl))
4760 {
4761 #ifdef PCC_STATIC_STRUCT_RETURN
4762 cfun->returns_pcc_struct = 1;
4763 #endif
4764 cfun->returns_struct = 1;
4765 }
4766
4767 cfun->stdarg = stdarg_p (fntype);
4768
4769 /* Assume all registers in stdarg functions need to be saved. */
4770 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4771 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4772
4773 /* ??? This could be set on a per-function basis by the front-end
4774 but is this worth the hassle? */
4775 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4776 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4777
4778 if (!profile_flag && !flag_instrument_function_entry_exit)
4779 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
4780 }
4781
4782 /* Don't enable begin stmt markers if var-tracking at assignments is
4783 disabled. The markers make little sense without the variable
4784 binding annotations among them. */
4785 cfun->debug_nonbind_markers = lang_hooks.emits_begin_stmt
4786 && MAY_HAVE_DEBUG_MARKER_STMTS;
4787 }
4788
4789 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4790 instead of just setting it. */
4791
4792 void
4793 push_struct_function (tree fndecl)
4794 {
4795 /* When in_dummy_function we might be in the middle of a pop_cfun and
4796 current_function_decl and cfun may not match. */
4797 gcc_assert (in_dummy_function
4798 || (!cfun && !current_function_decl)
4799 || (cfun && current_function_decl == cfun->decl));
4800 cfun_stack.safe_push (cfun);
4801 current_function_decl = fndecl;
4802 allocate_struct_function (fndecl, false);
4803 }
4804
4805 /* Reset crtl and other non-struct-function variables to defaults as
4806 appropriate for emitting rtl at the start of a function. */
4807
4808 static void
4809 prepare_function_start (void)
4810 {
4811 gcc_assert (!get_last_insn ());
4812 init_temp_slots ();
4813 init_emit ();
4814 init_varasm_status ();
4815 init_expr ();
4816 default_rtl_profile ();
4817
4818 if (flag_stack_usage_info)
4819 {
4820 cfun->su = ggc_cleared_alloc<stack_usage> ();
4821 cfun->su->static_stack_size = -1;
4822 }
4823
4824 cse_not_expected = ! optimize;
4825
4826 /* Caller save not needed yet. */
4827 caller_save_needed = 0;
4828
4829 /* We haven't done register allocation yet. */
4830 reg_renumber = 0;
4831
4832 /* Indicate that we have not instantiated virtual registers yet. */
4833 virtuals_instantiated = 0;
4834
4835 /* Indicate that we want CONCATs now. */
4836 generating_concat_p = 1;
4837
4838 /* Indicate we have no need of a frame pointer yet. */
4839 frame_pointer_needed = 0;
4840 }
4841
4842 void
4843 push_dummy_function (bool with_decl)
4844 {
4845 tree fn_decl, fn_type, fn_result_decl;
4846
4847 gcc_assert (!in_dummy_function);
4848 in_dummy_function = true;
4849
4850 if (with_decl)
4851 {
4852 fn_type = build_function_type_list (void_type_node, NULL_TREE);
4853 fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
4854 fn_type);
4855 fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
4856 NULL_TREE, void_type_node);
4857 DECL_RESULT (fn_decl) = fn_result_decl;
4858 }
4859 else
4860 fn_decl = NULL_TREE;
4861
4862 push_struct_function (fn_decl);
4863 }
4864
4865 /* Initialize the rtl expansion mechanism so that we can do simple things
4866 like generate sequences. This is used to provide a context during global
4867 initialization of some passes. You must call expand_dummy_function_end
4868 to exit this context. */
4869
4870 void
4871 init_dummy_function_start (void)
4872 {
4873 push_dummy_function (false);
4874 prepare_function_start ();
4875 }
4876
4877 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4878 and initialize static variables for generating RTL for the statements
4879 of the function. */
4880
4881 void
4882 init_function_start (tree subr)
4883 {
4884 /* Initialize backend, if needed. */
4885 initialize_rtl ();
4886
4887 prepare_function_start ();
4888 decide_function_section (subr);
4889
4890 /* Warn if this value is an aggregate type,
4891 regardless of which calling convention we are using for it. */
4892 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4893 warning (OPT_Waggregate_return, "function returns an aggregate");
4894 }
4895
4896 /* Expand code to verify the stack_protect_guard. This is invoked at
4897 the end of a function to be protected. */
4898
4899 void
4900 stack_protect_epilogue (void)
4901 {
4902 tree guard_decl = crtl->stack_protect_guard_decl;
4903 rtx_code_label *label = gen_label_rtx ();
4904 rtx x, y;
4905 rtx_insn *seq = NULL;
4906
4907 x = expand_normal (crtl->stack_protect_guard);
4908
4909 if (targetm.have_stack_protect_combined_test () && guard_decl)
4910 {
4911 gcc_assert (DECL_P (guard_decl));
4912 y = DECL_RTL (guard_decl);
4913 /* Allow the target to compute address of Y and compare it with X without
4914 leaking Y into a register. This combined address + compare pattern
4915 allows the target to prevent spilling of any intermediate results by
4916 splitting it after register allocator. */
4917 seq = targetm.gen_stack_protect_combined_test (x, y, label);
4918 }
4919 else
4920 {
4921 if (guard_decl)
4922 y = expand_normal (guard_decl);
4923 else
4924 y = const0_rtx;
4925
4926 /* Allow the target to compare Y with X without leaking either into
4927 a register. */
4928 if (targetm.have_stack_protect_test ())
4929 seq = targetm.gen_stack_protect_test (x, y, label);
4930 }
4931
4932 if (seq)
4933 emit_insn (seq);
4934 else
4935 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4936
4937 /* The noreturn predictor has been moved to the tree level. The rtl-level
4938 predictors estimate this branch about 20%, which isn't enough to get
4939 things moved out of line. Since this is the only extant case of adding
4940 a noreturn function at the rtl level, it doesn't seem worth doing ought
4941 except adding the prediction by hand. */
4942 rtx_insn *tmp = get_last_insn ();
4943 if (JUMP_P (tmp))
4944 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4945
4946 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
4947 free_temp_slots ();
4948 emit_label (label);
4949 }
4950 \f
4951 /* Start the RTL for a new function, and set variables used for
4952 emitting RTL.
4953 SUBR is the FUNCTION_DECL node.
4954 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4955 the function's parameters, which must be run at any return statement. */
4956
4957 void
4958 expand_function_start (tree subr)
4959 {
4960 /* Make sure volatile mem refs aren't considered
4961 valid operands of arithmetic insns. */
4962 init_recog_no_volatile ();
4963
4964 crtl->profile
4965 = (profile_flag
4966 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4967
4968 crtl->limit_stack
4969 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4970
4971 /* Make the label for return statements to jump to. Do not special
4972 case machines with special return instructions -- they will be
4973 handled later during jump, ifcvt, or epilogue creation. */
4974 return_label = gen_label_rtx ();
4975
4976 /* Initialize rtx used to return the value. */
4977 /* Do this before assign_parms so that we copy the struct value address
4978 before any library calls that assign parms might generate. */
4979
4980 /* Decide whether to return the value in memory or in a register. */
4981 tree res = DECL_RESULT (subr);
4982 if (aggregate_value_p (res, subr))
4983 {
4984 /* Returning something that won't go in a register. */
4985 rtx value_address = 0;
4986
4987 #ifdef PCC_STATIC_STRUCT_RETURN
4988 if (cfun->returns_pcc_struct)
4989 {
4990 int size = int_size_in_bytes (TREE_TYPE (res));
4991 value_address = assemble_static_space (size);
4992 }
4993 else
4994 #endif
4995 {
4996 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4997 /* Expect to be passed the address of a place to store the value.
4998 If it is passed as an argument, assign_parms will take care of
4999 it. */
5000 if (sv)
5001 {
5002 value_address = gen_reg_rtx (Pmode);
5003 emit_move_insn (value_address, sv);
5004 }
5005 }
5006 if (value_address)
5007 {
5008 rtx x = value_address;
5009 if (!DECL_BY_REFERENCE (res))
5010 {
5011 x = gen_rtx_MEM (DECL_MODE (res), x);
5012 set_mem_attributes (x, res, 1);
5013 }
5014 set_parm_rtl (res, x);
5015 }
5016 }
5017 else if (DECL_MODE (res) == VOIDmode)
5018 /* If return mode is void, this decl rtl should not be used. */
5019 set_parm_rtl (res, NULL_RTX);
5020 else
5021 {
5022 /* Compute the return values into a pseudo reg, which we will copy
5023 into the true return register after the cleanups are done. */
5024 tree return_type = TREE_TYPE (res);
5025
5026 /* If we may coalesce this result, make sure it has the expected mode
5027 in case it was promoted. But we need not bother about BLKmode. */
5028 machine_mode promoted_mode
5029 = flag_tree_coalesce_vars && is_gimple_reg (res)
5030 ? promote_ssa_mode (ssa_default_def (cfun, res), NULL)
5031 : BLKmode;
5032
5033 if (promoted_mode != BLKmode)
5034 set_parm_rtl (res, gen_reg_rtx (promoted_mode));
5035 else if (TYPE_MODE (return_type) != BLKmode
5036 && targetm.calls.return_in_msb (return_type))
5037 /* expand_function_end will insert the appropriate padding in
5038 this case. Use the return value's natural (unpadded) mode
5039 within the function proper. */
5040 set_parm_rtl (res, gen_reg_rtx (TYPE_MODE (return_type)));
5041 else
5042 {
5043 /* In order to figure out what mode to use for the pseudo, we
5044 figure out what the mode of the eventual return register will
5045 actually be, and use that. */
5046 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
5047
5048 /* Structures that are returned in registers are not
5049 aggregate_value_p, so we may see a PARALLEL or a REG. */
5050 if (REG_P (hard_reg))
5051 set_parm_rtl (res, gen_reg_rtx (GET_MODE (hard_reg)));
5052 else
5053 {
5054 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
5055 set_parm_rtl (res, gen_group_rtx (hard_reg));
5056 }
5057 }
5058
5059 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5060 result to the real return register(s). */
5061 DECL_REGISTER (res) = 1;
5062 }
5063
5064 /* Initialize rtx for parameters and local variables.
5065 In some cases this requires emitting insns. */
5066 assign_parms (subr);
5067
5068 /* If function gets a static chain arg, store it. */
5069 if (cfun->static_chain_decl)
5070 {
5071 tree parm = cfun->static_chain_decl;
5072 rtx local, chain;
5073 rtx_insn *insn;
5074 int unsignedp;
5075
5076 local = gen_reg_rtx (promote_decl_mode (parm, &unsignedp));
5077 chain = targetm.calls.static_chain (current_function_decl, true);
5078
5079 set_decl_incoming_rtl (parm, chain, false);
5080 set_parm_rtl (parm, local);
5081 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5082
5083 if (GET_MODE (local) != GET_MODE (chain))
5084 {
5085 convert_move (local, chain, unsignedp);
5086 insn = get_last_insn ();
5087 }
5088 else
5089 insn = emit_move_insn (local, chain);
5090
5091 /* Mark the register as eliminable, similar to parameters. */
5092 if (MEM_P (chain)
5093 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
5094 set_dst_reg_note (insn, REG_EQUIV, chain, local);
5095
5096 /* If we aren't optimizing, save the static chain onto the stack. */
5097 if (!optimize)
5098 {
5099 tree saved_static_chain_decl
5100 = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5101 DECL_NAME (parm), TREE_TYPE (parm));
5102 rtx saved_static_chain_rtx
5103 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5104 SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5105 emit_move_insn (saved_static_chain_rtx, chain);
5106 SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5107 DECL_HAS_VALUE_EXPR_P (parm) = 1;
5108 }
5109 }
5110
5111 /* The following was moved from init_function_start.
5112 The move was supposed to make sdb output more accurate. */
5113 /* Indicate the beginning of the function body,
5114 as opposed to parm setup. */
5115 emit_note (NOTE_INSN_FUNCTION_BEG);
5116
5117 gcc_assert (NOTE_P (get_last_insn ()));
5118
5119 parm_birth_insn = get_last_insn ();
5120
5121 /* If the function receives a non-local goto, then store the
5122 bits we need to restore the frame pointer. */
5123 if (cfun->nonlocal_goto_save_area)
5124 {
5125 tree t_save;
5126 rtx r_save;
5127
5128 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
5129 gcc_assert (DECL_RTL_SET_P (var));
5130
5131 t_save = build4 (ARRAY_REF,
5132 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
5133 cfun->nonlocal_goto_save_area,
5134 integer_zero_node, NULL_TREE, NULL_TREE);
5135 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5136 gcc_assert (GET_MODE (r_save) == Pmode);
5137
5138 emit_move_insn (r_save, hard_frame_pointer_rtx);
5139 update_nonlocal_goto_save_area ();
5140 }
5141
5142 if (crtl->profile)
5143 {
5144 #ifdef PROFILE_HOOK
5145 PROFILE_HOOK (current_function_funcdef_no);
5146 #endif
5147 }
5148
5149 /* If we are doing generic stack checking, the probe should go here. */
5150 if (flag_stack_check == GENERIC_STACK_CHECK)
5151 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
5152 }
5153 \f
5154 void
5155 pop_dummy_function (void)
5156 {
5157 pop_cfun ();
5158 in_dummy_function = false;
5159 }
5160
5161 /* Undo the effects of init_dummy_function_start. */
5162 void
5163 expand_dummy_function_end (void)
5164 {
5165 gcc_assert (in_dummy_function);
5166
5167 /* End any sequences that failed to be closed due to syntax errors. */
5168 while (in_sequence_p ())
5169 end_sequence ();
5170
5171 /* Outside function body, can't compute type's actual size
5172 until next function's body starts. */
5173
5174 free_after_parsing (cfun);
5175 free_after_compilation (cfun);
5176 pop_dummy_function ();
5177 }
5178
5179 /* Helper for diddle_return_value. */
5180
5181 void
5182 diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
5183 {
5184 if (! outgoing)
5185 return;
5186
5187 if (REG_P (outgoing))
5188 (*doit) (outgoing, arg);
5189 else if (GET_CODE (outgoing) == PARALLEL)
5190 {
5191 int i;
5192
5193 for (i = 0; i < XVECLEN (outgoing, 0); i++)
5194 {
5195 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5196
5197 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5198 (*doit) (x, arg);
5199 }
5200 }
5201 }
5202
5203 /* Call DOIT for each hard register used as a return value from
5204 the current function. */
5205
5206 void
5207 diddle_return_value (void (*doit) (rtx, void *), void *arg)
5208 {
5209 diddle_return_value_1 (doit, arg, crtl->return_rtx);
5210 }
5211
5212 static void
5213 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5214 {
5215 emit_clobber (reg);
5216 }
5217
5218 void
5219 clobber_return_register (void)
5220 {
5221 diddle_return_value (do_clobber_return_reg, NULL);
5222
5223 /* In case we do use pseudo to return value, clobber it too. */
5224 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5225 {
5226 tree decl_result = DECL_RESULT (current_function_decl);
5227 rtx decl_rtl = DECL_RTL (decl_result);
5228 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5229 {
5230 do_clobber_return_reg (decl_rtl, NULL);
5231 }
5232 }
5233 }
5234
5235 static void
5236 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5237 {
5238 emit_use (reg);
5239 }
5240
5241 static void
5242 use_return_register (void)
5243 {
5244 diddle_return_value (do_use_return_reg, NULL);
5245 }
5246
5247 /* Set the location of the insn chain starting at INSN to LOC. */
5248
5249 static void
5250 set_insn_locations (rtx_insn *insn, int loc)
5251 {
5252 while (insn != NULL)
5253 {
5254 if (INSN_P (insn))
5255 INSN_LOCATION (insn) = loc;
5256 insn = NEXT_INSN (insn);
5257 }
5258 }
5259
5260 /* Generate RTL for the end of the current function. */
5261
5262 void
5263 expand_function_end (void)
5264 {
5265 /* If arg_pointer_save_area was referenced only from a nested
5266 function, we will not have initialized it yet. Do that now. */
5267 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
5268 get_arg_pointer_save_area ();
5269
5270 /* If we are doing generic stack checking and this function makes calls,
5271 do a stack probe at the start of the function to ensure we have enough
5272 space for another stack frame. */
5273 if (flag_stack_check == GENERIC_STACK_CHECK)
5274 {
5275 rtx_insn *insn, *seq;
5276
5277 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5278 if (CALL_P (insn))
5279 {
5280 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5281 start_sequence ();
5282 if (STACK_CHECK_MOVING_SP)
5283 anti_adjust_stack_and_probe (max_frame_size, true);
5284 else
5285 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5286 seq = get_insns ();
5287 end_sequence ();
5288 set_insn_locations (seq, prologue_location);
5289 emit_insn_before (seq, stack_check_probe_note);
5290 break;
5291 }
5292 }
5293
5294 /* End any sequences that failed to be closed due to syntax errors. */
5295 while (in_sequence_p ())
5296 end_sequence ();
5297
5298 clear_pending_stack_adjust ();
5299 do_pending_stack_adjust ();
5300
5301 /* Output a linenumber for the end of the function.
5302 SDB depended on this. */
5303 set_curr_insn_location (input_location);
5304
5305 /* Before the return label (if any), clobber the return
5306 registers so that they are not propagated live to the rest of
5307 the function. This can only happen with functions that drop
5308 through; if there had been a return statement, there would
5309 have either been a return rtx, or a jump to the return label.
5310
5311 We delay actual code generation after the current_function_value_rtx
5312 is computed. */
5313 rtx_insn *clobber_after = get_last_insn ();
5314
5315 /* Output the label for the actual return from the function. */
5316 emit_label (return_label);
5317
5318 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5319 {
5320 /* Let except.c know where it should emit the call to unregister
5321 the function context for sjlj exceptions. */
5322 if (flag_exceptions)
5323 sjlj_emit_function_exit_after (get_last_insn ());
5324 }
5325
5326 /* If this is an implementation of throw, do what's necessary to
5327 communicate between __builtin_eh_return and the epilogue. */
5328 expand_eh_return ();
5329
5330 /* If stack protection is enabled for this function, check the guard. */
5331 if (crtl->stack_protect_guard
5332 && targetm.stack_protect_runtime_enabled_p ()
5333 && naked_return_label == NULL_RTX)
5334 stack_protect_epilogue ();
5335
5336 /* If scalar return value was computed in a pseudo-reg, or was a named
5337 return value that got dumped to the stack, copy that to the hard
5338 return register. */
5339 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5340 {
5341 tree decl_result = DECL_RESULT (current_function_decl);
5342 rtx decl_rtl = DECL_RTL (decl_result);
5343
5344 if (REG_P (decl_rtl)
5345 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5346 : DECL_REGISTER (decl_result))
5347 {
5348 rtx real_decl_rtl = crtl->return_rtx;
5349 complex_mode cmode;
5350
5351 /* This should be set in assign_parms. */
5352 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5353
5354 /* If this is a BLKmode structure being returned in registers,
5355 then use the mode computed in expand_return. Note that if
5356 decl_rtl is memory, then its mode may have been changed,
5357 but that crtl->return_rtx has not. */
5358 if (GET_MODE (real_decl_rtl) == BLKmode)
5359 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5360
5361 /* If a non-BLKmode return value should be padded at the least
5362 significant end of the register, shift it left by the appropriate
5363 amount. BLKmode results are handled using the group load/store
5364 machinery. */
5365 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5366 && REG_P (real_decl_rtl)
5367 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5368 {
5369 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5370 REGNO (real_decl_rtl)),
5371 decl_rtl);
5372 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5373 }
5374 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5375 {
5376 /* If expand_function_start has created a PARALLEL for decl_rtl,
5377 move the result to the real return registers. Otherwise, do
5378 a group load from decl_rtl for a named return. */
5379 if (GET_CODE (decl_rtl) == PARALLEL)
5380 emit_group_move (real_decl_rtl, decl_rtl);
5381 else
5382 emit_group_load (real_decl_rtl, decl_rtl,
5383 TREE_TYPE (decl_result),
5384 int_size_in_bytes (TREE_TYPE (decl_result)));
5385 }
5386 /* In the case of complex integer modes smaller than a word, we'll
5387 need to generate some non-trivial bitfield insertions. Do that
5388 on a pseudo and not the hard register. */
5389 else if (GET_CODE (decl_rtl) == CONCAT
5390 && is_complex_int_mode (GET_MODE (decl_rtl), &cmode)
5391 && GET_MODE_BITSIZE (cmode) <= BITS_PER_WORD)
5392 {
5393 int old_generating_concat_p;
5394 rtx tmp;
5395
5396 old_generating_concat_p = generating_concat_p;
5397 generating_concat_p = 0;
5398 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5399 generating_concat_p = old_generating_concat_p;
5400
5401 emit_move_insn (tmp, decl_rtl);
5402 emit_move_insn (real_decl_rtl, tmp);
5403 }
5404 /* If a named return value dumped decl_return to memory, then
5405 we may need to re-do the PROMOTE_MODE signed/unsigned
5406 extension. */
5407 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5408 {
5409 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5410 promote_function_mode (TREE_TYPE (decl_result),
5411 GET_MODE (decl_rtl), &unsignedp,
5412 TREE_TYPE (current_function_decl), 1);
5413
5414 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5415 }
5416 else
5417 emit_move_insn (real_decl_rtl, decl_rtl);
5418 }
5419 }
5420
5421 /* If returning a structure, arrange to return the address of the value
5422 in a place where debuggers expect to find it.
5423
5424 If returning a structure PCC style,
5425 the caller also depends on this value.
5426 And cfun->returns_pcc_struct is not necessarily set. */
5427 if ((cfun->returns_struct || cfun->returns_pcc_struct)
5428 && !targetm.calls.omit_struct_return_reg)
5429 {
5430 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5431 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5432 rtx outgoing;
5433
5434 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5435 type = TREE_TYPE (type);
5436 else
5437 value_address = XEXP (value_address, 0);
5438
5439 outgoing = targetm.calls.function_value (build_pointer_type (type),
5440 current_function_decl, true);
5441
5442 /* Mark this as a function return value so integrate will delete the
5443 assignment and USE below when inlining this function. */
5444 REG_FUNCTION_VALUE_P (outgoing) = 1;
5445
5446 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5447 scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (outgoing));
5448 value_address = convert_memory_address (mode, value_address);
5449
5450 emit_move_insn (outgoing, value_address);
5451
5452 /* Show return register used to hold result (in this case the address
5453 of the result. */
5454 crtl->return_rtx = outgoing;
5455 }
5456
5457 /* Emit the actual code to clobber return register. Don't emit
5458 it if clobber_after is a barrier, then the previous basic block
5459 certainly doesn't fall thru into the exit block. */
5460 if (!BARRIER_P (clobber_after))
5461 {
5462 start_sequence ();
5463 clobber_return_register ();
5464 rtx_insn *seq = get_insns ();
5465 end_sequence ();
5466
5467 emit_insn_after (seq, clobber_after);
5468 }
5469
5470 /* Output the label for the naked return from the function. */
5471 if (naked_return_label)
5472 emit_label (naked_return_label);
5473
5474 /* @@@ This is a kludge. We want to ensure that instructions that
5475 may trap are not moved into the epilogue by scheduling, because
5476 we don't always emit unwind information for the epilogue. */
5477 if (cfun->can_throw_non_call_exceptions
5478 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5479 emit_insn (gen_blockage ());
5480
5481 /* If stack protection is enabled for this function, check the guard. */
5482 if (crtl->stack_protect_guard
5483 && targetm.stack_protect_runtime_enabled_p ()
5484 && naked_return_label)
5485 stack_protect_epilogue ();
5486
5487 /* If we had calls to alloca, and this machine needs
5488 an accurate stack pointer to exit the function,
5489 insert some code to save and restore the stack pointer. */
5490 if (! EXIT_IGNORE_STACK
5491 && cfun->calls_alloca)
5492 {
5493 rtx tem = 0;
5494
5495 start_sequence ();
5496 emit_stack_save (SAVE_FUNCTION, &tem);
5497 rtx_insn *seq = get_insns ();
5498 end_sequence ();
5499 emit_insn_before (seq, parm_birth_insn);
5500
5501 emit_stack_restore (SAVE_FUNCTION, tem);
5502 }
5503
5504 /* ??? This should no longer be necessary since stupid is no longer with
5505 us, but there are some parts of the compiler (eg reload_combine, and
5506 sh mach_dep_reorg) that still try and compute their own lifetime info
5507 instead of using the general framework. */
5508 use_return_register ();
5509 }
5510
5511 rtx
5512 get_arg_pointer_save_area (void)
5513 {
5514 rtx ret = arg_pointer_save_area;
5515
5516 if (! ret)
5517 {
5518 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5519 arg_pointer_save_area = ret;
5520 }
5521
5522 if (! crtl->arg_pointer_save_area_init)
5523 {
5524 /* Save the arg pointer at the beginning of the function. The
5525 generated stack slot may not be a valid memory address, so we
5526 have to check it and fix it if necessary. */
5527 start_sequence ();
5528 emit_move_insn (validize_mem (copy_rtx (ret)),
5529 crtl->args.internal_arg_pointer);
5530 rtx_insn *seq = get_insns ();
5531 end_sequence ();
5532
5533 push_topmost_sequence ();
5534 emit_insn_after (seq, entry_of_function ());
5535 pop_topmost_sequence ();
5536
5537 crtl->arg_pointer_save_area_init = true;
5538 }
5539
5540 return ret;
5541 }
5542 \f
5543
5544 /* If debugging dumps are requested, dump information about how the
5545 target handled -fstack-check=clash for the prologue.
5546
5547 PROBES describes what if any probes were emitted.
5548
5549 RESIDUALS indicates if the prologue had any residual allocation
5550 (i.e. total allocation was not a multiple of PROBE_INTERVAL). */
5551
5552 void
5553 dump_stack_clash_frame_info (enum stack_clash_probes probes, bool residuals)
5554 {
5555 if (!dump_file)
5556 return;
5557
5558 switch (probes)
5559 {
5560 case NO_PROBE_NO_FRAME:
5561 fprintf (dump_file,
5562 "Stack clash no probe no stack adjustment in prologue.\n");
5563 break;
5564 case NO_PROBE_SMALL_FRAME:
5565 fprintf (dump_file,
5566 "Stack clash no probe small stack adjustment in prologue.\n");
5567 break;
5568 case PROBE_INLINE:
5569 fprintf (dump_file, "Stack clash inline probes in prologue.\n");
5570 break;
5571 case PROBE_LOOP:
5572 fprintf (dump_file, "Stack clash probe loop in prologue.\n");
5573 break;
5574 }
5575
5576 if (residuals)
5577 fprintf (dump_file, "Stack clash residual allocation in prologue.\n");
5578 else
5579 fprintf (dump_file, "Stack clash no residual allocation in prologue.\n");
5580
5581 if (frame_pointer_needed)
5582 fprintf (dump_file, "Stack clash frame pointer needed.\n");
5583 else
5584 fprintf (dump_file, "Stack clash no frame pointer needed.\n");
5585
5586 if (TREE_THIS_VOLATILE (cfun->decl))
5587 fprintf (dump_file,
5588 "Stack clash noreturn prologue, assuming no implicit"
5589 " probes in caller.\n");
5590 else
5591 fprintf (dump_file,
5592 "Stack clash not noreturn prologue.\n");
5593 }
5594
5595 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5596 for the first time. */
5597
5598 static void
5599 record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
5600 {
5601 rtx_insn *tmp;
5602 hash_table<insn_cache_hasher> *hash = *hashp;
5603
5604 if (hash == NULL)
5605 *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
5606
5607 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5608 {
5609 rtx *slot = hash->find_slot (tmp, INSERT);
5610 gcc_assert (*slot == NULL);
5611 *slot = tmp;
5612 }
5613 }
5614
5615 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5616 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5617 insn, then record COPY as well. */
5618
5619 void
5620 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5621 {
5622 hash_table<insn_cache_hasher> *hash;
5623 rtx *slot;
5624
5625 hash = epilogue_insn_hash;
5626 if (!hash || !hash->find (insn))
5627 {
5628 hash = prologue_insn_hash;
5629 if (!hash || !hash->find (insn))
5630 return;
5631 }
5632
5633 slot = hash->find_slot (copy, INSERT);
5634 gcc_assert (*slot == NULL);
5635 *slot = copy;
5636 }
5637
5638 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5639 we can be running after reorg, SEQUENCE rtl is possible. */
5640
5641 static bool
5642 contains (const rtx_insn *insn, hash_table<insn_cache_hasher> *hash)
5643 {
5644 if (hash == NULL)
5645 return false;
5646
5647 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5648 {
5649 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
5650 int i;
5651 for (i = seq->len () - 1; i >= 0; i--)
5652 if (hash->find (seq->element (i)))
5653 return true;
5654 return false;
5655 }
5656
5657 return hash->find (const_cast<rtx_insn *> (insn)) != NULL;
5658 }
5659
5660 int
5661 prologue_contains (const rtx_insn *insn)
5662 {
5663 return contains (insn, prologue_insn_hash);
5664 }
5665
5666 int
5667 epilogue_contains (const rtx_insn *insn)
5668 {
5669 return contains (insn, epilogue_insn_hash);
5670 }
5671
5672 int
5673 prologue_epilogue_contains (const rtx_insn *insn)
5674 {
5675 if (contains (insn, prologue_insn_hash))
5676 return 1;
5677 if (contains (insn, epilogue_insn_hash))
5678 return 1;
5679 return 0;
5680 }
5681
5682 void
5683 record_prologue_seq (rtx_insn *seq)
5684 {
5685 record_insns (seq, NULL, &prologue_insn_hash);
5686 }
5687
5688 void
5689 record_epilogue_seq (rtx_insn *seq)
5690 {
5691 record_insns (seq, NULL, &epilogue_insn_hash);
5692 }
5693
5694 /* Set JUMP_LABEL for a return insn. */
5695
5696 void
5697 set_return_jump_label (rtx_insn *returnjump)
5698 {
5699 rtx pat = PATTERN (returnjump);
5700 if (GET_CODE (pat) == PARALLEL)
5701 pat = XVECEXP (pat, 0, 0);
5702 if (ANY_RETURN_P (pat))
5703 JUMP_LABEL (returnjump) = pat;
5704 else
5705 JUMP_LABEL (returnjump) = ret_rtx;
5706 }
5707
5708 /* Return a sequence to be used as the split prologue for the current
5709 function, or NULL. */
5710
5711 static rtx_insn *
5712 make_split_prologue_seq (void)
5713 {
5714 if (!flag_split_stack
5715 || lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl)))
5716 return NULL;
5717
5718 start_sequence ();
5719 emit_insn (targetm.gen_split_stack_prologue ());
5720 rtx_insn *seq = get_insns ();
5721 end_sequence ();
5722
5723 record_insns (seq, NULL, &prologue_insn_hash);
5724 set_insn_locations (seq, prologue_location);
5725
5726 return seq;
5727 }
5728
5729 /* Return a sequence to be used as the prologue for the current function,
5730 or NULL. */
5731
5732 static rtx_insn *
5733 make_prologue_seq (void)
5734 {
5735 if (!targetm.have_prologue ())
5736 return NULL;
5737
5738 start_sequence ();
5739 rtx_insn *seq = targetm.gen_prologue ();
5740 emit_insn (seq);
5741
5742 /* Insert an explicit USE for the frame pointer
5743 if the profiling is on and the frame pointer is required. */
5744 if (crtl->profile && frame_pointer_needed)
5745 emit_use (hard_frame_pointer_rtx);
5746
5747 /* Retain a map of the prologue insns. */
5748 record_insns (seq, NULL, &prologue_insn_hash);
5749 emit_note (NOTE_INSN_PROLOGUE_END);
5750
5751 /* Ensure that instructions are not moved into the prologue when
5752 profiling is on. The call to the profiling routine can be
5753 emitted within the live range of a call-clobbered register. */
5754 if (!targetm.profile_before_prologue () && crtl->profile)
5755 emit_insn (gen_blockage ());
5756
5757 seq = get_insns ();
5758 end_sequence ();
5759 set_insn_locations (seq, prologue_location);
5760
5761 return seq;
5762 }
5763
5764 /* Return a sequence to be used as the epilogue for the current function,
5765 or NULL. */
5766
5767 static rtx_insn *
5768 make_epilogue_seq (void)
5769 {
5770 if (!targetm.have_epilogue ())
5771 return NULL;
5772
5773 start_sequence ();
5774 emit_note (NOTE_INSN_EPILOGUE_BEG);
5775 rtx_insn *seq = targetm.gen_epilogue ();
5776 if (seq)
5777 emit_jump_insn (seq);
5778
5779 /* Retain a map of the epilogue insns. */
5780 record_insns (seq, NULL, &epilogue_insn_hash);
5781 set_insn_locations (seq, epilogue_location);
5782
5783 seq = get_insns ();
5784 rtx_insn *returnjump = get_last_insn ();
5785 end_sequence ();
5786
5787 if (JUMP_P (returnjump))
5788 set_return_jump_label (returnjump);
5789
5790 return seq;
5791 }
5792
5793
5794 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5795 this into place with notes indicating where the prologue ends and where
5796 the epilogue begins. Update the basic block information when possible.
5797
5798 Notes on epilogue placement:
5799 There are several kinds of edges to the exit block:
5800 * a single fallthru edge from LAST_BB
5801 * possibly, edges from blocks containing sibcalls
5802 * possibly, fake edges from infinite loops
5803
5804 The epilogue is always emitted on the fallthru edge from the last basic
5805 block in the function, LAST_BB, into the exit block.
5806
5807 If LAST_BB is empty except for a label, it is the target of every
5808 other basic block in the function that ends in a return. If a
5809 target has a return or simple_return pattern (possibly with
5810 conditional variants), these basic blocks can be changed so that a
5811 return insn is emitted into them, and their target is adjusted to
5812 the real exit block.
5813
5814 Notes on shrink wrapping: We implement a fairly conservative
5815 version of shrink-wrapping rather than the textbook one. We only
5816 generate a single prologue and a single epilogue. This is
5817 sufficient to catch a number of interesting cases involving early
5818 exits.
5819
5820 First, we identify the blocks that require the prologue to occur before
5821 them. These are the ones that modify a call-saved register, or reference
5822 any of the stack or frame pointer registers. To simplify things, we then
5823 mark everything reachable from these blocks as also requiring a prologue.
5824 This takes care of loops automatically, and avoids the need to examine
5825 whether MEMs reference the frame, since it is sufficient to check for
5826 occurrences of the stack or frame pointer.
5827
5828 We then compute the set of blocks for which the need for a prologue
5829 is anticipatable (borrowing terminology from the shrink-wrapping
5830 description in Muchnick's book). These are the blocks which either
5831 require a prologue themselves, or those that have only successors
5832 where the prologue is anticipatable. The prologue needs to be
5833 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5834 is not. For the moment, we ensure that only one such edge exists.
5835
5836 The epilogue is placed as described above, but we make a
5837 distinction between inserting return and simple_return patterns
5838 when modifying other blocks that end in a return. Blocks that end
5839 in a sibcall omit the sibcall_epilogue if the block is not in
5840 ANTIC. */
5841
5842 void
5843 thread_prologue_and_epilogue_insns (void)
5844 {
5845 df_analyze ();
5846
5847 /* Can't deal with multiple successors of the entry block at the
5848 moment. Function should always have at least one entry
5849 point. */
5850 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5851
5852 edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5853 edge orig_entry_edge = entry_edge;
5854
5855 rtx_insn *split_prologue_seq = make_split_prologue_seq ();
5856 rtx_insn *prologue_seq = make_prologue_seq ();
5857 rtx_insn *epilogue_seq = make_epilogue_seq ();
5858
5859 /* Try to perform a kind of shrink-wrapping, making sure the
5860 prologue/epilogue is emitted only around those parts of the
5861 function that require it. */
5862 try_shrink_wrapping (&entry_edge, prologue_seq);
5863
5864 /* If the target can handle splitting the prologue/epilogue into separate
5865 components, try to shrink-wrap these components separately. */
5866 try_shrink_wrapping_separate (entry_edge->dest);
5867
5868 /* If that did anything for any component we now need the generate the
5869 "main" prologue again. Because some targets require some of these
5870 to be called in a specific order (i386 requires the split prologue
5871 to be first, for example), we create all three sequences again here.
5872 If this does not work for some target, that target should not enable
5873 separate shrink-wrapping. */
5874 if (crtl->shrink_wrapped_separate)
5875 {
5876 split_prologue_seq = make_split_prologue_seq ();
5877 prologue_seq = make_prologue_seq ();
5878 epilogue_seq = make_epilogue_seq ();
5879 }
5880
5881 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5882
5883 /* A small fib -- epilogue is not yet completed, but we wish to re-use
5884 this marker for the splits of EH_RETURN patterns, and nothing else
5885 uses the flag in the meantime. */
5886 epilogue_completed = 1;
5887
5888 /* Find non-fallthru edges that end with EH_RETURN instructions. On
5889 some targets, these get split to a special version of the epilogue
5890 code. In order to be able to properly annotate these with unwind
5891 info, try to split them now. If we get a valid split, drop an
5892 EPILOGUE_BEG note and mark the insns as epilogue insns. */
5893 edge e;
5894 edge_iterator ei;
5895 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5896 {
5897 rtx_insn *prev, *last, *trial;
5898
5899 if (e->flags & EDGE_FALLTHRU)
5900 continue;
5901 last = BB_END (e->src);
5902 if (!eh_returnjump_p (last))
5903 continue;
5904
5905 prev = PREV_INSN (last);
5906 trial = try_split (PATTERN (last), last, 1);
5907 if (trial == last)
5908 continue;
5909
5910 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
5911 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
5912 }
5913
5914 edge exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
5915
5916 if (exit_fallthru_edge)
5917 {
5918 if (epilogue_seq)
5919 {
5920 insert_insn_on_edge (epilogue_seq, exit_fallthru_edge);
5921 commit_edge_insertions ();
5922
5923 /* The epilogue insns we inserted may cause the exit edge to no longer
5924 be fallthru. */
5925 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5926 {
5927 if (((e->flags & EDGE_FALLTHRU) != 0)
5928 && returnjump_p (BB_END (e->src)))
5929 e->flags &= ~EDGE_FALLTHRU;
5930 }
5931 }
5932 else if (next_active_insn (BB_END (exit_fallthru_edge->src)))
5933 {
5934 /* We have a fall-through edge to the exit block, the source is not
5935 at the end of the function, and there will be an assembler epilogue
5936 at the end of the function.
5937 We can't use force_nonfallthru here, because that would try to
5938 use return. Inserting a jump 'by hand' is extremely messy, so
5939 we take advantage of cfg_layout_finalize using
5940 fixup_fallthru_exit_predecessor. */
5941 cfg_layout_initialize (0);
5942 basic_block cur_bb;
5943 FOR_EACH_BB_FN (cur_bb, cfun)
5944 if (cur_bb->index >= NUM_FIXED_BLOCKS
5945 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5946 cur_bb->aux = cur_bb->next_bb;
5947 cfg_layout_finalize ();
5948 }
5949 }
5950
5951 /* Insert the prologue. */
5952
5953 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5954
5955 if (split_prologue_seq || prologue_seq)
5956 {
5957 rtx_insn *split_prologue_insn = split_prologue_seq;
5958 if (split_prologue_seq)
5959 {
5960 while (split_prologue_insn && !NONDEBUG_INSN_P (split_prologue_insn))
5961 split_prologue_insn = NEXT_INSN (split_prologue_insn);
5962 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
5963 }
5964
5965 rtx_insn *prologue_insn = prologue_seq;
5966 if (prologue_seq)
5967 {
5968 while (prologue_insn && !NONDEBUG_INSN_P (prologue_insn))
5969 prologue_insn = NEXT_INSN (prologue_insn);
5970 insert_insn_on_edge (prologue_seq, entry_edge);
5971 }
5972
5973 commit_edge_insertions ();
5974
5975 /* Look for basic blocks within the prologue insns. */
5976 if (split_prologue_insn
5977 && BLOCK_FOR_INSN (split_prologue_insn) == NULL)
5978 split_prologue_insn = NULL;
5979 if (prologue_insn
5980 && BLOCK_FOR_INSN (prologue_insn) == NULL)
5981 prologue_insn = NULL;
5982 if (split_prologue_insn || prologue_insn)
5983 {
5984 auto_sbitmap blocks (last_basic_block_for_fn (cfun));
5985 bitmap_clear (blocks);
5986 if (split_prologue_insn)
5987 bitmap_set_bit (blocks,
5988 BLOCK_FOR_INSN (split_prologue_insn)->index);
5989 if (prologue_insn)
5990 bitmap_set_bit (blocks, BLOCK_FOR_INSN (prologue_insn)->index);
5991 find_many_sub_basic_blocks (blocks);
5992 }
5993 }
5994
5995 default_rtl_profile ();
5996
5997 /* Emit sibling epilogues before any sibling call sites. */
5998 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
5999 (e = ei_safe_edge (ei));
6000 ei_next (&ei))
6001 {
6002 /* Skip those already handled, the ones that run without prologue. */
6003 if (e->flags & EDGE_IGNORE)
6004 {
6005 e->flags &= ~EDGE_IGNORE;
6006 continue;
6007 }
6008
6009 rtx_insn *insn = BB_END (e->src);
6010
6011 if (!(CALL_P (insn) && SIBLING_CALL_P (insn)))
6012 continue;
6013
6014 if (rtx_insn *ep_seq = targetm.gen_sibcall_epilogue ())
6015 {
6016 start_sequence ();
6017 emit_note (NOTE_INSN_EPILOGUE_BEG);
6018 emit_insn (ep_seq);
6019 rtx_insn *seq = get_insns ();
6020 end_sequence ();
6021
6022 /* Retain a map of the epilogue insns. Used in life analysis to
6023 avoid getting rid of sibcall epilogue insns. Do this before we
6024 actually emit the sequence. */
6025 record_insns (seq, NULL, &epilogue_insn_hash);
6026 set_insn_locations (seq, epilogue_location);
6027
6028 emit_insn_before (seq, insn);
6029 }
6030 }
6031
6032 if (epilogue_seq)
6033 {
6034 rtx_insn *insn, *next;
6035
6036 /* Similarly, move any line notes that appear after the epilogue.
6037 There is no need, however, to be quite so anal about the existence
6038 of such a note. Also possibly move
6039 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6040 info generation. */
6041 for (insn = epilogue_seq; insn; insn = next)
6042 {
6043 next = NEXT_INSN (insn);
6044 if (NOTE_P (insn)
6045 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6046 reorder_insns (insn, insn, PREV_INSN (epilogue_seq));
6047 }
6048 }
6049
6050 /* Threading the prologue and epilogue changes the artificial refs
6051 in the entry and exit blocks. */
6052 epilogue_completed = 1;
6053 df_update_entry_exit_and_calls ();
6054 }
6055
6056 /* Reposition the prologue-end and epilogue-begin notes after
6057 instruction scheduling. */
6058
6059 void
6060 reposition_prologue_and_epilogue_notes (void)
6061 {
6062 if (!targetm.have_prologue ()
6063 && !targetm.have_epilogue ()
6064 && !targetm.have_sibcall_epilogue ())
6065 return;
6066
6067 /* Since the hash table is created on demand, the fact that it is
6068 non-null is a signal that it is non-empty. */
6069 if (prologue_insn_hash != NULL)
6070 {
6071 size_t len = prologue_insn_hash->elements ();
6072 rtx_insn *insn, *last = NULL, *note = NULL;
6073
6074 /* Scan from the beginning until we reach the last prologue insn. */
6075 /* ??? While we do have the CFG intact, there are two problems:
6076 (1) The prologue can contain loops (typically probing the stack),
6077 which means that the end of the prologue isn't in the first bb.
6078 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6079 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6080 {
6081 if (NOTE_P (insn))
6082 {
6083 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6084 note = insn;
6085 }
6086 else if (contains (insn, prologue_insn_hash))
6087 {
6088 last = insn;
6089 if (--len == 0)
6090 break;
6091 }
6092 }
6093
6094 if (last)
6095 {
6096 if (note == NULL)
6097 {
6098 /* Scan forward looking for the PROLOGUE_END note. It should
6099 be right at the beginning of the block, possibly with other
6100 insn notes that got moved there. */
6101 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6102 {
6103 if (NOTE_P (note)
6104 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6105 break;
6106 }
6107 }
6108
6109 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6110 if (LABEL_P (last))
6111 last = NEXT_INSN (last);
6112 reorder_insns (note, note, last);
6113 }
6114 }
6115
6116 if (epilogue_insn_hash != NULL)
6117 {
6118 edge_iterator ei;
6119 edge e;
6120
6121 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6122 {
6123 rtx_insn *insn, *first = NULL, *note = NULL;
6124 basic_block bb = e->src;
6125
6126 /* Scan from the beginning until we reach the first epilogue insn. */
6127 FOR_BB_INSNS (bb, insn)
6128 {
6129 if (NOTE_P (insn))
6130 {
6131 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6132 {
6133 note = insn;
6134 if (first != NULL)
6135 break;
6136 }
6137 }
6138 else if (first == NULL && contains (insn, epilogue_insn_hash))
6139 {
6140 first = insn;
6141 if (note != NULL)
6142 break;
6143 }
6144 }
6145
6146 if (note)
6147 {
6148 /* If the function has a single basic block, and no real
6149 epilogue insns (e.g. sibcall with no cleanup), the
6150 epilogue note can get scheduled before the prologue
6151 note. If we have frame related prologue insns, having
6152 them scanned during the epilogue will result in a crash.
6153 In this case re-order the epilogue note to just before
6154 the last insn in the block. */
6155 if (first == NULL)
6156 first = BB_END (bb);
6157
6158 if (PREV_INSN (first) != note)
6159 reorder_insns (note, note, PREV_INSN (first));
6160 }
6161 }
6162 }
6163 }
6164
6165 /* Returns the name of function declared by FNDECL. */
6166 const char *
6167 fndecl_name (tree fndecl)
6168 {
6169 if (fndecl == NULL)
6170 return "(nofn)";
6171 return lang_hooks.decl_printable_name (fndecl, 1);
6172 }
6173
6174 /* Returns the name of function FN. */
6175 const char *
6176 function_name (struct function *fn)
6177 {
6178 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6179 return fndecl_name (fndecl);
6180 }
6181
6182 /* Returns the name of the current function. */
6183 const char *
6184 current_function_name (void)
6185 {
6186 return function_name (cfun);
6187 }
6188 \f
6189
6190 static unsigned int
6191 rest_of_handle_check_leaf_regs (void)
6192 {
6193 #ifdef LEAF_REGISTERS
6194 crtl->uses_only_leaf_regs
6195 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6196 #endif
6197 return 0;
6198 }
6199
6200 /* Insert a TYPE into the used types hash table of CFUN. */
6201
6202 static void
6203 used_types_insert_helper (tree type, struct function *func)
6204 {
6205 if (type != NULL && func != NULL)
6206 {
6207 if (func->used_types_hash == NULL)
6208 func->used_types_hash = hash_set<tree>::create_ggc (37);
6209
6210 func->used_types_hash->add (type);
6211 }
6212 }
6213
6214 /* Given a type, insert it into the used hash table in cfun. */
6215 void
6216 used_types_insert (tree t)
6217 {
6218 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6219 if (TYPE_NAME (t))
6220 break;
6221 else
6222 t = TREE_TYPE (t);
6223 if (TREE_CODE (t) == ERROR_MARK)
6224 return;
6225 if (TYPE_NAME (t) == NULL_TREE
6226 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6227 t = TYPE_MAIN_VARIANT (t);
6228 if (debug_info_level > DINFO_LEVEL_NONE)
6229 {
6230 if (cfun)
6231 used_types_insert_helper (t, cfun);
6232 else
6233 {
6234 /* So this might be a type referenced by a global variable.
6235 Record that type so that we can later decide to emit its
6236 debug information. */
6237 vec_safe_push (types_used_by_cur_var_decl, t);
6238 }
6239 }
6240 }
6241
6242 /* Helper to Hash a struct types_used_by_vars_entry. */
6243
6244 static hashval_t
6245 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6246 {
6247 gcc_assert (entry && entry->var_decl && entry->type);
6248
6249 return iterative_hash_object (entry->type,
6250 iterative_hash_object (entry->var_decl, 0));
6251 }
6252
6253 /* Hash function of the types_used_by_vars_entry hash table. */
6254
6255 hashval_t
6256 used_type_hasher::hash (types_used_by_vars_entry *entry)
6257 {
6258 return hash_types_used_by_vars_entry (entry);
6259 }
6260
6261 /*Equality function of the types_used_by_vars_entry hash table. */
6262
6263 bool
6264 used_type_hasher::equal (types_used_by_vars_entry *e1,
6265 types_used_by_vars_entry *e2)
6266 {
6267 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6268 }
6269
6270 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6271
6272 void
6273 types_used_by_var_decl_insert (tree type, tree var_decl)
6274 {
6275 if (type != NULL && var_decl != NULL)
6276 {
6277 types_used_by_vars_entry **slot;
6278 struct types_used_by_vars_entry e;
6279 e.var_decl = var_decl;
6280 e.type = type;
6281 if (types_used_by_vars_hash == NULL)
6282 types_used_by_vars_hash
6283 = hash_table<used_type_hasher>::create_ggc (37);
6284
6285 slot = types_used_by_vars_hash->find_slot (&e, INSERT);
6286 if (*slot == NULL)
6287 {
6288 struct types_used_by_vars_entry *entry;
6289 entry = ggc_alloc<types_used_by_vars_entry> ();
6290 entry->type = type;
6291 entry->var_decl = var_decl;
6292 *slot = entry;
6293 }
6294 }
6295 }
6296
6297 namespace {
6298
6299 const pass_data pass_data_leaf_regs =
6300 {
6301 RTL_PASS, /* type */
6302 "*leaf_regs", /* name */
6303 OPTGROUP_NONE, /* optinfo_flags */
6304 TV_NONE, /* tv_id */
6305 0, /* properties_required */
6306 0, /* properties_provided */
6307 0, /* properties_destroyed */
6308 0, /* todo_flags_start */
6309 0, /* todo_flags_finish */
6310 };
6311
6312 class pass_leaf_regs : public rtl_opt_pass
6313 {
6314 public:
6315 pass_leaf_regs (gcc::context *ctxt)
6316 : rtl_opt_pass (pass_data_leaf_regs, ctxt)
6317 {}
6318
6319 /* opt_pass methods: */
6320 virtual unsigned int execute (function *)
6321 {
6322 return rest_of_handle_check_leaf_regs ();
6323 }
6324
6325 }; // class pass_leaf_regs
6326
6327 } // anon namespace
6328
6329 rtl_opt_pass *
6330 make_pass_leaf_regs (gcc::context *ctxt)
6331 {
6332 return new pass_leaf_regs (ctxt);
6333 }
6334
6335 static unsigned int
6336 rest_of_handle_thread_prologue_and_epilogue (void)
6337 {
6338 /* prepare_shrink_wrap is sensitive to the block structure of the control
6339 flow graph, so clean it up first. */
6340 if (optimize)
6341 cleanup_cfg (0);
6342
6343 /* On some machines, the prologue and epilogue code, or parts thereof,
6344 can be represented as RTL. Doing so lets us schedule insns between
6345 it and the rest of the code and also allows delayed branch
6346 scheduling to operate in the epilogue. */
6347 thread_prologue_and_epilogue_insns ();
6348
6349 /* Some non-cold blocks may now be only reachable from cold blocks.
6350 Fix that up. */
6351 fixup_partitions ();
6352
6353 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6354 see PR57320. */
6355 cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
6356
6357 /* The stack usage info is finalized during prologue expansion. */
6358 if (flag_stack_usage_info)
6359 output_stack_usage ();
6360
6361 return 0;
6362 }
6363
6364 namespace {
6365
6366 const pass_data pass_data_thread_prologue_and_epilogue =
6367 {
6368 RTL_PASS, /* type */
6369 "pro_and_epilogue", /* name */
6370 OPTGROUP_NONE, /* optinfo_flags */
6371 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6372 0, /* properties_required */
6373 0, /* properties_provided */
6374 0, /* properties_destroyed */
6375 0, /* todo_flags_start */
6376 ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6377 };
6378
6379 class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6380 {
6381 public:
6382 pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6383 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
6384 {}
6385
6386 /* opt_pass methods: */
6387 virtual unsigned int execute (function *)
6388 {
6389 return rest_of_handle_thread_prologue_and_epilogue ();
6390 }
6391
6392 }; // class pass_thread_prologue_and_epilogue
6393
6394 } // anon namespace
6395
6396 rtl_opt_pass *
6397 make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6398 {
6399 return new pass_thread_prologue_and_epilogue (ctxt);
6400 }
6401 \f
6402
6403 /* If CONSTRAINT is a matching constraint, then return its number.
6404 Otherwise, return -1. */
6405
6406 static int
6407 matching_constraint_num (const char *constraint)
6408 {
6409 if (*constraint == '%')
6410 constraint++;
6411
6412 if (IN_RANGE (*constraint, '0', '9'))
6413 return strtoul (constraint, NULL, 10);
6414
6415 return -1;
6416 }
6417
6418 /* This mini-pass fixes fall-out from SSA in asm statements that have
6419 in-out constraints. Say you start with
6420
6421 orig = inout;
6422 asm ("": "+mr" (inout));
6423 use (orig);
6424
6425 which is transformed very early to use explicit output and match operands:
6426
6427 orig = inout;
6428 asm ("": "=mr" (inout) : "0" (inout));
6429 use (orig);
6430
6431 Or, after SSA and copyprop,
6432
6433 asm ("": "=mr" (inout_2) : "0" (inout_1));
6434 use (inout_1);
6435
6436 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6437 they represent two separate values, so they will get different pseudo
6438 registers during expansion. Then, since the two operands need to match
6439 per the constraints, but use different pseudo registers, reload can
6440 only register a reload for these operands. But reloads can only be
6441 satisfied by hardregs, not by memory, so we need a register for this
6442 reload, just because we are presented with non-matching operands.
6443 So, even though we allow memory for this operand, no memory can be
6444 used for it, just because the two operands don't match. This can
6445 cause reload failures on register-starved targets.
6446
6447 So it's a symptom of reload not being able to use memory for reloads
6448 or, alternatively it's also a symptom of both operands not coming into
6449 reload as matching (in which case the pseudo could go to memory just
6450 fine, as the alternative allows it, and no reload would be necessary).
6451 We fix the latter problem here, by transforming
6452
6453 asm ("": "=mr" (inout_2) : "0" (inout_1));
6454
6455 back to
6456
6457 inout_2 = inout_1;
6458 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6459
6460 static void
6461 match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
6462 {
6463 int i;
6464 bool changed = false;
6465 rtx op = SET_SRC (p_sets[0]);
6466 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6467 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6468 bool *output_matched = XALLOCAVEC (bool, noutputs);
6469
6470 memset (output_matched, 0, noutputs * sizeof (bool));
6471 for (i = 0; i < ninputs; i++)
6472 {
6473 rtx input, output;
6474 rtx_insn *insns;
6475 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6476 int match, j;
6477
6478 match = matching_constraint_num (constraint);
6479 if (match < 0)
6480 continue;
6481
6482 gcc_assert (match < noutputs);
6483 output = SET_DEST (p_sets[match]);
6484 input = RTVEC_ELT (inputs, i);
6485 /* Only do the transformation for pseudos. */
6486 if (! REG_P (output)
6487 || rtx_equal_p (output, input)
6488 || !(REG_P (input) || SUBREG_P (input)
6489 || MEM_P (input) || CONSTANT_P (input))
6490 || !general_operand (input, GET_MODE (output)))
6491 continue;
6492
6493 /* We can't do anything if the output is also used as input,
6494 as we're going to overwrite it. */
6495 for (j = 0; j < ninputs; j++)
6496 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6497 break;
6498 if (j != ninputs)
6499 continue;
6500
6501 /* Avoid changing the same input several times. For
6502 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6503 only change it once (to out1), rather than changing it
6504 first to out1 and afterwards to out2. */
6505 if (i > 0)
6506 {
6507 for (j = 0; j < noutputs; j++)
6508 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6509 break;
6510 if (j != noutputs)
6511 continue;
6512 }
6513 output_matched[match] = true;
6514
6515 start_sequence ();
6516 emit_move_insn (output, copy_rtx (input));
6517 insns = get_insns ();
6518 end_sequence ();
6519 emit_insn_before (insns, insn);
6520
6521 constraint = ASM_OPERANDS_OUTPUT_CONSTRAINT(SET_SRC(p_sets[match]));
6522 bool early_clobber_p = strchr (constraint, '&') != NULL;
6523
6524 /* Now replace all mentions of the input with output. We can't
6525 just replace the occurrence in inputs[i], as the register might
6526 also be used in some other input (or even in an address of an
6527 output), which would mean possibly increasing the number of
6528 inputs by one (namely 'output' in addition), which might pose
6529 a too complicated problem for reload to solve. E.g. this situation:
6530
6531 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6532
6533 Here 'input' is used in two occurrences as input (once for the
6534 input operand, once for the address in the second output operand).
6535 If we would replace only the occurrence of the input operand (to
6536 make the matching) we would be left with this:
6537
6538 output = input
6539 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6540
6541 Now we suddenly have two different input values (containing the same
6542 value, but different pseudos) where we formerly had only one.
6543 With more complicated asms this might lead to reload failures
6544 which wouldn't have happen without this pass. So, iterate over
6545 all operands and replace all occurrences of the register used.
6546
6547 However, if one or more of the 'input' uses have a non-matching
6548 constraint and the matched output operand is an early clobber
6549 operand, then do not replace the input operand, since by definition
6550 it conflicts with the output operand and cannot share the same
6551 register. See PR89313 for details. */
6552
6553 for (j = 0; j < noutputs; j++)
6554 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6555 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6556 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6557 input, output);
6558 for (j = 0; j < ninputs; j++)
6559 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6560 {
6561 if (!early_clobber_p
6562 || match == matching_constraint_num
6563 (ASM_OPERANDS_INPUT_CONSTRAINT (op, j)))
6564 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6565 input, output);
6566 }
6567
6568 changed = true;
6569 }
6570
6571 if (changed)
6572 df_insn_rescan (insn);
6573 }
6574
6575 /* Add the decl D to the local_decls list of FUN. */
6576
6577 void
6578 add_local_decl (struct function *fun, tree d)
6579 {
6580 gcc_assert (VAR_P (d));
6581 vec_safe_push (fun->local_decls, d);
6582 }
6583
6584 namespace {
6585
6586 const pass_data pass_data_match_asm_constraints =
6587 {
6588 RTL_PASS, /* type */
6589 "asmcons", /* name */
6590 OPTGROUP_NONE, /* optinfo_flags */
6591 TV_NONE, /* tv_id */
6592 0, /* properties_required */
6593 0, /* properties_provided */
6594 0, /* properties_destroyed */
6595 0, /* todo_flags_start */
6596 0, /* todo_flags_finish */
6597 };
6598
6599 class pass_match_asm_constraints : public rtl_opt_pass
6600 {
6601 public:
6602 pass_match_asm_constraints (gcc::context *ctxt)
6603 : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6604 {}
6605
6606 /* opt_pass methods: */
6607 virtual unsigned int execute (function *);
6608
6609 }; // class pass_match_asm_constraints
6610
6611 unsigned
6612 pass_match_asm_constraints::execute (function *fun)
6613 {
6614 basic_block bb;
6615 rtx_insn *insn;
6616 rtx pat, *p_sets;
6617 int noutputs;
6618
6619 if (!crtl->has_asm_statement)
6620 return 0;
6621
6622 df_set_flags (DF_DEFER_INSN_RESCAN);
6623 FOR_EACH_BB_FN (bb, fun)
6624 {
6625 FOR_BB_INSNS (bb, insn)
6626 {
6627 if (!INSN_P (insn))
6628 continue;
6629
6630 pat = PATTERN (insn);
6631 if (GET_CODE (pat) == PARALLEL)
6632 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6633 else if (GET_CODE (pat) == SET)
6634 p_sets = &PATTERN (insn), noutputs = 1;
6635 else
6636 continue;
6637
6638 if (GET_CODE (*p_sets) == SET
6639 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6640 match_asm_constraints_1 (insn, p_sets, noutputs);
6641 }
6642 }
6643
6644 return TODO_df_finish;
6645 }
6646
6647 } // anon namespace
6648
6649 rtl_opt_pass *
6650 make_pass_match_asm_constraints (gcc::context *ctxt)
6651 {
6652 return new pass_match_asm_constraints (ctxt);
6653 }
6654
6655
6656 #include "gt-function.h"