1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
36 #include "coretypes.h"
41 #include "gimple-expr.h"
46 #include "stringpool.h"
52 #include "rtl-error.h"
54 #include "fold-const.h"
55 #include "stor-layout.h"
62 #include "optabs-tree.h"
64 #include "langhooks.h"
65 #include "common/common-target.h"
67 #include "tree-pass.h"
71 #include "cfgcleanup.h"
72 #include "cfgexpand.h"
73 #include "shrink-wrap.h"
78 #include "stringpool.h"
83 /* So we can assign to cfun in this file. */
86 #ifndef STACK_ALIGNMENT_NEEDED
87 #define STACK_ALIGNMENT_NEEDED 1
90 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
92 /* Round a value to the lowest integer less than it that is a multiple of
93 the required alignment. Avoid using division in case the value is
94 negative. Assume the alignment is a power of two. */
95 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
97 /* Similar, but round to the next highest integer that meets the
99 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
101 /* Nonzero once virtual register instantiation has been done.
102 assign_stack_local uses frame_pointer_rtx when this is nonzero.
103 calls.c:emit_library_call_value_1 uses it to set up
104 post-instantiation libcalls. */
105 int virtuals_instantiated
;
107 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
108 static GTY(()) int funcdef_no
;
110 /* These variables hold pointers to functions to create and destroy
111 target specific, per-function data structures. */
112 struct machine_function
* (*init_machine_status
) (void);
114 /* The currently compiled function. */
115 struct function
*cfun
= 0;
117 /* These hashes record the prologue and epilogue insns. */
119 struct insn_cache_hasher
: ggc_cache_ptr_hash
<rtx_def
>
121 static hashval_t
hash (rtx x
) { return htab_hash_pointer (x
); }
122 static bool equal (rtx a
, rtx b
) { return a
== b
; }
126 hash_table
<insn_cache_hasher
> *prologue_insn_hash
;
128 hash_table
<insn_cache_hasher
> *epilogue_insn_hash
;
131 hash_table
<used_type_hasher
> *types_used_by_vars_hash
= NULL
;
132 vec
<tree
, va_gc
> *types_used_by_cur_var_decl
;
134 /* Forward declarations. */
136 static class temp_slot
*find_temp_slot_from_address (rtx
);
137 static void pad_to_arg_alignment (struct args_size
*, int, struct args_size
*);
138 static void pad_below (struct args_size
*, machine_mode
, tree
);
139 static void reorder_blocks_1 (rtx_insn
*, tree
, vec
<tree
> *);
140 static int all_blocks (tree
, tree
*);
141 static tree
*get_block_vector (tree
, int *);
142 extern tree
debug_find_var_in_block_tree (tree
, tree
);
143 /* We always define `record_insns' even if it's not used so that we
144 can always export `prologue_epilogue_contains'. */
145 static void record_insns (rtx_insn
*, rtx
, hash_table
<insn_cache_hasher
> **)
147 static bool contains (const rtx_insn
*, hash_table
<insn_cache_hasher
> *);
148 static void prepare_function_start (void);
149 static void do_clobber_return_reg (rtx
, void *);
150 static void do_use_return_reg (rtx
, void *);
153 /* Stack of nested functions. */
154 /* Keep track of the cfun stack. */
156 static vec
<function
*> function_context_stack
;
158 /* Save the current context for compilation of a nested function.
159 This is called from language-specific code. */
162 push_function_context (void)
165 allocate_struct_function (NULL
, false);
167 function_context_stack
.safe_push (cfun
);
171 /* Restore the last saved context, at the end of a nested function.
172 This function is called from language-specific code. */
175 pop_function_context (void)
177 struct function
*p
= function_context_stack
.pop ();
179 current_function_decl
= p
->decl
;
181 /* Reset variables that have known state during rtx generation. */
182 virtuals_instantiated
= 0;
183 generating_concat_p
= 1;
186 /* Clear out all parts of the state in F that can safely be discarded
187 after the function has been parsed, but not compiled, to let
188 garbage collection reclaim the memory. */
191 free_after_parsing (struct function
*f
)
196 /* Clear out all parts of the state in F that can safely be discarded
197 after the function has been compiled, to let garbage collection
198 reclaim the memory. */
201 free_after_compilation (struct function
*f
)
203 prologue_insn_hash
= NULL
;
204 epilogue_insn_hash
= NULL
;
206 free (crtl
->emit
.regno_pointer_align
);
208 memset (crtl
, 0, sizeof (struct rtl_data
));
212 f
->curr_properties
&= ~PROP_cfg
;
214 regno_reg_rtx
= NULL
;
217 /* Return size needed for stack frame based on slots so far allocated.
218 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
219 the caller may have to do that. */
222 get_frame_size (void)
224 if (FRAME_GROWS_DOWNWARD
)
225 return -frame_offset
;
230 /* Issue an error message and return TRUE if frame OFFSET overflows in
231 the signed target pointer arithmetics for function FUNC. Otherwise
235 frame_offset_overflow (poly_int64 offset
, tree func
)
237 poly_uint64 size
= FRAME_GROWS_DOWNWARD
? -offset
: offset
;
238 unsigned HOST_WIDE_INT limit
239 = ((HOST_WIDE_INT_1U
<< (GET_MODE_BITSIZE (Pmode
) - 1))
240 /* Leave room for the fixed part of the frame. */
241 - 64 * UNITS_PER_WORD
);
243 if (!coeffs_in_range_p (size
, 0U, limit
))
245 unsigned HOST_WIDE_INT hwisize
;
246 if (size
.is_constant (&hwisize
))
247 error_at (DECL_SOURCE_LOCATION (func
),
248 "total size of local objects %wu exceeds maximum %wu",
251 error_at (DECL_SOURCE_LOCATION (func
),
252 "total size of local objects exceeds maximum %wu",
260 /* Return the minimum spill slot alignment for a register of mode MODE. */
263 spill_slot_alignment (machine_mode mode ATTRIBUTE_UNUSED
)
265 return STACK_SLOT_ALIGNMENT (NULL_TREE
, mode
, GET_MODE_ALIGNMENT (mode
));
268 /* Return stack slot alignment in bits for TYPE and MODE. */
271 get_stack_local_alignment (tree type
, machine_mode mode
)
273 unsigned int alignment
;
276 alignment
= BIGGEST_ALIGNMENT
;
278 alignment
= GET_MODE_ALIGNMENT (mode
);
280 /* Allow the frond-end to (possibly) increase the alignment of this
283 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
285 return STACK_SLOT_ALIGNMENT (type
, mode
, alignment
);
288 /* Determine whether it is possible to fit a stack slot of size SIZE and
289 alignment ALIGNMENT into an area in the stack frame that starts at
290 frame offset START and has a length of LENGTH. If so, store the frame
291 offset to be used for the stack slot in *POFFSET and return true;
292 return false otherwise. This function will extend the frame size when
293 given a start/length pair that lies at the end of the frame. */
296 try_fit_stack_local (poly_int64 start
, poly_int64 length
,
297 poly_int64 size
, unsigned int alignment
,
298 poly_int64_pod
*poffset
)
300 poly_int64 this_frame_offset
;
301 int frame_off
, frame_alignment
, frame_phase
;
303 /* Calculate how many bytes the start of local variables is off from
305 frame_alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
306 frame_off
= targetm
.starting_frame_offset () % frame_alignment
;
307 frame_phase
= frame_off
? frame_alignment
- frame_off
: 0;
309 /* Round the frame offset to the specified alignment. */
311 if (FRAME_GROWS_DOWNWARD
)
313 = (aligned_lower_bound (start
+ length
- size
- frame_phase
, alignment
)
317 = aligned_upper_bound (start
- frame_phase
, alignment
) + frame_phase
;
319 /* See if it fits. If this space is at the edge of the frame,
320 consider extending the frame to make it fit. Our caller relies on
321 this when allocating a new slot. */
322 if (maybe_lt (this_frame_offset
, start
))
324 if (known_eq (frame_offset
, start
))
325 frame_offset
= this_frame_offset
;
329 else if (maybe_gt (this_frame_offset
+ size
, start
+ length
))
331 if (known_eq (frame_offset
, start
+ length
))
332 frame_offset
= this_frame_offset
+ size
;
337 *poffset
= this_frame_offset
;
341 /* Create a new frame_space structure describing free space in the stack
342 frame beginning at START and ending at END, and chain it into the
343 function's frame_space_list. */
346 add_frame_space (poly_int64 start
, poly_int64 end
)
348 class frame_space
*space
= ggc_alloc
<frame_space
> ();
349 space
->next
= crtl
->frame_space_list
;
350 crtl
->frame_space_list
= space
;
351 space
->start
= start
;
352 space
->length
= end
- start
;
355 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
356 with machine mode MODE.
358 ALIGN controls the amount of alignment for the address of the slot:
359 0 means according to MODE,
360 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
361 -2 means use BITS_PER_UNIT,
362 positive specifies alignment boundary in bits.
364 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
365 alignment and ASLK_RECORD_PAD bit set if we should remember
366 extra space we allocated for alignment purposes. When we are
367 called from assign_stack_temp_for_type, it is not set so we don't
368 track the same stack slot in two independent lists.
370 We do not round to stack_boundary here. */
373 assign_stack_local_1 (machine_mode mode
, poly_int64 size
,
377 poly_int64 bigend_correction
= 0;
378 poly_int64 slot_offset
= 0, old_frame_offset
;
379 unsigned int alignment
, alignment_in_bits
;
383 alignment
= get_stack_local_alignment (NULL
, mode
);
384 alignment
/= BITS_PER_UNIT
;
386 else if (align
== -1)
388 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
389 size
= aligned_upper_bound (size
, alignment
);
391 else if (align
== -2)
392 alignment
= 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
394 alignment
= align
/ BITS_PER_UNIT
;
396 alignment_in_bits
= alignment
* BITS_PER_UNIT
;
398 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
399 if (alignment_in_bits
> MAX_SUPPORTED_STACK_ALIGNMENT
)
401 alignment_in_bits
= MAX_SUPPORTED_STACK_ALIGNMENT
;
402 alignment
= MAX_SUPPORTED_STACK_ALIGNMENT
/ BITS_PER_UNIT
;
405 if (SUPPORTS_STACK_ALIGNMENT
)
407 if (crtl
->stack_alignment_estimated
< alignment_in_bits
)
409 if (!crtl
->stack_realign_processed
)
410 crtl
->stack_alignment_estimated
= alignment_in_bits
;
413 /* If stack is realigned and stack alignment value
414 hasn't been finalized, it is OK not to increase
415 stack_alignment_estimated. The bigger alignment
416 requirement is recorded in stack_alignment_needed
418 gcc_assert (!crtl
->stack_realign_finalized
);
419 if (!crtl
->stack_realign_needed
)
421 /* It is OK to reduce the alignment as long as the
422 requested size is 0 or the estimated stack
423 alignment >= mode alignment. */
424 gcc_assert ((kind
& ASLK_REDUCE_ALIGN
)
425 || known_eq (size
, 0)
426 || (crtl
->stack_alignment_estimated
427 >= GET_MODE_ALIGNMENT (mode
)));
428 alignment_in_bits
= crtl
->stack_alignment_estimated
;
429 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
435 if (crtl
->stack_alignment_needed
< alignment_in_bits
)
436 crtl
->stack_alignment_needed
= alignment_in_bits
;
437 if (crtl
->max_used_stack_slot_alignment
< alignment_in_bits
)
438 crtl
->max_used_stack_slot_alignment
= alignment_in_bits
;
440 if (mode
!= BLKmode
|| maybe_ne (size
, 0))
442 if (kind
& ASLK_RECORD_PAD
)
444 class frame_space
**psp
;
446 for (psp
= &crtl
->frame_space_list
; *psp
; psp
= &(*psp
)->next
)
448 class frame_space
*space
= *psp
;
449 if (!try_fit_stack_local (space
->start
, space
->length
, size
,
450 alignment
, &slot_offset
))
453 if (known_gt (slot_offset
, space
->start
))
454 add_frame_space (space
->start
, slot_offset
);
455 if (known_lt (slot_offset
+ size
, space
->start
+ space
->length
))
456 add_frame_space (slot_offset
+ size
,
457 space
->start
+ space
->length
);
462 else if (!STACK_ALIGNMENT_NEEDED
)
464 slot_offset
= frame_offset
;
468 old_frame_offset
= frame_offset
;
470 if (FRAME_GROWS_DOWNWARD
)
472 frame_offset
-= size
;
473 try_fit_stack_local (frame_offset
, size
, size
, alignment
, &slot_offset
);
475 if (kind
& ASLK_RECORD_PAD
)
477 if (known_gt (slot_offset
, frame_offset
))
478 add_frame_space (frame_offset
, slot_offset
);
479 if (known_lt (slot_offset
+ size
, old_frame_offset
))
480 add_frame_space (slot_offset
+ size
, old_frame_offset
);
485 frame_offset
+= size
;
486 try_fit_stack_local (old_frame_offset
, size
, size
, alignment
, &slot_offset
);
488 if (kind
& ASLK_RECORD_PAD
)
490 if (known_gt (slot_offset
, old_frame_offset
))
491 add_frame_space (old_frame_offset
, slot_offset
);
492 if (known_lt (slot_offset
+ size
, frame_offset
))
493 add_frame_space (slot_offset
+ size
, frame_offset
);
498 /* On a big-endian machine, if we are allocating more space than we will use,
499 use the least significant bytes of those that are allocated. */
502 /* The slot size can sometimes be smaller than the mode size;
503 e.g. the rs6000 port allocates slots with a vector mode
504 that have the size of only one element. However, the slot
505 size must always be ordered wrt to the mode size, in the
506 same way as for a subreg. */
507 gcc_checking_assert (ordered_p (GET_MODE_SIZE (mode
), size
));
508 if (BYTES_BIG_ENDIAN
&& maybe_lt (GET_MODE_SIZE (mode
), size
))
509 bigend_correction
= size
- GET_MODE_SIZE (mode
);
512 /* If we have already instantiated virtual registers, return the actual
513 address relative to the frame pointer. */
514 if (virtuals_instantiated
)
515 addr
= plus_constant (Pmode
, frame_pointer_rtx
,
517 (slot_offset
+ bigend_correction
518 + targetm
.starting_frame_offset (), Pmode
));
520 addr
= plus_constant (Pmode
, virtual_stack_vars_rtx
,
522 (slot_offset
+ bigend_correction
,
525 x
= gen_rtx_MEM (mode
, addr
);
526 set_mem_align (x
, alignment_in_bits
);
527 MEM_NOTRAP_P (x
) = 1;
529 vec_safe_push (stack_slot_list
, x
);
531 if (frame_offset_overflow (frame_offset
, current_function_decl
))
537 /* Wrap up assign_stack_local_1 with last parameter as false. */
540 assign_stack_local (machine_mode mode
, poly_int64 size
, int align
)
542 return assign_stack_local_1 (mode
, size
, align
, ASLK_RECORD_PAD
);
545 /* In order to evaluate some expressions, such as function calls returning
546 structures in memory, we need to temporarily allocate stack locations.
547 We record each allocated temporary in the following structure.
549 Associated with each temporary slot is a nesting level. When we pop up
550 one level, all temporaries associated with the previous level are freed.
551 Normally, all temporaries are freed after the execution of the statement
552 in which they were created. However, if we are inside a ({...}) grouping,
553 the result may be in a temporary and hence must be preserved. If the
554 result could be in a temporary, we preserve it if we can determine which
555 one it is in. If we cannot determine which temporary may contain the
556 result, all temporaries are preserved. A temporary is preserved by
557 pretending it was allocated at the previous nesting level. */
559 class GTY(()) temp_slot
{
561 /* Points to next temporary slot. */
562 class temp_slot
*next
;
563 /* Points to previous temporary slot. */
564 class temp_slot
*prev
;
565 /* The rtx to used to reference the slot. */
567 /* The size, in units, of the slot. */
569 /* The type of the object in the slot, or zero if it doesn't correspond
570 to a type. We use this to determine whether a slot can be reused.
571 It can be reused if objects of the type of the new slot will always
572 conflict with objects of the type of the old slot. */
574 /* The alignment (in bits) of the slot. */
576 /* Nonzero if this temporary is currently in use. */
578 /* Nesting level at which this slot is being used. */
580 /* The offset of the slot from the frame_pointer, including extra space
581 for alignment. This info is for combine_temp_slots. */
582 poly_int64 base_offset
;
583 /* The size of the slot, including extra space for alignment. This
584 info is for combine_temp_slots. */
585 poly_int64 full_size
;
588 /* Entry for the below hash table. */
589 struct GTY((for_user
)) temp_slot_address_entry
{
592 class temp_slot
*temp_slot
;
595 struct temp_address_hasher
: ggc_ptr_hash
<temp_slot_address_entry
>
597 static hashval_t
hash (temp_slot_address_entry
*);
598 static bool equal (temp_slot_address_entry
*, temp_slot_address_entry
*);
601 /* A table of addresses that represent a stack slot. The table is a mapping
602 from address RTXen to a temp slot. */
603 static GTY(()) hash_table
<temp_address_hasher
> *temp_slot_address_table
;
604 static size_t n_temp_slots_in_use
;
606 /* Removes temporary slot TEMP from LIST. */
609 cut_slot_from_list (class temp_slot
*temp
, class temp_slot
**list
)
612 temp
->next
->prev
= temp
->prev
;
614 temp
->prev
->next
= temp
->next
;
618 temp
->prev
= temp
->next
= NULL
;
621 /* Inserts temporary slot TEMP to LIST. */
624 insert_slot_to_list (class temp_slot
*temp
, class temp_slot
**list
)
628 (*list
)->prev
= temp
;
633 /* Returns the list of used temp slots at LEVEL. */
635 static class temp_slot
**
636 temp_slots_at_level (int level
)
638 if (level
>= (int) vec_safe_length (used_temp_slots
))
639 vec_safe_grow_cleared (used_temp_slots
, level
+ 1);
641 return &(*used_temp_slots
)[level
];
644 /* Returns the maximal temporary slot level. */
647 max_slot_level (void)
649 if (!used_temp_slots
)
652 return used_temp_slots
->length () - 1;
655 /* Moves temporary slot TEMP to LEVEL. */
658 move_slot_to_level (class temp_slot
*temp
, int level
)
660 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
661 insert_slot_to_list (temp
, temp_slots_at_level (level
));
665 /* Make temporary slot TEMP available. */
668 make_slot_available (class temp_slot
*temp
)
670 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
671 insert_slot_to_list (temp
, &avail_temp_slots
);
674 n_temp_slots_in_use
--;
677 /* Compute the hash value for an address -> temp slot mapping.
678 The value is cached on the mapping entry. */
680 temp_slot_address_compute_hash (struct temp_slot_address_entry
*t
)
682 int do_not_record
= 0;
683 return hash_rtx (t
->address
, GET_MODE (t
->address
),
684 &do_not_record
, NULL
, false);
687 /* Return the hash value for an address -> temp slot mapping. */
689 temp_address_hasher::hash (temp_slot_address_entry
*t
)
694 /* Compare two address -> temp slot mapping entries. */
696 temp_address_hasher::equal (temp_slot_address_entry
*t1
,
697 temp_slot_address_entry
*t2
)
699 return exp_equiv_p (t1
->address
, t2
->address
, 0, true);
702 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
704 insert_temp_slot_address (rtx address
, class temp_slot
*temp_slot
)
706 struct temp_slot_address_entry
*t
= ggc_alloc
<temp_slot_address_entry
> ();
707 t
->address
= copy_rtx (address
);
708 t
->temp_slot
= temp_slot
;
709 t
->hash
= temp_slot_address_compute_hash (t
);
710 *temp_slot_address_table
->find_slot_with_hash (t
, t
->hash
, INSERT
) = t
;
713 /* Remove an address -> temp slot mapping entry if the temp slot is
714 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
716 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry
**slot
, void *)
718 const struct temp_slot_address_entry
*t
= *slot
;
719 if (! t
->temp_slot
->in_use
)
720 temp_slot_address_table
->clear_slot (slot
);
724 /* Remove all mappings of addresses to unused temp slots. */
726 remove_unused_temp_slot_addresses (void)
728 /* Use quicker clearing if there aren't any active temp slots. */
729 if (n_temp_slots_in_use
)
730 temp_slot_address_table
->traverse
731 <void *, remove_unused_temp_slot_addresses_1
> (NULL
);
733 temp_slot_address_table
->empty ();
736 /* Find the temp slot corresponding to the object at address X. */
738 static class temp_slot
*
739 find_temp_slot_from_address (rtx x
)
742 struct temp_slot_address_entry tmp
, *t
;
744 /* First try the easy way:
745 See if X exists in the address -> temp slot mapping. */
747 tmp
.temp_slot
= NULL
;
748 tmp
.hash
= temp_slot_address_compute_hash (&tmp
);
749 t
= temp_slot_address_table
->find_with_hash (&tmp
, tmp
.hash
);
753 /* If we have a sum involving a register, see if it points to a temp
755 if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 0))
756 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
758 else if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 1))
759 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
762 /* Last resort: Address is a virtual stack var address. */
764 if (strip_offset (x
, &offset
) == virtual_stack_vars_rtx
)
767 for (i
= max_slot_level (); i
>= 0; i
--)
768 for (p
= *temp_slots_at_level (i
); p
; p
= p
->next
)
769 if (known_in_range_p (offset
, p
->base_offset
, p
->full_size
))
776 /* Allocate a temporary stack slot and record it for possible later
779 MODE is the machine mode to be given to the returned rtx.
781 SIZE is the size in units of the space required. We do no rounding here
782 since assign_stack_local will do any required rounding.
784 TYPE is the type that will be used for the stack slot. */
787 assign_stack_temp_for_type (machine_mode mode
, poly_int64 size
, tree type
)
790 class temp_slot
*p
, *best_p
= 0, *selected
= NULL
, **pp
;
793 gcc_assert (known_size_p (size
));
795 align
= get_stack_local_alignment (type
, mode
);
797 /* Try to find an available, already-allocated temporary of the proper
798 mode which meets the size and alignment requirements. Choose the
799 smallest one with the closest alignment.
801 If assign_stack_temp is called outside of the tree->rtl expansion,
802 we cannot reuse the stack slots (that may still refer to
803 VIRTUAL_STACK_VARS_REGNUM). */
804 if (!virtuals_instantiated
)
806 for (p
= avail_temp_slots
; p
; p
= p
->next
)
808 if (p
->align
>= align
809 && known_ge (p
->size
, size
)
810 && GET_MODE (p
->slot
) == mode
811 && objects_must_conflict_p (p
->type
, type
)
813 || (known_eq (best_p
->size
, p
->size
)
814 ? best_p
->align
> p
->align
815 : known_ge (best_p
->size
, p
->size
))))
817 if (p
->align
== align
&& known_eq (p
->size
, size
))
820 cut_slot_from_list (selected
, &avail_temp_slots
);
829 /* Make our best, if any, the one to use. */
833 cut_slot_from_list (selected
, &avail_temp_slots
);
835 /* If there are enough aligned bytes left over, make them into a new
836 temp_slot so that the extra bytes don't get wasted. Do this only
837 for BLKmode slots, so that we can be sure of the alignment. */
838 if (GET_MODE (best_p
->slot
) == BLKmode
)
840 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
841 poly_int64 rounded_size
= aligned_upper_bound (size
, alignment
);
843 if (known_ge (best_p
->size
- rounded_size
, alignment
))
845 p
= ggc_alloc
<temp_slot
> ();
847 p
->size
= best_p
->size
- rounded_size
;
848 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
849 p
->full_size
= best_p
->full_size
- rounded_size
;
850 p
->slot
= adjust_address_nv (best_p
->slot
, BLKmode
, rounded_size
);
851 p
->align
= best_p
->align
;
852 p
->type
= best_p
->type
;
853 insert_slot_to_list (p
, &avail_temp_slots
);
855 vec_safe_push (stack_slot_list
, p
->slot
);
857 best_p
->size
= rounded_size
;
858 best_p
->full_size
= rounded_size
;
863 /* If we still didn't find one, make a new temporary. */
866 poly_int64 frame_offset_old
= frame_offset
;
868 p
= ggc_alloc
<temp_slot
> ();
870 /* We are passing an explicit alignment request to assign_stack_local.
871 One side effect of that is assign_stack_local will not round SIZE
872 to ensure the frame offset remains suitably aligned.
874 So for requests which depended on the rounding of SIZE, we go ahead
875 and round it now. We also make sure ALIGNMENT is at least
876 BIGGEST_ALIGNMENT. */
877 gcc_assert (mode
!= BLKmode
|| align
== BIGGEST_ALIGNMENT
);
878 p
->slot
= assign_stack_local_1 (mode
,
880 ? aligned_upper_bound (size
,
888 /* The following slot size computation is necessary because we don't
889 know the actual size of the temporary slot until assign_stack_local
890 has performed all the frame alignment and size rounding for the
891 requested temporary. Note that extra space added for alignment
892 can be either above or below this stack slot depending on which
893 way the frame grows. We include the extra space if and only if it
894 is above this slot. */
895 if (FRAME_GROWS_DOWNWARD
)
896 p
->size
= frame_offset_old
- frame_offset
;
900 /* Now define the fields used by combine_temp_slots. */
901 if (FRAME_GROWS_DOWNWARD
)
903 p
->base_offset
= frame_offset
;
904 p
->full_size
= frame_offset_old
- frame_offset
;
908 p
->base_offset
= frame_offset_old
;
909 p
->full_size
= frame_offset
- frame_offset_old
;
918 p
->level
= temp_slot_level
;
919 n_temp_slots_in_use
++;
921 pp
= temp_slots_at_level (p
->level
);
922 insert_slot_to_list (p
, pp
);
923 insert_temp_slot_address (XEXP (p
->slot
, 0), p
);
925 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
926 slot
= gen_rtx_MEM (mode
, XEXP (p
->slot
, 0));
927 vec_safe_push (stack_slot_list
, slot
);
929 /* If we know the alias set for the memory that will be used, use
930 it. If there's no TYPE, then we don't know anything about the
931 alias set for the memory. */
932 set_mem_alias_set (slot
, type
? get_alias_set (type
) : 0);
933 set_mem_align (slot
, align
);
935 /* If a type is specified, set the relevant flags. */
937 MEM_VOLATILE_P (slot
) = TYPE_VOLATILE (type
);
938 MEM_NOTRAP_P (slot
) = 1;
943 /* Allocate a temporary stack slot and record it for possible later
944 reuse. First two arguments are same as in preceding function. */
947 assign_stack_temp (machine_mode mode
, poly_int64 size
)
949 return assign_stack_temp_for_type (mode
, size
, NULL_TREE
);
952 /* Assign a temporary.
953 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
954 and so that should be used in error messages. In either case, we
955 allocate of the given type.
956 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
957 it is 0 if a register is OK.
958 DONT_PROMOTE is 1 if we should not promote values in register
962 assign_temp (tree type_or_decl
, int memory_required
,
963 int dont_promote ATTRIBUTE_UNUSED
)
971 if (DECL_P (type_or_decl
))
972 decl
= type_or_decl
, type
= TREE_TYPE (decl
);
974 decl
= NULL
, type
= type_or_decl
;
976 mode
= TYPE_MODE (type
);
978 unsignedp
= TYPE_UNSIGNED (type
);
981 /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
982 end. See also create_tmp_var for the gimplification-time check. */
983 gcc_assert (!TREE_ADDRESSABLE (type
) && COMPLETE_TYPE_P (type
));
985 if (mode
== BLKmode
|| memory_required
)
990 /* Unfortunately, we don't yet know how to allocate variable-sized
991 temporaries. However, sometimes we can find a fixed upper limit on
992 the size, so try that instead. */
993 if (!poly_int_tree_p (TYPE_SIZE_UNIT (type
), &size
))
994 size
= max_int_size_in_bytes (type
);
996 /* Zero sized arrays are a GNU C extension. Set size to 1 to avoid
997 problems with allocating the stack space. */
998 if (known_eq (size
, 0))
1001 /* The size of the temporary may be too large to fit into an integer. */
1002 /* ??? Not sure this should happen except for user silliness, so limit
1003 this to things that aren't compiler-generated temporaries. The
1004 rest of the time we'll die in assign_stack_temp_for_type. */
1006 && !known_size_p (size
)
1007 && TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
)
1009 error ("size of variable %q+D is too large", decl
);
1013 tmp
= assign_stack_temp_for_type (mode
, size
, type
);
1019 mode
= promote_mode (type
, mode
, &unsignedp
);
1022 return gen_reg_rtx (mode
);
1025 /* Combine temporary stack slots which are adjacent on the stack.
1027 This allows for better use of already allocated stack space. This is only
1028 done for BLKmode slots because we can be sure that we won't have alignment
1029 problems in this case. */
1032 combine_temp_slots (void)
1034 class temp_slot
*p
, *q
, *next
, *next_q
;
1037 /* We can't combine slots, because the information about which slot
1038 is in which alias set will be lost. */
1039 if (flag_strict_aliasing
)
1042 /* If there are a lot of temp slots, don't do anything unless
1043 high levels of optimization. */
1044 if (! flag_expensive_optimizations
)
1045 for (p
= avail_temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
1046 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
1049 for (p
= avail_temp_slots
; p
; p
= next
)
1055 if (GET_MODE (p
->slot
) != BLKmode
)
1058 for (q
= p
->next
; q
; q
= next_q
)
1064 if (GET_MODE (q
->slot
) != BLKmode
)
1067 if (known_eq (p
->base_offset
+ p
->full_size
, q
->base_offset
))
1069 /* Q comes after P; combine Q into P. */
1071 p
->full_size
+= q
->full_size
;
1074 else if (known_eq (q
->base_offset
+ q
->full_size
, p
->base_offset
))
1076 /* P comes after Q; combine P into Q. */
1078 q
->full_size
+= p
->full_size
;
1083 cut_slot_from_list (q
, &avail_temp_slots
);
1086 /* Either delete P or advance past it. */
1088 cut_slot_from_list (p
, &avail_temp_slots
);
1092 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1093 slot that previously was known by OLD_RTX. */
1096 update_temp_slot_address (rtx old_rtx
, rtx new_rtx
)
1100 if (rtx_equal_p (old_rtx
, new_rtx
))
1103 p
= find_temp_slot_from_address (old_rtx
);
1105 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1106 NEW_RTX is a register, see if one operand of the PLUS is a
1107 temporary location. If so, NEW_RTX points into it. Otherwise,
1108 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1109 in common between them. If so, try a recursive call on those
1113 if (GET_CODE (old_rtx
) != PLUS
)
1116 if (REG_P (new_rtx
))
1118 update_temp_slot_address (XEXP (old_rtx
, 0), new_rtx
);
1119 update_temp_slot_address (XEXP (old_rtx
, 1), new_rtx
);
1122 else if (GET_CODE (new_rtx
) != PLUS
)
1125 if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0)))
1126 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1));
1127 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0)))
1128 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1));
1129 else if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1)))
1130 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0));
1131 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1)))
1132 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0));
1137 /* Otherwise add an alias for the temp's address. */
1138 insert_temp_slot_address (new_rtx
, p
);
1141 /* If X could be a reference to a temporary slot, mark that slot as
1142 belonging to the to one level higher than the current level. If X
1143 matched one of our slots, just mark that one. Otherwise, we can't
1144 easily predict which it is, so upgrade all of them.
1146 This is called when an ({...}) construct occurs and a statement
1147 returns a value in memory. */
1150 preserve_temp_slots (rtx x
)
1152 class temp_slot
*p
= 0, *next
;
1157 /* If X is a register that is being used as a pointer, see if we have
1158 a temporary slot we know it points to. */
1159 if (REG_P (x
) && REG_POINTER (x
))
1160 p
= find_temp_slot_from_address (x
);
1162 /* If X is not in memory or is at a constant address, it cannot be in
1163 a temporary slot. */
1164 if (p
== 0 && (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0))))
1167 /* First see if we can find a match. */
1169 p
= find_temp_slot_from_address (XEXP (x
, 0));
1173 if (p
->level
== temp_slot_level
)
1174 move_slot_to_level (p
, temp_slot_level
- 1);
1178 /* Otherwise, preserve all non-kept slots at this level. */
1179 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1182 move_slot_to_level (p
, temp_slot_level
- 1);
1186 /* Free all temporaries used so far. This is normally called at the
1187 end of generating code for a statement. */
1190 free_temp_slots (void)
1192 class temp_slot
*p
, *next
;
1193 bool some_available
= false;
1195 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1198 make_slot_available (p
);
1199 some_available
= true;
1204 remove_unused_temp_slot_addresses ();
1205 combine_temp_slots ();
1209 /* Push deeper into the nesting level for stack temporaries. */
1212 push_temp_slots (void)
1217 /* Pop a temporary nesting level. All slots in use in the current level
1221 pop_temp_slots (void)
1227 /* Initialize temporary slots. */
1230 init_temp_slots (void)
1232 /* We have not allocated any temporaries yet. */
1233 avail_temp_slots
= 0;
1234 vec_alloc (used_temp_slots
, 0);
1235 temp_slot_level
= 0;
1236 n_temp_slots_in_use
= 0;
1238 /* Set up the table to map addresses to temp slots. */
1239 if (! temp_slot_address_table
)
1240 temp_slot_address_table
= hash_table
<temp_address_hasher
>::create_ggc (32);
1242 temp_slot_address_table
->empty ();
1245 /* Functions and data structures to keep track of the values hard regs
1246 had at the start of the function. */
1248 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1249 and has_hard_reg_initial_val.. */
1250 struct GTY(()) initial_value_pair
{
1254 /* ??? This could be a VEC but there is currently no way to define an
1255 opaque VEC type. This could be worked around by defining struct
1256 initial_value_pair in function.h. */
1257 struct GTY(()) initial_value_struct
{
1260 initial_value_pair
* GTY ((length ("%h.num_entries"))) entries
;
1263 /* If a pseudo represents an initial hard reg (or expression), return
1264 it, else return NULL_RTX. */
1267 get_hard_reg_initial_reg (rtx reg
)
1269 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1275 for (i
= 0; i
< ivs
->num_entries
; i
++)
1276 if (rtx_equal_p (ivs
->entries
[i
].pseudo
, reg
))
1277 return ivs
->entries
[i
].hard_reg
;
1282 /* Make sure that there's a pseudo register of mode MODE that stores the
1283 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1286 get_hard_reg_initial_val (machine_mode mode
, unsigned int regno
)
1288 struct initial_value_struct
*ivs
;
1291 rv
= has_hard_reg_initial_val (mode
, regno
);
1295 ivs
= crtl
->hard_reg_initial_vals
;
1298 ivs
= ggc_alloc
<initial_value_struct
> ();
1299 ivs
->num_entries
= 0;
1300 ivs
->max_entries
= 5;
1301 ivs
->entries
= ggc_vec_alloc
<initial_value_pair
> (5);
1302 crtl
->hard_reg_initial_vals
= ivs
;
1305 if (ivs
->num_entries
>= ivs
->max_entries
)
1307 ivs
->max_entries
+= 5;
1308 ivs
->entries
= GGC_RESIZEVEC (initial_value_pair
, ivs
->entries
,
1312 ivs
->entries
[ivs
->num_entries
].hard_reg
= gen_rtx_REG (mode
, regno
);
1313 ivs
->entries
[ivs
->num_entries
].pseudo
= gen_reg_rtx (mode
);
1315 return ivs
->entries
[ivs
->num_entries
++].pseudo
;
1318 /* See if get_hard_reg_initial_val has been used to create a pseudo
1319 for the initial value of hard register REGNO in mode MODE. Return
1320 the associated pseudo if so, otherwise return NULL. */
1323 has_hard_reg_initial_val (machine_mode mode
, unsigned int regno
)
1325 struct initial_value_struct
*ivs
;
1328 ivs
= crtl
->hard_reg_initial_vals
;
1330 for (i
= 0; i
< ivs
->num_entries
; i
++)
1331 if (GET_MODE (ivs
->entries
[i
].hard_reg
) == mode
1332 && REGNO (ivs
->entries
[i
].hard_reg
) == regno
)
1333 return ivs
->entries
[i
].pseudo
;
1339 emit_initial_value_sets (void)
1341 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1349 for (i
= 0; i
< ivs
->num_entries
; i
++)
1350 emit_move_insn (ivs
->entries
[i
].pseudo
, ivs
->entries
[i
].hard_reg
);
1354 emit_insn_at_entry (seq
);
1358 /* Return the hardreg-pseudoreg initial values pair entry I and
1359 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1361 initial_value_entry (int i
, rtx
*hreg
, rtx
*preg
)
1363 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1364 if (!ivs
|| i
>= ivs
->num_entries
)
1367 *hreg
= ivs
->entries
[i
].hard_reg
;
1368 *preg
= ivs
->entries
[i
].pseudo
;
1372 /* These routines are responsible for converting virtual register references
1373 to the actual hard register references once RTL generation is complete.
1375 The following four variables are used for communication between the
1376 routines. They contain the offsets of the virtual registers from their
1377 respective hard registers. */
1379 static poly_int64 in_arg_offset
;
1380 static poly_int64 var_offset
;
1381 static poly_int64 dynamic_offset
;
1382 static poly_int64 out_arg_offset
;
1383 static poly_int64 cfa_offset
;
1385 /* In most machines, the stack pointer register is equivalent to the bottom
1388 #ifndef STACK_POINTER_OFFSET
1389 #define STACK_POINTER_OFFSET 0
1392 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1393 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1396 /* If not defined, pick an appropriate default for the offset of dynamically
1397 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1398 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1400 #ifndef STACK_DYNAMIC_OFFSET
1402 /* The bottom of the stack points to the actual arguments. If
1403 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1404 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1405 stack space for register parameters is not pushed by the caller, but
1406 rather part of the fixed stack areas and hence not included in
1407 `crtl->outgoing_args_size'. Nevertheless, we must allow
1408 for it when allocating stack dynamic objects. */
1410 #ifdef INCOMING_REG_PARM_STACK_SPACE
1411 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1412 ((ACCUMULATE_OUTGOING_ARGS \
1413 ? (crtl->outgoing_args_size \
1414 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1415 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1416 : 0) + (STACK_POINTER_OFFSET))
1418 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1419 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : poly_int64 (0)) \
1420 + (STACK_POINTER_OFFSET))
1425 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1426 is a virtual register, return the equivalent hard register and set the
1427 offset indirectly through the pointer. Otherwise, return 0. */
1430 instantiate_new_reg (rtx x
, poly_int64_pod
*poffset
)
1435 if (x
== virtual_incoming_args_rtx
)
1437 if (stack_realign_drap
)
1439 /* Replace virtual_incoming_args_rtx with internal arg
1440 pointer if DRAP is used to realign stack. */
1441 new_rtx
= crtl
->args
.internal_arg_pointer
;
1445 new_rtx
= arg_pointer_rtx
, offset
= in_arg_offset
;
1447 else if (x
== virtual_stack_vars_rtx
)
1448 new_rtx
= frame_pointer_rtx
, offset
= var_offset
;
1449 else if (x
== virtual_stack_dynamic_rtx
)
1450 new_rtx
= stack_pointer_rtx
, offset
= dynamic_offset
;
1451 else if (x
== virtual_outgoing_args_rtx
)
1452 new_rtx
= stack_pointer_rtx
, offset
= out_arg_offset
;
1453 else if (x
== virtual_cfa_rtx
)
1455 #ifdef FRAME_POINTER_CFA_OFFSET
1456 new_rtx
= frame_pointer_rtx
;
1458 new_rtx
= arg_pointer_rtx
;
1460 offset
= cfa_offset
;
1462 else if (x
== virtual_preferred_stack_boundary_rtx
)
1464 new_rtx
= GEN_INT (crtl
->preferred_stack_boundary
/ BITS_PER_UNIT
);
1474 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1475 registers present inside of *LOC. The expression is simplified,
1476 as much as possible, but is not to be considered "valid" in any sense
1477 implied by the target. Return true if any change is made. */
1480 instantiate_virtual_regs_in_rtx (rtx
*loc
)
1484 bool changed
= false;
1485 subrtx_ptr_iterator::array_type array
;
1486 FOR_EACH_SUBRTX_PTR (iter
, array
, loc
, NONCONST
)
1493 switch (GET_CODE (x
))
1496 new_rtx
= instantiate_new_reg (x
, &offset
);
1499 *loc
= plus_constant (GET_MODE (x
), new_rtx
, offset
);
1502 iter
.skip_subrtxes ();
1506 new_rtx
= instantiate_new_reg (XEXP (x
, 0), &offset
);
1509 XEXP (x
, 0) = new_rtx
;
1510 *loc
= plus_constant (GET_MODE (x
), x
, offset
, true);
1512 iter
.skip_subrtxes ();
1516 /* FIXME -- from old code */
1517 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1518 we can commute the PLUS and SUBREG because pointers into the
1519 frame are well-behaved. */
1530 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1531 matches the predicate for insn CODE operand OPERAND. */
1534 safe_insn_predicate (int code
, int operand
, rtx x
)
1536 return code
< 0 || insn_operand_matches ((enum insn_code
) code
, operand
, x
);
1539 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1540 registers present inside of insn. The result will be a valid insn. */
1543 instantiate_virtual_regs_in_insn (rtx_insn
*insn
)
1547 bool any_change
= false;
1548 rtx set
, new_rtx
, x
;
1551 /* There are some special cases to be handled first. */
1552 set
= single_set (insn
);
1555 /* We're allowed to assign to a virtual register. This is interpreted
1556 to mean that the underlying register gets assigned the inverse
1557 transformation. This is used, for example, in the handling of
1559 new_rtx
= instantiate_new_reg (SET_DEST (set
), &offset
);
1564 instantiate_virtual_regs_in_rtx (&SET_SRC (set
));
1565 x
= simplify_gen_binary (PLUS
, GET_MODE (new_rtx
), SET_SRC (set
),
1566 gen_int_mode (-offset
, GET_MODE (new_rtx
)));
1567 x
= force_operand (x
, new_rtx
);
1569 emit_move_insn (new_rtx
, x
);
1574 emit_insn_before (seq
, insn
);
1579 /* Handle a straight copy from a virtual register by generating a
1580 new add insn. The difference between this and falling through
1581 to the generic case is avoiding a new pseudo and eliminating a
1582 move insn in the initial rtl stream. */
1583 new_rtx
= instantiate_new_reg (SET_SRC (set
), &offset
);
1585 && maybe_ne (offset
, 0)
1586 && REG_P (SET_DEST (set
))
1587 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1591 x
= expand_simple_binop (GET_MODE (SET_DEST (set
)), PLUS
, new_rtx
,
1592 gen_int_mode (offset
,
1593 GET_MODE (SET_DEST (set
))),
1594 SET_DEST (set
), 1, OPTAB_LIB_WIDEN
);
1595 if (x
!= SET_DEST (set
))
1596 emit_move_insn (SET_DEST (set
), x
);
1601 emit_insn_before (seq
, insn
);
1606 extract_insn (insn
);
1607 insn_code
= INSN_CODE (insn
);
1609 /* Handle a plus involving a virtual register by determining if the
1610 operands remain valid if they're modified in place. */
1612 if (GET_CODE (SET_SRC (set
)) == PLUS
1613 && recog_data
.n_operands
>= 3
1614 && recog_data
.operand_loc
[1] == &XEXP (SET_SRC (set
), 0)
1615 && recog_data
.operand_loc
[2] == &XEXP (SET_SRC (set
), 1)
1616 && poly_int_rtx_p (recog_data
.operand
[2], &delta
)
1617 && (new_rtx
= instantiate_new_reg (recog_data
.operand
[1], &offset
)))
1621 /* If the sum is zero, then replace with a plain move. */
1622 if (known_eq (offset
, 0)
1623 && REG_P (SET_DEST (set
))
1624 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1627 emit_move_insn (SET_DEST (set
), new_rtx
);
1631 emit_insn_before (seq
, insn
);
1636 x
= gen_int_mode (offset
, recog_data
.operand_mode
[2]);
1638 /* Using validate_change and apply_change_group here leaves
1639 recog_data in an invalid state. Since we know exactly what
1640 we want to check, do those two by hand. */
1641 if (safe_insn_predicate (insn_code
, 1, new_rtx
)
1642 && safe_insn_predicate (insn_code
, 2, x
))
1644 *recog_data
.operand_loc
[1] = recog_data
.operand
[1] = new_rtx
;
1645 *recog_data
.operand_loc
[2] = recog_data
.operand
[2] = x
;
1648 /* Fall through into the regular operand fixup loop in
1649 order to take care of operands other than 1 and 2. */
1655 extract_insn (insn
);
1656 insn_code
= INSN_CODE (insn
);
1659 /* In the general case, we expect virtual registers to appear only in
1660 operands, and then only as either bare registers or inside memories. */
1661 for (i
= 0; i
< recog_data
.n_operands
; ++i
)
1663 x
= recog_data
.operand
[i
];
1664 switch (GET_CODE (x
))
1668 rtx addr
= XEXP (x
, 0);
1670 if (!instantiate_virtual_regs_in_rtx (&addr
))
1674 x
= replace_equiv_address (x
, addr
, true);
1675 /* It may happen that the address with the virtual reg
1676 was valid (e.g. based on the virtual stack reg, which might
1677 be acceptable to the predicates with all offsets), whereas
1678 the address now isn't anymore, for instance when the address
1679 is still offsetted, but the base reg isn't virtual-stack-reg
1680 anymore. Below we would do a force_reg on the whole operand,
1681 but this insn might actually only accept memory. Hence,
1682 before doing that last resort, try to reload the address into
1683 a register, so this operand stays a MEM. */
1684 if (!safe_insn_predicate (insn_code
, i
, x
))
1686 addr
= force_reg (GET_MODE (addr
), addr
);
1687 x
= replace_equiv_address (x
, addr
, true);
1692 emit_insn_before (seq
, insn
);
1697 new_rtx
= instantiate_new_reg (x
, &offset
);
1698 if (new_rtx
== NULL
)
1700 if (known_eq (offset
, 0))
1706 /* Careful, special mode predicates may have stuff in
1707 insn_data[insn_code].operand[i].mode that isn't useful
1708 to us for computing a new value. */
1709 /* ??? Recognize address_operand and/or "p" constraints
1710 to see if (plus new offset) is a valid before we put
1711 this through expand_simple_binop. */
1712 x
= expand_simple_binop (GET_MODE (x
), PLUS
, new_rtx
,
1713 gen_int_mode (offset
, GET_MODE (x
)),
1714 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1717 emit_insn_before (seq
, insn
);
1722 new_rtx
= instantiate_new_reg (SUBREG_REG (x
), &offset
);
1723 if (new_rtx
== NULL
)
1725 if (maybe_ne (offset
, 0))
1728 new_rtx
= expand_simple_binop
1729 (GET_MODE (new_rtx
), PLUS
, new_rtx
,
1730 gen_int_mode (offset
, GET_MODE (new_rtx
)),
1731 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1734 emit_insn_before (seq
, insn
);
1736 x
= simplify_gen_subreg (recog_data
.operand_mode
[i
], new_rtx
,
1737 GET_MODE (new_rtx
), SUBREG_BYTE (x
));
1745 /* At this point, X contains the new value for the operand.
1746 Validate the new value vs the insn predicate. Note that
1747 asm insns will have insn_code -1 here. */
1748 if (!safe_insn_predicate (insn_code
, i
, x
))
1753 gcc_assert (REGNO (x
) <= LAST_VIRTUAL_REGISTER
);
1754 x
= copy_to_reg (x
);
1757 x
= force_reg (insn_data
[insn_code
].operand
[i
].mode
, x
);
1761 emit_insn_before (seq
, insn
);
1764 *recog_data
.operand_loc
[i
] = recog_data
.operand
[i
] = x
;
1770 /* Propagate operand changes into the duplicates. */
1771 for (i
= 0; i
< recog_data
.n_dups
; ++i
)
1772 *recog_data
.dup_loc
[i
]
1773 = copy_rtx (recog_data
.operand
[(unsigned)recog_data
.dup_num
[i
]]);
1775 /* Force re-recognition of the instruction for validation. */
1776 INSN_CODE (insn
) = -1;
1779 if (asm_noperands (PATTERN (insn
)) >= 0)
1781 if (!check_asm_operands (PATTERN (insn
)))
1783 error_for_asm (insn
, "impossible constraint in %<asm%>");
1784 /* For asm goto, instead of fixing up all the edges
1785 just clear the template and clear input operands
1786 (asm goto doesn't have any output operands). */
1789 rtx asm_op
= extract_asm_operands (PATTERN (insn
));
1790 ASM_OPERANDS_TEMPLATE (asm_op
) = ggc_strdup ("");
1791 ASM_OPERANDS_INPUT_VEC (asm_op
) = rtvec_alloc (0);
1792 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op
) = rtvec_alloc (0);
1800 if (recog_memoized (insn
) < 0)
1801 fatal_insn_not_found (insn
);
1805 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1806 do any instantiation required. */
1809 instantiate_decl_rtl (rtx x
)
1816 /* If this is a CONCAT, recurse for the pieces. */
1817 if (GET_CODE (x
) == CONCAT
)
1819 instantiate_decl_rtl (XEXP (x
, 0));
1820 instantiate_decl_rtl (XEXP (x
, 1));
1824 /* If this is not a MEM, no need to do anything. Similarly if the
1825 address is a constant or a register that is not a virtual register. */
1830 if (CONSTANT_P (addr
)
1832 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
1833 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
1836 instantiate_virtual_regs_in_rtx (&XEXP (x
, 0));
1839 /* Helper for instantiate_decls called via walk_tree: Process all decls
1840 in the given DECL_VALUE_EXPR. */
1843 instantiate_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1851 if (DECL_RTL_SET_P (t
))
1852 instantiate_decl_rtl (DECL_RTL (t
));
1853 if (TREE_CODE (t
) == PARM_DECL
&& DECL_NAMELESS (t
)
1854 && DECL_INCOMING_RTL (t
))
1855 instantiate_decl_rtl (DECL_INCOMING_RTL (t
));
1856 if ((VAR_P (t
) || TREE_CODE (t
) == RESULT_DECL
)
1857 && DECL_HAS_VALUE_EXPR_P (t
))
1859 tree v
= DECL_VALUE_EXPR (t
);
1860 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1867 /* Subroutine of instantiate_decls: Process all decls in the given
1868 BLOCK node and all its subblocks. */
1871 instantiate_decls_1 (tree let
)
1875 for (t
= BLOCK_VARS (let
); t
; t
= DECL_CHAIN (t
))
1877 if (DECL_RTL_SET_P (t
))
1878 instantiate_decl_rtl (DECL_RTL (t
));
1879 if (VAR_P (t
) && DECL_HAS_VALUE_EXPR_P (t
))
1881 tree v
= DECL_VALUE_EXPR (t
);
1882 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1886 /* Process all subblocks. */
1887 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= BLOCK_CHAIN (t
))
1888 instantiate_decls_1 (t
);
1891 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1892 all virtual registers in their DECL_RTL's. */
1895 instantiate_decls (tree fndecl
)
1900 /* Process all parameters of the function. */
1901 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= DECL_CHAIN (decl
))
1903 instantiate_decl_rtl (DECL_RTL (decl
));
1904 instantiate_decl_rtl (DECL_INCOMING_RTL (decl
));
1905 if (DECL_HAS_VALUE_EXPR_P (decl
))
1907 tree v
= DECL_VALUE_EXPR (decl
);
1908 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1912 if ((decl
= DECL_RESULT (fndecl
))
1913 && TREE_CODE (decl
) == RESULT_DECL
)
1915 if (DECL_RTL_SET_P (decl
))
1916 instantiate_decl_rtl (DECL_RTL (decl
));
1917 if (DECL_HAS_VALUE_EXPR_P (decl
))
1919 tree v
= DECL_VALUE_EXPR (decl
);
1920 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1924 /* Process the saved static chain if it exists. */
1925 decl
= DECL_STRUCT_FUNCTION (fndecl
)->static_chain_decl
;
1926 if (decl
&& DECL_HAS_VALUE_EXPR_P (decl
))
1927 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl
)));
1929 /* Now process all variables defined in the function or its subblocks. */
1930 if (DECL_INITIAL (fndecl
))
1931 instantiate_decls_1 (DECL_INITIAL (fndecl
));
1933 FOR_EACH_LOCAL_DECL (cfun
, ix
, decl
)
1934 if (DECL_RTL_SET_P (decl
))
1935 instantiate_decl_rtl (DECL_RTL (decl
));
1936 vec_free (cfun
->local_decls
);
1939 /* Pass through the INSNS of function FNDECL and convert virtual register
1940 references to hard register references. */
1943 instantiate_virtual_regs (void)
1947 /* Compute the offsets to use for this function. */
1948 in_arg_offset
= FIRST_PARM_OFFSET (current_function_decl
);
1949 var_offset
= targetm
.starting_frame_offset ();
1950 dynamic_offset
= STACK_DYNAMIC_OFFSET (current_function_decl
);
1951 out_arg_offset
= STACK_POINTER_OFFSET
;
1952 #ifdef FRAME_POINTER_CFA_OFFSET
1953 cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
1955 cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
1958 /* Initialize recognition, indicating that volatile is OK. */
1961 /* Scan through all the insns, instantiating every virtual register still
1963 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1966 /* These patterns in the instruction stream can never be recognized.
1967 Fortunately, they shouldn't contain virtual registers either. */
1968 if (GET_CODE (PATTERN (insn
)) == USE
1969 || GET_CODE (PATTERN (insn
)) == CLOBBER
1970 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
1971 || DEBUG_MARKER_INSN_P (insn
))
1973 else if (DEBUG_BIND_INSN_P (insn
))
1974 instantiate_virtual_regs_in_rtx (INSN_VAR_LOCATION_PTR (insn
));
1976 instantiate_virtual_regs_in_insn (insn
);
1978 if (insn
->deleted ())
1981 instantiate_virtual_regs_in_rtx (®_NOTES (insn
));
1983 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1985 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn
));
1988 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1989 instantiate_decls (current_function_decl
);
1991 targetm
.instantiate_decls ();
1993 /* Indicate that, from now on, assign_stack_local should use
1994 frame_pointer_rtx. */
1995 virtuals_instantiated
= 1;
2002 const pass_data pass_data_instantiate_virtual_regs
=
2004 RTL_PASS
, /* type */
2006 OPTGROUP_NONE
, /* optinfo_flags */
2007 TV_NONE
, /* tv_id */
2008 0, /* properties_required */
2009 0, /* properties_provided */
2010 0, /* properties_destroyed */
2011 0, /* todo_flags_start */
2012 0, /* todo_flags_finish */
2015 class pass_instantiate_virtual_regs
: public rtl_opt_pass
2018 pass_instantiate_virtual_regs (gcc::context
*ctxt
)
2019 : rtl_opt_pass (pass_data_instantiate_virtual_regs
, ctxt
)
2022 /* opt_pass methods: */
2023 virtual unsigned int execute (function
*)
2025 return instantiate_virtual_regs ();
2028 }; // class pass_instantiate_virtual_regs
2033 make_pass_instantiate_virtual_regs (gcc::context
*ctxt
)
2035 return new pass_instantiate_virtual_regs (ctxt
);
2039 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2040 This means a type for which function calls must pass an address to the
2041 function or get an address back from the function.
2042 EXP may be a type node or an expression (whose type is tested). */
2045 aggregate_value_p (const_tree exp
, const_tree fntype
)
2047 const_tree type
= (TYPE_P (exp
)) ? exp
: TREE_TYPE (exp
);
2048 int i
, regno
, nregs
;
2052 switch (TREE_CODE (fntype
))
2056 tree fndecl
= get_callee_fndecl (fntype
);
2058 fntype
= TREE_TYPE (fndecl
);
2059 else if (CALL_EXPR_FN (fntype
))
2060 fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype
)));
2062 /* For internal functions, assume nothing needs to be
2063 returned in memory. */
2068 fntype
= TREE_TYPE (fntype
);
2073 case IDENTIFIER_NODE
:
2077 /* We don't expect other tree types here. */
2081 if (VOID_TYPE_P (type
))
2084 /* If a record should be passed the same as its first (and only) member
2085 don't pass it as an aggregate. */
2086 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
2087 return aggregate_value_p (first_field (type
), fntype
);
2089 /* If the front end has decided that this needs to be passed by
2090 reference, do so. */
2091 if ((TREE_CODE (exp
) == PARM_DECL
|| TREE_CODE (exp
) == RESULT_DECL
)
2092 && DECL_BY_REFERENCE (exp
))
2095 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2096 if (fntype
&& TREE_ADDRESSABLE (fntype
))
2099 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2100 and thus can't be returned in registers. */
2101 if (TREE_ADDRESSABLE (type
))
2104 if (TYPE_EMPTY_P (type
))
2107 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
2110 if (targetm
.calls
.return_in_memory (type
, fntype
))
2113 /* Make sure we have suitable call-clobbered regs to return
2114 the value in; if not, we must return it in memory. */
2115 reg
= hard_function_value (type
, 0, fntype
, 0);
2117 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2122 regno
= REGNO (reg
);
2123 nregs
= hard_regno_nregs (regno
, TYPE_MODE (type
));
2124 for (i
= 0; i
< nregs
; i
++)
2125 if (! call_used_or_fixed_reg_p (regno
+ i
))
2131 /* Return true if we should assign DECL a pseudo register; false if it
2132 should live on the local stack. */
2135 use_register_for_decl (const_tree decl
)
2137 if (TREE_CODE (decl
) == SSA_NAME
)
2139 /* We often try to use the SSA_NAME, instead of its underlying
2140 decl, to get type information and guide decisions, to avoid
2141 differences of behavior between anonymous and named
2142 variables, but in this one case we have to go for the actual
2143 variable if there is one. The main reason is that, at least
2144 at -O0, we want to place user variables on the stack, but we
2145 don't mind using pseudos for anonymous or ignored temps.
2146 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
2147 should go in pseudos, whereas their corresponding variables
2148 might have to go on the stack. So, disregarding the decl
2149 here would negatively impact debug info at -O0, enable
2150 coalescing between SSA_NAMEs that ought to get different
2151 stack/pseudo assignments, and get the incoming argument
2152 processing thoroughly confused by PARM_DECLs expected to live
2153 in stack slots but assigned to pseudos. */
2154 if (!SSA_NAME_VAR (decl
))
2155 return TYPE_MODE (TREE_TYPE (decl
)) != BLKmode
2156 && !(flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)));
2158 decl
= SSA_NAME_VAR (decl
);
2161 /* Honor volatile. */
2162 if (TREE_SIDE_EFFECTS (decl
))
2165 /* Honor addressability. */
2166 if (TREE_ADDRESSABLE (decl
))
2169 /* RESULT_DECLs are a bit special in that they're assigned without
2170 regard to use_register_for_decl, but we generally only store in
2171 them. If we coalesce their SSA NAMEs, we'd better return a
2172 result that matches the assignment in expand_function_start. */
2173 if (TREE_CODE (decl
) == RESULT_DECL
)
2175 /* If it's not an aggregate, we're going to use a REG or a
2176 PARALLEL containing a REG. */
2177 if (!aggregate_value_p (decl
, current_function_decl
))
2180 /* If expand_function_start determines the return value, we'll
2181 use MEM if it's not by reference. */
2182 if (cfun
->returns_pcc_struct
2183 || (targetm
.calls
.struct_value_rtx
2184 (TREE_TYPE (current_function_decl
), 1)))
2185 return DECL_BY_REFERENCE (decl
);
2187 /* Otherwise, we're taking an extra all.function_result_decl
2188 argument. It's set up in assign_parms_augmented_arg_list,
2189 under the (negated) conditions above, and then it's used to
2190 set up the RESULT_DECL rtl in assign_params, after looping
2191 over all parameters. Now, if the RESULT_DECL is not by
2192 reference, we'll use a MEM either way. */
2193 if (!DECL_BY_REFERENCE (decl
))
2196 /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
2197 the function_result_decl's assignment. Since it's a pointer,
2198 we can short-circuit a number of the tests below, and we must
2199 duplicat e them because we don't have the
2200 function_result_decl to test. */
2201 if (!targetm
.calls
.allocate_stack_slots_for_args ())
2203 /* We don't set DECL_IGNORED_P for the function_result_decl. */
2206 /* We don't set DECL_REGISTER for the function_result_decl. */
2210 /* Only register-like things go in registers. */
2211 if (DECL_MODE (decl
) == BLKmode
)
2214 /* If -ffloat-store specified, don't put explicit float variables
2216 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2217 propagates values across these stores, and it probably shouldn't. */
2218 if (flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)))
2221 if (!targetm
.calls
.allocate_stack_slots_for_args ())
2224 /* If we're not interested in tracking debugging information for
2225 this decl, then we can certainly put it in a register. */
2226 if (DECL_IGNORED_P (decl
))
2232 if (!DECL_REGISTER (decl
))
2235 /* When not optimizing, disregard register keyword for types that
2236 could have methods, otherwise the methods won't be callable from
2238 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl
)))
2244 /* Structures to communicate between the subroutines of assign_parms.
2245 The first holds data persistent across all parameters, the second
2246 is cleared out for each parameter. */
2248 struct assign_parm_data_all
2250 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2251 should become a job of the target or otherwise encapsulated. */
2252 CUMULATIVE_ARGS args_so_far_v
;
2253 cumulative_args_t args_so_far
;
2254 struct args_size stack_args_size
;
2255 tree function_result_decl
;
2257 rtx_insn
*first_conversion_insn
;
2258 rtx_insn
*last_conversion_insn
;
2259 HOST_WIDE_INT pretend_args_size
;
2260 HOST_WIDE_INT extra_pretend_bytes
;
2261 int reg_parm_stack_space
;
2264 struct assign_parm_data_one
2267 function_arg_info arg
;
2270 machine_mode nominal_mode
;
2271 machine_mode passed_mode
;
2272 struct locate_and_pad_arg_data locate
;
2276 /* A subroutine of assign_parms. Initialize ALL. */
2279 assign_parms_initialize_all (struct assign_parm_data_all
*all
)
2281 tree fntype ATTRIBUTE_UNUSED
;
2283 memset (all
, 0, sizeof (*all
));
2285 fntype
= TREE_TYPE (current_function_decl
);
2287 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2288 INIT_CUMULATIVE_INCOMING_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
);
2290 INIT_CUMULATIVE_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
,
2291 current_function_decl
, -1);
2293 all
->args_so_far
= pack_cumulative_args (&all
->args_so_far_v
);
2295 #ifdef INCOMING_REG_PARM_STACK_SPACE
2296 all
->reg_parm_stack_space
2297 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl
);
2301 /* If ARGS contains entries with complex types, split the entry into two
2302 entries of the component type. Return a new list of substitutions are
2303 needed, else the old list. */
2306 split_complex_args (vec
<tree
> *args
)
2311 FOR_EACH_VEC_ELT (*args
, i
, p
)
2313 tree type
= TREE_TYPE (p
);
2314 if (TREE_CODE (type
) == COMPLEX_TYPE
2315 && targetm
.calls
.split_complex_arg (type
))
2318 tree subtype
= TREE_TYPE (type
);
2319 bool addressable
= TREE_ADDRESSABLE (p
);
2321 /* Rewrite the PARM_DECL's type with its component. */
2323 TREE_TYPE (p
) = subtype
;
2324 DECL_ARG_TYPE (p
) = TREE_TYPE (DECL_ARG_TYPE (p
));
2325 SET_DECL_MODE (p
, VOIDmode
);
2326 DECL_SIZE (p
) = NULL
;
2327 DECL_SIZE_UNIT (p
) = NULL
;
2328 /* If this arg must go in memory, put it in a pseudo here.
2329 We can't allow it to go in memory as per normal parms,
2330 because the usual place might not have the imag part
2331 adjacent to the real part. */
2332 DECL_ARTIFICIAL (p
) = addressable
;
2333 DECL_IGNORED_P (p
) = addressable
;
2334 TREE_ADDRESSABLE (p
) = 0;
2338 /* Build a second synthetic decl. */
2339 decl
= build_decl (EXPR_LOCATION (p
),
2340 PARM_DECL
, NULL_TREE
, subtype
);
2341 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (p
);
2342 DECL_ARTIFICIAL (decl
) = addressable
;
2343 DECL_IGNORED_P (decl
) = addressable
;
2344 layout_decl (decl
, 0);
2345 args
->safe_insert (++i
, decl
);
2350 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2351 the hidden struct return argument, and (abi willing) complex args.
2352 Return the new parameter list. */
2355 assign_parms_augmented_arg_list (struct assign_parm_data_all
*all
)
2357 tree fndecl
= current_function_decl
;
2358 tree fntype
= TREE_TYPE (fndecl
);
2359 vec
<tree
> fnargs
= vNULL
;
2362 for (arg
= DECL_ARGUMENTS (fndecl
); arg
; arg
= DECL_CHAIN (arg
))
2363 fnargs
.safe_push (arg
);
2365 all
->orig_fnargs
= DECL_ARGUMENTS (fndecl
);
2367 /* If struct value address is treated as the first argument, make it so. */
2368 if (aggregate_value_p (DECL_RESULT (fndecl
), fndecl
)
2369 && ! cfun
->returns_pcc_struct
2370 && targetm
.calls
.struct_value_rtx (TREE_TYPE (fndecl
), 1) == 0)
2372 tree type
= build_pointer_type (TREE_TYPE (fntype
));
2375 decl
= build_decl (DECL_SOURCE_LOCATION (fndecl
),
2376 PARM_DECL
, get_identifier (".result_ptr"), type
);
2377 DECL_ARG_TYPE (decl
) = type
;
2378 DECL_ARTIFICIAL (decl
) = 1;
2379 DECL_NAMELESS (decl
) = 1;
2380 TREE_CONSTANT (decl
) = 1;
2381 /* We don't set DECL_IGNORED_P or DECL_REGISTER here. If this
2382 changes, the end of the RESULT_DECL handling block in
2383 use_register_for_decl must be adjusted to match. */
2385 DECL_CHAIN (decl
) = all
->orig_fnargs
;
2386 all
->orig_fnargs
= decl
;
2387 fnargs
.safe_insert (0, decl
);
2389 all
->function_result_decl
= decl
;
2392 /* If the target wants to split complex arguments into scalars, do so. */
2393 if (targetm
.calls
.split_complex_arg
)
2394 split_complex_args (&fnargs
);
2399 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2400 data for the parameter. Incorporate ABI specifics such as pass-by-
2401 reference and type promotion. */
2404 assign_parm_find_data_types (struct assign_parm_data_all
*all
, tree parm
,
2405 struct assign_parm_data_one
*data
)
2409 *data
= assign_parm_data_one ();
2411 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2413 data
->arg
.named
= 1; /* No variadic parms. */
2414 else if (DECL_CHAIN (parm
))
2415 data
->arg
.named
= 1; /* Not the last non-variadic parm. */
2416 else if (targetm
.calls
.strict_argument_naming (all
->args_so_far
))
2417 data
->arg
.named
= 1; /* Only variadic ones are unnamed. */
2419 data
->arg
.named
= 0; /* Treat as variadic. */
2421 data
->nominal_type
= TREE_TYPE (parm
);
2422 data
->arg
.type
= DECL_ARG_TYPE (parm
);
2424 /* Look out for errors propagating this far. Also, if the parameter's
2425 type is void then its value doesn't matter. */
2426 if (TREE_TYPE (parm
) == error_mark_node
2427 /* This can happen after weird syntax errors
2428 or if an enum type is defined among the parms. */
2429 || TREE_CODE (parm
) != PARM_DECL
2430 || data
->arg
.type
== NULL
2431 || VOID_TYPE_P (data
->nominal_type
))
2433 data
->nominal_type
= data
->arg
.type
= void_type_node
;
2434 data
->nominal_mode
= data
->passed_mode
= data
->arg
.mode
= VOIDmode
;
2438 /* Find mode of arg as it is passed, and mode of arg as it should be
2439 during execution of this function. */
2440 data
->passed_mode
= data
->arg
.mode
= TYPE_MODE (data
->arg
.type
);
2441 data
->nominal_mode
= TYPE_MODE (data
->nominal_type
);
2443 /* If the parm is to be passed as a transparent union or record, use the
2444 type of the first field for the tests below. We have already verified
2445 that the modes are the same. */
2446 if (RECORD_OR_UNION_TYPE_P (data
->arg
.type
)
2447 && TYPE_TRANSPARENT_AGGR (data
->arg
.type
))
2448 data
->arg
.type
= TREE_TYPE (first_field (data
->arg
.type
));
2450 /* See if this arg was passed by invisible reference. */
2451 if (apply_pass_by_reference_rules (&all
->args_so_far_v
, data
->arg
))
2453 data
->nominal_type
= data
->arg
.type
;
2454 data
->passed_mode
= data
->nominal_mode
= data
->arg
.mode
;
2457 /* Find mode as it is passed by the ABI. */
2458 unsignedp
= TYPE_UNSIGNED (data
->arg
.type
);
2460 = promote_function_mode (data
->arg
.type
, data
->arg
.mode
, &unsignedp
,
2461 TREE_TYPE (current_function_decl
), 0);
2464 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2467 assign_parms_setup_varargs (struct assign_parm_data_all
*all
,
2468 struct assign_parm_data_one
*data
, bool no_rtl
)
2470 int varargs_pretend_bytes
= 0;
2472 function_arg_info last_named_arg
= data
->arg
;
2473 last_named_arg
.named
= true;
2474 targetm
.calls
.setup_incoming_varargs (all
->args_so_far
, last_named_arg
,
2475 &varargs_pretend_bytes
, no_rtl
);
2477 /* If the back-end has requested extra stack space, record how much is
2478 needed. Do not change pretend_args_size otherwise since it may be
2479 nonzero from an earlier partial argument. */
2480 if (varargs_pretend_bytes
> 0)
2481 all
->pretend_args_size
= varargs_pretend_bytes
;
2484 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2485 the incoming location of the current parameter. */
2488 assign_parm_find_entry_rtl (struct assign_parm_data_all
*all
,
2489 struct assign_parm_data_one
*data
)
2491 HOST_WIDE_INT pretend_bytes
= 0;
2495 if (data
->arg
.mode
== VOIDmode
)
2497 data
->entry_parm
= data
->stack_parm
= const0_rtx
;
2501 targetm
.calls
.warn_parameter_passing_abi (all
->args_so_far
,
2504 entry_parm
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2506 if (entry_parm
== 0)
2507 data
->arg
.mode
= data
->passed_mode
;
2509 /* Determine parm's home in the stack, in case it arrives in the stack
2510 or we should pretend it did. Compute the stack position and rtx where
2511 the argument arrives and its size.
2513 There is one complexity here: If this was a parameter that would
2514 have been passed in registers, but wasn't only because it is
2515 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2516 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2517 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2518 as it was the previous time. */
2519 in_regs
= (entry_parm
!= 0);
2520 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2523 if (!in_regs
&& !data
->arg
.named
)
2525 if (targetm
.calls
.pretend_outgoing_varargs_named (all
->args_so_far
))
2528 function_arg_info named_arg
= data
->arg
;
2529 named_arg
.named
= true;
2530 tem
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2532 in_regs
= tem
!= NULL
;
2536 /* If this parameter was passed both in registers and in the stack, use
2537 the copy on the stack. */
2538 if (targetm
.calls
.must_pass_in_stack (data
->arg
))
2545 partial
= targetm
.calls
.arg_partial_bytes (all
->args_so_far
, data
->arg
);
2546 data
->partial
= partial
;
2548 /* The caller might already have allocated stack space for the
2549 register parameters. */
2550 if (partial
!= 0 && all
->reg_parm_stack_space
== 0)
2552 /* Part of this argument is passed in registers and part
2553 is passed on the stack. Ask the prologue code to extend
2554 the stack part so that we can recreate the full value.
2556 PRETEND_BYTES is the size of the registers we need to store.
2557 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2558 stack space that the prologue should allocate.
2560 Internally, gcc assumes that the argument pointer is aligned
2561 to STACK_BOUNDARY bits. This is used both for alignment
2562 optimizations (see init_emit) and to locate arguments that are
2563 aligned to more than PARM_BOUNDARY bits. We must preserve this
2564 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2565 a stack boundary. */
2567 /* We assume at most one partial arg, and it must be the first
2568 argument on the stack. */
2569 gcc_assert (!all
->extra_pretend_bytes
&& !all
->pretend_args_size
);
2571 pretend_bytes
= partial
;
2572 all
->pretend_args_size
= CEIL_ROUND (pretend_bytes
, STACK_BYTES
);
2574 /* We want to align relative to the actual stack pointer, so
2575 don't include this in the stack size until later. */
2576 all
->extra_pretend_bytes
= all
->pretend_args_size
;
2580 locate_and_pad_parm (data
->arg
.mode
, data
->arg
.type
, in_regs
,
2581 all
->reg_parm_stack_space
,
2582 entry_parm
? data
->partial
: 0, current_function_decl
,
2583 &all
->stack_args_size
, &data
->locate
);
2585 /* Update parm_stack_boundary if this parameter is passed in the
2587 if (!in_regs
&& crtl
->parm_stack_boundary
< data
->locate
.boundary
)
2588 crtl
->parm_stack_boundary
= data
->locate
.boundary
;
2590 /* Adjust offsets to include the pretend args. */
2591 pretend_bytes
= all
->extra_pretend_bytes
- pretend_bytes
;
2592 data
->locate
.slot_offset
.constant
+= pretend_bytes
;
2593 data
->locate
.offset
.constant
+= pretend_bytes
;
2595 data
->entry_parm
= entry_parm
;
2598 /* A subroutine of assign_parms. If there is actually space on the stack
2599 for this parm, count it in stack_args_size and return true. */
2602 assign_parm_is_stack_parm (struct assign_parm_data_all
*all
,
2603 struct assign_parm_data_one
*data
)
2605 /* Trivially true if we've no incoming register. */
2606 if (data
->entry_parm
== NULL
)
2608 /* Also true if we're partially in registers and partially not,
2609 since we've arranged to drop the entire argument on the stack. */
2610 else if (data
->partial
!= 0)
2612 /* Also true if the target says that it's passed in both registers
2613 and on the stack. */
2614 else if (GET_CODE (data
->entry_parm
) == PARALLEL
2615 && XEXP (XVECEXP (data
->entry_parm
, 0, 0), 0) == NULL_RTX
)
2617 /* Also true if the target says that there's stack allocated for
2618 all register parameters. */
2619 else if (all
->reg_parm_stack_space
> 0)
2621 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2625 all
->stack_args_size
.constant
+= data
->locate
.size
.constant
;
2626 if (data
->locate
.size
.var
)
2627 ADD_PARM_SIZE (all
->stack_args_size
, data
->locate
.size
.var
);
2632 /* A subroutine of assign_parms. Given that this parameter is allocated
2633 stack space by the ABI, find it. */
2636 assign_parm_find_stack_rtl (tree parm
, struct assign_parm_data_one
*data
)
2638 rtx offset_rtx
, stack_parm
;
2639 unsigned int align
, boundary
;
2641 /* If we're passing this arg using a reg, make its stack home the
2642 aligned stack slot. */
2643 if (data
->entry_parm
)
2644 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.slot_offset
);
2646 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.offset
);
2648 stack_parm
= crtl
->args
.internal_arg_pointer
;
2649 if (offset_rtx
!= const0_rtx
)
2650 stack_parm
= gen_rtx_PLUS (Pmode
, stack_parm
, offset_rtx
);
2651 stack_parm
= gen_rtx_MEM (data
->arg
.mode
, stack_parm
);
2653 if (!data
->arg
.pass_by_reference
)
2655 set_mem_attributes (stack_parm
, parm
, 1);
2656 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2657 while promoted mode's size is needed. */
2658 if (data
->arg
.mode
!= BLKmode
2659 && data
->arg
.mode
!= DECL_MODE (parm
))
2661 set_mem_size (stack_parm
, GET_MODE_SIZE (data
->arg
.mode
));
2662 if (MEM_EXPR (stack_parm
) && MEM_OFFSET_KNOWN_P (stack_parm
))
2664 poly_int64 offset
= subreg_lowpart_offset (DECL_MODE (parm
),
2666 if (maybe_ne (offset
, 0))
2667 set_mem_offset (stack_parm
, MEM_OFFSET (stack_parm
) - offset
);
2672 boundary
= data
->locate
.boundary
;
2673 align
= BITS_PER_UNIT
;
2675 /* If we're padding upward, we know that the alignment of the slot
2676 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2677 intentionally forcing upward padding. Otherwise we have to come
2678 up with a guess at the alignment based on OFFSET_RTX. */
2680 if (data
->locate
.where_pad
== PAD_NONE
|| data
->entry_parm
)
2682 else if (data
->locate
.where_pad
== PAD_UPWARD
)
2685 /* If the argument offset is actually more aligned than the nominal
2686 stack slot boundary, take advantage of that excess alignment.
2687 Don't make any assumptions if STACK_POINTER_OFFSET is in use. */
2688 if (poly_int_rtx_p (offset_rtx
, &offset
)
2689 && known_eq (STACK_POINTER_OFFSET
, 0))
2691 unsigned int offset_align
= known_alignment (offset
) * BITS_PER_UNIT
;
2692 if (offset_align
== 0 || offset_align
> STACK_BOUNDARY
)
2693 offset_align
= STACK_BOUNDARY
;
2694 align
= MAX (align
, offset_align
);
2697 else if (poly_int_rtx_p (offset_rtx
, &offset
))
2699 align
= least_bit_hwi (boundary
);
2700 unsigned int offset_align
= known_alignment (offset
) * BITS_PER_UNIT
;
2701 if (offset_align
!= 0)
2702 align
= MIN (align
, offset_align
);
2704 set_mem_align (stack_parm
, align
);
2706 if (data
->entry_parm
)
2707 set_reg_attrs_for_parm (data
->entry_parm
, stack_parm
);
2709 data
->stack_parm
= stack_parm
;
2712 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2713 always valid and contiguous. */
2716 assign_parm_adjust_entry_rtl (struct assign_parm_data_one
*data
)
2718 rtx entry_parm
= data
->entry_parm
;
2719 rtx stack_parm
= data
->stack_parm
;
2721 /* If this parm was passed part in regs and part in memory, pretend it
2722 arrived entirely in memory by pushing the register-part onto the stack.
2723 In the special case of a DImode or DFmode that is split, we could put
2724 it together in a pseudoreg directly, but for now that's not worth
2726 if (data
->partial
!= 0)
2728 /* Handle calls that pass values in multiple non-contiguous
2729 locations. The Irix 6 ABI has examples of this. */
2730 if (GET_CODE (entry_parm
) == PARALLEL
)
2731 emit_group_store (validize_mem (copy_rtx (stack_parm
)), entry_parm
,
2732 data
->arg
.type
, int_size_in_bytes (data
->arg
.type
));
2735 gcc_assert (data
->partial
% UNITS_PER_WORD
== 0);
2736 move_block_from_reg (REGNO (entry_parm
),
2737 validize_mem (copy_rtx (stack_parm
)),
2738 data
->partial
/ UNITS_PER_WORD
);
2741 entry_parm
= stack_parm
;
2744 /* If we didn't decide this parm came in a register, by default it came
2746 else if (entry_parm
== NULL
)
2747 entry_parm
= stack_parm
;
2749 /* When an argument is passed in multiple locations, we can't make use
2750 of this information, but we can save some copying if the whole argument
2751 is passed in a single register. */
2752 else if (GET_CODE (entry_parm
) == PARALLEL
2753 && data
->nominal_mode
!= BLKmode
2754 && data
->passed_mode
!= BLKmode
)
2756 size_t i
, len
= XVECLEN (entry_parm
, 0);
2758 for (i
= 0; i
< len
; i
++)
2759 if (XEXP (XVECEXP (entry_parm
, 0, i
), 0) != NULL_RTX
2760 && REG_P (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2761 && (GET_MODE (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2762 == data
->passed_mode
)
2763 && INTVAL (XEXP (XVECEXP (entry_parm
, 0, i
), 1)) == 0)
2765 entry_parm
= XEXP (XVECEXP (entry_parm
, 0, i
), 0);
2770 data
->entry_parm
= entry_parm
;
2773 /* A subroutine of assign_parms. Reconstitute any values which were
2774 passed in multiple registers and would fit in a single register. */
2777 assign_parm_remove_parallels (struct assign_parm_data_one
*data
)
2779 rtx entry_parm
= data
->entry_parm
;
2781 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2782 This can be done with register operations rather than on the
2783 stack, even if we will store the reconstituted parameter on the
2785 if (GET_CODE (entry_parm
) == PARALLEL
&& GET_MODE (entry_parm
) != BLKmode
)
2787 rtx parmreg
= gen_reg_rtx (GET_MODE (entry_parm
));
2788 emit_group_store (parmreg
, entry_parm
, data
->arg
.type
,
2789 GET_MODE_SIZE (GET_MODE (entry_parm
)));
2790 entry_parm
= parmreg
;
2793 data
->entry_parm
= entry_parm
;
2796 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2797 always valid and properly aligned. */
2800 assign_parm_adjust_stack_rtl (struct assign_parm_data_one
*data
)
2802 rtx stack_parm
= data
->stack_parm
;
2804 /* If we can't trust the parm stack slot to be aligned enough for its
2805 ultimate type, don't use that slot after entry. We'll make another
2806 stack slot, if we need one. */
2808 && ((GET_MODE_ALIGNMENT (data
->nominal_mode
) > MEM_ALIGN (stack_parm
)
2809 && ((optab_handler (movmisalign_optab
, data
->nominal_mode
)
2810 != CODE_FOR_nothing
)
2811 || targetm
.slow_unaligned_access (data
->nominal_mode
,
2812 MEM_ALIGN (stack_parm
))))
2813 || (data
->nominal_type
2814 && TYPE_ALIGN (data
->nominal_type
) > MEM_ALIGN (stack_parm
)
2815 && MEM_ALIGN (stack_parm
) < PREFERRED_STACK_BOUNDARY
)))
2818 /* If parm was passed in memory, and we need to convert it on entry,
2819 don't store it back in that same slot. */
2820 else if (data
->entry_parm
== stack_parm
2821 && data
->nominal_mode
!= BLKmode
2822 && data
->nominal_mode
!= data
->passed_mode
)
2825 /* If stack protection is in effect for this function, don't leave any
2826 pointers in their passed stack slots. */
2827 else if (crtl
->stack_protect_guard
2828 && (flag_stack_protect
== 2
2829 || data
->arg
.pass_by_reference
2830 || POINTER_TYPE_P (data
->nominal_type
)))
2833 data
->stack_parm
= stack_parm
;
2836 /* A subroutine of assign_parms. Return true if the current parameter
2837 should be stored as a BLKmode in the current frame. */
2840 assign_parm_setup_block_p (struct assign_parm_data_one
*data
)
2842 if (data
->nominal_mode
== BLKmode
)
2844 if (GET_MODE (data
->entry_parm
) == BLKmode
)
2847 #ifdef BLOCK_REG_PADDING
2848 /* Only assign_parm_setup_block knows how to deal with register arguments
2849 that are padded at the least significant end. */
2850 if (REG_P (data
->entry_parm
)
2851 && known_lt (GET_MODE_SIZE (data
->arg
.mode
), UNITS_PER_WORD
)
2852 && (BLOCK_REG_PADDING (data
->passed_mode
, data
->arg
.type
, 1)
2853 == (BYTES_BIG_ENDIAN
? PAD_UPWARD
: PAD_DOWNWARD
)))
2860 /* A subroutine of assign_parms. Arrange for the parameter to be
2861 present and valid in DATA->STACK_RTL. */
2864 assign_parm_setup_block (struct assign_parm_data_all
*all
,
2865 tree parm
, struct assign_parm_data_one
*data
)
2867 rtx entry_parm
= data
->entry_parm
;
2868 rtx stack_parm
= data
->stack_parm
;
2869 rtx target_reg
= NULL_RTX
;
2870 bool in_conversion_seq
= false;
2872 HOST_WIDE_INT size_stored
;
2874 if (GET_CODE (entry_parm
) == PARALLEL
)
2875 entry_parm
= emit_group_move_into_temps (entry_parm
);
2877 /* If we want the parameter in a pseudo, don't use a stack slot. */
2878 if (is_gimple_reg (parm
) && use_register_for_decl (parm
))
2880 tree def
= ssa_default_def (cfun
, parm
);
2882 machine_mode mode
= promote_ssa_mode (def
, NULL
);
2883 rtx reg
= gen_reg_rtx (mode
);
2884 if (GET_CODE (reg
) != CONCAT
)
2889 /* Avoid allocating a stack slot, if there isn't one
2890 preallocated by the ABI. It might seem like we should
2891 always prefer a pseudo, but converting between
2892 floating-point and integer modes goes through the stack
2893 on various machines, so it's better to use the reserved
2894 stack slot than to risk wasting it and allocating more
2895 for the conversion. */
2896 if (stack_parm
== NULL_RTX
)
2898 int save
= generating_concat_p
;
2899 generating_concat_p
= 0;
2900 stack_parm
= gen_reg_rtx (mode
);
2901 generating_concat_p
= save
;
2904 data
->stack_parm
= NULL
;
2907 size
= int_size_in_bytes (data
->arg
.type
);
2908 size_stored
= CEIL_ROUND (size
, UNITS_PER_WORD
);
2909 if (stack_parm
== 0)
2911 HOST_WIDE_INT parm_align
2913 ? MAX (DECL_ALIGN (parm
), BITS_PER_WORD
) : DECL_ALIGN (parm
));
2915 SET_DECL_ALIGN (parm
, parm_align
);
2916 if (DECL_ALIGN (parm
) > MAX_SUPPORTED_STACK_ALIGNMENT
)
2918 rtx allocsize
= gen_int_mode (size_stored
, Pmode
);
2919 get_dynamic_stack_size (&allocsize
, 0, DECL_ALIGN (parm
), NULL
);
2920 stack_parm
= assign_stack_local (BLKmode
, UINTVAL (allocsize
),
2921 MAX_SUPPORTED_STACK_ALIGNMENT
);
2922 rtx addr
= align_dynamic_address (XEXP (stack_parm
, 0),
2924 mark_reg_pointer (addr
, DECL_ALIGN (parm
));
2925 stack_parm
= gen_rtx_MEM (GET_MODE (stack_parm
), addr
);
2926 MEM_NOTRAP_P (stack_parm
) = 1;
2929 stack_parm
= assign_stack_local (BLKmode
, size_stored
,
2931 if (known_eq (GET_MODE_SIZE (GET_MODE (entry_parm
)), size
))
2932 PUT_MODE (stack_parm
, GET_MODE (entry_parm
));
2933 set_mem_attributes (stack_parm
, parm
, 1);
2936 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2937 calls that pass values in multiple non-contiguous locations. */
2938 if (REG_P (entry_parm
) || GET_CODE (entry_parm
) == PARALLEL
)
2942 /* Note that we will be storing an integral number of words.
2943 So we have to be careful to ensure that we allocate an
2944 integral number of words. We do this above when we call
2945 assign_stack_local if space was not allocated in the argument
2946 list. If it was, this will not work if PARM_BOUNDARY is not
2947 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2948 if it becomes a problem. Exception is when BLKmode arrives
2949 with arguments not conforming to word_mode. */
2951 if (data
->stack_parm
== 0)
2953 else if (GET_CODE (entry_parm
) == PARALLEL
)
2956 gcc_assert (!size
|| !(PARM_BOUNDARY
% BITS_PER_WORD
));
2958 mem
= validize_mem (copy_rtx (stack_parm
));
2960 /* Handle values in multiple non-contiguous locations. */
2961 if (GET_CODE (entry_parm
) == PARALLEL
&& !MEM_P (mem
))
2962 emit_group_store (mem
, entry_parm
, data
->arg
.type
, size
);
2963 else if (GET_CODE (entry_parm
) == PARALLEL
)
2965 push_to_sequence2 (all
->first_conversion_insn
,
2966 all
->last_conversion_insn
);
2967 emit_group_store (mem
, entry_parm
, data
->arg
.type
, size
);
2968 all
->first_conversion_insn
= get_insns ();
2969 all
->last_conversion_insn
= get_last_insn ();
2971 in_conversion_seq
= true;
2977 /* If SIZE is that of a mode no bigger than a word, just use
2978 that mode's store operation. */
2979 else if (size
<= UNITS_PER_WORD
)
2981 unsigned int bits
= size
* BITS_PER_UNIT
;
2982 machine_mode mode
= int_mode_for_size (bits
, 0).else_blk ();
2985 #ifdef BLOCK_REG_PADDING
2986 && (size
== UNITS_PER_WORD
2987 || (BLOCK_REG_PADDING (mode
, data
->arg
.type
, 1)
2988 != (BYTES_BIG_ENDIAN
? PAD_UPWARD
: PAD_DOWNWARD
)))
2994 /* We are really truncating a word_mode value containing
2995 SIZE bytes into a value of mode MODE. If such an
2996 operation requires no actual instructions, we can refer
2997 to the value directly in mode MODE, otherwise we must
2998 start with the register in word_mode and explicitly
3000 if (targetm
.truly_noop_truncation (size
* BITS_PER_UNIT
,
3002 reg
= gen_rtx_REG (mode
, REGNO (entry_parm
));
3005 reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
3006 reg
= convert_to_mode (mode
, copy_to_reg (reg
), 1);
3008 emit_move_insn (change_address (mem
, mode
, 0), reg
);
3011 #ifdef BLOCK_REG_PADDING
3012 /* Storing the register in memory as a full word, as
3013 move_block_from_reg below would do, and then using the
3014 MEM in a smaller mode, has the effect of shifting right
3015 if BYTES_BIG_ENDIAN. If we're bypassing memory, the
3016 shifting must be explicit. */
3017 else if (!MEM_P (mem
))
3021 /* If the assert below fails, we should have taken the
3022 mode != BLKmode path above, unless we have downward
3023 padding of smaller-than-word arguments on a machine
3024 with little-endian bytes, which would likely require
3025 additional changes to work correctly. */
3026 gcc_checking_assert (BYTES_BIG_ENDIAN
3027 && (BLOCK_REG_PADDING (mode
,
3031 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
3033 x
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
3034 x
= expand_shift (RSHIFT_EXPR
, word_mode
, x
, by
,
3036 x
= force_reg (word_mode
, x
);
3037 x
= gen_lowpart_SUBREG (GET_MODE (mem
), x
);
3039 emit_move_insn (mem
, x
);
3043 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
3044 machine must be aligned to the left before storing
3045 to memory. Note that the previous test doesn't
3046 handle all cases (e.g. SIZE == 3). */
3047 else if (size
!= UNITS_PER_WORD
3048 #ifdef BLOCK_REG_PADDING
3049 && (BLOCK_REG_PADDING (mode
, data
->arg
.type
, 1)
3057 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
3058 rtx reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
3060 x
= expand_shift (LSHIFT_EXPR
, word_mode
, reg
, by
, NULL_RTX
, 1);
3061 tem
= change_address (mem
, word_mode
, 0);
3062 emit_move_insn (tem
, x
);
3065 move_block_from_reg (REGNO (entry_parm
), mem
,
3066 size_stored
/ UNITS_PER_WORD
);
3068 else if (!MEM_P (mem
))
3070 gcc_checking_assert (size
> UNITS_PER_WORD
);
3071 #ifdef BLOCK_REG_PADDING
3072 gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem
),
3076 emit_move_insn (mem
, entry_parm
);
3079 move_block_from_reg (REGNO (entry_parm
), mem
,
3080 size_stored
/ UNITS_PER_WORD
);
3082 else if (data
->stack_parm
== 0)
3084 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3085 emit_block_move (stack_parm
, data
->entry_parm
, GEN_INT (size
),
3087 all
->first_conversion_insn
= get_insns ();
3088 all
->last_conversion_insn
= get_last_insn ();
3090 in_conversion_seq
= true;
3095 if (!in_conversion_seq
)
3096 emit_move_insn (target_reg
, stack_parm
);
3099 push_to_sequence2 (all
->first_conversion_insn
,
3100 all
->last_conversion_insn
);
3101 emit_move_insn (target_reg
, stack_parm
);
3102 all
->first_conversion_insn
= get_insns ();
3103 all
->last_conversion_insn
= get_last_insn ();
3106 stack_parm
= target_reg
;
3109 data
->stack_parm
= stack_parm
;
3110 set_parm_rtl (parm
, stack_parm
);
3113 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
3114 parameter. Get it there. Perform all ABI specified conversions. */
3117 assign_parm_setup_reg (struct assign_parm_data_all
*all
, tree parm
,
3118 struct assign_parm_data_one
*data
)
3120 rtx parmreg
, validated_mem
;
3121 rtx equiv_stack_parm
;
3122 machine_mode promoted_nominal_mode
;
3123 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (parm
));
3124 bool did_conversion
= false;
3125 bool need_conversion
, moved
;
3126 enum insn_code icode
;
3129 /* Store the parm in a pseudoregister during the function, but we may
3130 need to do it in a wider mode. Using 2 here makes the result
3131 consistent with promote_decl_mode and thus expand_expr_real_1. */
3132 promoted_nominal_mode
3133 = promote_function_mode (data
->nominal_type
, data
->nominal_mode
, &unsignedp
,
3134 TREE_TYPE (current_function_decl
), 2);
3136 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
3137 if (!DECL_ARTIFICIAL (parm
))
3138 mark_user_reg (parmreg
);
3140 /* If this was an item that we received a pointer to,
3141 set rtl appropriately. */
3142 if (data
->arg
.pass_by_reference
)
3144 rtl
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
->arg
.type
)), parmreg
);
3145 set_mem_attributes (rtl
, parm
, 1);
3150 assign_parm_remove_parallels (data
);
3152 /* Copy the value into the register, thus bridging between
3153 assign_parm_find_data_types and expand_expr_real_1. */
3155 equiv_stack_parm
= data
->stack_parm
;
3156 validated_mem
= validize_mem (copy_rtx (data
->entry_parm
));
3158 need_conversion
= (data
->nominal_mode
!= data
->passed_mode
3159 || promoted_nominal_mode
!= data
->arg
.mode
);
3163 && GET_MODE_CLASS (data
->nominal_mode
) == MODE_INT
3164 && data
->nominal_mode
== data
->passed_mode
3165 && data
->nominal_mode
== GET_MODE (data
->entry_parm
))
3167 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3168 mode, by the caller. We now have to convert it to
3169 NOMINAL_MODE, if different. However, PARMREG may be in
3170 a different mode than NOMINAL_MODE if it is being stored
3173 If ENTRY_PARM is a hard register, it might be in a register
3174 not valid for operating in its mode (e.g., an odd-numbered
3175 register for a DFmode). In that case, moves are the only
3176 thing valid, so we can't do a convert from there. This
3177 occurs when the calling sequence allow such misaligned
3180 In addition, the conversion may involve a call, which could
3181 clobber parameters which haven't been copied to pseudo
3184 First, we try to emit an insn which performs the necessary
3185 conversion. We verify that this insn does not clobber any
3190 icode
= can_extend_p (promoted_nominal_mode
, data
->passed_mode
,
3194 op1
= validated_mem
;
3195 if (icode
!= CODE_FOR_nothing
3196 && insn_operand_matches (icode
, 0, op0
)
3197 && insn_operand_matches (icode
, 1, op1
))
3199 enum rtx_code code
= unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
;
3200 rtx_insn
*insn
, *insns
;
3202 HARD_REG_SET hardregs
;
3205 /* If op1 is a hard register that is likely spilled, first
3206 force it into a pseudo, otherwise combiner might extend
3207 its lifetime too much. */
3208 if (GET_CODE (t
) == SUBREG
)
3211 && HARD_REGISTER_P (t
)
3212 && ! TEST_HARD_REG_BIT (fixed_reg_set
, REGNO (t
))
3213 && targetm
.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t
))))
3215 t
= gen_reg_rtx (GET_MODE (op1
));
3216 emit_move_insn (t
, op1
);
3220 rtx_insn
*pat
= gen_extend_insn (op0
, t
, promoted_nominal_mode
,
3221 data
->passed_mode
, unsignedp
);
3223 insns
= get_insns ();
3226 CLEAR_HARD_REG_SET (hardregs
);
3227 for (insn
= insns
; insn
&& moved
; insn
= NEXT_INSN (insn
))
3230 note_stores (insn
, record_hard_reg_sets
, &hardregs
);
3231 if (!hard_reg_set_empty_p (hardregs
))
3240 if (equiv_stack_parm
!= NULL_RTX
)
3241 equiv_stack_parm
= gen_rtx_fmt_e (code
, GET_MODE (parmreg
),
3248 /* Nothing to do. */
3250 else if (need_conversion
)
3252 /* We did not have an insn to convert directly, or the sequence
3253 generated appeared unsafe. We must first copy the parm to a
3254 pseudo reg, and save the conversion until after all
3255 parameters have been moved. */
3258 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3260 emit_move_insn (tempreg
, validated_mem
);
3262 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3263 tempreg
= convert_to_mode (data
->nominal_mode
, tempreg
, unsignedp
);
3265 if (partial_subreg_p (tempreg
)
3266 && GET_MODE (tempreg
) == data
->nominal_mode
3267 && REG_P (SUBREG_REG (tempreg
))
3268 && data
->nominal_mode
== data
->passed_mode
3269 && GET_MODE (SUBREG_REG (tempreg
)) == GET_MODE (data
->entry_parm
))
3271 /* The argument is already sign/zero extended, so note it
3273 SUBREG_PROMOTED_VAR_P (tempreg
) = 1;
3274 SUBREG_PROMOTED_SET (tempreg
, unsignedp
);
3277 /* TREE_USED gets set erroneously during expand_assignment. */
3278 save_tree_used
= TREE_USED (parm
);
3279 SET_DECL_RTL (parm
, rtl
);
3280 expand_assignment (parm
, make_tree (data
->nominal_type
, tempreg
), false);
3281 SET_DECL_RTL (parm
, NULL_RTX
);
3282 TREE_USED (parm
) = save_tree_used
;
3283 all
->first_conversion_insn
= get_insns ();
3284 all
->last_conversion_insn
= get_last_insn ();
3287 did_conversion
= true;
3289 else if (MEM_P (data
->entry_parm
)
3290 && GET_MODE_ALIGNMENT (promoted_nominal_mode
)
3291 > MEM_ALIGN (data
->entry_parm
)
3292 && (((icode
= optab_handler (movmisalign_optab
,
3293 promoted_nominal_mode
))
3294 != CODE_FOR_nothing
)
3295 || targetm
.slow_unaligned_access (promoted_nominal_mode
,
3296 MEM_ALIGN (data
->entry_parm
))))
3298 if (icode
!= CODE_FOR_nothing
)
3299 emit_insn (GEN_FCN (icode
) (parmreg
, validated_mem
));
3301 rtl
= parmreg
= extract_bit_field (validated_mem
,
3302 GET_MODE_BITSIZE (promoted_nominal_mode
), 0,
3304 promoted_nominal_mode
, VOIDmode
, false, NULL
);
3307 emit_move_insn (parmreg
, validated_mem
);
3309 /* If we were passed a pointer but the actual value can safely live
3310 in a register, retrieve it and use it directly. */
3311 if (data
->arg
.pass_by_reference
&& TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
)
3313 /* We can't use nominal_mode, because it will have been set to
3314 Pmode above. We must use the actual mode of the parm. */
3315 if (use_register_for_decl (parm
))
3317 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
3318 mark_user_reg (parmreg
);
3322 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3323 TYPE_MODE (TREE_TYPE (parm
)),
3324 TYPE_ALIGN (TREE_TYPE (parm
)));
3326 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm
)),
3327 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm
))),
3329 set_mem_attributes (parmreg
, parm
, 1);
3332 /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
3333 the debug info in case it is not legitimate. */
3334 if (GET_MODE (parmreg
) != GET_MODE (rtl
))
3336 rtx tempreg
= gen_reg_rtx (GET_MODE (rtl
));
3337 int unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (parm
));
3339 push_to_sequence2 (all
->first_conversion_insn
,
3340 all
->last_conversion_insn
);
3341 emit_move_insn (tempreg
, rtl
);
3342 tempreg
= convert_to_mode (GET_MODE (parmreg
), tempreg
, unsigned_p
);
3343 emit_move_insn (MEM_P (parmreg
) ? copy_rtx (parmreg
) : parmreg
,
3345 all
->first_conversion_insn
= get_insns ();
3346 all
->last_conversion_insn
= get_last_insn ();
3349 did_conversion
= true;
3352 emit_move_insn (MEM_P (parmreg
) ? copy_rtx (parmreg
) : parmreg
, rtl
);
3356 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3358 data
->stack_parm
= NULL
;
3361 set_parm_rtl (parm
, rtl
);
3363 /* Mark the register as eliminable if we did no conversion and it was
3364 copied from memory at a fixed offset, and the arg pointer was not
3365 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3366 offset formed an invalid address, such memory-equivalences as we
3367 make here would screw up life analysis for it. */
3368 if (data
->nominal_mode
== data
->passed_mode
3370 && data
->stack_parm
!= 0
3371 && MEM_P (data
->stack_parm
)
3372 && data
->locate
.offset
.var
== 0
3373 && reg_mentioned_p (virtual_incoming_args_rtx
,
3374 XEXP (data
->stack_parm
, 0)))
3376 rtx_insn
*linsn
= get_last_insn ();
3380 /* Mark complex types separately. */
3381 if (GET_CODE (parmreg
) == CONCAT
)
3383 scalar_mode submode
= GET_MODE_INNER (GET_MODE (parmreg
));
3384 int regnor
= REGNO (XEXP (parmreg
, 0));
3385 int regnoi
= REGNO (XEXP (parmreg
, 1));
3386 rtx stackr
= adjust_address_nv (data
->stack_parm
, submode
, 0);
3387 rtx stacki
= adjust_address_nv (data
->stack_parm
, submode
,
3388 GET_MODE_SIZE (submode
));
3390 /* Scan backwards for the set of the real and
3392 for (sinsn
= linsn
; sinsn
!= 0;
3393 sinsn
= prev_nonnote_insn (sinsn
))
3395 set
= single_set (sinsn
);
3399 if (SET_DEST (set
) == regno_reg_rtx
[regnoi
])
3400 set_unique_reg_note (sinsn
, REG_EQUIV
, stacki
);
3401 else if (SET_DEST (set
) == regno_reg_rtx
[regnor
])
3402 set_unique_reg_note (sinsn
, REG_EQUIV
, stackr
);
3406 set_dst_reg_note (linsn
, REG_EQUIV
, equiv_stack_parm
, parmreg
);
3409 /* For pointer data type, suggest pointer register. */
3410 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3411 mark_reg_pointer (parmreg
,
3412 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
3415 /* A subroutine of assign_parms. Allocate stack space to hold the current
3416 parameter. Get it there. Perform all ABI specified conversions. */
3419 assign_parm_setup_stack (struct assign_parm_data_all
*all
, tree parm
,
3420 struct assign_parm_data_one
*data
)
3422 /* Value must be stored in the stack slot STACK_PARM during function
3424 bool to_conversion
= false;
3426 assign_parm_remove_parallels (data
);
3428 if (data
->arg
.mode
!= data
->nominal_mode
)
3430 /* Conversion is required. */
3431 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3433 emit_move_insn (tempreg
, validize_mem (copy_rtx (data
->entry_parm
)));
3435 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3436 to_conversion
= true;
3438 data
->entry_parm
= convert_to_mode (data
->nominal_mode
, tempreg
,
3439 TYPE_UNSIGNED (TREE_TYPE (parm
)));
3441 if (data
->stack_parm
)
3444 = subreg_lowpart_offset (data
->nominal_mode
,
3445 GET_MODE (data
->stack_parm
));
3446 /* ??? This may need a big-endian conversion on sparc64. */
3448 = adjust_address (data
->stack_parm
, data
->nominal_mode
, 0);
3449 if (maybe_ne (offset
, 0) && MEM_OFFSET_KNOWN_P (data
->stack_parm
))
3450 set_mem_offset (data
->stack_parm
,
3451 MEM_OFFSET (data
->stack_parm
) + offset
);
3455 if (data
->entry_parm
!= data
->stack_parm
)
3459 if (data
->stack_parm
== 0)
3461 int align
= STACK_SLOT_ALIGNMENT (data
->arg
.type
,
3462 GET_MODE (data
->entry_parm
),
3463 TYPE_ALIGN (data
->arg
.type
));
3464 if (align
< (int)GET_MODE_ALIGNMENT (GET_MODE (data
->entry_parm
))
3465 && ((optab_handler (movmisalign_optab
,
3466 GET_MODE (data
->entry_parm
))
3467 != CODE_FOR_nothing
)
3468 || targetm
.slow_unaligned_access (GET_MODE (data
->entry_parm
),
3470 align
= GET_MODE_ALIGNMENT (GET_MODE (data
->entry_parm
));
3472 = assign_stack_local (GET_MODE (data
->entry_parm
),
3473 GET_MODE_SIZE (GET_MODE (data
->entry_parm
)),
3475 align
= MEM_ALIGN (data
->stack_parm
);
3476 set_mem_attributes (data
->stack_parm
, parm
, 1);
3477 set_mem_align (data
->stack_parm
, align
);
3480 dest
= validize_mem (copy_rtx (data
->stack_parm
));
3481 src
= validize_mem (copy_rtx (data
->entry_parm
));
3485 /* Use a block move to handle potentially misaligned entry_parm. */
3487 push_to_sequence2 (all
->first_conversion_insn
,
3488 all
->last_conversion_insn
);
3489 to_conversion
= true;
3491 emit_block_move (dest
, src
,
3492 GEN_INT (int_size_in_bytes (data
->arg
.type
)),
3498 src
= force_reg (GET_MODE (src
), src
);
3499 emit_move_insn (dest
, src
);
3505 all
->first_conversion_insn
= get_insns ();
3506 all
->last_conversion_insn
= get_last_insn ();
3510 set_parm_rtl (parm
, data
->stack_parm
);
3513 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3514 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3517 assign_parms_unsplit_complex (struct assign_parm_data_all
*all
,
3521 tree orig_fnargs
= all
->orig_fnargs
;
3524 for (parm
= orig_fnargs
; parm
; parm
= TREE_CHAIN (parm
), ++i
)
3526 if (TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
3527 && targetm
.calls
.split_complex_arg (TREE_TYPE (parm
)))
3529 rtx tmp
, real
, imag
;
3530 scalar_mode inner
= GET_MODE_INNER (DECL_MODE (parm
));
3532 real
= DECL_RTL (fnargs
[i
]);
3533 imag
= DECL_RTL (fnargs
[i
+ 1]);
3534 if (inner
!= GET_MODE (real
))
3536 real
= gen_lowpart_SUBREG (inner
, real
);
3537 imag
= gen_lowpart_SUBREG (inner
, imag
);
3540 if (TREE_ADDRESSABLE (parm
))
3543 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (parm
));
3544 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3546 TYPE_ALIGN (TREE_TYPE (parm
)));
3548 /* split_complex_arg put the real and imag parts in
3549 pseudos. Move them to memory. */
3550 tmp
= assign_stack_local (DECL_MODE (parm
), size
, align
);
3551 set_mem_attributes (tmp
, parm
, 1);
3552 rmem
= adjust_address_nv (tmp
, inner
, 0);
3553 imem
= adjust_address_nv (tmp
, inner
, GET_MODE_SIZE (inner
));
3554 push_to_sequence2 (all
->first_conversion_insn
,
3555 all
->last_conversion_insn
);
3556 emit_move_insn (rmem
, real
);
3557 emit_move_insn (imem
, imag
);
3558 all
->first_conversion_insn
= get_insns ();
3559 all
->last_conversion_insn
= get_last_insn ();
3563 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3564 set_parm_rtl (parm
, tmp
);
3566 real
= DECL_INCOMING_RTL (fnargs
[i
]);
3567 imag
= DECL_INCOMING_RTL (fnargs
[i
+ 1]);
3568 if (inner
!= GET_MODE (real
))
3570 real
= gen_lowpart_SUBREG (inner
, real
);
3571 imag
= gen_lowpart_SUBREG (inner
, imag
);
3573 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3574 set_decl_incoming_rtl (parm
, tmp
, false);
3580 /* Assign RTL expressions to the function's parameters. This may involve
3581 copying them into registers and using those registers as the DECL_RTL. */
3584 assign_parms (tree fndecl
)
3586 struct assign_parm_data_all all
;
3591 crtl
->args
.internal_arg_pointer
3592 = targetm
.calls
.internal_arg_pointer ();
3594 assign_parms_initialize_all (&all
);
3595 fnargs
= assign_parms_augmented_arg_list (&all
);
3597 FOR_EACH_VEC_ELT (fnargs
, i
, parm
)
3599 struct assign_parm_data_one data
;
3601 /* Extract the type of PARM; adjust it according to ABI. */
3602 assign_parm_find_data_types (&all
, parm
, &data
);
3604 /* Early out for errors and void parameters. */
3605 if (data
.passed_mode
== VOIDmode
)
3607 SET_DECL_RTL (parm
, const0_rtx
);
3608 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
);
3612 /* Estimate stack alignment from parameter alignment. */
3613 if (SUPPORTS_STACK_ALIGNMENT
)
3616 = targetm
.calls
.function_arg_boundary (data
.arg
.mode
,
3618 align
= MINIMUM_ALIGNMENT (data
.arg
.type
, data
.arg
.mode
, align
);
3619 if (TYPE_ALIGN (data
.nominal_type
) > align
)
3620 align
= MINIMUM_ALIGNMENT (data
.nominal_type
,
3621 TYPE_MODE (data
.nominal_type
),
3622 TYPE_ALIGN (data
.nominal_type
));
3623 if (crtl
->stack_alignment_estimated
< align
)
3625 gcc_assert (!crtl
->stack_realign_processed
);
3626 crtl
->stack_alignment_estimated
= align
;
3630 /* Find out where the parameter arrives in this function. */
3631 assign_parm_find_entry_rtl (&all
, &data
);
3633 /* Find out where stack space for this parameter might be. */
3634 if (assign_parm_is_stack_parm (&all
, &data
))
3636 assign_parm_find_stack_rtl (parm
, &data
);
3637 assign_parm_adjust_entry_rtl (&data
);
3639 /* Record permanently how this parm was passed. */
3640 if (data
.arg
.pass_by_reference
)
3643 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
.arg
.type
)),
3645 set_decl_incoming_rtl (parm
, incoming_rtl
, true);
3648 set_decl_incoming_rtl (parm
, data
.entry_parm
, false);
3650 assign_parm_adjust_stack_rtl (&data
);
3652 if (assign_parm_setup_block_p (&data
))
3653 assign_parm_setup_block (&all
, parm
, &data
);
3654 else if (data
.arg
.pass_by_reference
|| use_register_for_decl (parm
))
3655 assign_parm_setup_reg (&all
, parm
, &data
);
3657 assign_parm_setup_stack (&all
, parm
, &data
);
3659 if (cfun
->stdarg
&& !DECL_CHAIN (parm
))
3660 assign_parms_setup_varargs (&all
, &data
, false);
3662 /* Update info on where next arg arrives in registers. */
3663 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.arg
);
3666 if (targetm
.calls
.split_complex_arg
)
3667 assign_parms_unsplit_complex (&all
, fnargs
);
3671 /* Output all parameter conversion instructions (possibly including calls)
3672 now that all parameters have been copied out of hard registers. */
3673 emit_insn (all
.first_conversion_insn
);
3675 /* Estimate reload stack alignment from scalar return mode. */
3676 if (SUPPORTS_STACK_ALIGNMENT
)
3678 if (DECL_RESULT (fndecl
))
3680 tree type
= TREE_TYPE (DECL_RESULT (fndecl
));
3681 machine_mode mode
= TYPE_MODE (type
);
3685 && !AGGREGATE_TYPE_P (type
))
3687 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
3688 if (crtl
->stack_alignment_estimated
< align
)
3690 gcc_assert (!crtl
->stack_realign_processed
);
3691 crtl
->stack_alignment_estimated
= align
;
3697 /* If we are receiving a struct value address as the first argument, set up
3698 the RTL for the function result. As this might require code to convert
3699 the transmitted address to Pmode, we do this here to ensure that possible
3700 preliminary conversions of the address have been emitted already. */
3701 if (all
.function_result_decl
)
3703 tree result
= DECL_RESULT (current_function_decl
);
3704 rtx addr
= DECL_RTL (all
.function_result_decl
);
3707 if (DECL_BY_REFERENCE (result
))
3709 SET_DECL_VALUE_EXPR (result
, all
.function_result_decl
);
3714 SET_DECL_VALUE_EXPR (result
,
3715 build1 (INDIRECT_REF
, TREE_TYPE (result
),
3716 all
.function_result_decl
));
3717 addr
= convert_memory_address (Pmode
, addr
);
3718 x
= gen_rtx_MEM (DECL_MODE (result
), addr
);
3719 set_mem_attributes (x
, result
, 1);
3722 DECL_HAS_VALUE_EXPR_P (result
) = 1;
3724 set_parm_rtl (result
, x
);
3727 /* We have aligned all the args, so add space for the pretend args. */
3728 crtl
->args
.pretend_args_size
= all
.pretend_args_size
;
3729 all
.stack_args_size
.constant
+= all
.extra_pretend_bytes
;
3730 crtl
->args
.size
= all
.stack_args_size
.constant
;
3732 /* Adjust function incoming argument size for alignment and
3735 crtl
->args
.size
= upper_bound (crtl
->args
.size
, all
.reg_parm_stack_space
);
3736 crtl
->args
.size
= aligned_upper_bound (crtl
->args
.size
,
3737 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3739 if (ARGS_GROW_DOWNWARD
)
3741 crtl
->args
.arg_offset_rtx
3742 = (all
.stack_args_size
.var
== 0
3743 ? gen_int_mode (-all
.stack_args_size
.constant
, Pmode
)
3744 : expand_expr (size_diffop (all
.stack_args_size
.var
,
3745 size_int (-all
.stack_args_size
.constant
)),
3746 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
));
3749 crtl
->args
.arg_offset_rtx
= ARGS_SIZE_RTX (all
.stack_args_size
);
3751 /* See how many bytes, if any, of its args a function should try to pop
3754 crtl
->args
.pops_args
= targetm
.calls
.return_pops_args (fndecl
,
3758 /* For stdarg.h function, save info about
3759 regs and stack space used by the named args. */
3761 crtl
->args
.info
= all
.args_so_far_v
;
3763 /* Set the rtx used for the function return value. Put this in its
3764 own variable so any optimizers that need this information don't have
3765 to include tree.h. Do this here so it gets done when an inlined
3766 function gets output. */
3769 = (DECL_RTL_SET_P (DECL_RESULT (fndecl
))
3770 ? DECL_RTL (DECL_RESULT (fndecl
)) : NULL_RTX
);
3772 /* If scalar return value was computed in a pseudo-reg, or was a named
3773 return value that got dumped to the stack, copy that to the hard
3775 if (DECL_RTL_SET_P (DECL_RESULT (fndecl
)))
3777 tree decl_result
= DECL_RESULT (fndecl
);
3778 rtx decl_rtl
= DECL_RTL (decl_result
);
3780 if (REG_P (decl_rtl
)
3781 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
3782 : DECL_REGISTER (decl_result
))
3786 real_decl_rtl
= targetm
.calls
.function_value (TREE_TYPE (decl_result
),
3788 REG_FUNCTION_VALUE_P (real_decl_rtl
) = 1;
3789 /* The delay slot scheduler assumes that crtl->return_rtx
3790 holds the hard register containing the return value, not a
3791 temporary pseudo. */
3792 crtl
->return_rtx
= real_decl_rtl
;
3797 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3798 For all seen types, gimplify their sizes. */
3801 gimplify_parm_type (tree
*tp
, int *walk_subtrees
, void *data
)
3808 if (POINTER_TYPE_P (t
))
3810 else if (TYPE_SIZE (t
) && !TREE_CONSTANT (TYPE_SIZE (t
))
3811 && !TYPE_SIZES_GIMPLIFIED (t
))
3813 gimplify_type_sizes (t
, (gimple_seq
*) data
);
3821 /* Gimplify the parameter list for current_function_decl. This involves
3822 evaluating SAVE_EXPRs of variable sized parameters and generating code
3823 to implement callee-copies reference parameters. Returns a sequence of
3824 statements to add to the beginning of the function. */
3827 gimplify_parameters (gimple_seq
*cleanup
)
3829 struct assign_parm_data_all all
;
3831 gimple_seq stmts
= NULL
;
3835 assign_parms_initialize_all (&all
);
3836 fnargs
= assign_parms_augmented_arg_list (&all
);
3838 FOR_EACH_VEC_ELT (fnargs
, i
, parm
)
3840 struct assign_parm_data_one data
;
3842 /* Extract the type of PARM; adjust it according to ABI. */
3843 assign_parm_find_data_types (&all
, parm
, &data
);
3845 /* Early out for errors and void parameters. */
3846 if (data
.passed_mode
== VOIDmode
|| DECL_SIZE (parm
) == NULL
)
3849 /* Update info on where next arg arrives in registers. */
3850 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.arg
);
3852 /* ??? Once upon a time variable_size stuffed parameter list
3853 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3854 turned out to be less than manageable in the gimple world.
3855 Now we have to hunt them down ourselves. */
3856 walk_tree_without_duplicates (&data
.arg
.type
,
3857 gimplify_parm_type
, &stmts
);
3859 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) != INTEGER_CST
)
3861 gimplify_one_sizepos (&DECL_SIZE (parm
), &stmts
);
3862 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm
), &stmts
);
3865 if (data
.arg
.pass_by_reference
)
3867 tree type
= TREE_TYPE (data
.arg
.type
);
3868 function_arg_info
orig_arg (type
, data
.arg
.named
);
3869 if (reference_callee_copied (&all
.args_so_far_v
, orig_arg
))
3873 /* For constant-sized objects, this is trivial; for
3874 variable-sized objects, we have to play games. */
3875 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) == INTEGER_CST
3876 && !(flag_stack_check
== GENERIC_STACK_CHECK
3877 && compare_tree_int (DECL_SIZE_UNIT (parm
),
3878 STACK_CHECK_MAX_VAR_SIZE
) > 0))
3880 local
= create_tmp_var (type
, get_name (parm
));
3881 DECL_IGNORED_P (local
) = 0;
3882 /* If PARM was addressable, move that flag over
3883 to the local copy, as its address will be taken,
3884 not the PARMs. Keep the parms address taken
3885 as we'll query that flag during gimplification. */
3886 if (TREE_ADDRESSABLE (parm
))
3887 TREE_ADDRESSABLE (local
) = 1;
3888 else if (TREE_CODE (type
) == COMPLEX_TYPE
3889 || TREE_CODE (type
) == VECTOR_TYPE
)
3890 DECL_GIMPLE_REG_P (local
) = 1;
3892 if (!is_gimple_reg (local
)
3893 && flag_stack_reuse
!= SR_NONE
)
3895 tree clobber
= build_clobber (type
);
3896 gimple
*clobber_stmt
;
3897 clobber_stmt
= gimple_build_assign (local
, clobber
);
3898 gimple_seq_add_stmt (cleanup
, clobber_stmt
);
3903 tree ptr_type
, addr
;
3905 ptr_type
= build_pointer_type (type
);
3906 addr
= create_tmp_reg (ptr_type
, get_name (parm
));
3907 DECL_IGNORED_P (addr
) = 0;
3908 local
= build_fold_indirect_ref (addr
);
3910 t
= build_alloca_call_expr (DECL_SIZE_UNIT (parm
),
3912 max_int_size_in_bytes (type
));
3913 /* The call has been built for a variable-sized object. */
3914 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
3915 t
= fold_convert (ptr_type
, t
);
3916 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
3917 gimplify_and_add (t
, &stmts
);
3920 gimplify_assign (local
, parm
, &stmts
);
3922 SET_DECL_VALUE_EXPR (parm
, local
);
3923 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
3933 /* Compute the size and offset from the start of the stacked arguments for a
3934 parm passed in mode PASSED_MODE and with type TYPE.
3936 INITIAL_OFFSET_PTR points to the current offset into the stacked
3939 The starting offset and size for this parm are returned in
3940 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3941 nonzero, the offset is that of stack slot, which is returned in
3942 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3943 padding required from the initial offset ptr to the stack slot.
3945 IN_REGS is nonzero if the argument will be passed in registers. It will
3946 never be set if REG_PARM_STACK_SPACE is not defined.
3948 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3949 for arguments which are passed in registers.
3951 FNDECL is the function in which the argument was defined.
3953 There are two types of rounding that are done. The first, controlled by
3954 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3955 argument list to be aligned to the specific boundary (in bits). This
3956 rounding affects the initial and starting offsets, but not the argument
3959 The second, controlled by TARGET_FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3960 optionally rounds the size of the parm to PARM_BOUNDARY. The
3961 initial offset is not affected by this rounding, while the size always
3962 is and the starting offset may be. */
3964 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3965 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3966 callers pass in the total size of args so far as
3967 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3970 locate_and_pad_parm (machine_mode passed_mode
, tree type
, int in_regs
,
3971 int reg_parm_stack_space
, int partial
,
3972 tree fndecl ATTRIBUTE_UNUSED
,
3973 struct args_size
*initial_offset_ptr
,
3974 struct locate_and_pad_arg_data
*locate
)
3977 pad_direction where_pad
;
3978 unsigned int boundary
, round_boundary
;
3979 int part_size_in_regs
;
3981 /* If we have found a stack parm before we reach the end of the
3982 area reserved for registers, skip that area. */
3985 if (reg_parm_stack_space
> 0)
3987 if (initial_offset_ptr
->var
3988 || !ordered_p (initial_offset_ptr
->constant
,
3989 reg_parm_stack_space
))
3991 initial_offset_ptr
->var
3992 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
3993 ssize_int (reg_parm_stack_space
));
3994 initial_offset_ptr
->constant
= 0;
3997 initial_offset_ptr
->constant
3998 = ordered_max (initial_offset_ptr
->constant
,
3999 reg_parm_stack_space
);
4003 part_size_in_regs
= (reg_parm_stack_space
== 0 ? partial
: 0);
4006 ? arg_size_in_bytes (type
)
4007 : size_int (GET_MODE_SIZE (passed_mode
)));
4008 where_pad
= targetm
.calls
.function_arg_padding (passed_mode
, type
);
4009 boundary
= targetm
.calls
.function_arg_boundary (passed_mode
, type
);
4010 round_boundary
= targetm
.calls
.function_arg_round_boundary (passed_mode
,
4012 locate
->where_pad
= where_pad
;
4014 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
4015 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
4016 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
4018 locate
->boundary
= boundary
;
4020 if (SUPPORTS_STACK_ALIGNMENT
)
4022 /* stack_alignment_estimated can't change after stack has been
4024 if (crtl
->stack_alignment_estimated
< boundary
)
4026 if (!crtl
->stack_realign_processed
)
4027 crtl
->stack_alignment_estimated
= boundary
;
4030 /* If stack is realigned and stack alignment value
4031 hasn't been finalized, it is OK not to increase
4032 stack_alignment_estimated. The bigger alignment
4033 requirement is recorded in stack_alignment_needed
4035 gcc_assert (!crtl
->stack_realign_finalized
4036 && crtl
->stack_realign_needed
);
4041 if (ARGS_GROW_DOWNWARD
)
4043 locate
->slot_offset
.constant
= -initial_offset_ptr
->constant
;
4044 if (initial_offset_ptr
->var
)
4045 locate
->slot_offset
.var
= size_binop (MINUS_EXPR
, ssize_int (0),
4046 initial_offset_ptr
->var
);
4050 if (where_pad
!= PAD_NONE
4051 && (!tree_fits_uhwi_p (sizetree
)
4052 || (tree_to_uhwi (sizetree
) * BITS_PER_UNIT
) % round_boundary
))
4053 s2
= round_up (s2
, round_boundary
/ BITS_PER_UNIT
);
4054 SUB_PARM_SIZE (locate
->slot_offset
, s2
);
4057 locate
->slot_offset
.constant
+= part_size_in_regs
;
4059 if (!in_regs
|| reg_parm_stack_space
> 0)
4060 pad_to_arg_alignment (&locate
->slot_offset
, boundary
,
4061 &locate
->alignment_pad
);
4063 locate
->size
.constant
= (-initial_offset_ptr
->constant
4064 - locate
->slot_offset
.constant
);
4065 if (initial_offset_ptr
->var
)
4066 locate
->size
.var
= size_binop (MINUS_EXPR
,
4067 size_binop (MINUS_EXPR
,
4069 initial_offset_ptr
->var
),
4070 locate
->slot_offset
.var
);
4072 /* Pad_below needs the pre-rounded size to know how much to pad
4074 locate
->offset
= locate
->slot_offset
;
4075 if (where_pad
== PAD_DOWNWARD
)
4076 pad_below (&locate
->offset
, passed_mode
, sizetree
);
4081 if (!in_regs
|| reg_parm_stack_space
> 0)
4082 pad_to_arg_alignment (initial_offset_ptr
, boundary
,
4083 &locate
->alignment_pad
);
4084 locate
->slot_offset
= *initial_offset_ptr
;
4086 #ifdef PUSH_ROUNDING
4087 if (passed_mode
!= BLKmode
)
4088 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
4091 /* Pad_below needs the pre-rounded size to know how much to pad below
4092 so this must be done before rounding up. */
4093 locate
->offset
= locate
->slot_offset
;
4094 if (where_pad
== PAD_DOWNWARD
)
4095 pad_below (&locate
->offset
, passed_mode
, sizetree
);
4097 if (where_pad
!= PAD_NONE
4098 && (!tree_fits_uhwi_p (sizetree
)
4099 || (tree_to_uhwi (sizetree
) * BITS_PER_UNIT
) % round_boundary
))
4100 sizetree
= round_up (sizetree
, round_boundary
/ BITS_PER_UNIT
);
4102 ADD_PARM_SIZE (locate
->size
, sizetree
);
4104 locate
->size
.constant
-= part_size_in_regs
;
4107 locate
->offset
.constant
4108 += targetm
.calls
.function_arg_offset (passed_mode
, type
);
4111 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4112 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4115 pad_to_arg_alignment (struct args_size
*offset_ptr
, int boundary
,
4116 struct args_size
*alignment_pad
)
4118 tree save_var
= NULL_TREE
;
4119 poly_int64 save_constant
= 0;
4120 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
4121 poly_int64 sp_offset
= STACK_POINTER_OFFSET
;
4123 #ifdef SPARC_STACK_BOUNDARY_HACK
4124 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4125 the real alignment of %sp. However, when it does this, the
4126 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
4127 if (SPARC_STACK_BOUNDARY_HACK
)
4131 if (boundary
> PARM_BOUNDARY
)
4133 save_var
= offset_ptr
->var
;
4134 save_constant
= offset_ptr
->constant
;
4137 alignment_pad
->var
= NULL_TREE
;
4138 alignment_pad
->constant
= 0;
4140 if (boundary
> BITS_PER_UNIT
)
4144 || !known_misalignment (offset_ptr
->constant
+ sp_offset
,
4145 boundary_in_bytes
, &misalign
))
4147 tree sp_offset_tree
= ssize_int (sp_offset
);
4148 tree offset
= size_binop (PLUS_EXPR
,
4149 ARGS_SIZE_TREE (*offset_ptr
),
4152 if (ARGS_GROW_DOWNWARD
)
4153 rounded
= round_down (offset
, boundary
/ BITS_PER_UNIT
);
4155 rounded
= round_up (offset
, boundary
/ BITS_PER_UNIT
);
4157 offset_ptr
->var
= size_binop (MINUS_EXPR
, rounded
, sp_offset_tree
);
4158 /* ARGS_SIZE_TREE includes constant term. */
4159 offset_ptr
->constant
= 0;
4160 if (boundary
> PARM_BOUNDARY
)
4161 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
,
4166 if (ARGS_GROW_DOWNWARD
)
4167 offset_ptr
->constant
-= misalign
;
4169 offset_ptr
->constant
+= -misalign
& (boundary_in_bytes
- 1);
4171 if (boundary
> PARM_BOUNDARY
)
4172 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
4178 pad_below (struct args_size
*offset_ptr
, machine_mode passed_mode
, tree sizetree
)
4180 unsigned int align
= PARM_BOUNDARY
/ BITS_PER_UNIT
;
4182 if (passed_mode
!= BLKmode
4183 && known_misalignment (GET_MODE_SIZE (passed_mode
), align
, &misalign
))
4184 offset_ptr
->constant
+= -misalign
& (align
- 1);
4187 if (TREE_CODE (sizetree
) != INTEGER_CST
4188 || (TREE_INT_CST_LOW (sizetree
) & (align
- 1)) != 0)
4190 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4191 tree s2
= round_up (sizetree
, align
);
4193 ADD_PARM_SIZE (*offset_ptr
, s2
);
4194 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
4200 /* True if register REGNO was alive at a place where `setjmp' was
4201 called and was set more than once or is an argument. Such regs may
4202 be clobbered by `longjmp'. */
4205 regno_clobbered_at_setjmp (bitmap setjmp_crosses
, int regno
)
4207 /* There appear to be cases where some local vars never reach the
4208 backend but have bogus regnos. */
4209 if (regno
>= max_reg_num ())
4212 return ((REG_N_SETS (regno
) > 1
4213 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
4215 && REGNO_REG_SET_P (setjmp_crosses
, regno
));
4218 /* Walk the tree of blocks describing the binding levels within a
4219 function and warn about variables the might be killed by setjmp or
4220 vfork. This is done after calling flow_analysis before register
4221 allocation since that will clobber the pseudo-regs to hard
4225 setjmp_vars_warning (bitmap setjmp_crosses
, tree block
)
4229 for (decl
= BLOCK_VARS (block
); decl
; decl
= DECL_CHAIN (decl
))
4232 && DECL_RTL_SET_P (decl
)
4233 && REG_P (DECL_RTL (decl
))
4234 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
4235 warning (OPT_Wclobbered
, "variable %q+D might be clobbered by"
4236 " %<longjmp%> or %<vfork%>", decl
);
4239 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= BLOCK_CHAIN (sub
))
4240 setjmp_vars_warning (setjmp_crosses
, sub
);
4243 /* Do the appropriate part of setjmp_vars_warning
4244 but for arguments instead of local variables. */
4247 setjmp_args_warning (bitmap setjmp_crosses
)
4250 for (decl
= DECL_ARGUMENTS (current_function_decl
);
4251 decl
; decl
= DECL_CHAIN (decl
))
4252 if (DECL_RTL (decl
) != 0
4253 && REG_P (DECL_RTL (decl
))
4254 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
4255 warning (OPT_Wclobbered
,
4256 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4260 /* Generate warning messages for variables live across setjmp. */
4263 generate_setjmp_warnings (void)
4265 bitmap setjmp_crosses
= regstat_get_setjmp_crosses ();
4267 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
4268 || bitmap_empty_p (setjmp_crosses
))
4271 setjmp_vars_warning (setjmp_crosses
, DECL_INITIAL (current_function_decl
));
4272 setjmp_args_warning (setjmp_crosses
);
4276 /* Reverse the order of elements in the fragment chain T of blocks,
4277 and return the new head of the chain (old last element).
4278 In addition to that clear BLOCK_SAME_RANGE flags when needed
4279 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4280 its super fragment origin. */
4283 block_fragments_nreverse (tree t
)
4285 tree prev
= 0, block
, next
, prev_super
= 0;
4286 tree super
= BLOCK_SUPERCONTEXT (t
);
4287 if (BLOCK_FRAGMENT_ORIGIN (super
))
4288 super
= BLOCK_FRAGMENT_ORIGIN (super
);
4289 for (block
= t
; block
; block
= next
)
4291 next
= BLOCK_FRAGMENT_CHAIN (block
);
4292 BLOCK_FRAGMENT_CHAIN (block
) = prev
;
4293 if ((prev
&& !BLOCK_SAME_RANGE (prev
))
4294 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block
))
4296 BLOCK_SAME_RANGE (block
) = 0;
4297 prev_super
= BLOCK_SUPERCONTEXT (block
);
4298 BLOCK_SUPERCONTEXT (block
) = super
;
4301 t
= BLOCK_FRAGMENT_ORIGIN (t
);
4302 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t
))
4304 BLOCK_SAME_RANGE (t
) = 0;
4305 BLOCK_SUPERCONTEXT (t
) = super
;
4309 /* Reverse the order of elements in the chain T of blocks,
4310 and return the new head of the chain (old last element).
4311 Also do the same on subblocks and reverse the order of elements
4312 in BLOCK_FRAGMENT_CHAIN as well. */
4315 blocks_nreverse_all (tree t
)
4317 tree prev
= 0, block
, next
;
4318 for (block
= t
; block
; block
= next
)
4320 next
= BLOCK_CHAIN (block
);
4321 BLOCK_CHAIN (block
) = prev
;
4322 if (BLOCK_FRAGMENT_CHAIN (block
)
4323 && BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
)
4325 BLOCK_FRAGMENT_CHAIN (block
)
4326 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block
));
4327 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block
)))
4328 BLOCK_SAME_RANGE (block
) = 0;
4330 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4337 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4338 and create duplicate blocks. */
4339 /* ??? Need an option to either create block fragments or to create
4340 abstract origin duplicates of a source block. It really depends
4341 on what optimization has been performed. */
4344 reorder_blocks (void)
4346 tree block
= DECL_INITIAL (current_function_decl
);
4348 if (block
== NULL_TREE
)
4351 auto_vec
<tree
, 10> block_stack
;
4353 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4354 clear_block_marks (block
);
4356 /* Prune the old trees away, so that they don't get in the way. */
4357 BLOCK_SUBBLOCKS (block
) = NULL_TREE
;
4358 BLOCK_CHAIN (block
) = NULL_TREE
;
4360 /* Recreate the block tree from the note nesting. */
4361 reorder_blocks_1 (get_insns (), block
, &block_stack
);
4362 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4365 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4368 clear_block_marks (tree block
)
4372 TREE_ASM_WRITTEN (block
) = 0;
4373 clear_block_marks (BLOCK_SUBBLOCKS (block
));
4374 block
= BLOCK_CHAIN (block
);
4379 reorder_blocks_1 (rtx_insn
*insns
, tree current_block
,
4380 vec
<tree
> *p_block_stack
)
4383 tree prev_beg
= NULL_TREE
, prev_end
= NULL_TREE
;
4385 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4389 if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_BEG
)
4391 tree block
= NOTE_BLOCK (insn
);
4394 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
);
4398 BLOCK_SAME_RANGE (prev_end
) = 0;
4399 prev_end
= NULL_TREE
;
4401 /* If we have seen this block before, that means it now
4402 spans multiple address regions. Create a new fragment. */
4403 if (TREE_ASM_WRITTEN (block
))
4405 tree new_block
= copy_node (block
);
4407 BLOCK_SAME_RANGE (new_block
) = 0;
4408 BLOCK_FRAGMENT_ORIGIN (new_block
) = origin
;
4409 BLOCK_FRAGMENT_CHAIN (new_block
)
4410 = BLOCK_FRAGMENT_CHAIN (origin
);
4411 BLOCK_FRAGMENT_CHAIN (origin
) = new_block
;
4413 NOTE_BLOCK (insn
) = new_block
;
4417 if (prev_beg
== current_block
&& prev_beg
)
4418 BLOCK_SAME_RANGE (block
) = 1;
4422 BLOCK_SUBBLOCKS (block
) = 0;
4423 TREE_ASM_WRITTEN (block
) = 1;
4424 /* When there's only one block for the entire function,
4425 current_block == block and we mustn't do this, it
4426 will cause infinite recursion. */
4427 if (block
!= current_block
)
4430 if (block
!= origin
)
4431 gcc_assert (BLOCK_SUPERCONTEXT (origin
) == current_block
4432 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4435 if (p_block_stack
->is_empty ())
4436 super
= current_block
;
4439 super
= p_block_stack
->last ();
4440 gcc_assert (super
== current_block
4441 || BLOCK_FRAGMENT_ORIGIN (super
)
4444 BLOCK_SUPERCONTEXT (block
) = super
;
4445 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
4446 BLOCK_SUBBLOCKS (current_block
) = block
;
4447 current_block
= origin
;
4449 p_block_stack
->safe_push (block
);
4451 else if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_END
)
4453 NOTE_BLOCK (insn
) = p_block_stack
->pop ();
4454 current_block
= BLOCK_SUPERCONTEXT (current_block
);
4455 if (BLOCK_FRAGMENT_ORIGIN (current_block
))
4456 current_block
= BLOCK_FRAGMENT_ORIGIN (current_block
);
4457 prev_beg
= NULL_TREE
;
4458 prev_end
= BLOCK_SAME_RANGE (NOTE_BLOCK (insn
))
4459 ? NOTE_BLOCK (insn
) : NULL_TREE
;
4464 prev_beg
= NULL_TREE
;
4466 BLOCK_SAME_RANGE (prev_end
) = 0;
4467 prev_end
= NULL_TREE
;
4472 /* Reverse the order of elements in the chain T of blocks,
4473 and return the new head of the chain (old last element). */
4476 blocks_nreverse (tree t
)
4478 tree prev
= 0, block
, next
;
4479 for (block
= t
; block
; block
= next
)
4481 next
= BLOCK_CHAIN (block
);
4482 BLOCK_CHAIN (block
) = prev
;
4488 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4489 by modifying the last node in chain 1 to point to chain 2. */
4492 block_chainon (tree op1
, tree op2
)
4501 for (t1
= op1
; BLOCK_CHAIN (t1
); t1
= BLOCK_CHAIN (t1
))
4503 BLOCK_CHAIN (t1
) = op2
;
4505 #ifdef ENABLE_TREE_CHECKING
4508 for (t2
= op2
; t2
; t2
= BLOCK_CHAIN (t2
))
4509 gcc_assert (t2
!= t1
);
4516 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4517 non-NULL, list them all into VECTOR, in a depth-first preorder
4518 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4522 all_blocks (tree block
, tree
*vector
)
4528 TREE_ASM_WRITTEN (block
) = 0;
4530 /* Record this block. */
4532 vector
[n_blocks
] = block
;
4536 /* Record the subblocks, and their subblocks... */
4537 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
4538 vector
? vector
+ n_blocks
: 0);
4539 block
= BLOCK_CHAIN (block
);
4545 /* Return a vector containing all the blocks rooted at BLOCK. The
4546 number of elements in the vector is stored in N_BLOCKS_P. The
4547 vector is dynamically allocated; it is the caller's responsibility
4548 to call `free' on the pointer returned. */
4551 get_block_vector (tree block
, int *n_blocks_p
)
4555 *n_blocks_p
= all_blocks (block
, NULL
);
4556 block_vector
= XNEWVEC (tree
, *n_blocks_p
);
4557 all_blocks (block
, block_vector
);
4559 return block_vector
;
4562 static GTY(()) int next_block_index
= 2;
4564 /* Set BLOCK_NUMBER for all the blocks in FN. */
4567 number_blocks (tree fn
)
4573 /* For XCOFF debugging output, we start numbering the blocks
4574 from 1 within each function, rather than keeping a running
4576 #if defined (XCOFF_DEBUGGING_INFO)
4577 if (write_symbols
== XCOFF_DEBUG
)
4578 next_block_index
= 1;
4581 block_vector
= get_block_vector (DECL_INITIAL (fn
), &n_blocks
);
4583 /* The top-level BLOCK isn't numbered at all. */
4584 for (i
= 1; i
< n_blocks
; ++i
)
4585 /* We number the blocks from two. */
4586 BLOCK_NUMBER (block_vector
[i
]) = next_block_index
++;
4588 free (block_vector
);
4593 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4596 debug_find_var_in_block_tree (tree var
, tree block
)
4600 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
4604 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
4606 tree ret
= debug_find_var_in_block_tree (var
, t
);
4614 /* Keep track of whether we're in a dummy function context. If we are,
4615 we don't want to invoke the set_current_function hook, because we'll
4616 get into trouble if the hook calls target_reinit () recursively or
4617 when the initial initialization is not yet complete. */
4619 static bool in_dummy_function
;
4621 /* Invoke the target hook when setting cfun. Update the optimization options
4622 if the function uses different options than the default. */
4625 invoke_set_current_function_hook (tree fndecl
)
4627 if (!in_dummy_function
)
4629 tree opts
= ((fndecl
)
4630 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
)
4631 : optimization_default_node
);
4634 opts
= optimization_default_node
;
4636 /* Change optimization options if needed. */
4637 if (optimization_current_node
!= opts
)
4639 optimization_current_node
= opts
;
4640 cl_optimization_restore (&global_options
, TREE_OPTIMIZATION (opts
));
4643 targetm
.set_current_function (fndecl
);
4644 this_fn_optabs
= this_target_optabs
;
4646 /* Initialize global alignment variables after op. */
4647 parse_alignment_opts ();
4649 if (opts
!= optimization_default_node
)
4651 init_tree_optimization_optabs (opts
);
4652 if (TREE_OPTIMIZATION_OPTABS (opts
))
4653 this_fn_optabs
= (struct target_optabs
*)
4654 TREE_OPTIMIZATION_OPTABS (opts
);
4659 /* cfun should never be set directly; use this function. */
4662 set_cfun (struct function
*new_cfun
, bool force
)
4664 if (cfun
!= new_cfun
|| force
)
4667 invoke_set_current_function_hook (new_cfun
? new_cfun
->decl
: NULL_TREE
);
4668 redirect_edge_var_map_empty ();
4672 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4674 static vec
<function
*> cfun_stack
;
4676 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4677 current_function_decl accordingly. */
4680 push_cfun (struct function
*new_cfun
)
4682 gcc_assert ((!cfun
&& !current_function_decl
)
4683 || (cfun
&& current_function_decl
== cfun
->decl
));
4684 cfun_stack
.safe_push (cfun
);
4685 current_function_decl
= new_cfun
? new_cfun
->decl
: NULL_TREE
;
4686 set_cfun (new_cfun
);
4689 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4694 struct function
*new_cfun
= cfun_stack
.pop ();
4695 /* When in_dummy_function, we do have a cfun but current_function_decl is
4696 NULL. We also allow pushing NULL cfun and subsequently changing
4697 current_function_decl to something else and have both restored by
4699 gcc_checking_assert (in_dummy_function
4701 || current_function_decl
== cfun
->decl
);
4702 set_cfun (new_cfun
);
4703 current_function_decl
= new_cfun
? new_cfun
->decl
: NULL_TREE
;
4706 /* Return value of funcdef and increase it. */
4708 get_next_funcdef_no (void)
4710 return funcdef_no
++;
4713 /* Return value of funcdef. */
4715 get_last_funcdef_no (void)
4720 /* Allocate a function structure for FNDECL and set its contents
4721 to the defaults. Set cfun to the newly-allocated object.
4722 Some of the helper functions invoked during initialization assume
4723 that cfun has already been set. Therefore, assign the new object
4724 directly into cfun and invoke the back end hook explicitly at the
4725 very end, rather than initializing a temporary and calling set_cfun
4728 ABSTRACT_P is true if this is a function that will never be seen by
4729 the middle-end. Such functions are front-end concepts (like C++
4730 function templates) that do not correspond directly to functions
4731 placed in object files. */
4734 allocate_struct_function (tree fndecl
, bool abstract_p
)
4736 tree fntype
= fndecl
? TREE_TYPE (fndecl
) : NULL_TREE
;
4738 cfun
= ggc_cleared_alloc
<function
> ();
4740 init_eh_for_function ();
4742 if (init_machine_status
)
4743 cfun
->machine
= (*init_machine_status
) ();
4745 #ifdef OVERRIDE_ABI_FORMAT
4746 OVERRIDE_ABI_FORMAT (fndecl
);
4749 if (fndecl
!= NULL_TREE
)
4751 DECL_STRUCT_FUNCTION (fndecl
) = cfun
;
4752 cfun
->decl
= fndecl
;
4753 current_function_funcdef_no
= get_next_funcdef_no ();
4756 invoke_set_current_function_hook (fndecl
);
4758 if (fndecl
!= NULL_TREE
)
4760 tree result
= DECL_RESULT (fndecl
);
4764 /* Now that we have activated any function-specific attributes
4765 that might affect layout, particularly vector modes, relayout
4766 each of the parameters and the result. */
4767 relayout_decl (result
);
4768 for (tree parm
= DECL_ARGUMENTS (fndecl
); parm
;
4769 parm
= DECL_CHAIN (parm
))
4770 relayout_decl (parm
);
4772 /* Similarly relayout the function decl. */
4773 targetm
.target_option
.relayout_function (fndecl
);
4776 if (!abstract_p
&& aggregate_value_p (result
, fndecl
))
4778 #ifdef PCC_STATIC_STRUCT_RETURN
4779 cfun
->returns_pcc_struct
= 1;
4781 cfun
->returns_struct
= 1;
4784 cfun
->stdarg
= stdarg_p (fntype
);
4786 /* Assume all registers in stdarg functions need to be saved. */
4787 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
4788 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
4790 /* ??? This could be set on a per-function basis by the front-end
4791 but is this worth the hassle? */
4792 cfun
->can_throw_non_call_exceptions
= flag_non_call_exceptions
;
4793 cfun
->can_delete_dead_exceptions
= flag_delete_dead_exceptions
;
4795 if (!profile_flag
&& !flag_instrument_function_entry_exit
)
4796 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
) = 1;
4799 /* Don't enable begin stmt markers if var-tracking at assignments is
4800 disabled. The markers make little sense without the variable
4801 binding annotations among them. */
4802 cfun
->debug_nonbind_markers
= lang_hooks
.emits_begin_stmt
4803 && MAY_HAVE_DEBUG_MARKER_STMTS
;
4806 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4807 instead of just setting it. */
4810 push_struct_function (tree fndecl
)
4812 /* When in_dummy_function we might be in the middle of a pop_cfun and
4813 current_function_decl and cfun may not match. */
4814 gcc_assert (in_dummy_function
4815 || (!cfun
&& !current_function_decl
)
4816 || (cfun
&& current_function_decl
== cfun
->decl
));
4817 cfun_stack
.safe_push (cfun
);
4818 current_function_decl
= fndecl
;
4819 allocate_struct_function (fndecl
, false);
4822 /* Reset crtl and other non-struct-function variables to defaults as
4823 appropriate for emitting rtl at the start of a function. */
4826 prepare_function_start (void)
4828 gcc_assert (!get_last_insn ());
4831 init_varasm_status ();
4833 default_rtl_profile ();
4835 if (flag_stack_usage_info
)
4837 cfun
->su
= ggc_cleared_alloc
<stack_usage
> ();
4838 cfun
->su
->static_stack_size
= -1;
4841 cse_not_expected
= ! optimize
;
4843 /* Caller save not needed yet. */
4844 caller_save_needed
= 0;
4846 /* We haven't done register allocation yet. */
4849 /* Indicate that we have not instantiated virtual registers yet. */
4850 virtuals_instantiated
= 0;
4852 /* Indicate that we want CONCATs now. */
4853 generating_concat_p
= 1;
4855 /* Indicate we have no need of a frame pointer yet. */
4856 frame_pointer_needed
= 0;
4860 push_dummy_function (bool with_decl
)
4862 tree fn_decl
, fn_type
, fn_result_decl
;
4864 gcc_assert (!in_dummy_function
);
4865 in_dummy_function
= true;
4869 fn_type
= build_function_type_list (void_type_node
, NULL_TREE
);
4870 fn_decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, NULL_TREE
,
4872 fn_result_decl
= build_decl (UNKNOWN_LOCATION
, RESULT_DECL
,
4873 NULL_TREE
, void_type_node
);
4874 DECL_RESULT (fn_decl
) = fn_result_decl
;
4877 fn_decl
= NULL_TREE
;
4879 push_struct_function (fn_decl
);
4882 /* Initialize the rtl expansion mechanism so that we can do simple things
4883 like generate sequences. This is used to provide a context during global
4884 initialization of some passes. You must call expand_dummy_function_end
4885 to exit this context. */
4888 init_dummy_function_start (void)
4890 push_dummy_function (false);
4891 prepare_function_start ();
4894 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4895 and initialize static variables for generating RTL for the statements
4899 init_function_start (tree subr
)
4901 /* Initialize backend, if needed. */
4904 prepare_function_start ();
4905 decide_function_section (subr
);
4907 /* Warn if this value is an aggregate type,
4908 regardless of which calling convention we are using for it. */
4909 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
4910 warning (OPT_Waggregate_return
, "function returns an aggregate");
4913 /* Expand code to verify the stack_protect_guard. This is invoked at
4914 the end of a function to be protected. */
4917 stack_protect_epilogue (void)
4919 tree guard_decl
= crtl
->stack_protect_guard_decl
;
4920 rtx_code_label
*label
= gen_label_rtx ();
4922 rtx_insn
*seq
= NULL
;
4924 x
= expand_normal (crtl
->stack_protect_guard
);
4926 if (targetm
.have_stack_protect_combined_test () && guard_decl
)
4928 gcc_assert (DECL_P (guard_decl
));
4929 y
= DECL_RTL (guard_decl
);
4930 /* Allow the target to compute address of Y and compare it with X without
4931 leaking Y into a register. This combined address + compare pattern
4932 allows the target to prevent spilling of any intermediate results by
4933 splitting it after register allocator. */
4934 seq
= targetm
.gen_stack_protect_combined_test (x
, y
, label
);
4939 y
= expand_normal (guard_decl
);
4943 /* Allow the target to compare Y with X without leaking either into
4945 if (targetm
.have_stack_protect_test ())
4946 seq
= targetm
.gen_stack_protect_test (x
, y
, label
);
4952 emit_cmp_and_jump_insns (x
, y
, EQ
, NULL_RTX
, ptr_mode
, 1, label
);
4954 /* The noreturn predictor has been moved to the tree level. The rtl-level
4955 predictors estimate this branch about 20%, which isn't enough to get
4956 things moved out of line. Since this is the only extant case of adding
4957 a noreturn function at the rtl level, it doesn't seem worth doing ought
4958 except adding the prediction by hand. */
4959 rtx_insn
*tmp
= get_last_insn ();
4961 predict_insn_def (tmp
, PRED_NORETURN
, TAKEN
);
4963 expand_call (targetm
.stack_protect_fail (), NULL_RTX
, /*ignore=*/true);
4968 /* Start the RTL for a new function, and set variables used for
4970 SUBR is the FUNCTION_DECL node.
4971 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4972 the function's parameters, which must be run at any return statement. */
4975 expand_function_start (tree subr
)
4977 /* Make sure volatile mem refs aren't considered
4978 valid operands of arithmetic insns. */
4979 init_recog_no_volatile ();
4983 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
4986 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
4988 /* Make the label for return statements to jump to. Do not special
4989 case machines with special return instructions -- they will be
4990 handled later during jump, ifcvt, or epilogue creation. */
4991 return_label
= gen_label_rtx ();
4993 /* Initialize rtx used to return the value. */
4994 /* Do this before assign_parms so that we copy the struct value address
4995 before any library calls that assign parms might generate. */
4997 /* Decide whether to return the value in memory or in a register. */
4998 tree res
= DECL_RESULT (subr
);
4999 if (aggregate_value_p (res
, subr
))
5001 /* Returning something that won't go in a register. */
5002 rtx value_address
= 0;
5004 #ifdef PCC_STATIC_STRUCT_RETURN
5005 if (cfun
->returns_pcc_struct
)
5007 int size
= int_size_in_bytes (TREE_TYPE (res
));
5008 value_address
= assemble_static_space (size
);
5013 rtx sv
= targetm
.calls
.struct_value_rtx (TREE_TYPE (subr
), 2);
5014 /* Expect to be passed the address of a place to store the value.
5015 If it is passed as an argument, assign_parms will take care of
5019 value_address
= gen_reg_rtx (Pmode
);
5020 emit_move_insn (value_address
, sv
);
5025 rtx x
= value_address
;
5026 if (!DECL_BY_REFERENCE (res
))
5028 x
= gen_rtx_MEM (DECL_MODE (res
), x
);
5029 set_mem_attributes (x
, res
, 1);
5031 set_parm_rtl (res
, x
);
5034 else if (DECL_MODE (res
) == VOIDmode
)
5035 /* If return mode is void, this decl rtl should not be used. */
5036 set_parm_rtl (res
, NULL_RTX
);
5039 /* Compute the return values into a pseudo reg, which we will copy
5040 into the true return register after the cleanups are done. */
5041 tree return_type
= TREE_TYPE (res
);
5043 /* If we may coalesce this result, make sure it has the expected mode
5044 in case it was promoted. But we need not bother about BLKmode. */
5045 machine_mode promoted_mode
5046 = flag_tree_coalesce_vars
&& is_gimple_reg (res
)
5047 ? promote_ssa_mode (ssa_default_def (cfun
, res
), NULL
)
5050 if (promoted_mode
!= BLKmode
)
5051 set_parm_rtl (res
, gen_reg_rtx (promoted_mode
));
5052 else if (TYPE_MODE (return_type
) != BLKmode
5053 && targetm
.calls
.return_in_msb (return_type
))
5054 /* expand_function_end will insert the appropriate padding in
5055 this case. Use the return value's natural (unpadded) mode
5056 within the function proper. */
5057 set_parm_rtl (res
, gen_reg_rtx (TYPE_MODE (return_type
)));
5060 /* In order to figure out what mode to use for the pseudo, we
5061 figure out what the mode of the eventual return register will
5062 actually be, and use that. */
5063 rtx hard_reg
= hard_function_value (return_type
, subr
, 0, 1);
5065 /* Structures that are returned in registers are not
5066 aggregate_value_p, so we may see a PARALLEL or a REG. */
5067 if (REG_P (hard_reg
))
5068 set_parm_rtl (res
, gen_reg_rtx (GET_MODE (hard_reg
)));
5071 gcc_assert (GET_CODE (hard_reg
) == PARALLEL
);
5072 set_parm_rtl (res
, gen_group_rtx (hard_reg
));
5076 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5077 result to the real return register(s). */
5078 DECL_REGISTER (res
) = 1;
5081 /* Initialize rtx for parameters and local variables.
5082 In some cases this requires emitting insns. */
5083 assign_parms (subr
);
5085 /* If function gets a static chain arg, store it. */
5086 if (cfun
->static_chain_decl
)
5088 tree parm
= cfun
->static_chain_decl
;
5093 local
= gen_reg_rtx (promote_decl_mode (parm
, &unsignedp
));
5094 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
5096 set_decl_incoming_rtl (parm
, chain
, false);
5097 set_parm_rtl (parm
, local
);
5098 mark_reg_pointer (local
, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
5100 if (GET_MODE (local
) != GET_MODE (chain
))
5102 convert_move (local
, chain
, unsignedp
);
5103 insn
= get_last_insn ();
5106 insn
= emit_move_insn (local
, chain
);
5108 /* Mark the register as eliminable, similar to parameters. */
5110 && reg_mentioned_p (arg_pointer_rtx
, XEXP (chain
, 0)))
5111 set_dst_reg_note (insn
, REG_EQUIV
, chain
, local
);
5113 /* If we aren't optimizing, save the static chain onto the stack. */
5116 tree saved_static_chain_decl
5117 = build_decl (DECL_SOURCE_LOCATION (parm
), VAR_DECL
,
5118 DECL_NAME (parm
), TREE_TYPE (parm
));
5119 rtx saved_static_chain_rtx
5120 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5121 SET_DECL_RTL (saved_static_chain_decl
, saved_static_chain_rtx
);
5122 emit_move_insn (saved_static_chain_rtx
, chain
);
5123 SET_DECL_VALUE_EXPR (parm
, saved_static_chain_decl
);
5124 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
5128 /* The following was moved from init_function_start.
5129 The move was supposed to make sdb output more accurate. */
5130 /* Indicate the beginning of the function body,
5131 as opposed to parm setup. */
5132 emit_note (NOTE_INSN_FUNCTION_BEG
);
5134 gcc_assert (NOTE_P (get_last_insn ()));
5136 parm_birth_insn
= get_last_insn ();
5138 /* If the function receives a non-local goto, then store the
5139 bits we need to restore the frame pointer. */
5140 if (cfun
->nonlocal_goto_save_area
)
5145 tree var
= TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0);
5146 gcc_assert (DECL_RTL_SET_P (var
));
5148 t_save
= build4 (ARRAY_REF
,
5149 TREE_TYPE (TREE_TYPE (cfun
->nonlocal_goto_save_area
)),
5150 cfun
->nonlocal_goto_save_area
,
5151 integer_zero_node
, NULL_TREE
, NULL_TREE
);
5152 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5153 gcc_assert (GET_MODE (r_save
) == Pmode
);
5155 emit_move_insn (r_save
, hard_frame_pointer_rtx
);
5156 update_nonlocal_goto_save_area ();
5162 PROFILE_HOOK (current_function_funcdef_no
);
5166 /* If we are doing generic stack checking, the probe should go here. */
5167 if (flag_stack_check
== GENERIC_STACK_CHECK
)
5168 stack_check_probe_note
= emit_note (NOTE_INSN_DELETED
);
5172 pop_dummy_function (void)
5175 in_dummy_function
= false;
5178 /* Undo the effects of init_dummy_function_start. */
5180 expand_dummy_function_end (void)
5182 gcc_assert (in_dummy_function
);
5184 /* End any sequences that failed to be closed due to syntax errors. */
5185 while (in_sequence_p ())
5188 /* Outside function body, can't compute type's actual size
5189 until next function's body starts. */
5191 free_after_parsing (cfun
);
5192 free_after_compilation (cfun
);
5193 pop_dummy_function ();
5196 /* Helper for diddle_return_value. */
5199 diddle_return_value_1 (void (*doit
) (rtx
, void *), void *arg
, rtx outgoing
)
5204 if (REG_P (outgoing
))
5205 (*doit
) (outgoing
, arg
);
5206 else if (GET_CODE (outgoing
) == PARALLEL
)
5210 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
5212 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
5214 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5220 /* Call DOIT for each hard register used as a return value from
5221 the current function. */
5224 diddle_return_value (void (*doit
) (rtx
, void *), void *arg
)
5226 diddle_return_value_1 (doit
, arg
, crtl
->return_rtx
);
5230 do_clobber_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
5236 clobber_return_register (void)
5238 diddle_return_value (do_clobber_return_reg
, NULL
);
5240 /* In case we do use pseudo to return value, clobber it too. */
5241 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
5243 tree decl_result
= DECL_RESULT (current_function_decl
);
5244 rtx decl_rtl
= DECL_RTL (decl_result
);
5245 if (REG_P (decl_rtl
) && REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
)
5247 do_clobber_return_reg (decl_rtl
, NULL
);
5253 do_use_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
5259 use_return_register (void)
5261 diddle_return_value (do_use_return_reg
, NULL
);
5264 /* Generate RTL for the end of the current function. */
5267 expand_function_end (void)
5269 /* If arg_pointer_save_area was referenced only from a nested
5270 function, we will not have initialized it yet. Do that now. */
5271 if (arg_pointer_save_area
&& ! crtl
->arg_pointer_save_area_init
)
5272 get_arg_pointer_save_area ();
5274 /* If we are doing generic stack checking and this function makes calls,
5275 do a stack probe at the start of the function to ensure we have enough
5276 space for another stack frame. */
5277 if (flag_stack_check
== GENERIC_STACK_CHECK
)
5279 rtx_insn
*insn
, *seq
;
5281 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5284 rtx max_frame_size
= GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
);
5286 if (STACK_CHECK_MOVING_SP
)
5287 anti_adjust_stack_and_probe (max_frame_size
, true);
5289 probe_stack_range (STACK_OLD_CHECK_PROTECT
, max_frame_size
);
5292 set_insn_locations (seq
, prologue_location
);
5293 emit_insn_before (seq
, stack_check_probe_note
);
5298 /* End any sequences that failed to be closed due to syntax errors. */
5299 while (in_sequence_p ())
5302 clear_pending_stack_adjust ();
5303 do_pending_stack_adjust ();
5305 /* Output a linenumber for the end of the function.
5306 SDB depended on this. */
5307 set_curr_insn_location (input_location
);
5309 /* Before the return label (if any), clobber the return
5310 registers so that they are not propagated live to the rest of
5311 the function. This can only happen with functions that drop
5312 through; if there had been a return statement, there would
5313 have either been a return rtx, or a jump to the return label.
5315 We delay actual code generation after the current_function_value_rtx
5317 rtx_insn
*clobber_after
= get_last_insn ();
5319 /* Output the label for the actual return from the function. */
5320 emit_label (return_label
);
5322 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
5324 /* Let except.c know where it should emit the call to unregister
5325 the function context for sjlj exceptions. */
5326 if (flag_exceptions
)
5327 sjlj_emit_function_exit_after (get_last_insn ());
5330 /* If this is an implementation of throw, do what's necessary to
5331 communicate between __builtin_eh_return and the epilogue. */
5332 expand_eh_return ();
5334 /* If stack protection is enabled for this function, check the guard. */
5335 if (crtl
->stack_protect_guard
5336 && targetm
.stack_protect_runtime_enabled_p ()
5337 && naked_return_label
== NULL_RTX
)
5338 stack_protect_epilogue ();
5340 /* If scalar return value was computed in a pseudo-reg, or was a named
5341 return value that got dumped to the stack, copy that to the hard
5343 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
5345 tree decl_result
= DECL_RESULT (current_function_decl
);
5346 rtx decl_rtl
= DECL_RTL (decl_result
);
5348 if (REG_P (decl_rtl
)
5349 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
5350 : DECL_REGISTER (decl_result
))
5352 rtx real_decl_rtl
= crtl
->return_rtx
;
5355 /* This should be set in assign_parms. */
5356 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl
));
5358 /* If this is a BLKmode structure being returned in registers,
5359 then use the mode computed in expand_return. Note that if
5360 decl_rtl is memory, then its mode may have been changed,
5361 but that crtl->return_rtx has not. */
5362 if (GET_MODE (real_decl_rtl
) == BLKmode
)
5363 PUT_MODE (real_decl_rtl
, GET_MODE (decl_rtl
));
5365 /* If a non-BLKmode return value should be padded at the least
5366 significant end of the register, shift it left by the appropriate
5367 amount. BLKmode results are handled using the group load/store
5369 if (TYPE_MODE (TREE_TYPE (decl_result
)) != BLKmode
5370 && REG_P (real_decl_rtl
)
5371 && targetm
.calls
.return_in_msb (TREE_TYPE (decl_result
)))
5373 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl
),
5374 REGNO (real_decl_rtl
)),
5376 shift_return_value (GET_MODE (decl_rtl
), true, real_decl_rtl
);
5378 else if (GET_CODE (real_decl_rtl
) == PARALLEL
)
5380 /* If expand_function_start has created a PARALLEL for decl_rtl,
5381 move the result to the real return registers. Otherwise, do
5382 a group load from decl_rtl for a named return. */
5383 if (GET_CODE (decl_rtl
) == PARALLEL
)
5384 emit_group_move (real_decl_rtl
, decl_rtl
);
5386 emit_group_load (real_decl_rtl
, decl_rtl
,
5387 TREE_TYPE (decl_result
),
5388 int_size_in_bytes (TREE_TYPE (decl_result
)));
5390 /* In the case of complex integer modes smaller than a word, we'll
5391 need to generate some non-trivial bitfield insertions. Do that
5392 on a pseudo and not the hard register. */
5393 else if (GET_CODE (decl_rtl
) == CONCAT
5394 && is_complex_int_mode (GET_MODE (decl_rtl
), &cmode
)
5395 && GET_MODE_BITSIZE (cmode
) <= BITS_PER_WORD
)
5397 int old_generating_concat_p
;
5400 old_generating_concat_p
= generating_concat_p
;
5401 generating_concat_p
= 0;
5402 tmp
= gen_reg_rtx (GET_MODE (decl_rtl
));
5403 generating_concat_p
= old_generating_concat_p
;
5405 emit_move_insn (tmp
, decl_rtl
);
5406 emit_move_insn (real_decl_rtl
, tmp
);
5408 /* If a named return value dumped decl_return to memory, then
5409 we may need to re-do the PROMOTE_MODE signed/unsigned
5411 else if (GET_MODE (real_decl_rtl
) != GET_MODE (decl_rtl
))
5413 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (decl_result
));
5414 promote_function_mode (TREE_TYPE (decl_result
),
5415 GET_MODE (decl_rtl
), &unsignedp
,
5416 TREE_TYPE (current_function_decl
), 1);
5418 convert_move (real_decl_rtl
, decl_rtl
, unsignedp
);
5421 emit_move_insn (real_decl_rtl
, decl_rtl
);
5425 /* If returning a structure, arrange to return the address of the value
5426 in a place where debuggers expect to find it.
5428 If returning a structure PCC style,
5429 the caller also depends on this value.
5430 And cfun->returns_pcc_struct is not necessarily set. */
5431 if ((cfun
->returns_struct
|| cfun
->returns_pcc_struct
)
5432 && !targetm
.calls
.omit_struct_return_reg
)
5434 rtx value_address
= DECL_RTL (DECL_RESULT (current_function_decl
));
5435 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
5438 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
5439 type
= TREE_TYPE (type
);
5441 value_address
= XEXP (value_address
, 0);
5443 outgoing
= targetm
.calls
.function_value (build_pointer_type (type
),
5444 current_function_decl
, true);
5446 /* Mark this as a function return value so integrate will delete the
5447 assignment and USE below when inlining this function. */
5448 REG_FUNCTION_VALUE_P (outgoing
) = 1;
5450 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5451 scalar_int_mode mode
= as_a
<scalar_int_mode
> (GET_MODE (outgoing
));
5452 value_address
= convert_memory_address (mode
, value_address
);
5454 emit_move_insn (outgoing
, value_address
);
5456 /* Show return register used to hold result (in this case the address
5458 crtl
->return_rtx
= outgoing
;
5461 /* Emit the actual code to clobber return register. Don't emit
5462 it if clobber_after is a barrier, then the previous basic block
5463 certainly doesn't fall thru into the exit block. */
5464 if (!BARRIER_P (clobber_after
))
5467 clobber_return_register ();
5468 rtx_insn
*seq
= get_insns ();
5471 emit_insn_after (seq
, clobber_after
);
5474 /* Output the label for the naked return from the function. */
5475 if (naked_return_label
)
5476 emit_label (naked_return_label
);
5478 /* @@@ This is a kludge. We want to ensure that instructions that
5479 may trap are not moved into the epilogue by scheduling, because
5480 we don't always emit unwind information for the epilogue. */
5481 if (cfun
->can_throw_non_call_exceptions
5482 && targetm_common
.except_unwind_info (&global_options
) != UI_SJLJ
)
5483 emit_insn (gen_blockage ());
5485 /* If stack protection is enabled for this function, check the guard. */
5486 if (crtl
->stack_protect_guard
5487 && targetm
.stack_protect_runtime_enabled_p ()
5488 && naked_return_label
)
5489 stack_protect_epilogue ();
5491 /* If we had calls to alloca, and this machine needs
5492 an accurate stack pointer to exit the function,
5493 insert some code to save and restore the stack pointer. */
5494 if (! EXIT_IGNORE_STACK
5495 && cfun
->calls_alloca
)
5500 emit_stack_save (SAVE_FUNCTION
, &tem
);
5501 rtx_insn
*seq
= get_insns ();
5503 emit_insn_before (seq
, parm_birth_insn
);
5505 emit_stack_restore (SAVE_FUNCTION
, tem
);
5508 /* ??? This should no longer be necessary since stupid is no longer with
5509 us, but there are some parts of the compiler (eg reload_combine, and
5510 sh mach_dep_reorg) that still try and compute their own lifetime info
5511 instead of using the general framework. */
5512 use_return_register ();
5516 get_arg_pointer_save_area (void)
5518 rtx ret
= arg_pointer_save_area
;
5522 ret
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5523 arg_pointer_save_area
= ret
;
5526 if (! crtl
->arg_pointer_save_area_init
)
5528 /* Save the arg pointer at the beginning of the function. The
5529 generated stack slot may not be a valid memory address, so we
5530 have to check it and fix it if necessary. */
5532 emit_move_insn (validize_mem (copy_rtx (ret
)),
5533 crtl
->args
.internal_arg_pointer
);
5534 rtx_insn
*seq
= get_insns ();
5537 push_topmost_sequence ();
5538 emit_insn_after (seq
, entry_of_function ());
5539 pop_topmost_sequence ();
5541 crtl
->arg_pointer_save_area_init
= true;
5548 /* If debugging dumps are requested, dump information about how the
5549 target handled -fstack-check=clash for the prologue.
5551 PROBES describes what if any probes were emitted.
5553 RESIDUALS indicates if the prologue had any residual allocation
5554 (i.e. total allocation was not a multiple of PROBE_INTERVAL). */
5557 dump_stack_clash_frame_info (enum stack_clash_probes probes
, bool residuals
)
5564 case NO_PROBE_NO_FRAME
:
5566 "Stack clash no probe no stack adjustment in prologue.\n");
5568 case NO_PROBE_SMALL_FRAME
:
5570 "Stack clash no probe small stack adjustment in prologue.\n");
5573 fprintf (dump_file
, "Stack clash inline probes in prologue.\n");
5576 fprintf (dump_file
, "Stack clash probe loop in prologue.\n");
5581 fprintf (dump_file
, "Stack clash residual allocation in prologue.\n");
5583 fprintf (dump_file
, "Stack clash no residual allocation in prologue.\n");
5585 if (frame_pointer_needed
)
5586 fprintf (dump_file
, "Stack clash frame pointer needed.\n");
5588 fprintf (dump_file
, "Stack clash no frame pointer needed.\n");
5590 if (TREE_THIS_VOLATILE (cfun
->decl
))
5592 "Stack clash noreturn prologue, assuming no implicit"
5593 " probes in caller.\n");
5596 "Stack clash not noreturn prologue.\n");
5599 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5600 for the first time. */
5603 record_insns (rtx_insn
*insns
, rtx end
, hash_table
<insn_cache_hasher
> **hashp
)
5606 hash_table
<insn_cache_hasher
> *hash
= *hashp
;
5609 *hashp
= hash
= hash_table
<insn_cache_hasher
>::create_ggc (17);
5611 for (tmp
= insns
; tmp
!= end
; tmp
= NEXT_INSN (tmp
))
5613 rtx
*slot
= hash
->find_slot (tmp
, INSERT
);
5614 gcc_assert (*slot
== NULL
);
5619 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5620 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5621 insn, then record COPY as well. */
5624 maybe_copy_prologue_epilogue_insn (rtx insn
, rtx copy
)
5626 hash_table
<insn_cache_hasher
> *hash
;
5629 hash
= epilogue_insn_hash
;
5630 if (!hash
|| !hash
->find (insn
))
5632 hash
= prologue_insn_hash
;
5633 if (!hash
|| !hash
->find (insn
))
5637 slot
= hash
->find_slot (copy
, INSERT
);
5638 gcc_assert (*slot
== NULL
);
5642 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5643 we can be running after reorg, SEQUENCE rtl is possible. */
5646 contains (const rtx_insn
*insn
, hash_table
<insn_cache_hasher
> *hash
)
5651 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
5653 rtx_sequence
*seq
= as_a
<rtx_sequence
*> (PATTERN (insn
));
5655 for (i
= seq
->len () - 1; i
>= 0; i
--)
5656 if (hash
->find (seq
->element (i
)))
5661 return hash
->find (const_cast<rtx_insn
*> (insn
)) != NULL
;
5665 prologue_contains (const rtx_insn
*insn
)
5667 return contains (insn
, prologue_insn_hash
);
5671 epilogue_contains (const rtx_insn
*insn
)
5673 return contains (insn
, epilogue_insn_hash
);
5677 prologue_epilogue_contains (const rtx_insn
*insn
)
5679 if (contains (insn
, prologue_insn_hash
))
5681 if (contains (insn
, epilogue_insn_hash
))
5687 record_prologue_seq (rtx_insn
*seq
)
5689 record_insns (seq
, NULL
, &prologue_insn_hash
);
5693 record_epilogue_seq (rtx_insn
*seq
)
5695 record_insns (seq
, NULL
, &epilogue_insn_hash
);
5698 /* Set JUMP_LABEL for a return insn. */
5701 set_return_jump_label (rtx_insn
*returnjump
)
5703 rtx pat
= PATTERN (returnjump
);
5704 if (GET_CODE (pat
) == PARALLEL
)
5705 pat
= XVECEXP (pat
, 0, 0);
5706 if (ANY_RETURN_P (pat
))
5707 JUMP_LABEL (returnjump
) = pat
;
5709 JUMP_LABEL (returnjump
) = ret_rtx
;
5712 /* Return a sequence to be used as the split prologue for the current
5713 function, or NULL. */
5716 make_split_prologue_seq (void)
5718 if (!flag_split_stack
5719 || lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun
->decl
)))
5723 emit_insn (targetm
.gen_split_stack_prologue ());
5724 rtx_insn
*seq
= get_insns ();
5727 record_insns (seq
, NULL
, &prologue_insn_hash
);
5728 set_insn_locations (seq
, prologue_location
);
5733 /* Return a sequence to be used as the prologue for the current function,
5737 make_prologue_seq (void)
5739 if (!targetm
.have_prologue ())
5743 rtx_insn
*seq
= targetm
.gen_prologue ();
5746 /* Insert an explicit USE for the frame pointer
5747 if the profiling is on and the frame pointer is required. */
5748 if (crtl
->profile
&& frame_pointer_needed
)
5749 emit_use (hard_frame_pointer_rtx
);
5751 /* Retain a map of the prologue insns. */
5752 record_insns (seq
, NULL
, &prologue_insn_hash
);
5753 emit_note (NOTE_INSN_PROLOGUE_END
);
5755 /* Ensure that instructions are not moved into the prologue when
5756 profiling is on. The call to the profiling routine can be
5757 emitted within the live range of a call-clobbered register. */
5758 if (!targetm
.profile_before_prologue () && crtl
->profile
)
5759 emit_insn (gen_blockage ());
5763 set_insn_locations (seq
, prologue_location
);
5768 /* Return a sequence to be used as the epilogue for the current function,
5772 make_epilogue_seq (void)
5774 if (!targetm
.have_epilogue ())
5778 emit_note (NOTE_INSN_EPILOGUE_BEG
);
5779 rtx_insn
*seq
= targetm
.gen_epilogue ();
5781 emit_jump_insn (seq
);
5783 /* Retain a map of the epilogue insns. */
5784 record_insns (seq
, NULL
, &epilogue_insn_hash
);
5785 set_insn_locations (seq
, epilogue_location
);
5788 rtx_insn
*returnjump
= get_last_insn ();
5791 if (JUMP_P (returnjump
))
5792 set_return_jump_label (returnjump
);
5798 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5799 this into place with notes indicating where the prologue ends and where
5800 the epilogue begins. Update the basic block information when possible.
5802 Notes on epilogue placement:
5803 There are several kinds of edges to the exit block:
5804 * a single fallthru edge from LAST_BB
5805 * possibly, edges from blocks containing sibcalls
5806 * possibly, fake edges from infinite loops
5808 The epilogue is always emitted on the fallthru edge from the last basic
5809 block in the function, LAST_BB, into the exit block.
5811 If LAST_BB is empty except for a label, it is the target of every
5812 other basic block in the function that ends in a return. If a
5813 target has a return or simple_return pattern (possibly with
5814 conditional variants), these basic blocks can be changed so that a
5815 return insn is emitted into them, and their target is adjusted to
5816 the real exit block.
5818 Notes on shrink wrapping: We implement a fairly conservative
5819 version of shrink-wrapping rather than the textbook one. We only
5820 generate a single prologue and a single epilogue. This is
5821 sufficient to catch a number of interesting cases involving early
5824 First, we identify the blocks that require the prologue to occur before
5825 them. These are the ones that modify a call-saved register, or reference
5826 any of the stack or frame pointer registers. To simplify things, we then
5827 mark everything reachable from these blocks as also requiring a prologue.
5828 This takes care of loops automatically, and avoids the need to examine
5829 whether MEMs reference the frame, since it is sufficient to check for
5830 occurrences of the stack or frame pointer.
5832 We then compute the set of blocks for which the need for a prologue
5833 is anticipatable (borrowing terminology from the shrink-wrapping
5834 description in Muchnick's book). These are the blocks which either
5835 require a prologue themselves, or those that have only successors
5836 where the prologue is anticipatable. The prologue needs to be
5837 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5838 is not. For the moment, we ensure that only one such edge exists.
5840 The epilogue is placed as described above, but we make a
5841 distinction between inserting return and simple_return patterns
5842 when modifying other blocks that end in a return. Blocks that end
5843 in a sibcall omit the sibcall_epilogue if the block is not in
5847 thread_prologue_and_epilogue_insns (void)
5851 /* Can't deal with multiple successors of the entry block at the
5852 moment. Function should always have at least one entry
5854 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
5856 edge entry_edge
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5857 edge orig_entry_edge
= entry_edge
;
5859 rtx_insn
*split_prologue_seq
= make_split_prologue_seq ();
5860 rtx_insn
*prologue_seq
= make_prologue_seq ();
5861 rtx_insn
*epilogue_seq
= make_epilogue_seq ();
5863 /* Try to perform a kind of shrink-wrapping, making sure the
5864 prologue/epilogue is emitted only around those parts of the
5865 function that require it. */
5866 try_shrink_wrapping (&entry_edge
, prologue_seq
);
5868 /* If the target can handle splitting the prologue/epilogue into separate
5869 components, try to shrink-wrap these components separately. */
5870 try_shrink_wrapping_separate (entry_edge
->dest
);
5872 /* If that did anything for any component we now need the generate the
5873 "main" prologue again. Because some targets require some of these
5874 to be called in a specific order (i386 requires the split prologue
5875 to be first, for example), we create all three sequences again here.
5876 If this does not work for some target, that target should not enable
5877 separate shrink-wrapping. */
5878 if (crtl
->shrink_wrapped_separate
)
5880 split_prologue_seq
= make_split_prologue_seq ();
5881 prologue_seq
= make_prologue_seq ();
5882 epilogue_seq
= make_epilogue_seq ();
5885 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5887 /* A small fib -- epilogue is not yet completed, but we wish to re-use
5888 this marker for the splits of EH_RETURN patterns, and nothing else
5889 uses the flag in the meantime. */
5890 epilogue_completed
= 1;
5892 /* Find non-fallthru edges that end with EH_RETURN instructions. On
5893 some targets, these get split to a special version of the epilogue
5894 code. In order to be able to properly annotate these with unwind
5895 info, try to split them now. If we get a valid split, drop an
5896 EPILOGUE_BEG note and mark the insns as epilogue insns. */
5899 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5901 rtx_insn
*prev
, *last
, *trial
;
5903 if (e
->flags
& EDGE_FALLTHRU
)
5905 last
= BB_END (e
->src
);
5906 if (!eh_returnjump_p (last
))
5909 prev
= PREV_INSN (last
);
5910 trial
= try_split (PATTERN (last
), last
, 1);
5914 record_insns (NEXT_INSN (prev
), NEXT_INSN (trial
), &epilogue_insn_hash
);
5915 emit_note_after (NOTE_INSN_EPILOGUE_BEG
, prev
);
5918 edge exit_fallthru_edge
= find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
);
5920 if (exit_fallthru_edge
)
5924 insert_insn_on_edge (epilogue_seq
, exit_fallthru_edge
);
5925 commit_edge_insertions ();
5927 /* The epilogue insns we inserted may cause the exit edge to no longer
5929 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5931 if (((e
->flags
& EDGE_FALLTHRU
) != 0)
5932 && returnjump_p (BB_END (e
->src
)))
5933 e
->flags
&= ~EDGE_FALLTHRU
;
5936 else if (next_active_insn (BB_END (exit_fallthru_edge
->src
)))
5938 /* We have a fall-through edge to the exit block, the source is not
5939 at the end of the function, and there will be an assembler epilogue
5940 at the end of the function.
5941 We can't use force_nonfallthru here, because that would try to
5942 use return. Inserting a jump 'by hand' is extremely messy, so
5943 we take advantage of cfg_layout_finalize using
5944 fixup_fallthru_exit_predecessor. */
5945 cfg_layout_initialize (0);
5947 FOR_EACH_BB_FN (cur_bb
, cfun
)
5948 if (cur_bb
->index
>= NUM_FIXED_BLOCKS
5949 && cur_bb
->next_bb
->index
>= NUM_FIXED_BLOCKS
)
5950 cur_bb
->aux
= cur_bb
->next_bb
;
5951 cfg_layout_finalize ();
5955 /* Insert the prologue. */
5957 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5959 if (split_prologue_seq
|| prologue_seq
)
5961 rtx_insn
*split_prologue_insn
= split_prologue_seq
;
5962 if (split_prologue_seq
)
5964 while (split_prologue_insn
&& !NONDEBUG_INSN_P (split_prologue_insn
))
5965 split_prologue_insn
= NEXT_INSN (split_prologue_insn
);
5966 insert_insn_on_edge (split_prologue_seq
, orig_entry_edge
);
5969 rtx_insn
*prologue_insn
= prologue_seq
;
5972 while (prologue_insn
&& !NONDEBUG_INSN_P (prologue_insn
))
5973 prologue_insn
= NEXT_INSN (prologue_insn
);
5974 insert_insn_on_edge (prologue_seq
, entry_edge
);
5977 commit_edge_insertions ();
5979 /* Look for basic blocks within the prologue insns. */
5980 if (split_prologue_insn
5981 && BLOCK_FOR_INSN (split_prologue_insn
) == NULL
)
5982 split_prologue_insn
= NULL
;
5984 && BLOCK_FOR_INSN (prologue_insn
) == NULL
)
5985 prologue_insn
= NULL
;
5986 if (split_prologue_insn
|| prologue_insn
)
5988 auto_sbitmap
blocks (last_basic_block_for_fn (cfun
));
5989 bitmap_clear (blocks
);
5990 if (split_prologue_insn
)
5991 bitmap_set_bit (blocks
,
5992 BLOCK_FOR_INSN (split_prologue_insn
)->index
);
5994 bitmap_set_bit (blocks
, BLOCK_FOR_INSN (prologue_insn
)->index
);
5995 find_many_sub_basic_blocks (blocks
);
5999 default_rtl_profile ();
6001 /* Emit sibling epilogues before any sibling call sites. */
6002 for (ei
= ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
);
6003 (e
= ei_safe_edge (ei
));
6006 /* Skip those already handled, the ones that run without prologue. */
6007 if (e
->flags
& EDGE_IGNORE
)
6009 e
->flags
&= ~EDGE_IGNORE
;
6013 rtx_insn
*insn
= BB_END (e
->src
);
6015 if (!(CALL_P (insn
) && SIBLING_CALL_P (insn
)))
6018 if (rtx_insn
*ep_seq
= targetm
.gen_sibcall_epilogue ())
6021 emit_note (NOTE_INSN_EPILOGUE_BEG
);
6023 rtx_insn
*seq
= get_insns ();
6026 /* Retain a map of the epilogue insns. Used in life analysis to
6027 avoid getting rid of sibcall epilogue insns. Do this before we
6028 actually emit the sequence. */
6029 record_insns (seq
, NULL
, &epilogue_insn_hash
);
6030 set_insn_locations (seq
, epilogue_location
);
6032 emit_insn_before (seq
, insn
);
6038 rtx_insn
*insn
, *next
;
6040 /* Similarly, move any line notes that appear after the epilogue.
6041 There is no need, however, to be quite so anal about the existence
6042 of such a note. Also possibly move
6043 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6045 for (insn
= epilogue_seq
; insn
; insn
= next
)
6047 next
= NEXT_INSN (insn
);
6049 && (NOTE_KIND (insn
) == NOTE_INSN_FUNCTION_BEG
))
6050 reorder_insns (insn
, insn
, PREV_INSN (epilogue_seq
));
6054 /* Threading the prologue and epilogue changes the artificial refs
6055 in the entry and exit blocks. */
6056 epilogue_completed
= 1;
6057 df_update_entry_exit_and_calls ();
6060 /* Reposition the prologue-end and epilogue-begin notes after
6061 instruction scheduling. */
6064 reposition_prologue_and_epilogue_notes (void)
6066 if (!targetm
.have_prologue ()
6067 && !targetm
.have_epilogue ()
6068 && !targetm
.have_sibcall_epilogue ())
6071 /* Since the hash table is created on demand, the fact that it is
6072 non-null is a signal that it is non-empty. */
6073 if (prologue_insn_hash
!= NULL
)
6075 size_t len
= prologue_insn_hash
->elements ();
6076 rtx_insn
*insn
, *last
= NULL
, *note
= NULL
;
6078 /* Scan from the beginning until we reach the last prologue insn. */
6079 /* ??? While we do have the CFG intact, there are two problems:
6080 (1) The prologue can contain loops (typically probing the stack),
6081 which means that the end of the prologue isn't in the first bb.
6082 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6083 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6087 if (NOTE_KIND (insn
) == NOTE_INSN_PROLOGUE_END
)
6090 else if (contains (insn
, prologue_insn_hash
))
6102 /* Scan forward looking for the PROLOGUE_END note. It should
6103 be right at the beginning of the block, possibly with other
6104 insn notes that got moved there. */
6105 for (note
= NEXT_INSN (last
); ; note
= NEXT_INSN (note
))
6108 && NOTE_KIND (note
) == NOTE_INSN_PROLOGUE_END
)
6113 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6115 last
= NEXT_INSN (last
);
6116 reorder_insns (note
, note
, last
);
6120 if (epilogue_insn_hash
!= NULL
)
6125 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6127 rtx_insn
*insn
, *first
= NULL
, *note
= NULL
;
6128 basic_block bb
= e
->src
;
6130 /* Scan from the beginning until we reach the first epilogue insn. */
6131 FOR_BB_INSNS (bb
, insn
)
6135 if (NOTE_KIND (insn
) == NOTE_INSN_EPILOGUE_BEG
)
6142 else if (first
== NULL
&& contains (insn
, epilogue_insn_hash
))
6152 /* If the function has a single basic block, and no real
6153 epilogue insns (e.g. sibcall with no cleanup), the
6154 epilogue note can get scheduled before the prologue
6155 note. If we have frame related prologue insns, having
6156 them scanned during the epilogue will result in a crash.
6157 In this case re-order the epilogue note to just before
6158 the last insn in the block. */
6160 first
= BB_END (bb
);
6162 if (PREV_INSN (first
) != note
)
6163 reorder_insns (note
, note
, PREV_INSN (first
));
6169 /* Returns the name of function declared by FNDECL. */
6171 fndecl_name (tree fndecl
)
6175 return lang_hooks
.decl_printable_name (fndecl
, 1);
6178 /* Returns the name of function FN. */
6180 function_name (struct function
*fn
)
6182 tree fndecl
= (fn
== NULL
) ? NULL
: fn
->decl
;
6183 return fndecl_name (fndecl
);
6186 /* Returns the name of the current function. */
6188 current_function_name (void)
6190 return function_name (cfun
);
6195 rest_of_handle_check_leaf_regs (void)
6197 #ifdef LEAF_REGISTERS
6198 crtl
->uses_only_leaf_regs
6199 = optimize
> 0 && only_leaf_regs_used () && leaf_function_p ();
6204 /* Insert a TYPE into the used types hash table of CFUN. */
6207 used_types_insert_helper (tree type
, struct function
*func
)
6209 if (type
!= NULL
&& func
!= NULL
)
6211 if (func
->used_types_hash
== NULL
)
6212 func
->used_types_hash
= hash_set
<tree
>::create_ggc (37);
6214 func
->used_types_hash
->add (type
);
6218 /* Given a type, insert it into the used hash table in cfun. */
6220 used_types_insert (tree t
)
6222 while (POINTER_TYPE_P (t
) || TREE_CODE (t
) == ARRAY_TYPE
)
6227 if (TREE_CODE (t
) == ERROR_MARK
)
6229 if (TYPE_NAME (t
) == NULL_TREE
6230 || TYPE_NAME (t
) == TYPE_NAME (TYPE_MAIN_VARIANT (t
)))
6231 t
= TYPE_MAIN_VARIANT (t
);
6232 if (debug_info_level
> DINFO_LEVEL_NONE
)
6235 used_types_insert_helper (t
, cfun
);
6238 /* So this might be a type referenced by a global variable.
6239 Record that type so that we can later decide to emit its
6240 debug information. */
6241 vec_safe_push (types_used_by_cur_var_decl
, t
);
6246 /* Helper to Hash a struct types_used_by_vars_entry. */
6249 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry
*entry
)
6251 gcc_assert (entry
&& entry
->var_decl
&& entry
->type
);
6253 return iterative_hash_object (entry
->type
,
6254 iterative_hash_object (entry
->var_decl
, 0));
6257 /* Hash function of the types_used_by_vars_entry hash table. */
6260 used_type_hasher::hash (types_used_by_vars_entry
*entry
)
6262 return hash_types_used_by_vars_entry (entry
);
6265 /*Equality function of the types_used_by_vars_entry hash table. */
6268 used_type_hasher::equal (types_used_by_vars_entry
*e1
,
6269 types_used_by_vars_entry
*e2
)
6271 return (e1
->var_decl
== e2
->var_decl
&& e1
->type
== e2
->type
);
6274 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6277 types_used_by_var_decl_insert (tree type
, tree var_decl
)
6279 if (type
!= NULL
&& var_decl
!= NULL
)
6281 types_used_by_vars_entry
**slot
;
6282 struct types_used_by_vars_entry e
;
6283 e
.var_decl
= var_decl
;
6285 if (types_used_by_vars_hash
== NULL
)
6286 types_used_by_vars_hash
6287 = hash_table
<used_type_hasher
>::create_ggc (37);
6289 slot
= types_used_by_vars_hash
->find_slot (&e
, INSERT
);
6292 struct types_used_by_vars_entry
*entry
;
6293 entry
= ggc_alloc
<types_used_by_vars_entry
> ();
6295 entry
->var_decl
= var_decl
;
6303 const pass_data pass_data_leaf_regs
=
6305 RTL_PASS
, /* type */
6306 "*leaf_regs", /* name */
6307 OPTGROUP_NONE
, /* optinfo_flags */
6308 TV_NONE
, /* tv_id */
6309 0, /* properties_required */
6310 0, /* properties_provided */
6311 0, /* properties_destroyed */
6312 0, /* todo_flags_start */
6313 0, /* todo_flags_finish */
6316 class pass_leaf_regs
: public rtl_opt_pass
6319 pass_leaf_regs (gcc::context
*ctxt
)
6320 : rtl_opt_pass (pass_data_leaf_regs
, ctxt
)
6323 /* opt_pass methods: */
6324 virtual unsigned int execute (function
*)
6326 return rest_of_handle_check_leaf_regs ();
6329 }; // class pass_leaf_regs
6334 make_pass_leaf_regs (gcc::context
*ctxt
)
6336 return new pass_leaf_regs (ctxt
);
6340 rest_of_handle_thread_prologue_and_epilogue (void)
6342 /* prepare_shrink_wrap is sensitive to the block structure of the control
6343 flow graph, so clean it up first. */
6347 /* On some machines, the prologue and epilogue code, or parts thereof,
6348 can be represented as RTL. Doing so lets us schedule insns between
6349 it and the rest of the code and also allows delayed branch
6350 scheduling to operate in the epilogue. */
6351 thread_prologue_and_epilogue_insns ();
6353 /* Some non-cold blocks may now be only reachable from cold blocks.
6355 fixup_partitions ();
6357 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6359 cleanup_cfg (optimize
? CLEANUP_EXPENSIVE
: 0);
6361 /* The stack usage info is finalized during prologue expansion. */
6362 if (flag_stack_usage_info
)
6363 output_stack_usage ();
6370 const pass_data pass_data_thread_prologue_and_epilogue
=
6372 RTL_PASS
, /* type */
6373 "pro_and_epilogue", /* name */
6374 OPTGROUP_NONE
, /* optinfo_flags */
6375 TV_THREAD_PROLOGUE_AND_EPILOGUE
, /* tv_id */
6376 0, /* properties_required */
6377 0, /* properties_provided */
6378 0, /* properties_destroyed */
6379 0, /* todo_flags_start */
6380 ( TODO_df_verify
| TODO_df_finish
), /* todo_flags_finish */
6383 class pass_thread_prologue_and_epilogue
: public rtl_opt_pass
6386 pass_thread_prologue_and_epilogue (gcc::context
*ctxt
)
6387 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue
, ctxt
)
6390 /* opt_pass methods: */
6391 virtual unsigned int execute (function
*)
6393 return rest_of_handle_thread_prologue_and_epilogue ();
6396 }; // class pass_thread_prologue_and_epilogue
6401 make_pass_thread_prologue_and_epilogue (gcc::context
*ctxt
)
6403 return new pass_thread_prologue_and_epilogue (ctxt
);
6407 /* If CONSTRAINT is a matching constraint, then return its number.
6408 Otherwise, return -1. */
6411 matching_constraint_num (const char *constraint
)
6413 if (*constraint
== '%')
6416 if (IN_RANGE (*constraint
, '0', '9'))
6417 return strtoul (constraint
, NULL
, 10);
6422 /* This mini-pass fixes fall-out from SSA in asm statements that have
6423 in-out constraints. Say you start with
6426 asm ("": "+mr" (inout));
6429 which is transformed very early to use explicit output and match operands:
6432 asm ("": "=mr" (inout) : "0" (inout));
6435 Or, after SSA and copyprop,
6437 asm ("": "=mr" (inout_2) : "0" (inout_1));
6440 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6441 they represent two separate values, so they will get different pseudo
6442 registers during expansion. Then, since the two operands need to match
6443 per the constraints, but use different pseudo registers, reload can
6444 only register a reload for these operands. But reloads can only be
6445 satisfied by hardregs, not by memory, so we need a register for this
6446 reload, just because we are presented with non-matching operands.
6447 So, even though we allow memory for this operand, no memory can be
6448 used for it, just because the two operands don't match. This can
6449 cause reload failures on register-starved targets.
6451 So it's a symptom of reload not being able to use memory for reloads
6452 or, alternatively it's also a symptom of both operands not coming into
6453 reload as matching (in which case the pseudo could go to memory just
6454 fine, as the alternative allows it, and no reload would be necessary).
6455 We fix the latter problem here, by transforming
6457 asm ("": "=mr" (inout_2) : "0" (inout_1));
6462 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6465 match_asm_constraints_1 (rtx_insn
*insn
, rtx
*p_sets
, int noutputs
)
6468 bool changed
= false;
6469 rtx op
= SET_SRC (p_sets
[0]);
6470 int ninputs
= ASM_OPERANDS_INPUT_LENGTH (op
);
6471 rtvec inputs
= ASM_OPERANDS_INPUT_VEC (op
);
6472 bool *output_matched
= XALLOCAVEC (bool, noutputs
);
6474 memset (output_matched
, 0, noutputs
* sizeof (bool));
6475 for (i
= 0; i
< ninputs
; i
++)
6479 const char *constraint
= ASM_OPERANDS_INPUT_CONSTRAINT (op
, i
);
6482 match
= matching_constraint_num (constraint
);
6486 gcc_assert (match
< noutputs
);
6487 output
= SET_DEST (p_sets
[match
]);
6488 input
= RTVEC_ELT (inputs
, i
);
6489 /* Only do the transformation for pseudos. */
6490 if (! REG_P (output
)
6491 || rtx_equal_p (output
, input
)
6492 || !(REG_P (input
) || SUBREG_P (input
)
6493 || MEM_P (input
) || CONSTANT_P (input
))
6494 || !general_operand (input
, GET_MODE (output
)))
6497 /* We can't do anything if the output is also used as input,
6498 as we're going to overwrite it. */
6499 for (j
= 0; j
< ninputs
; j
++)
6500 if (reg_overlap_mentioned_p (output
, RTVEC_ELT (inputs
, j
)))
6505 /* Avoid changing the same input several times. For
6506 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6507 only change it once (to out1), rather than changing it
6508 first to out1 and afterwards to out2. */
6511 for (j
= 0; j
< noutputs
; j
++)
6512 if (output_matched
[j
] && input
== SET_DEST (p_sets
[j
]))
6517 output_matched
[match
] = true;
6520 emit_move_insn (output
, copy_rtx (input
));
6521 insns
= get_insns ();
6523 emit_insn_before (insns
, insn
);
6525 constraint
= ASM_OPERANDS_OUTPUT_CONSTRAINT(SET_SRC(p_sets
[match
]));
6526 bool early_clobber_p
= strchr (constraint
, '&') != NULL
;
6528 /* Now replace all mentions of the input with output. We can't
6529 just replace the occurrence in inputs[i], as the register might
6530 also be used in some other input (or even in an address of an
6531 output), which would mean possibly increasing the number of
6532 inputs by one (namely 'output' in addition), which might pose
6533 a too complicated problem for reload to solve. E.g. this situation:
6535 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6537 Here 'input' is used in two occurrences as input (once for the
6538 input operand, once for the address in the second output operand).
6539 If we would replace only the occurrence of the input operand (to
6540 make the matching) we would be left with this:
6543 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6545 Now we suddenly have two different input values (containing the same
6546 value, but different pseudos) where we formerly had only one.
6547 With more complicated asms this might lead to reload failures
6548 which wouldn't have happen without this pass. So, iterate over
6549 all operands and replace all occurrences of the register used.
6551 However, if one or more of the 'input' uses have a non-matching
6552 constraint and the matched output operand is an early clobber
6553 operand, then do not replace the input operand, since by definition
6554 it conflicts with the output operand and cannot share the same
6555 register. See PR89313 for details. */
6557 for (j
= 0; j
< noutputs
; j
++)
6558 if (!rtx_equal_p (SET_DEST (p_sets
[j
]), input
)
6559 && reg_overlap_mentioned_p (input
, SET_DEST (p_sets
[j
])))
6560 SET_DEST (p_sets
[j
]) = replace_rtx (SET_DEST (p_sets
[j
]),
6562 for (j
= 0; j
< ninputs
; j
++)
6563 if (reg_overlap_mentioned_p (input
, RTVEC_ELT (inputs
, j
)))
6565 if (!early_clobber_p
6566 || match
== matching_constraint_num
6567 (ASM_OPERANDS_INPUT_CONSTRAINT (op
, j
)))
6568 RTVEC_ELT (inputs
, j
) = replace_rtx (RTVEC_ELT (inputs
, j
),
6576 df_insn_rescan (insn
);
6579 /* Add the decl D to the local_decls list of FUN. */
6582 add_local_decl (struct function
*fun
, tree d
)
6584 gcc_assert (VAR_P (d
));
6585 vec_safe_push (fun
->local_decls
, d
);
6590 const pass_data pass_data_match_asm_constraints
=
6592 RTL_PASS
, /* type */
6593 "asmcons", /* name */
6594 OPTGROUP_NONE
, /* optinfo_flags */
6595 TV_NONE
, /* tv_id */
6596 0, /* properties_required */
6597 0, /* properties_provided */
6598 0, /* properties_destroyed */
6599 0, /* todo_flags_start */
6600 0, /* todo_flags_finish */
6603 class pass_match_asm_constraints
: public rtl_opt_pass
6606 pass_match_asm_constraints (gcc::context
*ctxt
)
6607 : rtl_opt_pass (pass_data_match_asm_constraints
, ctxt
)
6610 /* opt_pass methods: */
6611 virtual unsigned int execute (function
*);
6613 }; // class pass_match_asm_constraints
6616 pass_match_asm_constraints::execute (function
*fun
)
6623 if (!crtl
->has_asm_statement
)
6626 df_set_flags (DF_DEFER_INSN_RESCAN
);
6627 FOR_EACH_BB_FN (bb
, fun
)
6629 FOR_BB_INSNS (bb
, insn
)
6634 pat
= PATTERN (insn
);
6635 if (GET_CODE (pat
) == PARALLEL
)
6636 p_sets
= &XVECEXP (pat
, 0, 0), noutputs
= XVECLEN (pat
, 0);
6637 else if (GET_CODE (pat
) == SET
)
6638 p_sets
= &PATTERN (insn
), noutputs
= 1;
6642 if (GET_CODE (*p_sets
) == SET
6643 && GET_CODE (SET_SRC (*p_sets
)) == ASM_OPERANDS
)
6644 match_asm_constraints_1 (insn
, p_sets
, noutputs
);
6648 return TODO_df_finish
;
6654 make_pass_match_asm_constraints (gcc::context
*ctxt
)
6656 return new pass_match_asm_constraints (ctxt
);
6660 #include "gt-function.h"