1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
36 #include "coretypes.h"
38 #include "rtl-error.h"
40 #include "stor-layout.h"
42 #include "stringpool.h"
49 #include "hard-reg-set.h"
53 #include "insn-codes.h"
57 #include "insn-config.h"
61 #include "langhooks.h"
63 #include "common/common-target.h"
64 #include "gimple-expr.h"
66 #include "tree-pass.h"
68 #include "dominance.h"
73 #include "cfgcleanup.h"
74 #include "basic-block.h"
77 #include "bb-reorder.h"
78 #include "shrink-wrap.h"
81 #include "tree-chkp.h"
84 /* So we can assign to cfun in this file. */
87 #ifndef STACK_ALIGNMENT_NEEDED
88 #define STACK_ALIGNMENT_NEEDED 1
91 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
93 /* Round a value to the lowest integer less than it that is a multiple of
94 the required alignment. Avoid using division in case the value is
95 negative. Assume the alignment is a power of two. */
96 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
98 /* Similar, but round to the next highest integer that meets the
100 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
102 /* Nonzero once virtual register instantiation has been done.
103 assign_stack_local uses frame_pointer_rtx when this is nonzero.
104 calls.c:emit_library_call_value_1 uses it to set up
105 post-instantiation libcalls. */
106 int virtuals_instantiated
;
108 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
109 static GTY(()) int funcdef_no
;
111 /* These variables hold pointers to functions to create and destroy
112 target specific, per-function data structures. */
113 struct machine_function
* (*init_machine_status
) (void);
115 /* The currently compiled function. */
116 struct function
*cfun
= 0;
118 /* These hashes record the prologue and epilogue insns. */
119 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
120 htab_t prologue_insn_hash
;
121 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
122 htab_t epilogue_insn_hash
;
125 hash_table
<used_type_hasher
> *types_used_by_vars_hash
= NULL
;
126 vec
<tree
, va_gc
> *types_used_by_cur_var_decl
;
128 /* Forward declarations. */
130 static struct temp_slot
*find_temp_slot_from_address (rtx
);
131 static void pad_to_arg_alignment (struct args_size
*, int, struct args_size
*);
132 static void pad_below (struct args_size
*, machine_mode
, tree
);
133 static void reorder_blocks_1 (rtx_insn
*, tree
, vec
<tree
> *);
134 static int all_blocks (tree
, tree
*);
135 static tree
*get_block_vector (tree
, int *);
136 extern tree
debug_find_var_in_block_tree (tree
, tree
);
137 /* We always define `record_insns' even if it's not used so that we
138 can always export `prologue_epilogue_contains'. */
139 static void record_insns (rtx_insn
*, rtx
, htab_t
*) ATTRIBUTE_UNUSED
;
140 static bool contains (const_rtx
, htab_t
);
141 static void prepare_function_start (void);
142 static void do_clobber_return_reg (rtx
, void *);
143 static void do_use_return_reg (rtx
, void *);
145 /* Stack of nested functions. */
146 /* Keep track of the cfun stack. */
148 typedef struct function
*function_p
;
150 static vec
<function_p
> function_context_stack
;
152 /* Save the current context for compilation of a nested function.
153 This is called from language-specific code. */
156 push_function_context (void)
159 allocate_struct_function (NULL
, false);
161 function_context_stack
.safe_push (cfun
);
165 /* Restore the last saved context, at the end of a nested function.
166 This function is called from language-specific code. */
169 pop_function_context (void)
171 struct function
*p
= function_context_stack
.pop ();
173 current_function_decl
= p
->decl
;
175 /* Reset variables that have known state during rtx generation. */
176 virtuals_instantiated
= 0;
177 generating_concat_p
= 1;
180 /* Clear out all parts of the state in F that can safely be discarded
181 after the function has been parsed, but not compiled, to let
182 garbage collection reclaim the memory. */
185 free_after_parsing (struct function
*f
)
190 /* Clear out all parts of the state in F that can safely be discarded
191 after the function has been compiled, to let garbage collection
192 reclaim the memory. */
195 free_after_compilation (struct function
*f
)
197 prologue_insn_hash
= NULL
;
198 epilogue_insn_hash
= NULL
;
200 free (crtl
->emit
.regno_pointer_align
);
202 memset (crtl
, 0, sizeof (struct rtl_data
));
207 regno_reg_rtx
= NULL
;
210 /* Return size needed for stack frame based on slots so far allocated.
211 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
212 the caller may have to do that. */
215 get_frame_size (void)
217 if (FRAME_GROWS_DOWNWARD
)
218 return -frame_offset
;
223 /* Issue an error message and return TRUE if frame OFFSET overflows in
224 the signed target pointer arithmetics for function FUNC. Otherwise
228 frame_offset_overflow (HOST_WIDE_INT offset
, tree func
)
230 unsigned HOST_WIDE_INT size
= FRAME_GROWS_DOWNWARD
? -offset
: offset
;
232 if (size
> ((unsigned HOST_WIDE_INT
) 1 << (GET_MODE_BITSIZE (Pmode
) - 1))
233 /* Leave room for the fixed part of the frame. */
234 - 64 * UNITS_PER_WORD
)
236 error_at (DECL_SOURCE_LOCATION (func
),
237 "total size of local objects too large");
244 /* Return stack slot alignment in bits for TYPE and MODE. */
247 get_stack_local_alignment (tree type
, machine_mode mode
)
249 unsigned int alignment
;
252 alignment
= BIGGEST_ALIGNMENT
;
254 alignment
= GET_MODE_ALIGNMENT (mode
);
256 /* Allow the frond-end to (possibly) increase the alignment of this
259 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
261 return STACK_SLOT_ALIGNMENT (type
, mode
, alignment
);
264 /* Determine whether it is possible to fit a stack slot of size SIZE and
265 alignment ALIGNMENT into an area in the stack frame that starts at
266 frame offset START and has a length of LENGTH. If so, store the frame
267 offset to be used for the stack slot in *POFFSET and return true;
268 return false otherwise. This function will extend the frame size when
269 given a start/length pair that lies at the end of the frame. */
272 try_fit_stack_local (HOST_WIDE_INT start
, HOST_WIDE_INT length
,
273 HOST_WIDE_INT size
, unsigned int alignment
,
274 HOST_WIDE_INT
*poffset
)
276 HOST_WIDE_INT this_frame_offset
;
277 int frame_off
, frame_alignment
, frame_phase
;
279 /* Calculate how many bytes the start of local variables is off from
281 frame_alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
282 frame_off
= STARTING_FRAME_OFFSET
% frame_alignment
;
283 frame_phase
= frame_off
? frame_alignment
- frame_off
: 0;
285 /* Round the frame offset to the specified alignment. */
287 /* We must be careful here, since FRAME_OFFSET might be negative and
288 division with a negative dividend isn't as well defined as we might
289 like. So we instead assume that ALIGNMENT is a power of two and
290 use logical operations which are unambiguous. */
291 if (FRAME_GROWS_DOWNWARD
)
293 = (FLOOR_ROUND (start
+ length
- size
- frame_phase
,
294 (unsigned HOST_WIDE_INT
) alignment
)
298 = (CEIL_ROUND (start
- frame_phase
,
299 (unsigned HOST_WIDE_INT
) alignment
)
302 /* See if it fits. If this space is at the edge of the frame,
303 consider extending the frame to make it fit. Our caller relies on
304 this when allocating a new slot. */
305 if (frame_offset
== start
&& this_frame_offset
< frame_offset
)
306 frame_offset
= this_frame_offset
;
307 else if (this_frame_offset
< start
)
309 else if (start
+ length
== frame_offset
310 && this_frame_offset
+ size
> start
+ length
)
311 frame_offset
= this_frame_offset
+ size
;
312 else if (this_frame_offset
+ size
> start
+ length
)
315 *poffset
= this_frame_offset
;
319 /* Create a new frame_space structure describing free space in the stack
320 frame beginning at START and ending at END, and chain it into the
321 function's frame_space_list. */
324 add_frame_space (HOST_WIDE_INT start
, HOST_WIDE_INT end
)
326 struct frame_space
*space
= ggc_alloc
<frame_space
> ();
327 space
->next
= crtl
->frame_space_list
;
328 crtl
->frame_space_list
= space
;
329 space
->start
= start
;
330 space
->length
= end
- start
;
333 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
334 with machine mode MODE.
336 ALIGN controls the amount of alignment for the address of the slot:
337 0 means according to MODE,
338 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
339 -2 means use BITS_PER_UNIT,
340 positive specifies alignment boundary in bits.
342 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
343 alignment and ASLK_RECORD_PAD bit set if we should remember
344 extra space we allocated for alignment purposes. When we are
345 called from assign_stack_temp_for_type, it is not set so we don't
346 track the same stack slot in two independent lists.
348 We do not round to stack_boundary here. */
351 assign_stack_local_1 (machine_mode mode
, HOST_WIDE_INT size
,
355 int bigend_correction
= 0;
356 HOST_WIDE_INT slot_offset
= 0, old_frame_offset
;
357 unsigned int alignment
, alignment_in_bits
;
361 alignment
= get_stack_local_alignment (NULL
, mode
);
362 alignment
/= BITS_PER_UNIT
;
364 else if (align
== -1)
366 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
367 size
= CEIL_ROUND (size
, alignment
);
369 else if (align
== -2)
370 alignment
= 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
372 alignment
= align
/ BITS_PER_UNIT
;
374 alignment_in_bits
= alignment
* BITS_PER_UNIT
;
376 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
377 if (alignment_in_bits
> MAX_SUPPORTED_STACK_ALIGNMENT
)
379 alignment_in_bits
= MAX_SUPPORTED_STACK_ALIGNMENT
;
380 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
383 if (SUPPORTS_STACK_ALIGNMENT
)
385 if (crtl
->stack_alignment_estimated
< alignment_in_bits
)
387 if (!crtl
->stack_realign_processed
)
388 crtl
->stack_alignment_estimated
= alignment_in_bits
;
391 /* If stack is realigned and stack alignment value
392 hasn't been finalized, it is OK not to increase
393 stack_alignment_estimated. The bigger alignment
394 requirement is recorded in stack_alignment_needed
396 gcc_assert (!crtl
->stack_realign_finalized
);
397 if (!crtl
->stack_realign_needed
)
399 /* It is OK to reduce the alignment as long as the
400 requested size is 0 or the estimated stack
401 alignment >= mode alignment. */
402 gcc_assert ((kind
& ASLK_REDUCE_ALIGN
)
404 || (crtl
->stack_alignment_estimated
405 >= GET_MODE_ALIGNMENT (mode
)));
406 alignment_in_bits
= crtl
->stack_alignment_estimated
;
407 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
413 if (crtl
->stack_alignment_needed
< alignment_in_bits
)
414 crtl
->stack_alignment_needed
= alignment_in_bits
;
415 if (crtl
->max_used_stack_slot_alignment
< alignment_in_bits
)
416 crtl
->max_used_stack_slot_alignment
= alignment_in_bits
;
418 if (mode
!= BLKmode
|| size
!= 0)
420 if (kind
& ASLK_RECORD_PAD
)
422 struct frame_space
**psp
;
424 for (psp
= &crtl
->frame_space_list
; *psp
; psp
= &(*psp
)->next
)
426 struct frame_space
*space
= *psp
;
427 if (!try_fit_stack_local (space
->start
, space
->length
, size
,
428 alignment
, &slot_offset
))
431 if (slot_offset
> space
->start
)
432 add_frame_space (space
->start
, slot_offset
);
433 if (slot_offset
+ size
< space
->start
+ space
->length
)
434 add_frame_space (slot_offset
+ size
,
435 space
->start
+ space
->length
);
440 else if (!STACK_ALIGNMENT_NEEDED
)
442 slot_offset
= frame_offset
;
446 old_frame_offset
= frame_offset
;
448 if (FRAME_GROWS_DOWNWARD
)
450 frame_offset
-= size
;
451 try_fit_stack_local (frame_offset
, size
, size
, alignment
, &slot_offset
);
453 if (kind
& ASLK_RECORD_PAD
)
455 if (slot_offset
> frame_offset
)
456 add_frame_space (frame_offset
, slot_offset
);
457 if (slot_offset
+ size
< old_frame_offset
)
458 add_frame_space (slot_offset
+ size
, old_frame_offset
);
463 frame_offset
+= size
;
464 try_fit_stack_local (old_frame_offset
, size
, size
, alignment
, &slot_offset
);
466 if (kind
& ASLK_RECORD_PAD
)
468 if (slot_offset
> old_frame_offset
)
469 add_frame_space (old_frame_offset
, slot_offset
);
470 if (slot_offset
+ size
< frame_offset
)
471 add_frame_space (slot_offset
+ size
, frame_offset
);
476 /* On a big-endian machine, if we are allocating more space than we will use,
477 use the least significant bytes of those that are allocated. */
478 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
&& GET_MODE_SIZE (mode
) < size
)
479 bigend_correction
= size
- GET_MODE_SIZE (mode
);
481 /* If we have already instantiated virtual registers, return the actual
482 address relative to the frame pointer. */
483 if (virtuals_instantiated
)
484 addr
= plus_constant (Pmode
, frame_pointer_rtx
,
486 (slot_offset
+ bigend_correction
487 + STARTING_FRAME_OFFSET
, Pmode
));
489 addr
= plus_constant (Pmode
, virtual_stack_vars_rtx
,
491 (slot_offset
+ bigend_correction
,
494 x
= gen_rtx_MEM (mode
, addr
);
495 set_mem_align (x
, alignment_in_bits
);
496 MEM_NOTRAP_P (x
) = 1;
499 = gen_rtx_EXPR_LIST (VOIDmode
, x
, stack_slot_list
);
501 if (frame_offset_overflow (frame_offset
, current_function_decl
))
507 /* Wrap up assign_stack_local_1 with last parameter as false. */
510 assign_stack_local (machine_mode mode
, HOST_WIDE_INT size
, int align
)
512 return assign_stack_local_1 (mode
, size
, align
, ASLK_RECORD_PAD
);
515 /* In order to evaluate some expressions, such as function calls returning
516 structures in memory, we need to temporarily allocate stack locations.
517 We record each allocated temporary in the following structure.
519 Associated with each temporary slot is a nesting level. When we pop up
520 one level, all temporaries associated with the previous level are freed.
521 Normally, all temporaries are freed after the execution of the statement
522 in which they were created. However, if we are inside a ({...}) grouping,
523 the result may be in a temporary and hence must be preserved. If the
524 result could be in a temporary, we preserve it if we can determine which
525 one it is in. If we cannot determine which temporary may contain the
526 result, all temporaries are preserved. A temporary is preserved by
527 pretending it was allocated at the previous nesting level. */
529 struct GTY(()) temp_slot
{
530 /* Points to next temporary slot. */
531 struct temp_slot
*next
;
532 /* Points to previous temporary slot. */
533 struct temp_slot
*prev
;
534 /* The rtx to used to reference the slot. */
536 /* The size, in units, of the slot. */
538 /* The type of the object in the slot, or zero if it doesn't correspond
539 to a type. We use this to determine whether a slot can be reused.
540 It can be reused if objects of the type of the new slot will always
541 conflict with objects of the type of the old slot. */
543 /* The alignment (in bits) of the slot. */
545 /* Nonzero if this temporary is currently in use. */
547 /* Nesting level at which this slot is being used. */
549 /* The offset of the slot from the frame_pointer, including extra space
550 for alignment. This info is for combine_temp_slots. */
551 HOST_WIDE_INT base_offset
;
552 /* The size of the slot, including extra space for alignment. This
553 info is for combine_temp_slots. */
554 HOST_WIDE_INT full_size
;
557 /* Entry for the below hash table. */
558 struct GTY((for_user
)) temp_slot_address_entry
{
561 struct temp_slot
*temp_slot
;
564 struct temp_address_hasher
: ggc_hasher
<temp_slot_address_entry
*>
566 static hashval_t
hash (temp_slot_address_entry
*);
567 static bool equal (temp_slot_address_entry
*, temp_slot_address_entry
*);
570 /* A table of addresses that represent a stack slot. The table is a mapping
571 from address RTXen to a temp slot. */
572 static GTY(()) hash_table
<temp_address_hasher
> *temp_slot_address_table
;
573 static size_t n_temp_slots_in_use
;
575 /* Removes temporary slot TEMP from LIST. */
578 cut_slot_from_list (struct temp_slot
*temp
, struct temp_slot
**list
)
581 temp
->next
->prev
= temp
->prev
;
583 temp
->prev
->next
= temp
->next
;
587 temp
->prev
= temp
->next
= NULL
;
590 /* Inserts temporary slot TEMP to LIST. */
593 insert_slot_to_list (struct temp_slot
*temp
, struct temp_slot
**list
)
597 (*list
)->prev
= temp
;
602 /* Returns the list of used temp slots at LEVEL. */
604 static struct temp_slot
**
605 temp_slots_at_level (int level
)
607 if (level
>= (int) vec_safe_length (used_temp_slots
))
608 vec_safe_grow_cleared (used_temp_slots
, level
+ 1);
610 return &(*used_temp_slots
)[level
];
613 /* Returns the maximal temporary slot level. */
616 max_slot_level (void)
618 if (!used_temp_slots
)
621 return used_temp_slots
->length () - 1;
624 /* Moves temporary slot TEMP to LEVEL. */
627 move_slot_to_level (struct temp_slot
*temp
, int level
)
629 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
630 insert_slot_to_list (temp
, temp_slots_at_level (level
));
634 /* Make temporary slot TEMP available. */
637 make_slot_available (struct temp_slot
*temp
)
639 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
640 insert_slot_to_list (temp
, &avail_temp_slots
);
643 n_temp_slots_in_use
--;
646 /* Compute the hash value for an address -> temp slot mapping.
647 The value is cached on the mapping entry. */
649 temp_slot_address_compute_hash (struct temp_slot_address_entry
*t
)
651 int do_not_record
= 0;
652 return hash_rtx (t
->address
, GET_MODE (t
->address
),
653 &do_not_record
, NULL
, false);
656 /* Return the hash value for an address -> temp slot mapping. */
658 temp_address_hasher::hash (temp_slot_address_entry
*t
)
663 /* Compare two address -> temp slot mapping entries. */
665 temp_address_hasher::equal (temp_slot_address_entry
*t1
,
666 temp_slot_address_entry
*t2
)
668 return exp_equiv_p (t1
->address
, t2
->address
, 0, true);
671 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
673 insert_temp_slot_address (rtx address
, struct temp_slot
*temp_slot
)
675 struct temp_slot_address_entry
*t
= ggc_alloc
<temp_slot_address_entry
> ();
676 t
->address
= address
;
677 t
->temp_slot
= temp_slot
;
678 t
->hash
= temp_slot_address_compute_hash (t
);
679 *temp_slot_address_table
->find_slot_with_hash (t
, t
->hash
, INSERT
) = t
;
682 /* Remove an address -> temp slot mapping entry if the temp slot is
683 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
685 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry
**slot
, void *)
687 const struct temp_slot_address_entry
*t
= *slot
;
688 if (! t
->temp_slot
->in_use
)
689 temp_slot_address_table
->clear_slot (slot
);
693 /* Remove all mappings of addresses to unused temp slots. */
695 remove_unused_temp_slot_addresses (void)
697 /* Use quicker clearing if there aren't any active temp slots. */
698 if (n_temp_slots_in_use
)
699 temp_slot_address_table
->traverse
700 <void *, remove_unused_temp_slot_addresses_1
> (NULL
);
702 temp_slot_address_table
->empty ();
705 /* Find the temp slot corresponding to the object at address X. */
707 static struct temp_slot
*
708 find_temp_slot_from_address (rtx x
)
711 struct temp_slot_address_entry tmp
, *t
;
713 /* First try the easy way:
714 See if X exists in the address -> temp slot mapping. */
716 tmp
.temp_slot
= NULL
;
717 tmp
.hash
= temp_slot_address_compute_hash (&tmp
);
718 t
= temp_slot_address_table
->find_with_hash (&tmp
, tmp
.hash
);
722 /* If we have a sum involving a register, see if it points to a temp
724 if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 0))
725 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
727 else if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 1))
728 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
731 /* Last resort: Address is a virtual stack var address. */
732 if (GET_CODE (x
) == PLUS
733 && XEXP (x
, 0) == virtual_stack_vars_rtx
734 && CONST_INT_P (XEXP (x
, 1)))
737 for (i
= max_slot_level (); i
>= 0; i
--)
738 for (p
= *temp_slots_at_level (i
); p
; p
= p
->next
)
740 if (INTVAL (XEXP (x
, 1)) >= p
->base_offset
741 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
)
749 /* Allocate a temporary stack slot and record it for possible later
752 MODE is the machine mode to be given to the returned rtx.
754 SIZE is the size in units of the space required. We do no rounding here
755 since assign_stack_local will do any required rounding.
757 TYPE is the type that will be used for the stack slot. */
760 assign_stack_temp_for_type (machine_mode mode
, HOST_WIDE_INT size
,
764 struct temp_slot
*p
, *best_p
= 0, *selected
= NULL
, **pp
;
767 /* If SIZE is -1 it means that somebody tried to allocate a temporary
768 of a variable size. */
769 gcc_assert (size
!= -1);
771 align
= get_stack_local_alignment (type
, mode
);
773 /* Try to find an available, already-allocated temporary of the proper
774 mode which meets the size and alignment requirements. Choose the
775 smallest one with the closest alignment.
777 If assign_stack_temp is called outside of the tree->rtl expansion,
778 we cannot reuse the stack slots (that may still refer to
779 VIRTUAL_STACK_VARS_REGNUM). */
780 if (!virtuals_instantiated
)
782 for (p
= avail_temp_slots
; p
; p
= p
->next
)
784 if (p
->align
>= align
&& p
->size
>= size
785 && GET_MODE (p
->slot
) == mode
786 && objects_must_conflict_p (p
->type
, type
)
787 && (best_p
== 0 || best_p
->size
> p
->size
788 || (best_p
->size
== p
->size
&& best_p
->align
> p
->align
)))
790 if (p
->align
== align
&& p
->size
== size
)
793 cut_slot_from_list (selected
, &avail_temp_slots
);
802 /* Make our best, if any, the one to use. */
806 cut_slot_from_list (selected
, &avail_temp_slots
);
808 /* If there are enough aligned bytes left over, make them into a new
809 temp_slot so that the extra bytes don't get wasted. Do this only
810 for BLKmode slots, so that we can be sure of the alignment. */
811 if (GET_MODE (best_p
->slot
) == BLKmode
)
813 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
814 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
816 if (best_p
->size
- rounded_size
>= alignment
)
818 p
= ggc_alloc
<temp_slot
> ();
820 p
->size
= best_p
->size
- rounded_size
;
821 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
822 p
->full_size
= best_p
->full_size
- rounded_size
;
823 p
->slot
= adjust_address_nv (best_p
->slot
, BLKmode
, rounded_size
);
824 p
->align
= best_p
->align
;
825 p
->type
= best_p
->type
;
826 insert_slot_to_list (p
, &avail_temp_slots
);
828 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, p
->slot
,
831 best_p
->size
= rounded_size
;
832 best_p
->full_size
= rounded_size
;
837 /* If we still didn't find one, make a new temporary. */
840 HOST_WIDE_INT frame_offset_old
= frame_offset
;
842 p
= ggc_alloc
<temp_slot
> ();
844 /* We are passing an explicit alignment request to assign_stack_local.
845 One side effect of that is assign_stack_local will not round SIZE
846 to ensure the frame offset remains suitably aligned.
848 So for requests which depended on the rounding of SIZE, we go ahead
849 and round it now. We also make sure ALIGNMENT is at least
850 BIGGEST_ALIGNMENT. */
851 gcc_assert (mode
!= BLKmode
|| align
== BIGGEST_ALIGNMENT
);
852 p
->slot
= assign_stack_local_1 (mode
,
862 /* The following slot size computation is necessary because we don't
863 know the actual size of the temporary slot until assign_stack_local
864 has performed all the frame alignment and size rounding for the
865 requested temporary. Note that extra space added for alignment
866 can be either above or below this stack slot depending on which
867 way the frame grows. We include the extra space if and only if it
868 is above this slot. */
869 if (FRAME_GROWS_DOWNWARD
)
870 p
->size
= frame_offset_old
- frame_offset
;
874 /* Now define the fields used by combine_temp_slots. */
875 if (FRAME_GROWS_DOWNWARD
)
877 p
->base_offset
= frame_offset
;
878 p
->full_size
= frame_offset_old
- frame_offset
;
882 p
->base_offset
= frame_offset_old
;
883 p
->full_size
= frame_offset
- frame_offset_old
;
892 p
->level
= temp_slot_level
;
893 n_temp_slots_in_use
++;
895 pp
= temp_slots_at_level (p
->level
);
896 insert_slot_to_list (p
, pp
);
897 insert_temp_slot_address (XEXP (p
->slot
, 0), p
);
899 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
900 slot
= gen_rtx_MEM (mode
, XEXP (p
->slot
, 0));
901 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, slot
, stack_slot_list
);
903 /* If we know the alias set for the memory that will be used, use
904 it. If there's no TYPE, then we don't know anything about the
905 alias set for the memory. */
906 set_mem_alias_set (slot
, type
? get_alias_set (type
) : 0);
907 set_mem_align (slot
, align
);
909 /* If a type is specified, set the relevant flags. */
911 MEM_VOLATILE_P (slot
) = TYPE_VOLATILE (type
);
912 MEM_NOTRAP_P (slot
) = 1;
917 /* Allocate a temporary stack slot and record it for possible later
918 reuse. First two arguments are same as in preceding function. */
921 assign_stack_temp (machine_mode mode
, HOST_WIDE_INT size
)
923 return assign_stack_temp_for_type (mode
, size
, NULL_TREE
);
926 /* Assign a temporary.
927 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
928 and so that should be used in error messages. In either case, we
929 allocate of the given type.
930 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
931 it is 0 if a register is OK.
932 DONT_PROMOTE is 1 if we should not promote values in register
936 assign_temp (tree type_or_decl
, int memory_required
,
937 int dont_promote ATTRIBUTE_UNUSED
)
945 if (DECL_P (type_or_decl
))
946 decl
= type_or_decl
, type
= TREE_TYPE (decl
);
948 decl
= NULL
, type
= type_or_decl
;
950 mode
= TYPE_MODE (type
);
952 unsignedp
= TYPE_UNSIGNED (type
);
955 if (mode
== BLKmode
|| memory_required
)
957 HOST_WIDE_INT size
= int_size_in_bytes (type
);
960 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
961 problems with allocating the stack space. */
965 /* Unfortunately, we don't yet know how to allocate variable-sized
966 temporaries. However, sometimes we can find a fixed upper limit on
967 the size, so try that instead. */
969 size
= max_int_size_in_bytes (type
);
971 /* The size of the temporary may be too large to fit into an integer. */
972 /* ??? Not sure this should happen except for user silliness, so limit
973 this to things that aren't compiler-generated temporaries. The
974 rest of the time we'll die in assign_stack_temp_for_type. */
975 if (decl
&& size
== -1
976 && TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
)
978 error ("size of variable %q+D is too large", decl
);
982 tmp
= assign_stack_temp_for_type (mode
, size
, type
);
988 mode
= promote_mode (type
, mode
, &unsignedp
);
991 return gen_reg_rtx (mode
);
994 /* Combine temporary stack slots which are adjacent on the stack.
996 This allows for better use of already allocated stack space. This is only
997 done for BLKmode slots because we can be sure that we won't have alignment
998 problems in this case. */
1001 combine_temp_slots (void)
1003 struct temp_slot
*p
, *q
, *next
, *next_q
;
1006 /* We can't combine slots, because the information about which slot
1007 is in which alias set will be lost. */
1008 if (flag_strict_aliasing
)
1011 /* If there are a lot of temp slots, don't do anything unless
1012 high levels of optimization. */
1013 if (! flag_expensive_optimizations
)
1014 for (p
= avail_temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
1015 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
1018 for (p
= avail_temp_slots
; p
; p
= next
)
1024 if (GET_MODE (p
->slot
) != BLKmode
)
1027 for (q
= p
->next
; q
; q
= next_q
)
1033 if (GET_MODE (q
->slot
) != BLKmode
)
1036 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
1038 /* Q comes after P; combine Q into P. */
1040 p
->full_size
+= q
->full_size
;
1043 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
1045 /* P comes after Q; combine P into Q. */
1047 q
->full_size
+= p
->full_size
;
1052 cut_slot_from_list (q
, &avail_temp_slots
);
1055 /* Either delete P or advance past it. */
1057 cut_slot_from_list (p
, &avail_temp_slots
);
1061 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1062 slot that previously was known by OLD_RTX. */
1065 update_temp_slot_address (rtx old_rtx
, rtx new_rtx
)
1067 struct temp_slot
*p
;
1069 if (rtx_equal_p (old_rtx
, new_rtx
))
1072 p
= find_temp_slot_from_address (old_rtx
);
1074 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1075 NEW_RTX is a register, see if one operand of the PLUS is a
1076 temporary location. If so, NEW_RTX points into it. Otherwise,
1077 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1078 in common between them. If so, try a recursive call on those
1082 if (GET_CODE (old_rtx
) != PLUS
)
1085 if (REG_P (new_rtx
))
1087 update_temp_slot_address (XEXP (old_rtx
, 0), new_rtx
);
1088 update_temp_slot_address (XEXP (old_rtx
, 1), new_rtx
);
1091 else if (GET_CODE (new_rtx
) != PLUS
)
1094 if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0)))
1095 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1));
1096 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0)))
1097 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1));
1098 else if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1)))
1099 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0));
1100 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1)))
1101 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0));
1106 /* Otherwise add an alias for the temp's address. */
1107 insert_temp_slot_address (new_rtx
, p
);
1110 /* If X could be a reference to a temporary slot, mark that slot as
1111 belonging to the to one level higher than the current level. If X
1112 matched one of our slots, just mark that one. Otherwise, we can't
1113 easily predict which it is, so upgrade all of them.
1115 This is called when an ({...}) construct occurs and a statement
1116 returns a value in memory. */
1119 preserve_temp_slots (rtx x
)
1121 struct temp_slot
*p
= 0, *next
;
1126 /* If X is a register that is being used as a pointer, see if we have
1127 a temporary slot we know it points to. */
1128 if (REG_P (x
) && REG_POINTER (x
))
1129 p
= find_temp_slot_from_address (x
);
1131 /* If X is not in memory or is at a constant address, it cannot be in
1132 a temporary slot. */
1133 if (p
== 0 && (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0))))
1136 /* First see if we can find a match. */
1138 p
= find_temp_slot_from_address (XEXP (x
, 0));
1142 if (p
->level
== temp_slot_level
)
1143 move_slot_to_level (p
, temp_slot_level
- 1);
1147 /* Otherwise, preserve all non-kept slots at this level. */
1148 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1151 move_slot_to_level (p
, temp_slot_level
- 1);
1155 /* Free all temporaries used so far. This is normally called at the
1156 end of generating code for a statement. */
1159 free_temp_slots (void)
1161 struct temp_slot
*p
, *next
;
1162 bool some_available
= false;
1164 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1167 make_slot_available (p
);
1168 some_available
= true;
1173 remove_unused_temp_slot_addresses ();
1174 combine_temp_slots ();
1178 /* Push deeper into the nesting level for stack temporaries. */
1181 push_temp_slots (void)
1186 /* Pop a temporary nesting level. All slots in use in the current level
1190 pop_temp_slots (void)
1196 /* Initialize temporary slots. */
1199 init_temp_slots (void)
1201 /* We have not allocated any temporaries yet. */
1202 avail_temp_slots
= 0;
1203 vec_alloc (used_temp_slots
, 0);
1204 temp_slot_level
= 0;
1205 n_temp_slots_in_use
= 0;
1207 /* Set up the table to map addresses to temp slots. */
1208 if (! temp_slot_address_table
)
1209 temp_slot_address_table
= hash_table
<temp_address_hasher
>::create_ggc (32);
1211 temp_slot_address_table
->empty ();
1214 /* Functions and data structures to keep track of the values hard regs
1215 had at the start of the function. */
1217 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1218 and has_hard_reg_initial_val.. */
1219 typedef struct GTY(()) initial_value_pair
{
1222 } initial_value_pair
;
1223 /* ??? This could be a VEC but there is currently no way to define an
1224 opaque VEC type. This could be worked around by defining struct
1225 initial_value_pair in function.h. */
1226 typedef struct GTY(()) initial_value_struct
{
1229 initial_value_pair
* GTY ((length ("%h.num_entries"))) entries
;
1230 } initial_value_struct
;
1232 /* If a pseudo represents an initial hard reg (or expression), return
1233 it, else return NULL_RTX. */
1236 get_hard_reg_initial_reg (rtx reg
)
1238 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1244 for (i
= 0; i
< ivs
->num_entries
; i
++)
1245 if (rtx_equal_p (ivs
->entries
[i
].pseudo
, reg
))
1246 return ivs
->entries
[i
].hard_reg
;
1251 /* Make sure that there's a pseudo register of mode MODE that stores the
1252 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1255 get_hard_reg_initial_val (machine_mode mode
, unsigned int regno
)
1257 struct initial_value_struct
*ivs
;
1260 rv
= has_hard_reg_initial_val (mode
, regno
);
1264 ivs
= crtl
->hard_reg_initial_vals
;
1267 ivs
= ggc_alloc
<initial_value_struct
> ();
1268 ivs
->num_entries
= 0;
1269 ivs
->max_entries
= 5;
1270 ivs
->entries
= ggc_vec_alloc
<initial_value_pair
> (5);
1271 crtl
->hard_reg_initial_vals
= ivs
;
1274 if (ivs
->num_entries
>= ivs
->max_entries
)
1276 ivs
->max_entries
+= 5;
1277 ivs
->entries
= GGC_RESIZEVEC (initial_value_pair
, ivs
->entries
,
1281 ivs
->entries
[ivs
->num_entries
].hard_reg
= gen_rtx_REG (mode
, regno
);
1282 ivs
->entries
[ivs
->num_entries
].pseudo
= gen_reg_rtx (mode
);
1284 return ivs
->entries
[ivs
->num_entries
++].pseudo
;
1287 /* See if get_hard_reg_initial_val has been used to create a pseudo
1288 for the initial value of hard register REGNO in mode MODE. Return
1289 the associated pseudo if so, otherwise return NULL. */
1292 has_hard_reg_initial_val (machine_mode mode
, unsigned int regno
)
1294 struct initial_value_struct
*ivs
;
1297 ivs
= crtl
->hard_reg_initial_vals
;
1299 for (i
= 0; i
< ivs
->num_entries
; i
++)
1300 if (GET_MODE (ivs
->entries
[i
].hard_reg
) == mode
1301 && REGNO (ivs
->entries
[i
].hard_reg
) == regno
)
1302 return ivs
->entries
[i
].pseudo
;
1308 emit_initial_value_sets (void)
1310 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1318 for (i
= 0; i
< ivs
->num_entries
; i
++)
1319 emit_move_insn (ivs
->entries
[i
].pseudo
, ivs
->entries
[i
].hard_reg
);
1323 emit_insn_at_entry (seq
);
1327 /* Return the hardreg-pseudoreg initial values pair entry I and
1328 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1330 initial_value_entry (int i
, rtx
*hreg
, rtx
*preg
)
1332 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1333 if (!ivs
|| i
>= ivs
->num_entries
)
1336 *hreg
= ivs
->entries
[i
].hard_reg
;
1337 *preg
= ivs
->entries
[i
].pseudo
;
1341 /* These routines are responsible for converting virtual register references
1342 to the actual hard register references once RTL generation is complete.
1344 The following four variables are used for communication between the
1345 routines. They contain the offsets of the virtual registers from their
1346 respective hard registers. */
1348 static int in_arg_offset
;
1349 static int var_offset
;
1350 static int dynamic_offset
;
1351 static int out_arg_offset
;
1352 static int cfa_offset
;
1354 /* In most machines, the stack pointer register is equivalent to the bottom
1357 #ifndef STACK_POINTER_OFFSET
1358 #define STACK_POINTER_OFFSET 0
1361 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1362 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1365 /* If not defined, pick an appropriate default for the offset of dynamically
1366 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1367 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1369 #ifndef STACK_DYNAMIC_OFFSET
1371 /* The bottom of the stack points to the actual arguments. If
1372 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1373 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1374 stack space for register parameters is not pushed by the caller, but
1375 rather part of the fixed stack areas and hence not included in
1376 `crtl->outgoing_args_size'. Nevertheless, we must allow
1377 for it when allocating stack dynamic objects. */
1379 #ifdef INCOMING_REG_PARM_STACK_SPACE
1380 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1381 ((ACCUMULATE_OUTGOING_ARGS \
1382 ? (crtl->outgoing_args_size \
1383 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1384 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1385 : 0) + (STACK_POINTER_OFFSET))
1387 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1388 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1389 + (STACK_POINTER_OFFSET))
1394 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1395 is a virtual register, return the equivalent hard register and set the
1396 offset indirectly through the pointer. Otherwise, return 0. */
1399 instantiate_new_reg (rtx x
, HOST_WIDE_INT
*poffset
)
1402 HOST_WIDE_INT offset
;
1404 if (x
== virtual_incoming_args_rtx
)
1406 if (stack_realign_drap
)
1408 /* Replace virtual_incoming_args_rtx with internal arg
1409 pointer if DRAP is used to realign stack. */
1410 new_rtx
= crtl
->args
.internal_arg_pointer
;
1414 new_rtx
= arg_pointer_rtx
, offset
= in_arg_offset
;
1416 else if (x
== virtual_stack_vars_rtx
)
1417 new_rtx
= frame_pointer_rtx
, offset
= var_offset
;
1418 else if (x
== virtual_stack_dynamic_rtx
)
1419 new_rtx
= stack_pointer_rtx
, offset
= dynamic_offset
;
1420 else if (x
== virtual_outgoing_args_rtx
)
1421 new_rtx
= stack_pointer_rtx
, offset
= out_arg_offset
;
1422 else if (x
== virtual_cfa_rtx
)
1424 #ifdef FRAME_POINTER_CFA_OFFSET
1425 new_rtx
= frame_pointer_rtx
;
1427 new_rtx
= arg_pointer_rtx
;
1429 offset
= cfa_offset
;
1431 else if (x
== virtual_preferred_stack_boundary_rtx
)
1433 new_rtx
= GEN_INT (crtl
->preferred_stack_boundary
/ BITS_PER_UNIT
);
1443 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1444 registers present inside of *LOC. The expression is simplified,
1445 as much as possible, but is not to be considered "valid" in any sense
1446 implied by the target. Return true if any change is made. */
1449 instantiate_virtual_regs_in_rtx (rtx
*loc
)
1453 bool changed
= false;
1454 subrtx_ptr_iterator::array_type array
;
1455 FOR_EACH_SUBRTX_PTR (iter
, array
, loc
, NONCONST
)
1461 HOST_WIDE_INT offset
;
1462 switch (GET_CODE (x
))
1465 new_rtx
= instantiate_new_reg (x
, &offset
);
1468 *loc
= plus_constant (GET_MODE (x
), new_rtx
, offset
);
1471 iter
.skip_subrtxes ();
1475 new_rtx
= instantiate_new_reg (XEXP (x
, 0), &offset
);
1478 XEXP (x
, 0) = new_rtx
;
1479 *loc
= plus_constant (GET_MODE (x
), x
, offset
, true);
1481 iter
.skip_subrtxes ();
1485 /* FIXME -- from old code */
1486 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1487 we can commute the PLUS and SUBREG because pointers into the
1488 frame are well-behaved. */
1499 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1500 matches the predicate for insn CODE operand OPERAND. */
1503 safe_insn_predicate (int code
, int operand
, rtx x
)
1505 return code
< 0 || insn_operand_matches ((enum insn_code
) code
, operand
, x
);
1508 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1509 registers present inside of insn. The result will be a valid insn. */
1512 instantiate_virtual_regs_in_insn (rtx_insn
*insn
)
1514 HOST_WIDE_INT offset
;
1516 bool any_change
= false;
1517 rtx set
, new_rtx
, x
;
1520 /* There are some special cases to be handled first. */
1521 set
= single_set (insn
);
1524 /* We're allowed to assign to a virtual register. This is interpreted
1525 to mean that the underlying register gets assigned the inverse
1526 transformation. This is used, for example, in the handling of
1528 new_rtx
= instantiate_new_reg (SET_DEST (set
), &offset
);
1533 instantiate_virtual_regs_in_rtx (&SET_SRC (set
));
1534 x
= simplify_gen_binary (PLUS
, GET_MODE (new_rtx
), SET_SRC (set
),
1535 gen_int_mode (-offset
, GET_MODE (new_rtx
)));
1536 x
= force_operand (x
, new_rtx
);
1538 emit_move_insn (new_rtx
, x
);
1543 emit_insn_before (seq
, insn
);
1548 /* Handle a straight copy from a virtual register by generating a
1549 new add insn. The difference between this and falling through
1550 to the generic case is avoiding a new pseudo and eliminating a
1551 move insn in the initial rtl stream. */
1552 new_rtx
= instantiate_new_reg (SET_SRC (set
), &offset
);
1553 if (new_rtx
&& offset
!= 0
1554 && REG_P (SET_DEST (set
))
1555 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1559 x
= expand_simple_binop (GET_MODE (SET_DEST (set
)), PLUS
, new_rtx
,
1560 gen_int_mode (offset
,
1561 GET_MODE (SET_DEST (set
))),
1562 SET_DEST (set
), 1, OPTAB_LIB_WIDEN
);
1563 if (x
!= SET_DEST (set
))
1564 emit_move_insn (SET_DEST (set
), x
);
1569 emit_insn_before (seq
, insn
);
1574 extract_insn (insn
);
1575 insn_code
= INSN_CODE (insn
);
1577 /* Handle a plus involving a virtual register by determining if the
1578 operands remain valid if they're modified in place. */
1579 if (GET_CODE (SET_SRC (set
)) == PLUS
1580 && recog_data
.n_operands
>= 3
1581 && recog_data
.operand_loc
[1] == &XEXP (SET_SRC (set
), 0)
1582 && recog_data
.operand_loc
[2] == &XEXP (SET_SRC (set
), 1)
1583 && CONST_INT_P (recog_data
.operand
[2])
1584 && (new_rtx
= instantiate_new_reg (recog_data
.operand
[1], &offset
)))
1586 offset
+= INTVAL (recog_data
.operand
[2]);
1588 /* If the sum is zero, then replace with a plain move. */
1590 && REG_P (SET_DEST (set
))
1591 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1594 emit_move_insn (SET_DEST (set
), new_rtx
);
1598 emit_insn_before (seq
, insn
);
1603 x
= gen_int_mode (offset
, recog_data
.operand_mode
[2]);
1605 /* Using validate_change and apply_change_group here leaves
1606 recog_data in an invalid state. Since we know exactly what
1607 we want to check, do those two by hand. */
1608 if (safe_insn_predicate (insn_code
, 1, new_rtx
)
1609 && safe_insn_predicate (insn_code
, 2, x
))
1611 *recog_data
.operand_loc
[1] = recog_data
.operand
[1] = new_rtx
;
1612 *recog_data
.operand_loc
[2] = recog_data
.operand
[2] = x
;
1615 /* Fall through into the regular operand fixup loop in
1616 order to take care of operands other than 1 and 2. */
1622 extract_insn (insn
);
1623 insn_code
= INSN_CODE (insn
);
1626 /* In the general case, we expect virtual registers to appear only in
1627 operands, and then only as either bare registers or inside memories. */
1628 for (i
= 0; i
< recog_data
.n_operands
; ++i
)
1630 x
= recog_data
.operand
[i
];
1631 switch (GET_CODE (x
))
1635 rtx addr
= XEXP (x
, 0);
1637 if (!instantiate_virtual_regs_in_rtx (&addr
))
1641 x
= replace_equiv_address (x
, addr
, true);
1642 /* It may happen that the address with the virtual reg
1643 was valid (e.g. based on the virtual stack reg, which might
1644 be acceptable to the predicates with all offsets), whereas
1645 the address now isn't anymore, for instance when the address
1646 is still offsetted, but the base reg isn't virtual-stack-reg
1647 anymore. Below we would do a force_reg on the whole operand,
1648 but this insn might actually only accept memory. Hence,
1649 before doing that last resort, try to reload the address into
1650 a register, so this operand stays a MEM. */
1651 if (!safe_insn_predicate (insn_code
, i
, x
))
1653 addr
= force_reg (GET_MODE (addr
), addr
);
1654 x
= replace_equiv_address (x
, addr
, true);
1659 emit_insn_before (seq
, insn
);
1664 new_rtx
= instantiate_new_reg (x
, &offset
);
1665 if (new_rtx
== NULL
)
1673 /* Careful, special mode predicates may have stuff in
1674 insn_data[insn_code].operand[i].mode that isn't useful
1675 to us for computing a new value. */
1676 /* ??? Recognize address_operand and/or "p" constraints
1677 to see if (plus new offset) is a valid before we put
1678 this through expand_simple_binop. */
1679 x
= expand_simple_binop (GET_MODE (x
), PLUS
, new_rtx
,
1680 gen_int_mode (offset
, GET_MODE (x
)),
1681 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1684 emit_insn_before (seq
, insn
);
1689 new_rtx
= instantiate_new_reg (SUBREG_REG (x
), &offset
);
1690 if (new_rtx
== NULL
)
1695 new_rtx
= expand_simple_binop
1696 (GET_MODE (new_rtx
), PLUS
, new_rtx
,
1697 gen_int_mode (offset
, GET_MODE (new_rtx
)),
1698 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1701 emit_insn_before (seq
, insn
);
1703 x
= simplify_gen_subreg (recog_data
.operand_mode
[i
], new_rtx
,
1704 GET_MODE (new_rtx
), SUBREG_BYTE (x
));
1712 /* At this point, X contains the new value for the operand.
1713 Validate the new value vs the insn predicate. Note that
1714 asm insns will have insn_code -1 here. */
1715 if (!safe_insn_predicate (insn_code
, i
, x
))
1720 gcc_assert (REGNO (x
) <= LAST_VIRTUAL_REGISTER
);
1721 x
= copy_to_reg (x
);
1724 x
= force_reg (insn_data
[insn_code
].operand
[i
].mode
, x
);
1728 emit_insn_before (seq
, insn
);
1731 *recog_data
.operand_loc
[i
] = recog_data
.operand
[i
] = x
;
1737 /* Propagate operand changes into the duplicates. */
1738 for (i
= 0; i
< recog_data
.n_dups
; ++i
)
1739 *recog_data
.dup_loc
[i
]
1740 = copy_rtx (recog_data
.operand
[(unsigned)recog_data
.dup_num
[i
]]);
1742 /* Force re-recognition of the instruction for validation. */
1743 INSN_CODE (insn
) = -1;
1746 if (asm_noperands (PATTERN (insn
)) >= 0)
1748 if (!check_asm_operands (PATTERN (insn
)))
1750 error_for_asm (insn
, "impossible constraint in %<asm%>");
1751 /* For asm goto, instead of fixing up all the edges
1752 just clear the template and clear input operands
1753 (asm goto doesn't have any output operands). */
1756 rtx asm_op
= extract_asm_operands (PATTERN (insn
));
1757 ASM_OPERANDS_TEMPLATE (asm_op
) = ggc_strdup ("");
1758 ASM_OPERANDS_INPUT_VEC (asm_op
) = rtvec_alloc (0);
1759 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op
) = rtvec_alloc (0);
1767 if (recog_memoized (insn
) < 0)
1768 fatal_insn_not_found (insn
);
1772 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1773 do any instantiation required. */
1776 instantiate_decl_rtl (rtx x
)
1783 /* If this is a CONCAT, recurse for the pieces. */
1784 if (GET_CODE (x
) == CONCAT
)
1786 instantiate_decl_rtl (XEXP (x
, 0));
1787 instantiate_decl_rtl (XEXP (x
, 1));
1791 /* If this is not a MEM, no need to do anything. Similarly if the
1792 address is a constant or a register that is not a virtual register. */
1797 if (CONSTANT_P (addr
)
1799 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
1800 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
1803 instantiate_virtual_regs_in_rtx (&XEXP (x
, 0));
1806 /* Helper for instantiate_decls called via walk_tree: Process all decls
1807 in the given DECL_VALUE_EXPR. */
1810 instantiate_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1818 if (DECL_RTL_SET_P (t
))
1819 instantiate_decl_rtl (DECL_RTL (t
));
1820 if (TREE_CODE (t
) == PARM_DECL
&& DECL_NAMELESS (t
)
1821 && DECL_INCOMING_RTL (t
))
1822 instantiate_decl_rtl (DECL_INCOMING_RTL (t
));
1823 if ((TREE_CODE (t
) == VAR_DECL
1824 || TREE_CODE (t
) == RESULT_DECL
)
1825 && DECL_HAS_VALUE_EXPR_P (t
))
1827 tree v
= DECL_VALUE_EXPR (t
);
1828 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1835 /* Subroutine of instantiate_decls: Process all decls in the given
1836 BLOCK node and all its subblocks. */
1839 instantiate_decls_1 (tree let
)
1843 for (t
= BLOCK_VARS (let
); t
; t
= DECL_CHAIN (t
))
1845 if (DECL_RTL_SET_P (t
))
1846 instantiate_decl_rtl (DECL_RTL (t
));
1847 if (TREE_CODE (t
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (t
))
1849 tree v
= DECL_VALUE_EXPR (t
);
1850 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1854 /* Process all subblocks. */
1855 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= BLOCK_CHAIN (t
))
1856 instantiate_decls_1 (t
);
1859 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1860 all virtual registers in their DECL_RTL's. */
1863 instantiate_decls (tree fndecl
)
1868 /* Process all parameters of the function. */
1869 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= DECL_CHAIN (decl
))
1871 instantiate_decl_rtl (DECL_RTL (decl
));
1872 instantiate_decl_rtl (DECL_INCOMING_RTL (decl
));
1873 if (DECL_HAS_VALUE_EXPR_P (decl
))
1875 tree v
= DECL_VALUE_EXPR (decl
);
1876 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1880 if ((decl
= DECL_RESULT (fndecl
))
1881 && TREE_CODE (decl
) == RESULT_DECL
)
1883 if (DECL_RTL_SET_P (decl
))
1884 instantiate_decl_rtl (DECL_RTL (decl
));
1885 if (DECL_HAS_VALUE_EXPR_P (decl
))
1887 tree v
= DECL_VALUE_EXPR (decl
);
1888 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1892 /* Process the saved static chain if it exists. */
1893 decl
= DECL_STRUCT_FUNCTION (fndecl
)->static_chain_decl
;
1894 if (decl
&& DECL_HAS_VALUE_EXPR_P (decl
))
1895 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl
)));
1897 /* Now process all variables defined in the function or its subblocks. */
1898 instantiate_decls_1 (DECL_INITIAL (fndecl
));
1900 FOR_EACH_LOCAL_DECL (cfun
, ix
, decl
)
1901 if (DECL_RTL_SET_P (decl
))
1902 instantiate_decl_rtl (DECL_RTL (decl
));
1903 vec_free (cfun
->local_decls
);
1906 /* Pass through the INSNS of function FNDECL and convert virtual register
1907 references to hard register references. */
1910 instantiate_virtual_regs (void)
1914 /* Compute the offsets to use for this function. */
1915 in_arg_offset
= FIRST_PARM_OFFSET (current_function_decl
);
1916 var_offset
= STARTING_FRAME_OFFSET
;
1917 dynamic_offset
= STACK_DYNAMIC_OFFSET (current_function_decl
);
1918 out_arg_offset
= STACK_POINTER_OFFSET
;
1919 #ifdef FRAME_POINTER_CFA_OFFSET
1920 cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
1922 cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
1925 /* Initialize recognition, indicating that volatile is OK. */
1928 /* Scan through all the insns, instantiating every virtual register still
1930 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1933 /* These patterns in the instruction stream can never be recognized.
1934 Fortunately, they shouldn't contain virtual registers either. */
1935 if (GET_CODE (PATTERN (insn
)) == USE
1936 || GET_CODE (PATTERN (insn
)) == CLOBBER
1937 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
)
1939 else if (DEBUG_INSN_P (insn
))
1940 instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn
));
1942 instantiate_virtual_regs_in_insn (insn
);
1944 if (insn
->deleted ())
1947 instantiate_virtual_regs_in_rtx (®_NOTES (insn
));
1949 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1951 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn
));
1954 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1955 instantiate_decls (current_function_decl
);
1957 targetm
.instantiate_decls ();
1959 /* Indicate that, from now on, assign_stack_local should use
1960 frame_pointer_rtx. */
1961 virtuals_instantiated
= 1;
1968 const pass_data pass_data_instantiate_virtual_regs
=
1970 RTL_PASS
, /* type */
1972 OPTGROUP_NONE
, /* optinfo_flags */
1973 TV_NONE
, /* tv_id */
1974 0, /* properties_required */
1975 0, /* properties_provided */
1976 0, /* properties_destroyed */
1977 0, /* todo_flags_start */
1978 0, /* todo_flags_finish */
1981 class pass_instantiate_virtual_regs
: public rtl_opt_pass
1984 pass_instantiate_virtual_regs (gcc::context
*ctxt
)
1985 : rtl_opt_pass (pass_data_instantiate_virtual_regs
, ctxt
)
1988 /* opt_pass methods: */
1989 virtual unsigned int execute (function
*)
1991 return instantiate_virtual_regs ();
1994 }; // class pass_instantiate_virtual_regs
1999 make_pass_instantiate_virtual_regs (gcc::context
*ctxt
)
2001 return new pass_instantiate_virtual_regs (ctxt
);
2005 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2006 This means a type for which function calls must pass an address to the
2007 function or get an address back from the function.
2008 EXP may be a type node or an expression (whose type is tested). */
2011 aggregate_value_p (const_tree exp
, const_tree fntype
)
2013 const_tree type
= (TYPE_P (exp
)) ? exp
: TREE_TYPE (exp
);
2014 int i
, regno
, nregs
;
2018 switch (TREE_CODE (fntype
))
2022 tree fndecl
= get_callee_fndecl (fntype
);
2024 ? TREE_TYPE (fndecl
)
2025 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype
))));
2029 fntype
= TREE_TYPE (fntype
);
2034 case IDENTIFIER_NODE
:
2038 /* We don't expect other tree types here. */
2042 if (VOID_TYPE_P (type
))
2045 /* If a record should be passed the same as its first (and only) member
2046 don't pass it as an aggregate. */
2047 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
2048 return aggregate_value_p (first_field (type
), fntype
);
2050 /* If the front end has decided that this needs to be passed by
2051 reference, do so. */
2052 if ((TREE_CODE (exp
) == PARM_DECL
|| TREE_CODE (exp
) == RESULT_DECL
)
2053 && DECL_BY_REFERENCE (exp
))
2056 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2057 if (fntype
&& TREE_ADDRESSABLE (fntype
))
2060 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2061 and thus can't be returned in registers. */
2062 if (TREE_ADDRESSABLE (type
))
2065 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
2068 if (targetm
.calls
.return_in_memory (type
, fntype
))
2071 /* Make sure we have suitable call-clobbered regs to return
2072 the value in; if not, we must return it in memory. */
2073 reg
= hard_function_value (type
, 0, fntype
, 0);
2075 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2080 regno
= REGNO (reg
);
2081 nregs
= hard_regno_nregs
[regno
][TYPE_MODE (type
)];
2082 for (i
= 0; i
< nregs
; i
++)
2083 if (! call_used_regs
[regno
+ i
])
2089 /* Return true if we should assign DECL a pseudo register; false if it
2090 should live on the local stack. */
2093 use_register_for_decl (const_tree decl
)
2095 if (!targetm
.calls
.allocate_stack_slots_for_args ())
2098 /* Honor volatile. */
2099 if (TREE_SIDE_EFFECTS (decl
))
2102 /* Honor addressability. */
2103 if (TREE_ADDRESSABLE (decl
))
2106 /* Decl is implicitly addressible by bound stores and loads
2107 if it is an aggregate holding bounds. */
2108 if (chkp_function_instrumented_p (current_function_decl
)
2110 && !BOUNDED_P (decl
)
2111 && chkp_type_has_pointer (TREE_TYPE (decl
)))
2114 /* Only register-like things go in registers. */
2115 if (DECL_MODE (decl
) == BLKmode
)
2118 /* If -ffloat-store specified, don't put explicit float variables
2120 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2121 propagates values across these stores, and it probably shouldn't. */
2122 if (flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)))
2125 /* If we're not interested in tracking debugging information for
2126 this decl, then we can certainly put it in a register. */
2127 if (DECL_IGNORED_P (decl
))
2133 if (!DECL_REGISTER (decl
))
2136 switch (TREE_CODE (TREE_TYPE (decl
)))
2140 case QUAL_UNION_TYPE
:
2141 /* When not optimizing, disregard register keyword for variables with
2142 types containing methods, otherwise the methods won't be callable
2143 from the debugger. */
2144 if (TYPE_METHODS (TREE_TYPE (decl
)))
2154 /* Return true if TYPE should be passed by invisible reference. */
2157 pass_by_reference (CUMULATIVE_ARGS
*ca
, machine_mode mode
,
2158 tree type
, bool named_arg
)
2162 /* If this type contains non-trivial constructors, then it is
2163 forbidden for the middle-end to create any new copies. */
2164 if (TREE_ADDRESSABLE (type
))
2167 /* GCC post 3.4 passes *all* variable sized types by reference. */
2168 if (!TYPE_SIZE (type
) || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
2171 /* If a record type should be passed the same as its first (and only)
2172 member, use the type and mode of that member. */
2173 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
2175 type
= TREE_TYPE (first_field (type
));
2176 mode
= TYPE_MODE (type
);
2180 return targetm
.calls
.pass_by_reference (pack_cumulative_args (ca
), mode
,
2184 /* Return true if TYPE, which is passed by reference, should be callee
2185 copied instead of caller copied. */
2188 reference_callee_copied (CUMULATIVE_ARGS
*ca
, machine_mode mode
,
2189 tree type
, bool named_arg
)
2191 if (type
&& TREE_ADDRESSABLE (type
))
2193 return targetm
.calls
.callee_copies (pack_cumulative_args (ca
), mode
, type
,
2197 /* Structures to communicate between the subroutines of assign_parms.
2198 The first holds data persistent across all parameters, the second
2199 is cleared out for each parameter. */
2201 struct assign_parm_data_all
2203 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2204 should become a job of the target or otherwise encapsulated. */
2205 CUMULATIVE_ARGS args_so_far_v
;
2206 cumulative_args_t args_so_far
;
2207 struct args_size stack_args_size
;
2208 tree function_result_decl
;
2210 rtx_insn
*first_conversion_insn
;
2211 rtx_insn
*last_conversion_insn
;
2212 HOST_WIDE_INT pretend_args_size
;
2213 HOST_WIDE_INT extra_pretend_bytes
;
2214 int reg_parm_stack_space
;
2217 struct assign_parm_data_one
2223 machine_mode nominal_mode
;
2224 machine_mode passed_mode
;
2225 machine_mode promoted_mode
;
2226 struct locate_and_pad_arg_data locate
;
2228 BOOL_BITFIELD named_arg
: 1;
2229 BOOL_BITFIELD passed_pointer
: 1;
2230 BOOL_BITFIELD on_stack
: 1;
2231 BOOL_BITFIELD loaded_in_reg
: 1;
2234 struct bounds_parm_data
2236 assign_parm_data_one parm_data
;
2243 /* A subroutine of assign_parms. Initialize ALL. */
2246 assign_parms_initialize_all (struct assign_parm_data_all
*all
)
2248 tree fntype ATTRIBUTE_UNUSED
;
2250 memset (all
, 0, sizeof (*all
));
2252 fntype
= TREE_TYPE (current_function_decl
);
2254 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2255 INIT_CUMULATIVE_INCOMING_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
);
2257 INIT_CUMULATIVE_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
,
2258 current_function_decl
, -1);
2260 all
->args_so_far
= pack_cumulative_args (&all
->args_so_far_v
);
2262 #ifdef INCOMING_REG_PARM_STACK_SPACE
2263 all
->reg_parm_stack_space
2264 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl
);
2268 /* If ARGS contains entries with complex types, split the entry into two
2269 entries of the component type. Return a new list of substitutions are
2270 needed, else the old list. */
2273 split_complex_args (vec
<tree
> *args
)
2278 FOR_EACH_VEC_ELT (*args
, i
, p
)
2280 tree type
= TREE_TYPE (p
);
2281 if (TREE_CODE (type
) == COMPLEX_TYPE
2282 && targetm
.calls
.split_complex_arg (type
))
2285 tree subtype
= TREE_TYPE (type
);
2286 bool addressable
= TREE_ADDRESSABLE (p
);
2288 /* Rewrite the PARM_DECL's type with its component. */
2290 TREE_TYPE (p
) = subtype
;
2291 DECL_ARG_TYPE (p
) = TREE_TYPE (DECL_ARG_TYPE (p
));
2292 DECL_MODE (p
) = VOIDmode
;
2293 DECL_SIZE (p
) = NULL
;
2294 DECL_SIZE_UNIT (p
) = NULL
;
2295 /* If this arg must go in memory, put it in a pseudo here.
2296 We can't allow it to go in memory as per normal parms,
2297 because the usual place might not have the imag part
2298 adjacent to the real part. */
2299 DECL_ARTIFICIAL (p
) = addressable
;
2300 DECL_IGNORED_P (p
) = addressable
;
2301 TREE_ADDRESSABLE (p
) = 0;
2305 /* Build a second synthetic decl. */
2306 decl
= build_decl (EXPR_LOCATION (p
),
2307 PARM_DECL
, NULL_TREE
, subtype
);
2308 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (p
);
2309 DECL_ARTIFICIAL (decl
) = addressable
;
2310 DECL_IGNORED_P (decl
) = addressable
;
2311 layout_decl (decl
, 0);
2312 args
->safe_insert (++i
, decl
);
2317 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2318 the hidden struct return argument, and (abi willing) complex args.
2319 Return the new parameter list. */
2322 assign_parms_augmented_arg_list (struct assign_parm_data_all
*all
)
2324 tree fndecl
= current_function_decl
;
2325 tree fntype
= TREE_TYPE (fndecl
);
2326 vec
<tree
> fnargs
= vNULL
;
2329 for (arg
= DECL_ARGUMENTS (fndecl
); arg
; arg
= DECL_CHAIN (arg
))
2330 fnargs
.safe_push (arg
);
2332 all
->orig_fnargs
= DECL_ARGUMENTS (fndecl
);
2334 /* If struct value address is treated as the first argument, make it so. */
2335 if (aggregate_value_p (DECL_RESULT (fndecl
), fndecl
)
2336 && ! cfun
->returns_pcc_struct
2337 && targetm
.calls
.struct_value_rtx (TREE_TYPE (fndecl
), 1) == 0)
2339 tree type
= build_pointer_type (TREE_TYPE (fntype
));
2342 decl
= build_decl (DECL_SOURCE_LOCATION (fndecl
),
2343 PARM_DECL
, get_identifier (".result_ptr"), type
);
2344 DECL_ARG_TYPE (decl
) = type
;
2345 DECL_ARTIFICIAL (decl
) = 1;
2346 DECL_NAMELESS (decl
) = 1;
2347 TREE_CONSTANT (decl
) = 1;
2349 DECL_CHAIN (decl
) = all
->orig_fnargs
;
2350 all
->orig_fnargs
= decl
;
2351 fnargs
.safe_insert (0, decl
);
2353 all
->function_result_decl
= decl
;
2355 /* If function is instrumented then bounds of the
2356 passed structure address is the second argument. */
2357 if (chkp_function_instrumented_p (fndecl
))
2359 decl
= build_decl (DECL_SOURCE_LOCATION (fndecl
),
2360 PARM_DECL
, get_identifier (".result_bnd"),
2361 pointer_bounds_type_node
);
2362 DECL_ARG_TYPE (decl
) = pointer_bounds_type_node
;
2363 DECL_ARTIFICIAL (decl
) = 1;
2364 DECL_NAMELESS (decl
) = 1;
2365 TREE_CONSTANT (decl
) = 1;
2367 DECL_CHAIN (decl
) = DECL_CHAIN (all
->orig_fnargs
);
2368 DECL_CHAIN (all
->orig_fnargs
) = decl
;
2369 fnargs
.safe_insert (1, decl
);
2373 /* If the target wants to split complex arguments into scalars, do so. */
2374 if (targetm
.calls
.split_complex_arg
)
2375 split_complex_args (&fnargs
);
2380 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2381 data for the parameter. Incorporate ABI specifics such as pass-by-
2382 reference and type promotion. */
2385 assign_parm_find_data_types (struct assign_parm_data_all
*all
, tree parm
,
2386 struct assign_parm_data_one
*data
)
2388 tree nominal_type
, passed_type
;
2389 machine_mode nominal_mode
, passed_mode
, promoted_mode
;
2392 memset (data
, 0, sizeof (*data
));
2394 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2396 data
->named_arg
= 1; /* No variadic parms. */
2397 else if (DECL_CHAIN (parm
))
2398 data
->named_arg
= 1; /* Not the last non-variadic parm. */
2399 else if (targetm
.calls
.strict_argument_naming (all
->args_so_far
))
2400 data
->named_arg
= 1; /* Only variadic ones are unnamed. */
2402 data
->named_arg
= 0; /* Treat as variadic. */
2404 nominal_type
= TREE_TYPE (parm
);
2405 passed_type
= DECL_ARG_TYPE (parm
);
2407 /* Look out for errors propagating this far. Also, if the parameter's
2408 type is void then its value doesn't matter. */
2409 if (TREE_TYPE (parm
) == error_mark_node
2410 /* This can happen after weird syntax errors
2411 or if an enum type is defined among the parms. */
2412 || TREE_CODE (parm
) != PARM_DECL
2413 || passed_type
== NULL
2414 || VOID_TYPE_P (nominal_type
))
2416 nominal_type
= passed_type
= void_type_node
;
2417 nominal_mode
= passed_mode
= promoted_mode
= VOIDmode
;
2421 /* Find mode of arg as it is passed, and mode of arg as it should be
2422 during execution of this function. */
2423 passed_mode
= TYPE_MODE (passed_type
);
2424 nominal_mode
= TYPE_MODE (nominal_type
);
2426 /* If the parm is to be passed as a transparent union or record, use the
2427 type of the first field for the tests below. We have already verified
2428 that the modes are the same. */
2429 if ((TREE_CODE (passed_type
) == UNION_TYPE
2430 || TREE_CODE (passed_type
) == RECORD_TYPE
)
2431 && TYPE_TRANSPARENT_AGGR (passed_type
))
2432 passed_type
= TREE_TYPE (first_field (passed_type
));
2434 /* See if this arg was passed by invisible reference. */
2435 if (pass_by_reference (&all
->args_so_far_v
, passed_mode
,
2436 passed_type
, data
->named_arg
))
2438 passed_type
= nominal_type
= build_pointer_type (passed_type
);
2439 data
->passed_pointer
= true;
2440 passed_mode
= nominal_mode
= TYPE_MODE (nominal_type
);
2443 /* Find mode as it is passed by the ABI. */
2444 unsignedp
= TYPE_UNSIGNED (passed_type
);
2445 promoted_mode
= promote_function_mode (passed_type
, passed_mode
, &unsignedp
,
2446 TREE_TYPE (current_function_decl
), 0);
2449 data
->nominal_type
= nominal_type
;
2450 data
->passed_type
= passed_type
;
2451 data
->nominal_mode
= nominal_mode
;
2452 data
->passed_mode
= passed_mode
;
2453 data
->promoted_mode
= promoted_mode
;
2456 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2459 assign_parms_setup_varargs (struct assign_parm_data_all
*all
,
2460 struct assign_parm_data_one
*data
, bool no_rtl
)
2462 int varargs_pretend_bytes
= 0;
2464 targetm
.calls
.setup_incoming_varargs (all
->args_so_far
,
2465 data
->promoted_mode
,
2467 &varargs_pretend_bytes
, no_rtl
);
2469 /* If the back-end has requested extra stack space, record how much is
2470 needed. Do not change pretend_args_size otherwise since it may be
2471 nonzero from an earlier partial argument. */
2472 if (varargs_pretend_bytes
> 0)
2473 all
->pretend_args_size
= varargs_pretend_bytes
;
2476 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2477 the incoming location of the current parameter. */
2480 assign_parm_find_entry_rtl (struct assign_parm_data_all
*all
,
2481 struct assign_parm_data_one
*data
)
2483 HOST_WIDE_INT pretend_bytes
= 0;
2487 if (data
->promoted_mode
== VOIDmode
)
2489 data
->entry_parm
= data
->stack_parm
= const0_rtx
;
2493 entry_parm
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2494 data
->promoted_mode
,
2498 if (entry_parm
== 0)
2499 data
->promoted_mode
= data
->passed_mode
;
2501 /* Determine parm's home in the stack, in case it arrives in the stack
2502 or we should pretend it did. Compute the stack position and rtx where
2503 the argument arrives and its size.
2505 There is one complexity here: If this was a parameter that would
2506 have been passed in registers, but wasn't only because it is
2507 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2508 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2509 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2510 as it was the previous time. */
2511 in_regs
= (entry_parm
!= 0) || POINTER_BOUNDS_TYPE_P (data
->passed_type
);
2512 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2515 if (!in_regs
&& !data
->named_arg
)
2517 if (targetm
.calls
.pretend_outgoing_varargs_named (all
->args_so_far
))
2520 tem
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2521 data
->promoted_mode
,
2522 data
->passed_type
, true);
2523 in_regs
= tem
!= NULL
;
2527 /* If this parameter was passed both in registers and in the stack, use
2528 the copy on the stack. */
2529 if (targetm
.calls
.must_pass_in_stack (data
->promoted_mode
,
2537 partial
= targetm
.calls
.arg_partial_bytes (all
->args_so_far
,
2538 data
->promoted_mode
,
2541 data
->partial
= partial
;
2543 /* The caller might already have allocated stack space for the
2544 register parameters. */
2545 if (partial
!= 0 && all
->reg_parm_stack_space
== 0)
2547 /* Part of this argument is passed in registers and part
2548 is passed on the stack. Ask the prologue code to extend
2549 the stack part so that we can recreate the full value.
2551 PRETEND_BYTES is the size of the registers we need to store.
2552 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2553 stack space that the prologue should allocate.
2555 Internally, gcc assumes that the argument pointer is aligned
2556 to STACK_BOUNDARY bits. This is used both for alignment
2557 optimizations (see init_emit) and to locate arguments that are
2558 aligned to more than PARM_BOUNDARY bits. We must preserve this
2559 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2560 a stack boundary. */
2562 /* We assume at most one partial arg, and it must be the first
2563 argument on the stack. */
2564 gcc_assert (!all
->extra_pretend_bytes
&& !all
->pretend_args_size
);
2566 pretend_bytes
= partial
;
2567 all
->pretend_args_size
= CEIL_ROUND (pretend_bytes
, STACK_BYTES
);
2569 /* We want to align relative to the actual stack pointer, so
2570 don't include this in the stack size until later. */
2571 all
->extra_pretend_bytes
= all
->pretend_args_size
;
2575 locate_and_pad_parm (data
->promoted_mode
, data
->passed_type
, in_regs
,
2576 all
->reg_parm_stack_space
,
2577 entry_parm
? data
->partial
: 0, current_function_decl
,
2578 &all
->stack_args_size
, &data
->locate
);
2580 /* Update parm_stack_boundary if this parameter is passed in the
2582 if (!in_regs
&& crtl
->parm_stack_boundary
< data
->locate
.boundary
)
2583 crtl
->parm_stack_boundary
= data
->locate
.boundary
;
2585 /* Adjust offsets to include the pretend args. */
2586 pretend_bytes
= all
->extra_pretend_bytes
- pretend_bytes
;
2587 data
->locate
.slot_offset
.constant
+= pretend_bytes
;
2588 data
->locate
.offset
.constant
+= pretend_bytes
;
2590 data
->entry_parm
= entry_parm
;
2593 /* A subroutine of assign_parms. If there is actually space on the stack
2594 for this parm, count it in stack_args_size and return true. */
2597 assign_parm_is_stack_parm (struct assign_parm_data_all
*all
,
2598 struct assign_parm_data_one
*data
)
2600 /* Bounds are never passed on the stack to keep compatibility
2601 with not instrumented code. */
2602 if (POINTER_BOUNDS_TYPE_P (data
->passed_type
))
2604 /* Trivially true if we've no incoming register. */
2605 else if (data
->entry_parm
== NULL
)
2607 /* Also true if we're partially in registers and partially not,
2608 since we've arranged to drop the entire argument on the stack. */
2609 else if (data
->partial
!= 0)
2611 /* Also true if the target says that it's passed in both registers
2612 and on the stack. */
2613 else if (GET_CODE (data
->entry_parm
) == PARALLEL
2614 && XEXP (XVECEXP (data
->entry_parm
, 0, 0), 0) == NULL_RTX
)
2616 /* Also true if the target says that there's stack allocated for
2617 all register parameters. */
2618 else if (all
->reg_parm_stack_space
> 0)
2620 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2624 all
->stack_args_size
.constant
+= data
->locate
.size
.constant
;
2625 if (data
->locate
.size
.var
)
2626 ADD_PARM_SIZE (all
->stack_args_size
, data
->locate
.size
.var
);
2631 /* A subroutine of assign_parms. Given that this parameter is allocated
2632 stack space by the ABI, find it. */
2635 assign_parm_find_stack_rtl (tree parm
, struct assign_parm_data_one
*data
)
2637 rtx offset_rtx
, stack_parm
;
2638 unsigned int align
, boundary
;
2640 /* If we're passing this arg using a reg, make its stack home the
2641 aligned stack slot. */
2642 if (data
->entry_parm
)
2643 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.slot_offset
);
2645 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.offset
);
2647 stack_parm
= crtl
->args
.internal_arg_pointer
;
2648 if (offset_rtx
!= const0_rtx
)
2649 stack_parm
= gen_rtx_PLUS (Pmode
, stack_parm
, offset_rtx
);
2650 stack_parm
= gen_rtx_MEM (data
->promoted_mode
, stack_parm
);
2652 if (!data
->passed_pointer
)
2654 set_mem_attributes (stack_parm
, parm
, 1);
2655 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2656 while promoted mode's size is needed. */
2657 if (data
->promoted_mode
!= BLKmode
2658 && data
->promoted_mode
!= DECL_MODE (parm
))
2660 set_mem_size (stack_parm
, GET_MODE_SIZE (data
->promoted_mode
));
2661 if (MEM_EXPR (stack_parm
) && MEM_OFFSET_KNOWN_P (stack_parm
))
2663 int offset
= subreg_lowpart_offset (DECL_MODE (parm
),
2664 data
->promoted_mode
);
2666 set_mem_offset (stack_parm
, MEM_OFFSET (stack_parm
) - offset
);
2671 boundary
= data
->locate
.boundary
;
2672 align
= BITS_PER_UNIT
;
2674 /* If we're padding upward, we know that the alignment of the slot
2675 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2676 intentionally forcing upward padding. Otherwise we have to come
2677 up with a guess at the alignment based on OFFSET_RTX. */
2678 if (data
->locate
.where_pad
!= downward
|| data
->entry_parm
)
2680 else if (CONST_INT_P (offset_rtx
))
2682 align
= INTVAL (offset_rtx
) * BITS_PER_UNIT
| boundary
;
2683 align
= align
& -align
;
2685 set_mem_align (stack_parm
, align
);
2687 if (data
->entry_parm
)
2688 set_reg_attrs_for_parm (data
->entry_parm
, stack_parm
);
2690 data
->stack_parm
= stack_parm
;
2693 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2694 always valid and contiguous. */
2697 assign_parm_adjust_entry_rtl (struct assign_parm_data_one
*data
)
2699 rtx entry_parm
= data
->entry_parm
;
2700 rtx stack_parm
= data
->stack_parm
;
2702 /* If this parm was passed part in regs and part in memory, pretend it
2703 arrived entirely in memory by pushing the register-part onto the stack.
2704 In the special case of a DImode or DFmode that is split, we could put
2705 it together in a pseudoreg directly, but for now that's not worth
2707 if (data
->partial
!= 0)
2709 /* Handle calls that pass values in multiple non-contiguous
2710 locations. The Irix 6 ABI has examples of this. */
2711 if (GET_CODE (entry_parm
) == PARALLEL
)
2712 emit_group_store (validize_mem (copy_rtx (stack_parm
)), entry_parm
,
2714 int_size_in_bytes (data
->passed_type
));
2717 gcc_assert (data
->partial
% UNITS_PER_WORD
== 0);
2718 move_block_from_reg (REGNO (entry_parm
),
2719 validize_mem (copy_rtx (stack_parm
)),
2720 data
->partial
/ UNITS_PER_WORD
);
2723 entry_parm
= stack_parm
;
2726 /* If we didn't decide this parm came in a register, by default it came
2728 else if (entry_parm
== NULL
)
2729 entry_parm
= stack_parm
;
2731 /* When an argument is passed in multiple locations, we can't make use
2732 of this information, but we can save some copying if the whole argument
2733 is passed in a single register. */
2734 else if (GET_CODE (entry_parm
) == PARALLEL
2735 && data
->nominal_mode
!= BLKmode
2736 && data
->passed_mode
!= BLKmode
)
2738 size_t i
, len
= XVECLEN (entry_parm
, 0);
2740 for (i
= 0; i
< len
; i
++)
2741 if (XEXP (XVECEXP (entry_parm
, 0, i
), 0) != NULL_RTX
2742 && REG_P (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2743 && (GET_MODE (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2744 == data
->passed_mode
)
2745 && INTVAL (XEXP (XVECEXP (entry_parm
, 0, i
), 1)) == 0)
2747 entry_parm
= XEXP (XVECEXP (entry_parm
, 0, i
), 0);
2752 data
->entry_parm
= entry_parm
;
2755 /* A subroutine of assign_parms. Reconstitute any values which were
2756 passed in multiple registers and would fit in a single register. */
2759 assign_parm_remove_parallels (struct assign_parm_data_one
*data
)
2761 rtx entry_parm
= data
->entry_parm
;
2763 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2764 This can be done with register operations rather than on the
2765 stack, even if we will store the reconstituted parameter on the
2767 if (GET_CODE (entry_parm
) == PARALLEL
&& GET_MODE (entry_parm
) != BLKmode
)
2769 rtx parmreg
= gen_reg_rtx (GET_MODE (entry_parm
));
2770 emit_group_store (parmreg
, entry_parm
, data
->passed_type
,
2771 GET_MODE_SIZE (GET_MODE (entry_parm
)));
2772 entry_parm
= parmreg
;
2775 data
->entry_parm
= entry_parm
;
2778 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2779 always valid and properly aligned. */
2782 assign_parm_adjust_stack_rtl (struct assign_parm_data_one
*data
)
2784 rtx stack_parm
= data
->stack_parm
;
2786 /* If we can't trust the parm stack slot to be aligned enough for its
2787 ultimate type, don't use that slot after entry. We'll make another
2788 stack slot, if we need one. */
2790 && ((STRICT_ALIGNMENT
2791 && GET_MODE_ALIGNMENT (data
->nominal_mode
) > MEM_ALIGN (stack_parm
))
2792 || (data
->nominal_type
2793 && TYPE_ALIGN (data
->nominal_type
) > MEM_ALIGN (stack_parm
)
2794 && MEM_ALIGN (stack_parm
) < PREFERRED_STACK_BOUNDARY
)))
2797 /* If parm was passed in memory, and we need to convert it on entry,
2798 don't store it back in that same slot. */
2799 else if (data
->entry_parm
== stack_parm
2800 && data
->nominal_mode
!= BLKmode
2801 && data
->nominal_mode
!= data
->passed_mode
)
2804 /* If stack protection is in effect for this function, don't leave any
2805 pointers in their passed stack slots. */
2806 else if (crtl
->stack_protect_guard
2807 && (flag_stack_protect
== 2
2808 || data
->passed_pointer
2809 || POINTER_TYPE_P (data
->nominal_type
)))
2812 data
->stack_parm
= stack_parm
;
2815 /* A subroutine of assign_parms. Return true if the current parameter
2816 should be stored as a BLKmode in the current frame. */
2819 assign_parm_setup_block_p (struct assign_parm_data_one
*data
)
2821 if (data
->nominal_mode
== BLKmode
)
2823 if (GET_MODE (data
->entry_parm
) == BLKmode
)
2826 #ifdef BLOCK_REG_PADDING
2827 /* Only assign_parm_setup_block knows how to deal with register arguments
2828 that are padded at the least significant end. */
2829 if (REG_P (data
->entry_parm
)
2830 && GET_MODE_SIZE (data
->promoted_mode
) < UNITS_PER_WORD
2831 && (BLOCK_REG_PADDING (data
->passed_mode
, data
->passed_type
, 1)
2832 == (BYTES_BIG_ENDIAN
? upward
: downward
)))
2839 /* A subroutine of assign_parms. Arrange for the parameter to be
2840 present and valid in DATA->STACK_RTL. */
2843 assign_parm_setup_block (struct assign_parm_data_all
*all
,
2844 tree parm
, struct assign_parm_data_one
*data
)
2846 rtx entry_parm
= data
->entry_parm
;
2847 rtx stack_parm
= data
->stack_parm
;
2849 HOST_WIDE_INT size_stored
;
2851 if (GET_CODE (entry_parm
) == PARALLEL
)
2852 entry_parm
= emit_group_move_into_temps (entry_parm
);
2854 size
= int_size_in_bytes (data
->passed_type
);
2855 size_stored
= CEIL_ROUND (size
, UNITS_PER_WORD
);
2856 if (stack_parm
== 0)
2858 DECL_ALIGN (parm
) = MAX (DECL_ALIGN (parm
), BITS_PER_WORD
);
2859 stack_parm
= assign_stack_local (BLKmode
, size_stored
,
2861 if (GET_MODE_SIZE (GET_MODE (entry_parm
)) == size
)
2862 PUT_MODE (stack_parm
, GET_MODE (entry_parm
));
2863 set_mem_attributes (stack_parm
, parm
, 1);
2866 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2867 calls that pass values in multiple non-contiguous locations. */
2868 if (REG_P (entry_parm
) || GET_CODE (entry_parm
) == PARALLEL
)
2872 /* Note that we will be storing an integral number of words.
2873 So we have to be careful to ensure that we allocate an
2874 integral number of words. We do this above when we call
2875 assign_stack_local if space was not allocated in the argument
2876 list. If it was, this will not work if PARM_BOUNDARY is not
2877 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2878 if it becomes a problem. Exception is when BLKmode arrives
2879 with arguments not conforming to word_mode. */
2881 if (data
->stack_parm
== 0)
2883 else if (GET_CODE (entry_parm
) == PARALLEL
)
2886 gcc_assert (!size
|| !(PARM_BOUNDARY
% BITS_PER_WORD
));
2888 mem
= validize_mem (copy_rtx (stack_parm
));
2890 /* Handle values in multiple non-contiguous locations. */
2891 if (GET_CODE (entry_parm
) == PARALLEL
)
2893 push_to_sequence2 (all
->first_conversion_insn
,
2894 all
->last_conversion_insn
);
2895 emit_group_store (mem
, entry_parm
, data
->passed_type
, size
);
2896 all
->first_conversion_insn
= get_insns ();
2897 all
->last_conversion_insn
= get_last_insn ();
2904 /* If SIZE is that of a mode no bigger than a word, just use
2905 that mode's store operation. */
2906 else if (size
<= UNITS_PER_WORD
)
2909 = mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0);
2912 #ifdef BLOCK_REG_PADDING
2913 && (size
== UNITS_PER_WORD
2914 || (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2915 != (BYTES_BIG_ENDIAN
? upward
: downward
)))
2921 /* We are really truncating a word_mode value containing
2922 SIZE bytes into a value of mode MODE. If such an
2923 operation requires no actual instructions, we can refer
2924 to the value directly in mode MODE, otherwise we must
2925 start with the register in word_mode and explicitly
2927 if (TRULY_NOOP_TRUNCATION (size
* BITS_PER_UNIT
, BITS_PER_WORD
))
2928 reg
= gen_rtx_REG (mode
, REGNO (entry_parm
));
2931 reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
2932 reg
= convert_to_mode (mode
, copy_to_reg (reg
), 1);
2934 emit_move_insn (change_address (mem
, mode
, 0), reg
);
2937 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2938 machine must be aligned to the left before storing
2939 to memory. Note that the previous test doesn't
2940 handle all cases (e.g. SIZE == 3). */
2941 else if (size
!= UNITS_PER_WORD
2942 #ifdef BLOCK_REG_PADDING
2943 && (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2951 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
2952 rtx reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
2954 x
= expand_shift (LSHIFT_EXPR
, word_mode
, reg
, by
, NULL_RTX
, 1);
2955 tem
= change_address (mem
, word_mode
, 0);
2956 emit_move_insn (tem
, x
);
2959 move_block_from_reg (REGNO (entry_parm
), mem
,
2960 size_stored
/ UNITS_PER_WORD
);
2963 move_block_from_reg (REGNO (entry_parm
), mem
,
2964 size_stored
/ UNITS_PER_WORD
);
2966 else if (data
->stack_parm
== 0)
2968 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
2969 emit_block_move (stack_parm
, data
->entry_parm
, GEN_INT (size
),
2971 all
->first_conversion_insn
= get_insns ();
2972 all
->last_conversion_insn
= get_last_insn ();
2976 data
->stack_parm
= stack_parm
;
2977 SET_DECL_RTL (parm
, stack_parm
);
2980 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2981 parameter. Get it there. Perform all ABI specified conversions. */
2984 assign_parm_setup_reg (struct assign_parm_data_all
*all
, tree parm
,
2985 struct assign_parm_data_one
*data
)
2987 rtx parmreg
, validated_mem
;
2988 rtx equiv_stack_parm
;
2989 machine_mode promoted_nominal_mode
;
2990 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (parm
));
2991 bool did_conversion
= false;
2992 bool need_conversion
, moved
;
2994 /* Store the parm in a pseudoregister during the function, but we may
2995 need to do it in a wider mode. Using 2 here makes the result
2996 consistent with promote_decl_mode and thus expand_expr_real_1. */
2997 promoted_nominal_mode
2998 = promote_function_mode (data
->nominal_type
, data
->nominal_mode
, &unsignedp
,
2999 TREE_TYPE (current_function_decl
), 2);
3001 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
3003 if (!DECL_ARTIFICIAL (parm
))
3004 mark_user_reg (parmreg
);
3006 /* If this was an item that we received a pointer to,
3007 set DECL_RTL appropriately. */
3008 if (data
->passed_pointer
)
3010 rtx x
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
->passed_type
)), parmreg
);
3011 set_mem_attributes (x
, parm
, 1);
3012 SET_DECL_RTL (parm
, x
);
3015 SET_DECL_RTL (parm
, parmreg
);
3017 assign_parm_remove_parallels (data
);
3019 /* Copy the value into the register, thus bridging between
3020 assign_parm_find_data_types and expand_expr_real_1. */
3022 equiv_stack_parm
= data
->stack_parm
;
3023 validated_mem
= validize_mem (copy_rtx (data
->entry_parm
));
3025 need_conversion
= (data
->nominal_mode
!= data
->passed_mode
3026 || promoted_nominal_mode
!= data
->promoted_mode
);
3030 && GET_MODE_CLASS (data
->nominal_mode
) == MODE_INT
3031 && data
->nominal_mode
== data
->passed_mode
3032 && data
->nominal_mode
== GET_MODE (data
->entry_parm
))
3034 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3035 mode, by the caller. We now have to convert it to
3036 NOMINAL_MODE, if different. However, PARMREG may be in
3037 a different mode than NOMINAL_MODE if it is being stored
3040 If ENTRY_PARM is a hard register, it might be in a register
3041 not valid for operating in its mode (e.g., an odd-numbered
3042 register for a DFmode). In that case, moves are the only
3043 thing valid, so we can't do a convert from there. This
3044 occurs when the calling sequence allow such misaligned
3047 In addition, the conversion may involve a call, which could
3048 clobber parameters which haven't been copied to pseudo
3051 First, we try to emit an insn which performs the necessary
3052 conversion. We verify that this insn does not clobber any
3055 enum insn_code icode
;
3058 icode
= can_extend_p (promoted_nominal_mode
, data
->passed_mode
,
3062 op1
= validated_mem
;
3063 if (icode
!= CODE_FOR_nothing
3064 && insn_operand_matches (icode
, 0, op0
)
3065 && insn_operand_matches (icode
, 1, op1
))
3067 enum rtx_code code
= unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
;
3068 rtx_insn
*insn
, *insns
;
3070 HARD_REG_SET hardregs
;
3073 /* If op1 is a hard register that is likely spilled, first
3074 force it into a pseudo, otherwise combiner might extend
3075 its lifetime too much. */
3076 if (GET_CODE (t
) == SUBREG
)
3079 && HARD_REGISTER_P (t
)
3080 && ! TEST_HARD_REG_BIT (fixed_reg_set
, REGNO (t
))
3081 && targetm
.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t
))))
3083 t
= gen_reg_rtx (GET_MODE (op1
));
3084 emit_move_insn (t
, op1
);
3088 rtx pat
= gen_extend_insn (op0
, t
, promoted_nominal_mode
,
3089 data
->passed_mode
, unsignedp
);
3091 insns
= get_insns ();
3094 CLEAR_HARD_REG_SET (hardregs
);
3095 for (insn
= insns
; insn
&& moved
; insn
= NEXT_INSN (insn
))
3098 note_stores (PATTERN (insn
), record_hard_reg_sets
,
3100 if (!hard_reg_set_empty_p (hardregs
))
3109 if (equiv_stack_parm
!= NULL_RTX
)
3110 equiv_stack_parm
= gen_rtx_fmt_e (code
, GET_MODE (parmreg
),
3117 /* Nothing to do. */
3119 else if (need_conversion
)
3121 /* We did not have an insn to convert directly, or the sequence
3122 generated appeared unsafe. We must first copy the parm to a
3123 pseudo reg, and save the conversion until after all
3124 parameters have been moved. */
3127 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3129 emit_move_insn (tempreg
, validated_mem
);
3131 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3132 tempreg
= convert_to_mode (data
->nominal_mode
, tempreg
, unsignedp
);
3134 if (GET_CODE (tempreg
) == SUBREG
3135 && GET_MODE (tempreg
) == data
->nominal_mode
3136 && REG_P (SUBREG_REG (tempreg
))
3137 && data
->nominal_mode
== data
->passed_mode
3138 && GET_MODE (SUBREG_REG (tempreg
)) == GET_MODE (data
->entry_parm
)
3139 && GET_MODE_SIZE (GET_MODE (tempreg
))
3140 < GET_MODE_SIZE (GET_MODE (data
->entry_parm
)))
3142 /* The argument is already sign/zero extended, so note it
3144 SUBREG_PROMOTED_VAR_P (tempreg
) = 1;
3145 SUBREG_PROMOTED_SET (tempreg
, unsignedp
);
3148 /* TREE_USED gets set erroneously during expand_assignment. */
3149 save_tree_used
= TREE_USED (parm
);
3150 expand_assignment (parm
, make_tree (data
->nominal_type
, tempreg
), false);
3151 TREE_USED (parm
) = save_tree_used
;
3152 all
->first_conversion_insn
= get_insns ();
3153 all
->last_conversion_insn
= get_last_insn ();
3156 did_conversion
= true;
3159 emit_move_insn (parmreg
, validated_mem
);
3161 /* If we were passed a pointer but the actual value can safely live
3162 in a register, retrieve it and use it directly. */
3163 if (data
->passed_pointer
&& TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
)
3165 /* We can't use nominal_mode, because it will have been set to
3166 Pmode above. We must use the actual mode of the parm. */
3167 if (use_register_for_decl (parm
))
3169 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
3170 mark_user_reg (parmreg
);
3174 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3175 TYPE_MODE (TREE_TYPE (parm
)),
3176 TYPE_ALIGN (TREE_TYPE (parm
)));
3178 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm
)),
3179 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm
))),
3181 set_mem_attributes (parmreg
, parm
, 1);
3184 if (GET_MODE (parmreg
) != GET_MODE (DECL_RTL (parm
)))
3186 rtx tempreg
= gen_reg_rtx (GET_MODE (DECL_RTL (parm
)));
3187 int unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (parm
));
3189 push_to_sequence2 (all
->first_conversion_insn
,
3190 all
->last_conversion_insn
);
3191 emit_move_insn (tempreg
, DECL_RTL (parm
));
3192 tempreg
= convert_to_mode (GET_MODE (parmreg
), tempreg
, unsigned_p
);
3193 emit_move_insn (parmreg
, tempreg
);
3194 all
->first_conversion_insn
= get_insns ();
3195 all
->last_conversion_insn
= get_last_insn ();
3198 did_conversion
= true;
3201 emit_move_insn (parmreg
, DECL_RTL (parm
));
3203 SET_DECL_RTL (parm
, parmreg
);
3205 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3207 data
->stack_parm
= NULL
;
3210 /* Mark the register as eliminable if we did no conversion and it was
3211 copied from memory at a fixed offset, and the arg pointer was not
3212 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3213 offset formed an invalid address, such memory-equivalences as we
3214 make here would screw up life analysis for it. */
3215 if (data
->nominal_mode
== data
->passed_mode
3217 && data
->stack_parm
!= 0
3218 && MEM_P (data
->stack_parm
)
3219 && data
->locate
.offset
.var
== 0
3220 && reg_mentioned_p (virtual_incoming_args_rtx
,
3221 XEXP (data
->stack_parm
, 0)))
3223 rtx_insn
*linsn
= get_last_insn ();
3227 /* Mark complex types separately. */
3228 if (GET_CODE (parmreg
) == CONCAT
)
3230 machine_mode submode
3231 = GET_MODE_INNER (GET_MODE (parmreg
));
3232 int regnor
= REGNO (XEXP (parmreg
, 0));
3233 int regnoi
= REGNO (XEXP (parmreg
, 1));
3234 rtx stackr
= adjust_address_nv (data
->stack_parm
, submode
, 0);
3235 rtx stacki
= adjust_address_nv (data
->stack_parm
, submode
,
3236 GET_MODE_SIZE (submode
));
3238 /* Scan backwards for the set of the real and
3240 for (sinsn
= linsn
; sinsn
!= 0;
3241 sinsn
= prev_nonnote_insn (sinsn
))
3243 set
= single_set (sinsn
);
3247 if (SET_DEST (set
) == regno_reg_rtx
[regnoi
])
3248 set_unique_reg_note (sinsn
, REG_EQUIV
, stacki
);
3249 else if (SET_DEST (set
) == regno_reg_rtx
[regnor
])
3250 set_unique_reg_note (sinsn
, REG_EQUIV
, stackr
);
3254 set_dst_reg_note (linsn
, REG_EQUIV
, equiv_stack_parm
, parmreg
);
3257 /* For pointer data type, suggest pointer register. */
3258 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3259 mark_reg_pointer (parmreg
,
3260 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
3263 /* A subroutine of assign_parms. Allocate stack space to hold the current
3264 parameter. Get it there. Perform all ABI specified conversions. */
3267 assign_parm_setup_stack (struct assign_parm_data_all
*all
, tree parm
,
3268 struct assign_parm_data_one
*data
)
3270 /* Value must be stored in the stack slot STACK_PARM during function
3272 bool to_conversion
= false;
3274 assign_parm_remove_parallels (data
);
3276 if (data
->promoted_mode
!= data
->nominal_mode
)
3278 /* Conversion is required. */
3279 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3281 emit_move_insn (tempreg
, validize_mem (copy_rtx (data
->entry_parm
)));
3283 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3284 to_conversion
= true;
3286 data
->entry_parm
= convert_to_mode (data
->nominal_mode
, tempreg
,
3287 TYPE_UNSIGNED (TREE_TYPE (parm
)));
3289 if (data
->stack_parm
)
3291 int offset
= subreg_lowpart_offset (data
->nominal_mode
,
3292 GET_MODE (data
->stack_parm
));
3293 /* ??? This may need a big-endian conversion on sparc64. */
3295 = adjust_address (data
->stack_parm
, data
->nominal_mode
, 0);
3296 if (offset
&& MEM_OFFSET_KNOWN_P (data
->stack_parm
))
3297 set_mem_offset (data
->stack_parm
,
3298 MEM_OFFSET (data
->stack_parm
) + offset
);
3302 if (data
->entry_parm
!= data
->stack_parm
)
3306 if (data
->stack_parm
== 0)
3308 int align
= STACK_SLOT_ALIGNMENT (data
->passed_type
,
3309 GET_MODE (data
->entry_parm
),
3310 TYPE_ALIGN (data
->passed_type
));
3312 = assign_stack_local (GET_MODE (data
->entry_parm
),
3313 GET_MODE_SIZE (GET_MODE (data
->entry_parm
)),
3315 set_mem_attributes (data
->stack_parm
, parm
, 1);
3318 dest
= validize_mem (copy_rtx (data
->stack_parm
));
3319 src
= validize_mem (copy_rtx (data
->entry_parm
));
3323 /* Use a block move to handle potentially misaligned entry_parm. */
3325 push_to_sequence2 (all
->first_conversion_insn
,
3326 all
->last_conversion_insn
);
3327 to_conversion
= true;
3329 emit_block_move (dest
, src
,
3330 GEN_INT (int_size_in_bytes (data
->passed_type
)),
3334 emit_move_insn (dest
, src
);
3339 all
->first_conversion_insn
= get_insns ();
3340 all
->last_conversion_insn
= get_last_insn ();
3344 SET_DECL_RTL (parm
, data
->stack_parm
);
3347 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3348 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3351 assign_parms_unsplit_complex (struct assign_parm_data_all
*all
,
3355 tree orig_fnargs
= all
->orig_fnargs
;
3358 for (parm
= orig_fnargs
; parm
; parm
= TREE_CHAIN (parm
), ++i
)
3360 if (TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
3361 && targetm
.calls
.split_complex_arg (TREE_TYPE (parm
)))
3363 rtx tmp
, real
, imag
;
3364 machine_mode inner
= GET_MODE_INNER (DECL_MODE (parm
));
3366 real
= DECL_RTL (fnargs
[i
]);
3367 imag
= DECL_RTL (fnargs
[i
+ 1]);
3368 if (inner
!= GET_MODE (real
))
3370 real
= gen_lowpart_SUBREG (inner
, real
);
3371 imag
= gen_lowpart_SUBREG (inner
, imag
);
3374 if (TREE_ADDRESSABLE (parm
))
3377 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (parm
));
3378 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3380 TYPE_ALIGN (TREE_TYPE (parm
)));
3382 /* split_complex_arg put the real and imag parts in
3383 pseudos. Move them to memory. */
3384 tmp
= assign_stack_local (DECL_MODE (parm
), size
, align
);
3385 set_mem_attributes (tmp
, parm
, 1);
3386 rmem
= adjust_address_nv (tmp
, inner
, 0);
3387 imem
= adjust_address_nv (tmp
, inner
, GET_MODE_SIZE (inner
));
3388 push_to_sequence2 (all
->first_conversion_insn
,
3389 all
->last_conversion_insn
);
3390 emit_move_insn (rmem
, real
);
3391 emit_move_insn (imem
, imag
);
3392 all
->first_conversion_insn
= get_insns ();
3393 all
->last_conversion_insn
= get_last_insn ();
3397 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3398 SET_DECL_RTL (parm
, tmp
);
3400 real
= DECL_INCOMING_RTL (fnargs
[i
]);
3401 imag
= DECL_INCOMING_RTL (fnargs
[i
+ 1]);
3402 if (inner
!= GET_MODE (real
))
3404 real
= gen_lowpart_SUBREG (inner
, real
);
3405 imag
= gen_lowpart_SUBREG (inner
, imag
);
3407 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3408 set_decl_incoming_rtl (parm
, tmp
, false);
3414 /* Load bounds of PARM from bounds table. */
3416 assign_parm_load_bounds (struct assign_parm_data_one
*data
,
3422 unsigned i
, offs
= 0;
3424 rtx slot
= NULL
, ptr
= NULL
;
3429 bitmap_obstack_initialize (NULL
);
3430 slots
= BITMAP_ALLOC (NULL
);
3431 chkp_find_bound_slots (TREE_TYPE (parm
), slots
);
3432 EXECUTE_IF_SET_IN_BITMAP (slots
, 0, i
, bi
)
3442 BITMAP_FREE (slots
);
3443 bitmap_obstack_release (NULL
);
3446 /* We may have bounds not associated with any pointer. */
3448 offs
= bnd_no
* POINTER_SIZE
/ BITS_PER_UNIT
;
3450 /* Find associated pointer. */
3453 /* If bounds are not associated with any bounds,
3454 then it is passed in a register or special slot. */
3455 gcc_assert (data
->entry_parm
);
3458 else if (MEM_P (entry
))
3459 slot
= adjust_address (entry
, Pmode
, offs
);
3460 else if (REG_P (entry
))
3461 ptr
= gen_rtx_REG (Pmode
, REGNO (entry
) + bnd_no
);
3462 else if (GET_CODE (entry
) == PARALLEL
)
3463 ptr
= chkp_get_value_with_offs (entry
, GEN_INT (offs
));
3466 data
->entry_parm
= targetm
.calls
.load_bounds_for_arg (slot
, ptr
,
3470 /* Assign RTL expressions to the function's bounds parameters BNDARGS. */
3473 assign_bounds (vec
<bounds_parm_data
> &bndargs
,
3474 struct assign_parm_data_all
&all
)
3476 unsigned i
, pass
, handled
= 0;
3477 bounds_parm_data
*pbdata
;
3479 if (!bndargs
.exists ())
3482 /* We make few passes to store input bounds. Firstly handle bounds
3483 passed in registers. After that we load bounds passed in special
3484 slots. Finally we load bounds from Bounds Table. */
3485 for (pass
= 0; pass
< 3; pass
++)
3486 FOR_EACH_VEC_ELT (bndargs
, i
, pbdata
)
3488 /* Pass 0 => regs only. */
3490 && (!pbdata
->parm_data
.entry_parm
3491 || GET_CODE (pbdata
->parm_data
.entry_parm
) != REG
))
3493 /* Pass 1 => slots only. */
3495 && (!pbdata
->parm_data
.entry_parm
3496 || GET_CODE (pbdata
->parm_data
.entry_parm
) == REG
))
3498 /* Pass 2 => BT only. */
3500 && pbdata
->parm_data
.entry_parm
)
3503 if (!pbdata
->parm_data
.entry_parm
3504 || GET_CODE (pbdata
->parm_data
.entry_parm
) != REG
)
3505 assign_parm_load_bounds (&pbdata
->parm_data
, pbdata
->ptr_parm
,
3506 pbdata
->ptr_entry
, pbdata
->bound_no
);
3508 set_decl_incoming_rtl (pbdata
->bounds_parm
,
3509 pbdata
->parm_data
.entry_parm
, false);
3511 if (assign_parm_setup_block_p (&pbdata
->parm_data
))
3512 assign_parm_setup_block (&all
, pbdata
->bounds_parm
,
3513 &pbdata
->parm_data
);
3514 else if (pbdata
->parm_data
.passed_pointer
3515 || use_register_for_decl (pbdata
->bounds_parm
))
3516 assign_parm_setup_reg (&all
, pbdata
->bounds_parm
,
3517 &pbdata
->parm_data
);
3519 assign_parm_setup_stack (&all
, pbdata
->bounds_parm
,
3520 &pbdata
->parm_data
);
3522 /* Count handled bounds to make sure we miss nothing. */
3526 gcc_assert (handled
== bndargs
.length ());
3531 /* Assign RTL expressions to the function's parameters. This may involve
3532 copying them into registers and using those registers as the DECL_RTL. */
3535 assign_parms (tree fndecl
)
3537 struct assign_parm_data_all all
;
3540 unsigned i
, bound_no
= 0;
3541 tree last_arg
= NULL
;
3542 rtx last_arg_entry
= NULL
;
3543 vec
<bounds_parm_data
> bndargs
= vNULL
;
3544 bounds_parm_data bdata
;
3546 crtl
->args
.internal_arg_pointer
3547 = targetm
.calls
.internal_arg_pointer ();
3549 assign_parms_initialize_all (&all
);
3550 fnargs
= assign_parms_augmented_arg_list (&all
);
3552 FOR_EACH_VEC_ELT (fnargs
, i
, parm
)
3554 struct assign_parm_data_one data
;
3556 /* Extract the type of PARM; adjust it according to ABI. */
3557 assign_parm_find_data_types (&all
, parm
, &data
);
3559 /* Early out for errors and void parameters. */
3560 if (data
.passed_mode
== VOIDmode
)
3562 SET_DECL_RTL (parm
, const0_rtx
);
3563 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
);
3567 /* Estimate stack alignment from parameter alignment. */
3568 if (SUPPORTS_STACK_ALIGNMENT
)
3571 = targetm
.calls
.function_arg_boundary (data
.promoted_mode
,
3573 align
= MINIMUM_ALIGNMENT (data
.passed_type
, data
.promoted_mode
,
3575 if (TYPE_ALIGN (data
.nominal_type
) > align
)
3576 align
= MINIMUM_ALIGNMENT (data
.nominal_type
,
3577 TYPE_MODE (data
.nominal_type
),
3578 TYPE_ALIGN (data
.nominal_type
));
3579 if (crtl
->stack_alignment_estimated
< align
)
3581 gcc_assert (!crtl
->stack_realign_processed
);
3582 crtl
->stack_alignment_estimated
= align
;
3586 /* Find out where the parameter arrives in this function. */
3587 assign_parm_find_entry_rtl (&all
, &data
);
3589 /* Find out where stack space for this parameter might be. */
3590 if (assign_parm_is_stack_parm (&all
, &data
))
3592 assign_parm_find_stack_rtl (parm
, &data
);
3593 assign_parm_adjust_entry_rtl (&data
);
3595 if (!POINTER_BOUNDS_TYPE_P (data
.passed_type
))
3597 /* Remember where last non bounds arg was passed in case
3598 we have to load associated bounds for it from Bounds
3601 last_arg_entry
= data
.entry_parm
;
3604 /* Record permanently how this parm was passed. */
3605 if (data
.passed_pointer
)
3608 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
.passed_type
)),
3610 set_decl_incoming_rtl (parm
, incoming_rtl
, true);
3613 set_decl_incoming_rtl (parm
, data
.entry_parm
, false);
3615 /* Boudns should be loaded in the particular order to
3616 have registers allocated correctly. Collect info about
3617 input bounds and load them later. */
3618 if (POINTER_BOUNDS_TYPE_P (data
.passed_type
))
3620 /* Expect bounds in instrumented functions only. */
3621 gcc_assert (chkp_function_instrumented_p (fndecl
));
3623 bdata
.parm_data
= data
;
3624 bdata
.bounds_parm
= parm
;
3625 bdata
.ptr_parm
= last_arg
;
3626 bdata
.ptr_entry
= last_arg_entry
;
3627 bdata
.bound_no
= bound_no
;
3628 bndargs
.safe_push (bdata
);
3632 assign_parm_adjust_stack_rtl (&data
);
3634 if (assign_parm_setup_block_p (&data
))
3635 assign_parm_setup_block (&all
, parm
, &data
);
3636 else if (data
.passed_pointer
|| use_register_for_decl (parm
))
3637 assign_parm_setup_reg (&all
, parm
, &data
);
3639 assign_parm_setup_stack (&all
, parm
, &data
);
3642 if (cfun
->stdarg
&& !DECL_CHAIN (parm
))
3644 int pretend_bytes
= 0;
3646 assign_parms_setup_varargs (&all
, &data
, false);
3648 if (chkp_function_instrumented_p (fndecl
))
3650 /* We expect this is the last parm. Otherwise it is wrong
3651 to assign bounds right now. */
3652 gcc_assert (i
== (fnargs
.length () - 1));
3653 assign_bounds (bndargs
, all
);
3654 targetm
.calls
.setup_incoming_vararg_bounds (all
.args_so_far
,
3662 /* Update info on where next arg arrives in registers. */
3663 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.promoted_mode
,
3664 data
.passed_type
, data
.named_arg
);
3666 if (POINTER_BOUNDS_TYPE_P (data
.passed_type
))
3670 assign_bounds (bndargs
, all
);
3672 if (targetm
.calls
.split_complex_arg
)
3673 assign_parms_unsplit_complex (&all
, fnargs
);
3677 /* Initialize pic_offset_table_rtx with a pseudo register
3679 if (targetm
.use_pseudo_pic_reg ())
3680 pic_offset_table_rtx
= gen_reg_rtx (Pmode
);
3682 /* Output all parameter conversion instructions (possibly including calls)
3683 now that all parameters have been copied out of hard registers. */
3684 emit_insn (all
.first_conversion_insn
);
3686 /* Estimate reload stack alignment from scalar return mode. */
3687 if (SUPPORTS_STACK_ALIGNMENT
)
3689 if (DECL_RESULT (fndecl
))
3691 tree type
= TREE_TYPE (DECL_RESULT (fndecl
));
3692 machine_mode mode
= TYPE_MODE (type
);
3696 && !AGGREGATE_TYPE_P (type
))
3698 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
3699 if (crtl
->stack_alignment_estimated
< align
)
3701 gcc_assert (!crtl
->stack_realign_processed
);
3702 crtl
->stack_alignment_estimated
= align
;
3708 /* If we are receiving a struct value address as the first argument, set up
3709 the RTL for the function result. As this might require code to convert
3710 the transmitted address to Pmode, we do this here to ensure that possible
3711 preliminary conversions of the address have been emitted already. */
3712 if (all
.function_result_decl
)
3714 tree result
= DECL_RESULT (current_function_decl
);
3715 rtx addr
= DECL_RTL (all
.function_result_decl
);
3718 if (DECL_BY_REFERENCE (result
))
3720 SET_DECL_VALUE_EXPR (result
, all
.function_result_decl
);
3725 SET_DECL_VALUE_EXPR (result
,
3726 build1 (INDIRECT_REF
, TREE_TYPE (result
),
3727 all
.function_result_decl
));
3728 addr
= convert_memory_address (Pmode
, addr
);
3729 x
= gen_rtx_MEM (DECL_MODE (result
), addr
);
3730 set_mem_attributes (x
, result
, 1);
3733 DECL_HAS_VALUE_EXPR_P (result
) = 1;
3735 SET_DECL_RTL (result
, x
);
3738 /* We have aligned all the args, so add space for the pretend args. */
3739 crtl
->args
.pretend_args_size
= all
.pretend_args_size
;
3740 all
.stack_args_size
.constant
+= all
.extra_pretend_bytes
;
3741 crtl
->args
.size
= all
.stack_args_size
.constant
;
3743 /* Adjust function incoming argument size for alignment and
3746 crtl
->args
.size
= MAX (crtl
->args
.size
, all
.reg_parm_stack_space
);
3747 crtl
->args
.size
= CEIL_ROUND (crtl
->args
.size
,
3748 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3750 #ifdef ARGS_GROW_DOWNWARD
3751 crtl
->args
.arg_offset_rtx
3752 = (all
.stack_args_size
.var
== 0 ? GEN_INT (-all
.stack_args_size
.constant
)
3753 : expand_expr (size_diffop (all
.stack_args_size
.var
,
3754 size_int (-all
.stack_args_size
.constant
)),
3755 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
));
3757 crtl
->args
.arg_offset_rtx
= ARGS_SIZE_RTX (all
.stack_args_size
);
3760 /* See how many bytes, if any, of its args a function should try to pop
3763 crtl
->args
.pops_args
= targetm
.calls
.return_pops_args (fndecl
,
3767 /* For stdarg.h function, save info about
3768 regs and stack space used by the named args. */
3770 crtl
->args
.info
= all
.args_so_far_v
;
3772 /* Set the rtx used for the function return value. Put this in its
3773 own variable so any optimizers that need this information don't have
3774 to include tree.h. Do this here so it gets done when an inlined
3775 function gets output. */
3778 = (DECL_RTL_SET_P (DECL_RESULT (fndecl
))
3779 ? DECL_RTL (DECL_RESULT (fndecl
)) : NULL_RTX
);
3781 /* If scalar return value was computed in a pseudo-reg, or was a named
3782 return value that got dumped to the stack, copy that to the hard
3784 if (DECL_RTL_SET_P (DECL_RESULT (fndecl
)))
3786 tree decl_result
= DECL_RESULT (fndecl
);
3787 rtx decl_rtl
= DECL_RTL (decl_result
);
3789 if (REG_P (decl_rtl
)
3790 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
3791 : DECL_REGISTER (decl_result
))
3795 real_decl_rtl
= targetm
.calls
.function_value (TREE_TYPE (decl_result
),
3797 if (chkp_function_instrumented_p (fndecl
))
3799 = targetm
.calls
.chkp_function_value_bounds (TREE_TYPE (decl_result
),
3801 REG_FUNCTION_VALUE_P (real_decl_rtl
) = 1;
3802 /* The delay slot scheduler assumes that crtl->return_rtx
3803 holds the hard register containing the return value, not a
3804 temporary pseudo. */
3805 crtl
->return_rtx
= real_decl_rtl
;
3810 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3811 For all seen types, gimplify their sizes. */
3814 gimplify_parm_type (tree
*tp
, int *walk_subtrees
, void *data
)
3821 if (POINTER_TYPE_P (t
))
3823 else if (TYPE_SIZE (t
) && !TREE_CONSTANT (TYPE_SIZE (t
))
3824 && !TYPE_SIZES_GIMPLIFIED (t
))
3826 gimplify_type_sizes (t
, (gimple_seq
*) data
);
3834 /* Gimplify the parameter list for current_function_decl. This involves
3835 evaluating SAVE_EXPRs of variable sized parameters and generating code
3836 to implement callee-copies reference parameters. Returns a sequence of
3837 statements to add to the beginning of the function. */
3840 gimplify_parameters (void)
3842 struct assign_parm_data_all all
;
3844 gimple_seq stmts
= NULL
;
3848 assign_parms_initialize_all (&all
);
3849 fnargs
= assign_parms_augmented_arg_list (&all
);
3851 FOR_EACH_VEC_ELT (fnargs
, i
, parm
)
3853 struct assign_parm_data_one data
;
3855 /* Extract the type of PARM; adjust it according to ABI. */
3856 assign_parm_find_data_types (&all
, parm
, &data
);
3858 /* Early out for errors and void parameters. */
3859 if (data
.passed_mode
== VOIDmode
|| DECL_SIZE (parm
) == NULL
)
3862 /* Update info on where next arg arrives in registers. */
3863 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.promoted_mode
,
3864 data
.passed_type
, data
.named_arg
);
3866 /* ??? Once upon a time variable_size stuffed parameter list
3867 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3868 turned out to be less than manageable in the gimple world.
3869 Now we have to hunt them down ourselves. */
3870 walk_tree_without_duplicates (&data
.passed_type
,
3871 gimplify_parm_type
, &stmts
);
3873 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) != INTEGER_CST
)
3875 gimplify_one_sizepos (&DECL_SIZE (parm
), &stmts
);
3876 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm
), &stmts
);
3879 if (data
.passed_pointer
)
3881 tree type
= TREE_TYPE (data
.passed_type
);
3882 if (reference_callee_copied (&all
.args_so_far_v
, TYPE_MODE (type
),
3883 type
, data
.named_arg
))
3887 /* For constant-sized objects, this is trivial; for
3888 variable-sized objects, we have to play games. */
3889 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) == INTEGER_CST
3890 && !(flag_stack_check
== GENERIC_STACK_CHECK
3891 && compare_tree_int (DECL_SIZE_UNIT (parm
),
3892 STACK_CHECK_MAX_VAR_SIZE
) > 0))
3894 local
= create_tmp_var (type
, get_name (parm
));
3895 DECL_IGNORED_P (local
) = 0;
3896 /* If PARM was addressable, move that flag over
3897 to the local copy, as its address will be taken,
3898 not the PARMs. Keep the parms address taken
3899 as we'll query that flag during gimplification. */
3900 if (TREE_ADDRESSABLE (parm
))
3901 TREE_ADDRESSABLE (local
) = 1;
3902 else if (TREE_CODE (type
) == COMPLEX_TYPE
3903 || TREE_CODE (type
) == VECTOR_TYPE
)
3904 DECL_GIMPLE_REG_P (local
) = 1;
3908 tree ptr_type
, addr
;
3910 ptr_type
= build_pointer_type (type
);
3911 addr
= create_tmp_reg (ptr_type
, get_name (parm
));
3912 DECL_IGNORED_P (addr
) = 0;
3913 local
= build_fold_indirect_ref (addr
);
3915 t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
3916 t
= build_call_expr (t
, 2, DECL_SIZE_UNIT (parm
),
3917 size_int (DECL_ALIGN (parm
)));
3919 /* The call has been built for a variable-sized object. */
3920 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
3921 t
= fold_convert (ptr_type
, t
);
3922 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
3923 gimplify_and_add (t
, &stmts
);
3926 gimplify_assign (local
, parm
, &stmts
);
3928 SET_DECL_VALUE_EXPR (parm
, local
);
3929 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
3939 /* Compute the size and offset from the start of the stacked arguments for a
3940 parm passed in mode PASSED_MODE and with type TYPE.
3942 INITIAL_OFFSET_PTR points to the current offset into the stacked
3945 The starting offset and size for this parm are returned in
3946 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3947 nonzero, the offset is that of stack slot, which is returned in
3948 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3949 padding required from the initial offset ptr to the stack slot.
3951 IN_REGS is nonzero if the argument will be passed in registers. It will
3952 never be set if REG_PARM_STACK_SPACE is not defined.
3954 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3955 for arguments which are passed in registers.
3957 FNDECL is the function in which the argument was defined.
3959 There are two types of rounding that are done. The first, controlled by
3960 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3961 argument list to be aligned to the specific boundary (in bits). This
3962 rounding affects the initial and starting offsets, but not the argument
3965 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3966 optionally rounds the size of the parm to PARM_BOUNDARY. The
3967 initial offset is not affected by this rounding, while the size always
3968 is and the starting offset may be. */
3970 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3971 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3972 callers pass in the total size of args so far as
3973 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3976 locate_and_pad_parm (machine_mode passed_mode
, tree type
, int in_regs
,
3977 int reg_parm_stack_space
, int partial
,
3978 tree fndecl ATTRIBUTE_UNUSED
,
3979 struct args_size
*initial_offset_ptr
,
3980 struct locate_and_pad_arg_data
*locate
)
3983 enum direction where_pad
;
3984 unsigned int boundary
, round_boundary
;
3985 int part_size_in_regs
;
3987 /* If we have found a stack parm before we reach the end of the
3988 area reserved for registers, skip that area. */
3991 if (reg_parm_stack_space
> 0)
3993 if (initial_offset_ptr
->var
)
3995 initial_offset_ptr
->var
3996 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
3997 ssize_int (reg_parm_stack_space
));
3998 initial_offset_ptr
->constant
= 0;
4000 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
4001 initial_offset_ptr
->constant
= reg_parm_stack_space
;
4005 part_size_in_regs
= (reg_parm_stack_space
== 0 ? partial
: 0);
4008 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
4009 where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
4010 boundary
= targetm
.calls
.function_arg_boundary (passed_mode
, type
);
4011 round_boundary
= targetm
.calls
.function_arg_round_boundary (passed_mode
,
4013 locate
->where_pad
= where_pad
;
4015 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
4016 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
4017 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
4019 locate
->boundary
= boundary
;
4021 if (SUPPORTS_STACK_ALIGNMENT
)
4023 /* stack_alignment_estimated can't change after stack has been
4025 if (crtl
->stack_alignment_estimated
< boundary
)
4027 if (!crtl
->stack_realign_processed
)
4028 crtl
->stack_alignment_estimated
= boundary
;
4031 /* If stack is realigned and stack alignment value
4032 hasn't been finalized, it is OK not to increase
4033 stack_alignment_estimated. The bigger alignment
4034 requirement is recorded in stack_alignment_needed
4036 gcc_assert (!crtl
->stack_realign_finalized
4037 && crtl
->stack_realign_needed
);
4042 /* Remember if the outgoing parameter requires extra alignment on the
4043 calling function side. */
4044 if (crtl
->stack_alignment_needed
< boundary
)
4045 crtl
->stack_alignment_needed
= boundary
;
4046 if (crtl
->preferred_stack_boundary
< boundary
)
4047 crtl
->preferred_stack_boundary
= boundary
;
4049 #ifdef ARGS_GROW_DOWNWARD
4050 locate
->slot_offset
.constant
= -initial_offset_ptr
->constant
;
4051 if (initial_offset_ptr
->var
)
4052 locate
->slot_offset
.var
= size_binop (MINUS_EXPR
, ssize_int (0),
4053 initial_offset_ptr
->var
);
4057 if (where_pad
!= none
4058 && (!tree_fits_uhwi_p (sizetree
)
4059 || (tree_to_uhwi (sizetree
) * BITS_PER_UNIT
) % round_boundary
))
4060 s2
= round_up (s2
, round_boundary
/ BITS_PER_UNIT
);
4061 SUB_PARM_SIZE (locate
->slot_offset
, s2
);
4064 locate
->slot_offset
.constant
+= part_size_in_regs
;
4066 if (!in_regs
|| reg_parm_stack_space
> 0)
4067 pad_to_arg_alignment (&locate
->slot_offset
, boundary
,
4068 &locate
->alignment_pad
);
4070 locate
->size
.constant
= (-initial_offset_ptr
->constant
4071 - locate
->slot_offset
.constant
);
4072 if (initial_offset_ptr
->var
)
4073 locate
->size
.var
= size_binop (MINUS_EXPR
,
4074 size_binop (MINUS_EXPR
,
4076 initial_offset_ptr
->var
),
4077 locate
->slot_offset
.var
);
4079 /* Pad_below needs the pre-rounded size to know how much to pad
4081 locate
->offset
= locate
->slot_offset
;
4082 if (where_pad
== downward
)
4083 pad_below (&locate
->offset
, passed_mode
, sizetree
);
4085 #else /* !ARGS_GROW_DOWNWARD */
4086 if (!in_regs
|| reg_parm_stack_space
> 0)
4087 pad_to_arg_alignment (initial_offset_ptr
, boundary
,
4088 &locate
->alignment_pad
);
4089 locate
->slot_offset
= *initial_offset_ptr
;
4091 #ifdef PUSH_ROUNDING
4092 if (passed_mode
!= BLKmode
)
4093 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
4096 /* Pad_below needs the pre-rounded size to know how much to pad below
4097 so this must be done before rounding up. */
4098 locate
->offset
= locate
->slot_offset
;
4099 if (where_pad
== downward
)
4100 pad_below (&locate
->offset
, passed_mode
, sizetree
);
4102 if (where_pad
!= none
4103 && (!tree_fits_uhwi_p (sizetree
)
4104 || (tree_to_uhwi (sizetree
) * BITS_PER_UNIT
) % round_boundary
))
4105 sizetree
= round_up (sizetree
, round_boundary
/ BITS_PER_UNIT
);
4107 ADD_PARM_SIZE (locate
->size
, sizetree
);
4109 locate
->size
.constant
-= part_size_in_regs
;
4110 #endif /* ARGS_GROW_DOWNWARD */
4112 #ifdef FUNCTION_ARG_OFFSET
4113 locate
->offset
.constant
+= FUNCTION_ARG_OFFSET (passed_mode
, type
);
4117 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4118 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4121 pad_to_arg_alignment (struct args_size
*offset_ptr
, int boundary
,
4122 struct args_size
*alignment_pad
)
4124 tree save_var
= NULL_TREE
;
4125 HOST_WIDE_INT save_constant
= 0;
4126 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
4127 HOST_WIDE_INT sp_offset
= STACK_POINTER_OFFSET
;
4129 #ifdef SPARC_STACK_BOUNDARY_HACK
4130 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4131 the real alignment of %sp. However, when it does this, the
4132 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
4133 if (SPARC_STACK_BOUNDARY_HACK
)
4137 if (boundary
> PARM_BOUNDARY
)
4139 save_var
= offset_ptr
->var
;
4140 save_constant
= offset_ptr
->constant
;
4143 alignment_pad
->var
= NULL_TREE
;
4144 alignment_pad
->constant
= 0;
4146 if (boundary
> BITS_PER_UNIT
)
4148 if (offset_ptr
->var
)
4150 tree sp_offset_tree
= ssize_int (sp_offset
);
4151 tree offset
= size_binop (PLUS_EXPR
,
4152 ARGS_SIZE_TREE (*offset_ptr
),
4154 #ifdef ARGS_GROW_DOWNWARD
4155 tree rounded
= round_down (offset
, boundary
/ BITS_PER_UNIT
);
4157 tree rounded
= round_up (offset
, boundary
/ BITS_PER_UNIT
);
4160 offset_ptr
->var
= size_binop (MINUS_EXPR
, rounded
, sp_offset_tree
);
4161 /* ARGS_SIZE_TREE includes constant term. */
4162 offset_ptr
->constant
= 0;
4163 if (boundary
> PARM_BOUNDARY
)
4164 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
,
4169 offset_ptr
->constant
= -sp_offset
+
4170 #ifdef ARGS_GROW_DOWNWARD
4171 FLOOR_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
4173 CEIL_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
4175 if (boundary
> PARM_BOUNDARY
)
4176 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
4182 pad_below (struct args_size
*offset_ptr
, machine_mode passed_mode
, tree sizetree
)
4184 if (passed_mode
!= BLKmode
)
4186 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
4187 offset_ptr
->constant
4188 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
4189 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
4190 - GET_MODE_SIZE (passed_mode
));
4194 if (TREE_CODE (sizetree
) != INTEGER_CST
4195 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
4197 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4198 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
4200 ADD_PARM_SIZE (*offset_ptr
, s2
);
4201 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
4207 /* True if register REGNO was alive at a place where `setjmp' was
4208 called and was set more than once or is an argument. Such regs may
4209 be clobbered by `longjmp'. */
4212 regno_clobbered_at_setjmp (bitmap setjmp_crosses
, int regno
)
4214 /* There appear to be cases where some local vars never reach the
4215 backend but have bogus regnos. */
4216 if (regno
>= max_reg_num ())
4219 return ((REG_N_SETS (regno
) > 1
4220 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
4222 && REGNO_REG_SET_P (setjmp_crosses
, regno
));
4225 /* Walk the tree of blocks describing the binding levels within a
4226 function and warn about variables the might be killed by setjmp or
4227 vfork. This is done after calling flow_analysis before register
4228 allocation since that will clobber the pseudo-regs to hard
4232 setjmp_vars_warning (bitmap setjmp_crosses
, tree block
)
4236 for (decl
= BLOCK_VARS (block
); decl
; decl
= DECL_CHAIN (decl
))
4238 if (TREE_CODE (decl
) == VAR_DECL
4239 && DECL_RTL_SET_P (decl
)
4240 && REG_P (DECL_RTL (decl
))
4241 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
4242 warning (OPT_Wclobbered
, "variable %q+D might be clobbered by"
4243 " %<longjmp%> or %<vfork%>", decl
);
4246 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= BLOCK_CHAIN (sub
))
4247 setjmp_vars_warning (setjmp_crosses
, sub
);
4250 /* Do the appropriate part of setjmp_vars_warning
4251 but for arguments instead of local variables. */
4254 setjmp_args_warning (bitmap setjmp_crosses
)
4257 for (decl
= DECL_ARGUMENTS (current_function_decl
);
4258 decl
; decl
= DECL_CHAIN (decl
))
4259 if (DECL_RTL (decl
) != 0
4260 && REG_P (DECL_RTL (decl
))
4261 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
4262 warning (OPT_Wclobbered
,
4263 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4267 /* Generate warning messages for variables live across setjmp. */
4270 generate_setjmp_warnings (void)
4272 bitmap setjmp_crosses
= regstat_get_setjmp_crosses ();
4274 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
4275 || bitmap_empty_p (setjmp_crosses
))
4278 setjmp_vars_warning (setjmp_crosses
, DECL_INITIAL (current_function_decl
));
4279 setjmp_args_warning (setjmp_crosses
);
4283 /* Reverse the order of elements in the fragment chain T of blocks,
4284 and return the new head of the chain (old last element).
4285 In addition to that clear BLOCK_SAME_RANGE flags when needed
4286 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4287 its super fragment origin. */
4290 block_fragments_nreverse (tree t
)
4292 tree prev
= 0, block
, next
, prev_super
= 0;
4293 tree super
= BLOCK_SUPERCONTEXT (t
);
4294 if (BLOCK_FRAGMENT_ORIGIN (super
))
4295 super
= BLOCK_FRAGMENT_ORIGIN (super
);
4296 for (block
= t
; block
; block
= next
)
4298 next
= BLOCK_FRAGMENT_CHAIN (block
);
4299 BLOCK_FRAGMENT_CHAIN (block
) = prev
;
4300 if ((prev
&& !BLOCK_SAME_RANGE (prev
))
4301 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block
))
4303 BLOCK_SAME_RANGE (block
) = 0;
4304 prev_super
= BLOCK_SUPERCONTEXT (block
);
4305 BLOCK_SUPERCONTEXT (block
) = super
;
4308 t
= BLOCK_FRAGMENT_ORIGIN (t
);
4309 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t
))
4311 BLOCK_SAME_RANGE (t
) = 0;
4312 BLOCK_SUPERCONTEXT (t
) = super
;
4316 /* Reverse the order of elements in the chain T of blocks,
4317 and return the new head of the chain (old last element).
4318 Also do the same on subblocks and reverse the order of elements
4319 in BLOCK_FRAGMENT_CHAIN as well. */
4322 blocks_nreverse_all (tree t
)
4324 tree prev
= 0, block
, next
;
4325 for (block
= t
; block
; block
= next
)
4327 next
= BLOCK_CHAIN (block
);
4328 BLOCK_CHAIN (block
) = prev
;
4329 if (BLOCK_FRAGMENT_CHAIN (block
)
4330 && BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
)
4332 BLOCK_FRAGMENT_CHAIN (block
)
4333 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block
));
4334 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block
)))
4335 BLOCK_SAME_RANGE (block
) = 0;
4337 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4344 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4345 and create duplicate blocks. */
4346 /* ??? Need an option to either create block fragments or to create
4347 abstract origin duplicates of a source block. It really depends
4348 on what optimization has been performed. */
4351 reorder_blocks (void)
4353 tree block
= DECL_INITIAL (current_function_decl
);
4355 if (block
== NULL_TREE
)
4358 auto_vec
<tree
, 10> block_stack
;
4360 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4361 clear_block_marks (block
);
4363 /* Prune the old trees away, so that they don't get in the way. */
4364 BLOCK_SUBBLOCKS (block
) = NULL_TREE
;
4365 BLOCK_CHAIN (block
) = NULL_TREE
;
4367 /* Recreate the block tree from the note nesting. */
4368 reorder_blocks_1 (get_insns (), block
, &block_stack
);
4369 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4372 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4375 clear_block_marks (tree block
)
4379 TREE_ASM_WRITTEN (block
) = 0;
4380 clear_block_marks (BLOCK_SUBBLOCKS (block
));
4381 block
= BLOCK_CHAIN (block
);
4386 reorder_blocks_1 (rtx_insn
*insns
, tree current_block
,
4387 vec
<tree
> *p_block_stack
)
4390 tree prev_beg
= NULL_TREE
, prev_end
= NULL_TREE
;
4392 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4396 if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_BEG
)
4398 tree block
= NOTE_BLOCK (insn
);
4401 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
);
4405 BLOCK_SAME_RANGE (prev_end
) = 0;
4406 prev_end
= NULL_TREE
;
4408 /* If we have seen this block before, that means it now
4409 spans multiple address regions. Create a new fragment. */
4410 if (TREE_ASM_WRITTEN (block
))
4412 tree new_block
= copy_node (block
);
4414 BLOCK_SAME_RANGE (new_block
) = 0;
4415 BLOCK_FRAGMENT_ORIGIN (new_block
) = origin
;
4416 BLOCK_FRAGMENT_CHAIN (new_block
)
4417 = BLOCK_FRAGMENT_CHAIN (origin
);
4418 BLOCK_FRAGMENT_CHAIN (origin
) = new_block
;
4420 NOTE_BLOCK (insn
) = new_block
;
4424 if (prev_beg
== current_block
&& prev_beg
)
4425 BLOCK_SAME_RANGE (block
) = 1;
4429 BLOCK_SUBBLOCKS (block
) = 0;
4430 TREE_ASM_WRITTEN (block
) = 1;
4431 /* When there's only one block for the entire function,
4432 current_block == block and we mustn't do this, it
4433 will cause infinite recursion. */
4434 if (block
!= current_block
)
4437 if (block
!= origin
)
4438 gcc_assert (BLOCK_SUPERCONTEXT (origin
) == current_block
4439 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4442 if (p_block_stack
->is_empty ())
4443 super
= current_block
;
4446 super
= p_block_stack
->last ();
4447 gcc_assert (super
== current_block
4448 || BLOCK_FRAGMENT_ORIGIN (super
)
4451 BLOCK_SUPERCONTEXT (block
) = super
;
4452 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
4453 BLOCK_SUBBLOCKS (current_block
) = block
;
4454 current_block
= origin
;
4456 p_block_stack
->safe_push (block
);
4458 else if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_END
)
4460 NOTE_BLOCK (insn
) = p_block_stack
->pop ();
4461 current_block
= BLOCK_SUPERCONTEXT (current_block
);
4462 if (BLOCK_FRAGMENT_ORIGIN (current_block
))
4463 current_block
= BLOCK_FRAGMENT_ORIGIN (current_block
);
4464 prev_beg
= NULL_TREE
;
4465 prev_end
= BLOCK_SAME_RANGE (NOTE_BLOCK (insn
))
4466 ? NOTE_BLOCK (insn
) : NULL_TREE
;
4471 prev_beg
= NULL_TREE
;
4473 BLOCK_SAME_RANGE (prev_end
) = 0;
4474 prev_end
= NULL_TREE
;
4479 /* Reverse the order of elements in the chain T of blocks,
4480 and return the new head of the chain (old last element). */
4483 blocks_nreverse (tree t
)
4485 tree prev
= 0, block
, next
;
4486 for (block
= t
; block
; block
= next
)
4488 next
= BLOCK_CHAIN (block
);
4489 BLOCK_CHAIN (block
) = prev
;
4495 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4496 by modifying the last node in chain 1 to point to chain 2. */
4499 block_chainon (tree op1
, tree op2
)
4508 for (t1
= op1
; BLOCK_CHAIN (t1
); t1
= BLOCK_CHAIN (t1
))
4510 BLOCK_CHAIN (t1
) = op2
;
4512 #ifdef ENABLE_TREE_CHECKING
4515 for (t2
= op2
; t2
; t2
= BLOCK_CHAIN (t2
))
4516 gcc_assert (t2
!= t1
);
4523 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4524 non-NULL, list them all into VECTOR, in a depth-first preorder
4525 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4529 all_blocks (tree block
, tree
*vector
)
4535 TREE_ASM_WRITTEN (block
) = 0;
4537 /* Record this block. */
4539 vector
[n_blocks
] = block
;
4543 /* Record the subblocks, and their subblocks... */
4544 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
4545 vector
? vector
+ n_blocks
: 0);
4546 block
= BLOCK_CHAIN (block
);
4552 /* Return a vector containing all the blocks rooted at BLOCK. The
4553 number of elements in the vector is stored in N_BLOCKS_P. The
4554 vector is dynamically allocated; it is the caller's responsibility
4555 to call `free' on the pointer returned. */
4558 get_block_vector (tree block
, int *n_blocks_p
)
4562 *n_blocks_p
= all_blocks (block
, NULL
);
4563 block_vector
= XNEWVEC (tree
, *n_blocks_p
);
4564 all_blocks (block
, block_vector
);
4566 return block_vector
;
4569 static GTY(()) int next_block_index
= 2;
4571 /* Set BLOCK_NUMBER for all the blocks in FN. */
4574 number_blocks (tree fn
)
4580 /* For SDB and XCOFF debugging output, we start numbering the blocks
4581 from 1 within each function, rather than keeping a running
4583 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4584 if (write_symbols
== SDB_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
4585 next_block_index
= 1;
4588 block_vector
= get_block_vector (DECL_INITIAL (fn
), &n_blocks
);
4590 /* The top-level BLOCK isn't numbered at all. */
4591 for (i
= 1; i
< n_blocks
; ++i
)
4592 /* We number the blocks from two. */
4593 BLOCK_NUMBER (block_vector
[i
]) = next_block_index
++;
4595 free (block_vector
);
4600 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4603 debug_find_var_in_block_tree (tree var
, tree block
)
4607 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
4611 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
4613 tree ret
= debug_find_var_in_block_tree (var
, t
);
4621 /* Keep track of whether we're in a dummy function context. If we are,
4622 we don't want to invoke the set_current_function hook, because we'll
4623 get into trouble if the hook calls target_reinit () recursively or
4624 when the initial initialization is not yet complete. */
4626 static bool in_dummy_function
;
4628 /* Invoke the target hook when setting cfun. Update the optimization options
4629 if the function uses different options than the default. */
4632 invoke_set_current_function_hook (tree fndecl
)
4634 if (!in_dummy_function
)
4636 tree opts
= ((fndecl
)
4637 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
)
4638 : optimization_default_node
);
4641 opts
= optimization_default_node
;
4643 /* Change optimization options if needed. */
4644 if (optimization_current_node
!= opts
)
4646 optimization_current_node
= opts
;
4647 cl_optimization_restore (&global_options
, TREE_OPTIMIZATION (opts
));
4650 targetm
.set_current_function (fndecl
);
4651 this_fn_optabs
= this_target_optabs
;
4653 if (opts
!= optimization_default_node
)
4655 init_tree_optimization_optabs (opts
);
4656 if (TREE_OPTIMIZATION_OPTABS (opts
))
4657 this_fn_optabs
= (struct target_optabs
*)
4658 TREE_OPTIMIZATION_OPTABS (opts
);
4663 /* cfun should never be set directly; use this function. */
4666 set_cfun (struct function
*new_cfun
)
4668 if (cfun
!= new_cfun
)
4671 invoke_set_current_function_hook (new_cfun
? new_cfun
->decl
: NULL_TREE
);
4675 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4677 static vec
<function_p
> cfun_stack
;
4679 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4680 current_function_decl accordingly. */
4683 push_cfun (struct function
*new_cfun
)
4685 gcc_assert ((!cfun
&& !current_function_decl
)
4686 || (cfun
&& current_function_decl
== cfun
->decl
));
4687 cfun_stack
.safe_push (cfun
);
4688 current_function_decl
= new_cfun
? new_cfun
->decl
: NULL_TREE
;
4689 set_cfun (new_cfun
);
4692 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4697 struct function
*new_cfun
= cfun_stack
.pop ();
4698 /* When in_dummy_function, we do have a cfun but current_function_decl is
4699 NULL. We also allow pushing NULL cfun and subsequently changing
4700 current_function_decl to something else and have both restored by
4702 gcc_checking_assert (in_dummy_function
4704 || current_function_decl
== cfun
->decl
);
4705 set_cfun (new_cfun
);
4706 current_function_decl
= new_cfun
? new_cfun
->decl
: NULL_TREE
;
4709 /* Return value of funcdef and increase it. */
4711 get_next_funcdef_no (void)
4713 return funcdef_no
++;
4716 /* Return value of funcdef. */
4718 get_last_funcdef_no (void)
4723 /* Allocate a function structure for FNDECL and set its contents
4724 to the defaults. Set cfun to the newly-allocated object.
4725 Some of the helper functions invoked during initialization assume
4726 that cfun has already been set. Therefore, assign the new object
4727 directly into cfun and invoke the back end hook explicitly at the
4728 very end, rather than initializing a temporary and calling set_cfun
4731 ABSTRACT_P is true if this is a function that will never be seen by
4732 the middle-end. Such functions are front-end concepts (like C++
4733 function templates) that do not correspond directly to functions
4734 placed in object files. */
4737 allocate_struct_function (tree fndecl
, bool abstract_p
)
4739 tree fntype
= fndecl
? TREE_TYPE (fndecl
) : NULL_TREE
;
4741 cfun
= ggc_cleared_alloc
<function
> ();
4743 init_eh_for_function ();
4745 if (init_machine_status
)
4746 cfun
->machine
= (*init_machine_status
) ();
4748 #ifdef OVERRIDE_ABI_FORMAT
4749 OVERRIDE_ABI_FORMAT (fndecl
);
4752 if (fndecl
!= NULL_TREE
)
4754 DECL_STRUCT_FUNCTION (fndecl
) = cfun
;
4755 cfun
->decl
= fndecl
;
4756 current_function_funcdef_no
= get_next_funcdef_no ();
4759 invoke_set_current_function_hook (fndecl
);
4761 if (fndecl
!= NULL_TREE
)
4763 tree result
= DECL_RESULT (fndecl
);
4764 if (!abstract_p
&& aggregate_value_p (result
, fndecl
))
4766 #ifdef PCC_STATIC_STRUCT_RETURN
4767 cfun
->returns_pcc_struct
= 1;
4769 cfun
->returns_struct
= 1;
4772 cfun
->stdarg
= stdarg_p (fntype
);
4774 /* Assume all registers in stdarg functions need to be saved. */
4775 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
4776 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
4778 /* ??? This could be set on a per-function basis by the front-end
4779 but is this worth the hassle? */
4780 cfun
->can_throw_non_call_exceptions
= flag_non_call_exceptions
;
4781 cfun
->can_delete_dead_exceptions
= flag_delete_dead_exceptions
;
4783 if (!profile_flag
&& !flag_instrument_function_entry_exit
)
4784 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
) = 1;
4788 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4789 instead of just setting it. */
4792 push_struct_function (tree fndecl
)
4794 /* When in_dummy_function we might be in the middle of a pop_cfun and
4795 current_function_decl and cfun may not match. */
4796 gcc_assert (in_dummy_function
4797 || (!cfun
&& !current_function_decl
)
4798 || (cfun
&& current_function_decl
== cfun
->decl
));
4799 cfun_stack
.safe_push (cfun
);
4800 current_function_decl
= fndecl
;
4801 allocate_struct_function (fndecl
, false);
4804 /* Reset crtl and other non-struct-function variables to defaults as
4805 appropriate for emitting rtl at the start of a function. */
4808 prepare_function_start (void)
4810 gcc_assert (!crtl
->emit
.x_last_insn
);
4813 init_varasm_status ();
4815 default_rtl_profile ();
4817 if (flag_stack_usage_info
)
4819 cfun
->su
= ggc_cleared_alloc
<stack_usage
> ();
4820 cfun
->su
->static_stack_size
= -1;
4823 cse_not_expected
= ! optimize
;
4825 /* Caller save not needed yet. */
4826 caller_save_needed
= 0;
4828 /* We haven't done register allocation yet. */
4831 /* Indicate that we have not instantiated virtual registers yet. */
4832 virtuals_instantiated
= 0;
4834 /* Indicate that we want CONCATs now. */
4835 generating_concat_p
= 1;
4837 /* Indicate we have no need of a frame pointer yet. */
4838 frame_pointer_needed
= 0;
4841 /* Initialize the rtl expansion mechanism so that we can do simple things
4842 like generate sequences. This is used to provide a context during global
4843 initialization of some passes. You must call expand_dummy_function_end
4844 to exit this context. */
4847 init_dummy_function_start (void)
4849 gcc_assert (!in_dummy_function
);
4850 in_dummy_function
= true;
4851 push_struct_function (NULL_TREE
);
4852 prepare_function_start ();
4855 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4856 and initialize static variables for generating RTL for the statements
4860 init_function_start (tree subr
)
4862 if (subr
&& DECL_STRUCT_FUNCTION (subr
))
4863 set_cfun (DECL_STRUCT_FUNCTION (subr
));
4865 allocate_struct_function (subr
, false);
4867 /* Initialize backend, if needed. */
4870 prepare_function_start ();
4871 decide_function_section (subr
);
4873 /* Warn if this value is an aggregate type,
4874 regardless of which calling convention we are using for it. */
4875 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
4876 warning (OPT_Waggregate_return
, "function returns an aggregate");
4879 /* Expand code to verify the stack_protect_guard. This is invoked at
4880 the end of a function to be protected. */
4882 #ifndef HAVE_stack_protect_test
4883 # define HAVE_stack_protect_test 0
4884 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4888 stack_protect_epilogue (void)
4890 tree guard_decl
= targetm
.stack_protect_guard ();
4891 rtx_code_label
*label
= gen_label_rtx ();
4894 x
= expand_normal (crtl
->stack_protect_guard
);
4895 y
= expand_normal (guard_decl
);
4897 /* Allow the target to compare Y with X without leaking either into
4899 switch ((int) (HAVE_stack_protect_test
!= 0))
4902 tmp
= gen_stack_protect_test (x
, y
, label
);
4911 emit_cmp_and_jump_insns (x
, y
, EQ
, NULL_RTX
, ptr_mode
, 1, label
);
4915 /* The noreturn predictor has been moved to the tree level. The rtl-level
4916 predictors estimate this branch about 20%, which isn't enough to get
4917 things moved out of line. Since this is the only extant case of adding
4918 a noreturn function at the rtl level, it doesn't seem worth doing ought
4919 except adding the prediction by hand. */
4920 tmp
= get_last_insn ();
4922 predict_insn_def (as_a
<rtx_insn
*> (tmp
), PRED_NORETURN
, TAKEN
);
4924 expand_call (targetm
.stack_protect_fail (), NULL_RTX
, /*ignore=*/true);
4929 /* Start the RTL for a new function, and set variables used for
4931 SUBR is the FUNCTION_DECL node.
4932 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4933 the function's parameters, which must be run at any return statement. */
4936 expand_function_start (tree subr
)
4938 /* Make sure volatile mem refs aren't considered
4939 valid operands of arithmetic insns. */
4940 init_recog_no_volatile ();
4944 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
4947 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
4949 /* Make the label for return statements to jump to. Do not special
4950 case machines with special return instructions -- they will be
4951 handled later during jump, ifcvt, or epilogue creation. */
4952 return_label
= gen_label_rtx ();
4954 /* Initialize rtx used to return the value. */
4955 /* Do this before assign_parms so that we copy the struct value address
4956 before any library calls that assign parms might generate. */
4958 /* Decide whether to return the value in memory or in a register. */
4959 if (aggregate_value_p (DECL_RESULT (subr
), subr
))
4961 /* Returning something that won't go in a register. */
4962 rtx value_address
= 0;
4964 #ifdef PCC_STATIC_STRUCT_RETURN
4965 if (cfun
->returns_pcc_struct
)
4967 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
4968 value_address
= assemble_static_space (size
);
4973 rtx sv
= targetm
.calls
.struct_value_rtx (TREE_TYPE (subr
), 2);
4974 /* Expect to be passed the address of a place to store the value.
4975 If it is passed as an argument, assign_parms will take care of
4979 value_address
= gen_reg_rtx (Pmode
);
4980 emit_move_insn (value_address
, sv
);
4985 rtx x
= value_address
;
4986 if (!DECL_BY_REFERENCE (DECL_RESULT (subr
)))
4988 x
= gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr
)), x
);
4989 set_mem_attributes (x
, DECL_RESULT (subr
), 1);
4991 SET_DECL_RTL (DECL_RESULT (subr
), x
);
4994 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
4995 /* If return mode is void, this decl rtl should not be used. */
4996 SET_DECL_RTL (DECL_RESULT (subr
), NULL_RTX
);
4999 /* Compute the return values into a pseudo reg, which we will copy
5000 into the true return register after the cleanups are done. */
5001 tree return_type
= TREE_TYPE (DECL_RESULT (subr
));
5002 if (TYPE_MODE (return_type
) != BLKmode
5003 && targetm
.calls
.return_in_msb (return_type
))
5004 /* expand_function_end will insert the appropriate padding in
5005 this case. Use the return value's natural (unpadded) mode
5006 within the function proper. */
5007 SET_DECL_RTL (DECL_RESULT (subr
),
5008 gen_reg_rtx (TYPE_MODE (return_type
)));
5011 /* In order to figure out what mode to use for the pseudo, we
5012 figure out what the mode of the eventual return register will
5013 actually be, and use that. */
5014 rtx hard_reg
= hard_function_value (return_type
, subr
, 0, 1);
5016 /* Structures that are returned in registers are not
5017 aggregate_value_p, so we may see a PARALLEL or a REG. */
5018 if (REG_P (hard_reg
))
5019 SET_DECL_RTL (DECL_RESULT (subr
),
5020 gen_reg_rtx (GET_MODE (hard_reg
)));
5023 gcc_assert (GET_CODE (hard_reg
) == PARALLEL
);
5024 SET_DECL_RTL (DECL_RESULT (subr
), gen_group_rtx (hard_reg
));
5028 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5029 result to the real return register(s). */
5030 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
5032 if (chkp_function_instrumented_p (current_function_decl
))
5034 tree return_type
= TREE_TYPE (DECL_RESULT (subr
));
5035 rtx bounds
= targetm
.calls
.chkp_function_value_bounds (return_type
,
5037 SET_DECL_BOUNDS_RTL (DECL_RESULT (subr
), bounds
);
5041 /* Initialize rtx for parameters and local variables.
5042 In some cases this requires emitting insns. */
5043 assign_parms (subr
);
5045 /* If function gets a static chain arg, store it. */
5046 if (cfun
->static_chain_decl
)
5048 tree parm
= cfun
->static_chain_decl
;
5049 rtx local
, chain
, insn
;
5051 local
= gen_reg_rtx (Pmode
);
5052 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
5054 set_decl_incoming_rtl (parm
, chain
, false);
5055 SET_DECL_RTL (parm
, local
);
5056 mark_reg_pointer (local
, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
5058 insn
= emit_move_insn (local
, chain
);
5060 /* Mark the register as eliminable, similar to parameters. */
5062 && reg_mentioned_p (arg_pointer_rtx
, XEXP (chain
, 0)))
5063 set_dst_reg_note (insn
, REG_EQUIV
, chain
, local
);
5065 /* If we aren't optimizing, save the static chain onto the stack. */
5068 tree saved_static_chain_decl
5069 = build_decl (DECL_SOURCE_LOCATION (parm
), VAR_DECL
,
5070 DECL_NAME (parm
), TREE_TYPE (parm
));
5071 rtx saved_static_chain_rtx
5072 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5073 SET_DECL_RTL (saved_static_chain_decl
, saved_static_chain_rtx
);
5074 emit_move_insn (saved_static_chain_rtx
, chain
);
5075 SET_DECL_VALUE_EXPR (parm
, saved_static_chain_decl
);
5076 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
5080 /* If the function receives a non-local goto, then store the
5081 bits we need to restore the frame pointer. */
5082 if (cfun
->nonlocal_goto_save_area
)
5087 tree var
= TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0);
5088 gcc_assert (DECL_RTL_SET_P (var
));
5090 t_save
= build4 (ARRAY_REF
,
5091 TREE_TYPE (TREE_TYPE (cfun
->nonlocal_goto_save_area
)),
5092 cfun
->nonlocal_goto_save_area
,
5093 integer_zero_node
, NULL_TREE
, NULL_TREE
);
5094 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5095 gcc_assert (GET_MODE (r_save
) == Pmode
);
5097 emit_move_insn (r_save
, targetm
.builtin_setjmp_frame_value ());
5098 update_nonlocal_goto_save_area ();
5101 /* The following was moved from init_function_start.
5102 The move is supposed to make sdb output more accurate. */
5103 /* Indicate the beginning of the function body,
5104 as opposed to parm setup. */
5105 emit_note (NOTE_INSN_FUNCTION_BEG
);
5107 gcc_assert (NOTE_P (get_last_insn ()));
5109 parm_birth_insn
= get_last_insn ();
5114 PROFILE_HOOK (current_function_funcdef_no
);
5118 /* If we are doing generic stack checking, the probe should go here. */
5119 if (flag_stack_check
== GENERIC_STACK_CHECK
)
5120 stack_check_probe_note
= emit_note (NOTE_INSN_DELETED
);
5123 /* Undo the effects of init_dummy_function_start. */
5125 expand_dummy_function_end (void)
5127 gcc_assert (in_dummy_function
);
5129 /* End any sequences that failed to be closed due to syntax errors. */
5130 while (in_sequence_p ())
5133 /* Outside function body, can't compute type's actual size
5134 until next function's body starts. */
5136 free_after_parsing (cfun
);
5137 free_after_compilation (cfun
);
5139 in_dummy_function
= false;
5142 /* Helper for diddle_return_value. */
5145 diddle_return_value_1 (void (*doit
) (rtx
, void *), void *arg
, rtx outgoing
)
5150 if (REG_P (outgoing
))
5151 (*doit
) (outgoing
, arg
);
5152 else if (GET_CODE (outgoing
) == PARALLEL
)
5156 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
5158 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
5160 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5166 /* Call DOIT for each hard register used as a return value from
5167 the current function. */
5170 diddle_return_value (void (*doit
) (rtx
, void *), void *arg
)
5172 diddle_return_value_1 (doit
, arg
, crtl
->return_rtx
);
5173 diddle_return_value_1 (doit
, arg
, crtl
->return_bnd
);
5177 do_clobber_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
5183 clobber_return_register (void)
5185 diddle_return_value (do_clobber_return_reg
, NULL
);
5187 /* In case we do use pseudo to return value, clobber it too. */
5188 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
5190 tree decl_result
= DECL_RESULT (current_function_decl
);
5191 rtx decl_rtl
= DECL_RTL (decl_result
);
5192 if (REG_P (decl_rtl
) && REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
)
5194 do_clobber_return_reg (decl_rtl
, NULL
);
5200 do_use_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
5206 use_return_register (void)
5208 diddle_return_value (do_use_return_reg
, NULL
);
5211 /* Possibly warn about unused parameters. */
5213 do_warn_unused_parameter (tree fn
)
5217 for (decl
= DECL_ARGUMENTS (fn
);
5218 decl
; decl
= DECL_CHAIN (decl
))
5219 if (!TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
5220 && DECL_NAME (decl
) && !DECL_ARTIFICIAL (decl
)
5221 && !TREE_NO_WARNING (decl
))
5222 warning (OPT_Wunused_parameter
, "unused parameter %q+D", decl
);
5225 /* Set the location of the insn chain starting at INSN to LOC. */
5228 set_insn_locations (rtx_insn
*insn
, int loc
)
5230 while (insn
!= NULL
)
5233 INSN_LOCATION (insn
) = loc
;
5234 insn
= NEXT_INSN (insn
);
5238 /* Generate RTL for the end of the current function. */
5241 expand_function_end (void)
5245 /* If arg_pointer_save_area was referenced only from a nested
5246 function, we will not have initialized it yet. Do that now. */
5247 if (arg_pointer_save_area
&& ! crtl
->arg_pointer_save_area_init
)
5248 get_arg_pointer_save_area ();
5250 /* If we are doing generic stack checking and this function makes calls,
5251 do a stack probe at the start of the function to ensure we have enough
5252 space for another stack frame. */
5253 if (flag_stack_check
== GENERIC_STACK_CHECK
)
5255 rtx_insn
*insn
, *seq
;
5257 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5260 rtx max_frame_size
= GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
);
5262 if (STACK_CHECK_MOVING_SP
)
5263 anti_adjust_stack_and_probe (max_frame_size
, true);
5265 probe_stack_range (STACK_OLD_CHECK_PROTECT
, max_frame_size
);
5268 set_insn_locations (seq
, prologue_location
);
5269 emit_insn_before (seq
, stack_check_probe_note
);
5274 /* End any sequences that failed to be closed due to syntax errors. */
5275 while (in_sequence_p ())
5278 clear_pending_stack_adjust ();
5279 do_pending_stack_adjust ();
5281 /* Output a linenumber for the end of the function.
5282 SDB depends on this. */
5283 set_curr_insn_location (input_location
);
5285 /* Before the return label (if any), clobber the return
5286 registers so that they are not propagated live to the rest of
5287 the function. This can only happen with functions that drop
5288 through; if there had been a return statement, there would
5289 have either been a return rtx, or a jump to the return label.
5291 We delay actual code generation after the current_function_value_rtx
5293 clobber_after
= get_last_insn ();
5295 /* Output the label for the actual return from the function. */
5296 emit_label (return_label
);
5298 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
5300 /* Let except.c know where it should emit the call to unregister
5301 the function context for sjlj exceptions. */
5302 if (flag_exceptions
)
5303 sjlj_emit_function_exit_after (get_last_insn ());
5307 /* We want to ensure that instructions that may trap are not
5308 moved into the epilogue by scheduling, because we don't
5309 always emit unwind information for the epilogue. */
5310 if (cfun
->can_throw_non_call_exceptions
)
5311 emit_insn (gen_blockage ());
5314 /* If this is an implementation of throw, do what's necessary to
5315 communicate between __builtin_eh_return and the epilogue. */
5316 expand_eh_return ();
5318 /* If scalar return value was computed in a pseudo-reg, or was a named
5319 return value that got dumped to the stack, copy that to the hard
5321 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
5323 tree decl_result
= DECL_RESULT (current_function_decl
);
5324 rtx decl_rtl
= DECL_RTL (decl_result
);
5326 if (REG_P (decl_rtl
)
5327 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
5328 : DECL_REGISTER (decl_result
))
5330 rtx real_decl_rtl
= crtl
->return_rtx
;
5332 /* This should be set in assign_parms. */
5333 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl
));
5335 /* If this is a BLKmode structure being returned in registers,
5336 then use the mode computed in expand_return. Note that if
5337 decl_rtl is memory, then its mode may have been changed,
5338 but that crtl->return_rtx has not. */
5339 if (GET_MODE (real_decl_rtl
) == BLKmode
)
5340 PUT_MODE (real_decl_rtl
, GET_MODE (decl_rtl
));
5342 /* If a non-BLKmode return value should be padded at the least
5343 significant end of the register, shift it left by the appropriate
5344 amount. BLKmode results are handled using the group load/store
5346 if (TYPE_MODE (TREE_TYPE (decl_result
)) != BLKmode
5347 && REG_P (real_decl_rtl
)
5348 && targetm
.calls
.return_in_msb (TREE_TYPE (decl_result
)))
5350 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl
),
5351 REGNO (real_decl_rtl
)),
5353 shift_return_value (GET_MODE (decl_rtl
), true, real_decl_rtl
);
5355 /* If a named return value dumped decl_return to memory, then
5356 we may need to re-do the PROMOTE_MODE signed/unsigned
5358 else if (GET_MODE (real_decl_rtl
) != GET_MODE (decl_rtl
))
5360 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (decl_result
));
5361 promote_function_mode (TREE_TYPE (decl_result
),
5362 GET_MODE (decl_rtl
), &unsignedp
,
5363 TREE_TYPE (current_function_decl
), 1);
5365 convert_move (real_decl_rtl
, decl_rtl
, unsignedp
);
5367 else if (GET_CODE (real_decl_rtl
) == PARALLEL
)
5369 /* If expand_function_start has created a PARALLEL for decl_rtl,
5370 move the result to the real return registers. Otherwise, do
5371 a group load from decl_rtl for a named return. */
5372 if (GET_CODE (decl_rtl
) == PARALLEL
)
5373 emit_group_move (real_decl_rtl
, decl_rtl
);
5375 emit_group_load (real_decl_rtl
, decl_rtl
,
5376 TREE_TYPE (decl_result
),
5377 int_size_in_bytes (TREE_TYPE (decl_result
)));
5379 /* In the case of complex integer modes smaller than a word, we'll
5380 need to generate some non-trivial bitfield insertions. Do that
5381 on a pseudo and not the hard register. */
5382 else if (GET_CODE (decl_rtl
) == CONCAT
5383 && GET_MODE_CLASS (GET_MODE (decl_rtl
)) == MODE_COMPLEX_INT
5384 && GET_MODE_BITSIZE (GET_MODE (decl_rtl
)) <= BITS_PER_WORD
)
5386 int old_generating_concat_p
;
5389 old_generating_concat_p
= generating_concat_p
;
5390 generating_concat_p
= 0;
5391 tmp
= gen_reg_rtx (GET_MODE (decl_rtl
));
5392 generating_concat_p
= old_generating_concat_p
;
5394 emit_move_insn (tmp
, decl_rtl
);
5395 emit_move_insn (real_decl_rtl
, tmp
);
5398 emit_move_insn (real_decl_rtl
, decl_rtl
);
5402 /* If returning a structure, arrange to return the address of the value
5403 in a place where debuggers expect to find it.
5405 If returning a structure PCC style,
5406 the caller also depends on this value.
5407 And cfun->returns_pcc_struct is not necessarily set. */
5408 if ((cfun
->returns_struct
|| cfun
->returns_pcc_struct
)
5409 && !targetm
.calls
.omit_struct_return_reg
)
5411 rtx value_address
= DECL_RTL (DECL_RESULT (current_function_decl
));
5412 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
5415 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
5416 type
= TREE_TYPE (type
);
5418 value_address
= XEXP (value_address
, 0);
5420 outgoing
= targetm
.calls
.function_value (build_pointer_type (type
),
5421 current_function_decl
, true);
5423 /* Mark this as a function return value so integrate will delete the
5424 assignment and USE below when inlining this function. */
5425 REG_FUNCTION_VALUE_P (outgoing
) = 1;
5427 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5428 value_address
= convert_memory_address (GET_MODE (outgoing
),
5431 emit_move_insn (outgoing
, value_address
);
5433 /* Show return register used to hold result (in this case the address
5435 crtl
->return_rtx
= outgoing
;
5438 /* Emit the actual code to clobber return register. Don't emit
5439 it if clobber_after is a barrier, then the previous basic block
5440 certainly doesn't fall thru into the exit block. */
5441 if (!BARRIER_P (clobber_after
))
5446 clobber_return_register ();
5450 emit_insn_after (seq
, clobber_after
);
5453 /* Output the label for the naked return from the function. */
5454 if (naked_return_label
)
5455 emit_label (naked_return_label
);
5457 /* @@@ This is a kludge. We want to ensure that instructions that
5458 may trap are not moved into the epilogue by scheduling, because
5459 we don't always emit unwind information for the epilogue. */
5460 if (cfun
->can_throw_non_call_exceptions
5461 && targetm_common
.except_unwind_info (&global_options
) != UI_SJLJ
)
5462 emit_insn (gen_blockage ());
5464 /* If stack protection is enabled for this function, check the guard. */
5465 if (crtl
->stack_protect_guard
)
5466 stack_protect_epilogue ();
5468 /* If we had calls to alloca, and this machine needs
5469 an accurate stack pointer to exit the function,
5470 insert some code to save and restore the stack pointer. */
5471 if (! EXIT_IGNORE_STACK
5472 && cfun
->calls_alloca
)
5477 emit_stack_save (SAVE_FUNCTION
, &tem
);
5480 emit_insn_before (seq
, parm_birth_insn
);
5482 emit_stack_restore (SAVE_FUNCTION
, tem
);
5485 /* ??? This should no longer be necessary since stupid is no longer with
5486 us, but there are some parts of the compiler (eg reload_combine, and
5487 sh mach_dep_reorg) that still try and compute their own lifetime info
5488 instead of using the general framework. */
5489 use_return_register ();
5493 get_arg_pointer_save_area (void)
5495 rtx ret
= arg_pointer_save_area
;
5499 ret
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5500 arg_pointer_save_area
= ret
;
5503 if (! crtl
->arg_pointer_save_area_init
)
5507 /* Save the arg pointer at the beginning of the function. The
5508 generated stack slot may not be a valid memory address, so we
5509 have to check it and fix it if necessary. */
5511 emit_move_insn (validize_mem (copy_rtx (ret
)),
5512 crtl
->args
.internal_arg_pointer
);
5516 push_topmost_sequence ();
5517 emit_insn_after (seq
, entry_of_function ());
5518 pop_topmost_sequence ();
5520 crtl
->arg_pointer_save_area_init
= true;
5526 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5527 for the first time. */
5530 record_insns (rtx_insn
*insns
, rtx end
, htab_t
*hashp
)
5533 htab_t hash
= *hashp
;
5537 = htab_create_ggc (17, htab_hash_pointer
, htab_eq_pointer
, NULL
);
5539 for (tmp
= insns
; tmp
!= end
; tmp
= NEXT_INSN (tmp
))
5541 void **slot
= htab_find_slot (hash
, tmp
, INSERT
);
5542 gcc_assert (*slot
== NULL
);
5547 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5548 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5549 insn, then record COPY as well. */
5552 maybe_copy_prologue_epilogue_insn (rtx insn
, rtx copy
)
5557 hash
= epilogue_insn_hash
;
5558 if (!hash
|| !htab_find (hash
, insn
))
5560 hash
= prologue_insn_hash
;
5561 if (!hash
|| !htab_find (hash
, insn
))
5565 slot
= htab_find_slot (hash
, copy
, INSERT
);
5566 gcc_assert (*slot
== NULL
);
5570 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5571 we can be running after reorg, SEQUENCE rtl is possible. */
5574 contains (const_rtx insn
, htab_t hash
)
5579 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
5581 rtx_sequence
*seq
= as_a
<rtx_sequence
*> (PATTERN (insn
));
5583 for (i
= seq
->len () - 1; i
>= 0; i
--)
5584 if (htab_find (hash
, seq
->element (i
)))
5589 return htab_find (hash
, insn
) != NULL
;
5593 prologue_epilogue_contains (const_rtx insn
)
5595 if (contains (insn
, prologue_insn_hash
))
5597 if (contains (insn
, epilogue_insn_hash
))
5603 /* Insert use of return register before the end of BB. */
5606 emit_use_return_register_into_block (basic_block bb
)
5610 use_return_register ();
5615 if (reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
5616 insn
= prev_cc0_setter (insn
);
5618 emit_insn_before (seq
, insn
);
5622 /* Create a return pattern, either simple_return or return, depending on
5626 gen_return_pattern (bool simple_p
)
5628 #ifdef HAVE_simple_return
5629 return simple_p
? gen_simple_return () : gen_return ();
5631 gcc_assert (!simple_p
);
5632 return gen_return ();
5636 /* Insert an appropriate return pattern at the end of block BB. This
5637 also means updating block_for_insn appropriately. SIMPLE_P is
5638 the same as in gen_return_pattern and passed to it. */
5641 emit_return_into_block (bool simple_p
, basic_block bb
)
5644 jump
= emit_jump_insn_after (gen_return_pattern (simple_p
), BB_END (bb
));
5645 pat
= PATTERN (jump
);
5646 if (GET_CODE (pat
) == PARALLEL
)
5647 pat
= XVECEXP (pat
, 0, 0);
5648 gcc_assert (ANY_RETURN_P (pat
));
5649 JUMP_LABEL (jump
) = pat
;
5653 /* Set JUMP_LABEL for a return insn. */
5656 set_return_jump_label (rtx returnjump
)
5658 rtx pat
= PATTERN (returnjump
);
5659 if (GET_CODE (pat
) == PARALLEL
)
5660 pat
= XVECEXP (pat
, 0, 0);
5661 if (ANY_RETURN_P (pat
))
5662 JUMP_LABEL (returnjump
) = pat
;
5664 JUMP_LABEL (returnjump
) = ret_rtx
;
5667 #if defined (HAVE_return) || defined (HAVE_simple_return)
5668 /* Return true if there are any active insns between HEAD and TAIL. */
5670 active_insn_between (rtx_insn
*head
, rtx_insn
*tail
)
5674 if (active_insn_p (tail
))
5678 tail
= PREV_INSN (tail
);
5683 /* LAST_BB is a block that exits, and empty of active instructions.
5684 Examine its predecessors for jumps that can be converted to
5685 (conditional) returns. */
5687 convert_jumps_to_returns (basic_block last_bb
, bool simple_p
,
5688 vec
<edge
> unconverted ATTRIBUTE_UNUSED
)
5695 auto_vec
<basic_block
> src_bbs (EDGE_COUNT (last_bb
->preds
));
5697 FOR_EACH_EDGE (e
, ei
, last_bb
->preds
)
5698 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5699 src_bbs
.quick_push (e
->src
);
5701 label
= BB_HEAD (last_bb
);
5703 FOR_EACH_VEC_ELT (src_bbs
, i
, bb
)
5705 rtx_insn
*jump
= BB_END (bb
);
5707 if (!JUMP_P (jump
) || JUMP_LABEL (jump
) != label
)
5710 e
= find_edge (bb
, last_bb
);
5712 /* If we have an unconditional jump, we can replace that
5713 with a simple return instruction. */
5714 if (simplejump_p (jump
))
5716 /* The use of the return register might be present in the exit
5717 fallthru block. Either:
5718 - removing the use is safe, and we should remove the use in
5719 the exit fallthru block, or
5720 - removing the use is not safe, and we should add it here.
5721 For now, we conservatively choose the latter. Either of the
5722 2 helps in crossjumping. */
5723 emit_use_return_register_into_block (bb
);
5725 emit_return_into_block (simple_p
, bb
);
5729 /* If we have a conditional jump branching to the last
5730 block, we can try to replace that with a conditional
5731 return instruction. */
5732 else if (condjump_p (jump
))
5737 dest
= simple_return_rtx
;
5740 if (!redirect_jump (jump
, dest
, 0))
5742 #ifdef HAVE_simple_return
5747 "Failed to redirect bb %d branch.\n", bb
->index
);
5748 unconverted
.safe_push (e
);
5754 /* See comment in simplejump_p case above. */
5755 emit_use_return_register_into_block (bb
);
5757 /* If this block has only one successor, it both jumps
5758 and falls through to the fallthru block, so we can't
5760 if (single_succ_p (bb
))
5765 #ifdef HAVE_simple_return
5770 "Failed to redirect bb %d branch.\n", bb
->index
);
5771 unconverted
.safe_push (e
);
5777 /* Fix up the CFG for the successful change we just made. */
5778 redirect_edge_succ (e
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
5779 e
->flags
&= ~EDGE_CROSSING
;
5785 /* Emit a return insn for the exit fallthru block. */
5787 emit_return_for_exit (edge exit_fallthru_edge
, bool simple_p
)
5789 basic_block last_bb
= exit_fallthru_edge
->src
;
5791 if (JUMP_P (BB_END (last_bb
)))
5793 last_bb
= split_edge (exit_fallthru_edge
);
5794 exit_fallthru_edge
= single_succ_edge (last_bb
);
5796 emit_barrier_after (BB_END (last_bb
));
5797 emit_return_into_block (simple_p
, last_bb
);
5798 exit_fallthru_edge
->flags
&= ~EDGE_FALLTHRU
;
5804 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5805 this into place with notes indicating where the prologue ends and where
5806 the epilogue begins. Update the basic block information when possible.
5808 Notes on epilogue placement:
5809 There are several kinds of edges to the exit block:
5810 * a single fallthru edge from LAST_BB
5811 * possibly, edges from blocks containing sibcalls
5812 * possibly, fake edges from infinite loops
5814 The epilogue is always emitted on the fallthru edge from the last basic
5815 block in the function, LAST_BB, into the exit block.
5817 If LAST_BB is empty except for a label, it is the target of every
5818 other basic block in the function that ends in a return. If a
5819 target has a return or simple_return pattern (possibly with
5820 conditional variants), these basic blocks can be changed so that a
5821 return insn is emitted into them, and their target is adjusted to
5822 the real exit block.
5824 Notes on shrink wrapping: We implement a fairly conservative
5825 version of shrink-wrapping rather than the textbook one. We only
5826 generate a single prologue and a single epilogue. This is
5827 sufficient to catch a number of interesting cases involving early
5830 First, we identify the blocks that require the prologue to occur before
5831 them. These are the ones that modify a call-saved register, or reference
5832 any of the stack or frame pointer registers. To simplify things, we then
5833 mark everything reachable from these blocks as also requiring a prologue.
5834 This takes care of loops automatically, and avoids the need to examine
5835 whether MEMs reference the frame, since it is sufficient to check for
5836 occurrences of the stack or frame pointer.
5838 We then compute the set of blocks for which the need for a prologue
5839 is anticipatable (borrowing terminology from the shrink-wrapping
5840 description in Muchnick's book). These are the blocks which either
5841 require a prologue themselves, or those that have only successors
5842 where the prologue is anticipatable. The prologue needs to be
5843 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5844 is not. For the moment, we ensure that only one such edge exists.
5846 The epilogue is placed as described above, but we make a
5847 distinction between inserting return and simple_return patterns
5848 when modifying other blocks that end in a return. Blocks that end
5849 in a sibcall omit the sibcall_epilogue if the block is not in
5853 thread_prologue_and_epilogue_insns (void)
5856 #ifdef HAVE_simple_return
5857 vec
<edge
> unconverted_simple_returns
= vNULL
;
5858 bitmap_head bb_flags
;
5860 rtx_insn
*returnjump
;
5861 rtx_insn
*epilogue_end ATTRIBUTE_UNUSED
;
5862 rtx_insn
*prologue_seq ATTRIBUTE_UNUSED
, *split_prologue_seq ATTRIBUTE_UNUSED
;
5863 edge e
, entry_edge
, orig_entry_edge
, exit_fallthru_edge
;
5868 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5871 epilogue_end
= NULL
;
5874 /* Can't deal with multiple successors of the entry block at the
5875 moment. Function should always have at least one entry
5877 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
5878 entry_edge
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5879 orig_entry_edge
= entry_edge
;
5881 split_prologue_seq
= NULL
;
5882 if (flag_split_stack
5883 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun
->decl
))
5886 #ifndef HAVE_split_stack_prologue
5889 gcc_assert (HAVE_split_stack_prologue
);
5892 emit_insn (gen_split_stack_prologue ());
5893 split_prologue_seq
= get_insns ();
5896 record_insns (split_prologue_seq
, NULL
, &prologue_insn_hash
);
5897 set_insn_locations (split_prologue_seq
, prologue_location
);
5901 prologue_seq
= NULL
;
5902 #ifdef HAVE_prologue
5906 rtx_insn
*seq
= safe_as_a
<rtx_insn
*> (gen_prologue ());
5909 /* Insert an explicit USE for the frame pointer
5910 if the profiling is on and the frame pointer is required. */
5911 if (crtl
->profile
&& frame_pointer_needed
)
5912 emit_use (hard_frame_pointer_rtx
);
5914 /* Retain a map of the prologue insns. */
5915 record_insns (seq
, NULL
, &prologue_insn_hash
);
5916 emit_note (NOTE_INSN_PROLOGUE_END
);
5918 /* Ensure that instructions are not moved into the prologue when
5919 profiling is on. The call to the profiling routine can be
5920 emitted within the live range of a call-clobbered register. */
5921 if (!targetm
.profile_before_prologue () && crtl
->profile
)
5922 emit_insn (gen_blockage ());
5924 prologue_seq
= get_insns ();
5926 set_insn_locations (prologue_seq
, prologue_location
);
5930 #ifdef HAVE_simple_return
5931 bitmap_initialize (&bb_flags
, &bitmap_default_obstack
);
5933 /* Try to perform a kind of shrink-wrapping, making sure the
5934 prologue/epilogue is emitted only around those parts of the
5935 function that require it. */
5937 try_shrink_wrapping (&entry_edge
, orig_entry_edge
, &bb_flags
, prologue_seq
);
5940 if (split_prologue_seq
!= NULL_RTX
)
5942 insert_insn_on_edge (split_prologue_seq
, orig_entry_edge
);
5945 if (prologue_seq
!= NULL_RTX
)
5947 insert_insn_on_edge (prologue_seq
, entry_edge
);
5951 /* If the exit block has no non-fake predecessors, we don't need
5953 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5954 if ((e
->flags
& EDGE_FAKE
) == 0)
5959 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5961 exit_fallthru_edge
= find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
);
5963 #ifdef HAVE_simple_return
5964 if (entry_edge
!= orig_entry_edge
)
5966 = get_unconverted_simple_return (exit_fallthru_edge
, bb_flags
,
5967 &unconverted_simple_returns
,
5973 if (exit_fallthru_edge
== NULL
)
5978 basic_block last_bb
= exit_fallthru_edge
->src
;
5980 if (LABEL_P (BB_HEAD (last_bb
))
5981 && !active_insn_between (BB_HEAD (last_bb
), BB_END (last_bb
)))
5982 convert_jumps_to_returns (last_bb
, false, vNULL
);
5984 if (EDGE_COUNT (last_bb
->preds
) != 0
5985 && single_succ_p (last_bb
))
5987 last_bb
= emit_return_for_exit (exit_fallthru_edge
, false);
5988 epilogue_end
= returnjump
= BB_END (last_bb
);
5989 #ifdef HAVE_simple_return
5990 /* Emitting the return may add a basic block.
5991 Fix bb_flags for the added block. */
5992 if (last_bb
!= exit_fallthru_edge
->src
)
5993 bitmap_set_bit (&bb_flags
, last_bb
->index
);
6001 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6002 this marker for the splits of EH_RETURN patterns, and nothing else
6003 uses the flag in the meantime. */
6004 epilogue_completed
= 1;
6006 #ifdef HAVE_eh_return
6007 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6008 some targets, these get split to a special version of the epilogue
6009 code. In order to be able to properly annotate these with unwind
6010 info, try to split them now. If we get a valid split, drop an
6011 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6012 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6014 rtx_insn
*prev
, *last
, *trial
;
6016 if (e
->flags
& EDGE_FALLTHRU
)
6018 last
= BB_END (e
->src
);
6019 if (!eh_returnjump_p (last
))
6022 prev
= PREV_INSN (last
);
6023 trial
= try_split (PATTERN (last
), last
, 1);
6027 record_insns (NEXT_INSN (prev
), NEXT_INSN (trial
), &epilogue_insn_hash
);
6028 emit_note_after (NOTE_INSN_EPILOGUE_BEG
, prev
);
6032 /* If nothing falls through into the exit block, we don't need an
6035 if (exit_fallthru_edge
== NULL
)
6038 #ifdef HAVE_epilogue
6042 epilogue_end
= emit_note (NOTE_INSN_EPILOGUE_BEG
);
6043 rtx_insn
*seq
= as_a
<rtx_insn
*> (gen_epilogue ());
6045 emit_jump_insn (seq
);
6047 /* Retain a map of the epilogue insns. */
6048 record_insns (seq
, NULL
, &epilogue_insn_hash
);
6049 set_insn_locations (seq
, epilogue_location
);
6052 returnjump
= get_last_insn ();
6055 insert_insn_on_edge (seq
, exit_fallthru_edge
);
6058 if (JUMP_P (returnjump
))
6059 set_return_jump_label (returnjump
);
6066 if (! next_active_insn (BB_END (exit_fallthru_edge
->src
)))
6068 /* We have a fall-through edge to the exit block, the source is not
6069 at the end of the function, and there will be an assembler epilogue
6070 at the end of the function.
6071 We can't use force_nonfallthru here, because that would try to
6072 use return. Inserting a jump 'by hand' is extremely messy, so
6073 we take advantage of cfg_layout_finalize using
6074 fixup_fallthru_exit_predecessor. */
6075 cfg_layout_initialize (0);
6076 FOR_EACH_BB_FN (cur_bb
, cfun
)
6077 if (cur_bb
->index
>= NUM_FIXED_BLOCKS
6078 && cur_bb
->next_bb
->index
>= NUM_FIXED_BLOCKS
)
6079 cur_bb
->aux
= cur_bb
->next_bb
;
6080 cfg_layout_finalize ();
6085 default_rtl_profile ();
6091 commit_edge_insertions ();
6093 /* Look for basic blocks within the prologue insns. */
6094 blocks
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
6095 bitmap_clear (blocks
);
6096 bitmap_set_bit (blocks
, entry_edge
->dest
->index
);
6097 bitmap_set_bit (blocks
, orig_entry_edge
->dest
->index
);
6098 find_many_sub_basic_blocks (blocks
);
6099 sbitmap_free (blocks
);
6101 /* The epilogue insns we inserted may cause the exit edge to no longer
6103 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6105 if (((e
->flags
& EDGE_FALLTHRU
) != 0)
6106 && returnjump_p (BB_END (e
->src
)))
6107 e
->flags
&= ~EDGE_FALLTHRU
;
6111 #ifdef HAVE_simple_return
6112 convert_to_simple_return (entry_edge
, orig_entry_edge
, bb_flags
, returnjump
,
6113 unconverted_simple_returns
);
6116 #ifdef HAVE_sibcall_epilogue
6117 /* Emit sibling epilogues before any sibling call sites. */
6118 for (ei
= ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
); (e
=
6122 basic_block bb
= e
->src
;
6123 rtx_insn
*insn
= BB_END (bb
);
6127 || ! SIBLING_CALL_P (insn
)
6128 #ifdef HAVE_simple_return
6129 || (entry_edge
!= orig_entry_edge
6130 && !bitmap_bit_p (&bb_flags
, bb
->index
))
6138 ep_seq
= gen_sibcall_epilogue ();
6142 emit_note (NOTE_INSN_EPILOGUE_BEG
);
6144 rtx_insn
*seq
= get_insns ();
6147 /* Retain a map of the epilogue insns. Used in life analysis to
6148 avoid getting rid of sibcall epilogue insns. Do this before we
6149 actually emit the sequence. */
6150 record_insns (seq
, NULL
, &epilogue_insn_hash
);
6151 set_insn_locations (seq
, epilogue_location
);
6153 emit_insn_before (seq
, insn
);
6159 #ifdef HAVE_epilogue
6162 rtx_insn
*insn
, *next
;
6164 /* Similarly, move any line notes that appear after the epilogue.
6165 There is no need, however, to be quite so anal about the existence
6166 of such a note. Also possibly move
6167 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6169 for (insn
= epilogue_end
; insn
; insn
= next
)
6171 next
= NEXT_INSN (insn
);
6173 && (NOTE_KIND (insn
) == NOTE_INSN_FUNCTION_BEG
))
6174 reorder_insns (insn
, insn
, PREV_INSN (epilogue_end
));
6179 #ifdef HAVE_simple_return
6180 bitmap_clear (&bb_flags
);
6183 /* Threading the prologue and epilogue changes the artificial refs
6184 in the entry and exit blocks. */
6185 epilogue_completed
= 1;
6186 df_update_entry_exit_and_calls ();
6189 /* Reposition the prologue-end and epilogue-begin notes after
6190 instruction scheduling. */
6193 reposition_prologue_and_epilogue_notes (void)
6195 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \
6196 || defined (HAVE_sibcall_epilogue)
6197 /* Since the hash table is created on demand, the fact that it is
6198 non-null is a signal that it is non-empty. */
6199 if (prologue_insn_hash
!= NULL
)
6201 size_t len
= htab_elements (prologue_insn_hash
);
6202 rtx_insn
*insn
, *last
= NULL
, *note
= NULL
;
6204 /* Scan from the beginning until we reach the last prologue insn. */
6205 /* ??? While we do have the CFG intact, there are two problems:
6206 (1) The prologue can contain loops (typically probing the stack),
6207 which means that the end of the prologue isn't in the first bb.
6208 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6209 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6213 if (NOTE_KIND (insn
) == NOTE_INSN_PROLOGUE_END
)
6216 else if (contains (insn
, prologue_insn_hash
))
6228 /* Scan forward looking for the PROLOGUE_END note. It should
6229 be right at the beginning of the block, possibly with other
6230 insn notes that got moved there. */
6231 for (note
= NEXT_INSN (last
); ; note
= NEXT_INSN (note
))
6234 && NOTE_KIND (note
) == NOTE_INSN_PROLOGUE_END
)
6239 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6241 last
= NEXT_INSN (last
);
6242 reorder_insns (note
, note
, last
);
6246 if (epilogue_insn_hash
!= NULL
)
6251 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6253 rtx_insn
*insn
, *first
= NULL
, *note
= NULL
;
6254 basic_block bb
= e
->src
;
6256 /* Scan from the beginning until we reach the first epilogue insn. */
6257 FOR_BB_INSNS (bb
, insn
)
6261 if (NOTE_KIND (insn
) == NOTE_INSN_EPILOGUE_BEG
)
6268 else if (first
== NULL
&& contains (insn
, epilogue_insn_hash
))
6278 /* If the function has a single basic block, and no real
6279 epilogue insns (e.g. sibcall with no cleanup), the
6280 epilogue note can get scheduled before the prologue
6281 note. If we have frame related prologue insns, having
6282 them scanned during the epilogue will result in a crash.
6283 In this case re-order the epilogue note to just before
6284 the last insn in the block. */
6286 first
= BB_END (bb
);
6288 if (PREV_INSN (first
) != note
)
6289 reorder_insns (note
, note
, PREV_INSN (first
));
6293 #endif /* HAVE_prologue or HAVE_epilogue */
6296 /* Returns the name of function declared by FNDECL. */
6298 fndecl_name (tree fndecl
)
6302 return lang_hooks
.decl_printable_name (fndecl
, 2);
6305 /* Returns the name of function FN. */
6307 function_name (struct function
*fn
)
6309 tree fndecl
= (fn
== NULL
) ? NULL
: fn
->decl
;
6310 return fndecl_name (fndecl
);
6313 /* Returns the name of the current function. */
6315 current_function_name (void)
6317 return function_name (cfun
);
6322 rest_of_handle_check_leaf_regs (void)
6324 #ifdef LEAF_REGISTERS
6325 crtl
->uses_only_leaf_regs
6326 = optimize
> 0 && only_leaf_regs_used () && leaf_function_p ();
6331 /* Insert a TYPE into the used types hash table of CFUN. */
6334 used_types_insert_helper (tree type
, struct function
*func
)
6336 if (type
!= NULL
&& func
!= NULL
)
6338 if (func
->used_types_hash
== NULL
)
6339 func
->used_types_hash
= hash_set
<tree
>::create_ggc (37);
6341 func
->used_types_hash
->add (type
);
6345 /* Given a type, insert it into the used hash table in cfun. */
6347 used_types_insert (tree t
)
6349 while (POINTER_TYPE_P (t
) || TREE_CODE (t
) == ARRAY_TYPE
)
6354 if (TREE_CODE (t
) == ERROR_MARK
)
6356 if (TYPE_NAME (t
) == NULL_TREE
6357 || TYPE_NAME (t
) == TYPE_NAME (TYPE_MAIN_VARIANT (t
)))
6358 t
= TYPE_MAIN_VARIANT (t
);
6359 if (debug_info_level
> DINFO_LEVEL_NONE
)
6362 used_types_insert_helper (t
, cfun
);
6365 /* So this might be a type referenced by a global variable.
6366 Record that type so that we can later decide to emit its
6367 debug information. */
6368 vec_safe_push (types_used_by_cur_var_decl
, t
);
6373 /* Helper to Hash a struct types_used_by_vars_entry. */
6376 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry
*entry
)
6378 gcc_assert (entry
&& entry
->var_decl
&& entry
->type
);
6380 return iterative_hash_object (entry
->type
,
6381 iterative_hash_object (entry
->var_decl
, 0));
6384 /* Hash function of the types_used_by_vars_entry hash table. */
6387 used_type_hasher::hash (types_used_by_vars_entry
*entry
)
6389 return hash_types_used_by_vars_entry (entry
);
6392 /*Equality function of the types_used_by_vars_entry hash table. */
6395 used_type_hasher::equal (types_used_by_vars_entry
*e1
,
6396 types_used_by_vars_entry
*e2
)
6398 return (e1
->var_decl
== e2
->var_decl
&& e1
->type
== e2
->type
);
6401 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6404 types_used_by_var_decl_insert (tree type
, tree var_decl
)
6406 if (type
!= NULL
&& var_decl
!= NULL
)
6408 types_used_by_vars_entry
**slot
;
6409 struct types_used_by_vars_entry e
;
6410 e
.var_decl
= var_decl
;
6412 if (types_used_by_vars_hash
== NULL
)
6413 types_used_by_vars_hash
6414 = hash_table
<used_type_hasher
>::create_ggc (37);
6416 slot
= types_used_by_vars_hash
->find_slot (&e
, INSERT
);
6419 struct types_used_by_vars_entry
*entry
;
6420 entry
= ggc_alloc
<types_used_by_vars_entry
> ();
6422 entry
->var_decl
= var_decl
;
6430 const pass_data pass_data_leaf_regs
=
6432 RTL_PASS
, /* type */
6433 "*leaf_regs", /* name */
6434 OPTGROUP_NONE
, /* optinfo_flags */
6435 TV_NONE
, /* tv_id */
6436 0, /* properties_required */
6437 0, /* properties_provided */
6438 0, /* properties_destroyed */
6439 0, /* todo_flags_start */
6440 0, /* todo_flags_finish */
6443 class pass_leaf_regs
: public rtl_opt_pass
6446 pass_leaf_regs (gcc::context
*ctxt
)
6447 : rtl_opt_pass (pass_data_leaf_regs
, ctxt
)
6450 /* opt_pass methods: */
6451 virtual unsigned int execute (function
*)
6453 return rest_of_handle_check_leaf_regs ();
6456 }; // class pass_leaf_regs
6461 make_pass_leaf_regs (gcc::context
*ctxt
)
6463 return new pass_leaf_regs (ctxt
);
6467 rest_of_handle_thread_prologue_and_epilogue (void)
6470 cleanup_cfg (CLEANUP_EXPENSIVE
);
6472 /* On some machines, the prologue and epilogue code, or parts thereof,
6473 can be represented as RTL. Doing so lets us schedule insns between
6474 it and the rest of the code and also allows delayed branch
6475 scheduling to operate in the epilogue. */
6476 thread_prologue_and_epilogue_insns ();
6478 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6482 /* The stack usage info is finalized during prologue expansion. */
6483 if (flag_stack_usage_info
)
6484 output_stack_usage ();
6491 const pass_data pass_data_thread_prologue_and_epilogue
=
6493 RTL_PASS
, /* type */
6494 "pro_and_epilogue", /* name */
6495 OPTGROUP_NONE
, /* optinfo_flags */
6496 TV_THREAD_PROLOGUE_AND_EPILOGUE
, /* tv_id */
6497 0, /* properties_required */
6498 0, /* properties_provided */
6499 0, /* properties_destroyed */
6500 0, /* todo_flags_start */
6501 ( TODO_df_verify
| TODO_df_finish
), /* todo_flags_finish */
6504 class pass_thread_prologue_and_epilogue
: public rtl_opt_pass
6507 pass_thread_prologue_and_epilogue (gcc::context
*ctxt
)
6508 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue
, ctxt
)
6511 /* opt_pass methods: */
6512 virtual unsigned int execute (function
*)
6514 return rest_of_handle_thread_prologue_and_epilogue ();
6517 }; // class pass_thread_prologue_and_epilogue
6522 make_pass_thread_prologue_and_epilogue (gcc::context
*ctxt
)
6524 return new pass_thread_prologue_and_epilogue (ctxt
);
6528 /* This mini-pass fixes fall-out from SSA in asm statements that have
6529 in-out constraints. Say you start with
6532 asm ("": "+mr" (inout));
6535 which is transformed very early to use explicit output and match operands:
6538 asm ("": "=mr" (inout) : "0" (inout));
6541 Or, after SSA and copyprop,
6543 asm ("": "=mr" (inout_2) : "0" (inout_1));
6546 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6547 they represent two separate values, so they will get different pseudo
6548 registers during expansion. Then, since the two operands need to match
6549 per the constraints, but use different pseudo registers, reload can
6550 only register a reload for these operands. But reloads can only be
6551 satisfied by hardregs, not by memory, so we need a register for this
6552 reload, just because we are presented with non-matching operands.
6553 So, even though we allow memory for this operand, no memory can be
6554 used for it, just because the two operands don't match. This can
6555 cause reload failures on register-starved targets.
6557 So it's a symptom of reload not being able to use memory for reloads
6558 or, alternatively it's also a symptom of both operands not coming into
6559 reload as matching (in which case the pseudo could go to memory just
6560 fine, as the alternative allows it, and no reload would be necessary).
6561 We fix the latter problem here, by transforming
6563 asm ("": "=mr" (inout_2) : "0" (inout_1));
6568 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6571 match_asm_constraints_1 (rtx_insn
*insn
, rtx
*p_sets
, int noutputs
)
6574 bool changed
= false;
6575 rtx op
= SET_SRC (p_sets
[0]);
6576 int ninputs
= ASM_OPERANDS_INPUT_LENGTH (op
);
6577 rtvec inputs
= ASM_OPERANDS_INPUT_VEC (op
);
6578 bool *output_matched
= XALLOCAVEC (bool, noutputs
);
6580 memset (output_matched
, 0, noutputs
* sizeof (bool));
6581 for (i
= 0; i
< ninputs
; i
++)
6585 const char *constraint
= ASM_OPERANDS_INPUT_CONSTRAINT (op
, i
);
6589 if (*constraint
== '%')
6592 match
= strtoul (constraint
, &end
, 10);
6593 if (end
== constraint
)
6596 gcc_assert (match
< noutputs
);
6597 output
= SET_DEST (p_sets
[match
]);
6598 input
= RTVEC_ELT (inputs
, i
);
6599 /* Only do the transformation for pseudos. */
6600 if (! REG_P (output
)
6601 || rtx_equal_p (output
, input
)
6602 || (GET_MODE (input
) != VOIDmode
6603 && GET_MODE (input
) != GET_MODE (output
)))
6606 /* We can't do anything if the output is also used as input,
6607 as we're going to overwrite it. */
6608 for (j
= 0; j
< ninputs
; j
++)
6609 if (reg_overlap_mentioned_p (output
, RTVEC_ELT (inputs
, j
)))
6614 /* Avoid changing the same input several times. For
6615 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6616 only change in once (to out1), rather than changing it
6617 first to out1 and afterwards to out2. */
6620 for (j
= 0; j
< noutputs
; j
++)
6621 if (output_matched
[j
] && input
== SET_DEST (p_sets
[j
]))
6626 output_matched
[match
] = true;
6629 emit_move_insn (output
, input
);
6630 insns
= get_insns ();
6632 emit_insn_before (insns
, insn
);
6634 /* Now replace all mentions of the input with output. We can't
6635 just replace the occurrence in inputs[i], as the register might
6636 also be used in some other input (or even in an address of an
6637 output), which would mean possibly increasing the number of
6638 inputs by one (namely 'output' in addition), which might pose
6639 a too complicated problem for reload to solve. E.g. this situation:
6641 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6643 Here 'input' is used in two occurrences as input (once for the
6644 input operand, once for the address in the second output operand).
6645 If we would replace only the occurrence of the input operand (to
6646 make the matching) we would be left with this:
6649 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6651 Now we suddenly have two different input values (containing the same
6652 value, but different pseudos) where we formerly had only one.
6653 With more complicated asms this might lead to reload failures
6654 which wouldn't have happen without this pass. So, iterate over
6655 all operands and replace all occurrences of the register used. */
6656 for (j
= 0; j
< noutputs
; j
++)
6657 if (!rtx_equal_p (SET_DEST (p_sets
[j
]), input
)
6658 && reg_overlap_mentioned_p (input
, SET_DEST (p_sets
[j
])))
6659 SET_DEST (p_sets
[j
]) = replace_rtx (SET_DEST (p_sets
[j
]),
6661 for (j
= 0; j
< ninputs
; j
++)
6662 if (reg_overlap_mentioned_p (input
, RTVEC_ELT (inputs
, j
)))
6663 RTVEC_ELT (inputs
, j
) = replace_rtx (RTVEC_ELT (inputs
, j
),
6670 df_insn_rescan (insn
);
6673 /* Add the decl D to the local_decls list of FUN. */
6676 add_local_decl (struct function
*fun
, tree d
)
6678 gcc_assert (TREE_CODE (d
) == VAR_DECL
);
6679 vec_safe_push (fun
->local_decls
, d
);
6684 const pass_data pass_data_match_asm_constraints
=
6686 RTL_PASS
, /* type */
6687 "asmcons", /* name */
6688 OPTGROUP_NONE
, /* optinfo_flags */
6689 TV_NONE
, /* tv_id */
6690 0, /* properties_required */
6691 0, /* properties_provided */
6692 0, /* properties_destroyed */
6693 0, /* todo_flags_start */
6694 0, /* todo_flags_finish */
6697 class pass_match_asm_constraints
: public rtl_opt_pass
6700 pass_match_asm_constraints (gcc::context
*ctxt
)
6701 : rtl_opt_pass (pass_data_match_asm_constraints
, ctxt
)
6704 /* opt_pass methods: */
6705 virtual unsigned int execute (function
*);
6707 }; // class pass_match_asm_constraints
6710 pass_match_asm_constraints::execute (function
*fun
)
6717 if (!crtl
->has_asm_statement
)
6720 df_set_flags (DF_DEFER_INSN_RESCAN
);
6721 FOR_EACH_BB_FN (bb
, fun
)
6723 FOR_BB_INSNS (bb
, insn
)
6728 pat
= PATTERN (insn
);
6729 if (GET_CODE (pat
) == PARALLEL
)
6730 p_sets
= &XVECEXP (pat
, 0, 0), noutputs
= XVECLEN (pat
, 0);
6731 else if (GET_CODE (pat
) == SET
)
6732 p_sets
= &PATTERN (insn
), noutputs
= 1;
6736 if (GET_CODE (*p_sets
) == SET
6737 && GET_CODE (SET_SRC (*p_sets
)) == ASM_OPERANDS
)
6738 match_asm_constraints_1 (insn
, p_sets
, noutputs
);
6742 return TODO_df_finish
;
6748 make_pass_match_asm_constraints (gcc::context
*ctxt
)
6750 return new pass_match_asm_constraints (ctxt
);
6754 #include "gt-function.h"