1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
38 #include "coretypes.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
53 #include "basic-block.h"
58 #include "integrate.h"
59 #include "langhooks.h"
61 #include "cfglayout.h"
63 #ifndef LOCAL_ALIGNMENT
64 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
67 #ifndef STACK_ALIGNMENT_NEEDED
68 #define STACK_ALIGNMENT_NEEDED 1
71 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
73 /* Some systems use __main in a way incompatible with its use in gcc, in these
74 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
75 give the same symbol without quotes for an alternative entry point. You
76 must define both, or neither. */
78 #define NAME__MAIN "__main"
81 /* Round a value to the lowest integer less than it that is a multiple of
82 the required alignment. Avoid using division in case the value is
83 negative. Assume the alignment is a power of two. */
84 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
86 /* Similar, but round to the next highest integer that meets the
88 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
90 /* Nonzero if function being compiled doesn't contain any calls
91 (ignoring the prologue and epilogue). This is set prior to
92 local register allocation and is valid for the remaining
94 int current_function_is_leaf
;
96 /* Nonzero if function being compiled doesn't modify the stack pointer
97 (ignoring the prologue and epilogue). This is only valid after
98 life_analysis has run. */
99 int current_function_sp_is_unchanging
;
101 /* Nonzero if the function being compiled is a leaf function which only
102 uses leaf registers. This is valid after reload (specifically after
103 sched2) and is useful only if the port defines LEAF_REGISTERS. */
104 int current_function_uses_only_leaf_regs
;
106 /* Nonzero once virtual register instantiation has been done.
107 assign_stack_local uses frame_pointer_rtx when this is nonzero.
108 calls.c:emit_library_call_value_1 uses it to set up
109 post-instantiation libcalls. */
110 int virtuals_instantiated
;
112 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
113 static GTY(()) int funcdef_no
;
115 /* These variables hold pointers to functions to create and destroy
116 target specific, per-function data structures. */
117 struct machine_function
* (*init_machine_status
) (void);
119 /* The currently compiled function. */
120 struct function
*cfun
= 0;
122 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
123 static GTY(()) varray_type prologue
;
124 static GTY(()) varray_type epilogue
;
126 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
128 static GTY(()) varray_type sibcall_epilogue
;
130 /* In order to evaluate some expressions, such as function calls returning
131 structures in memory, we need to temporarily allocate stack locations.
132 We record each allocated temporary in the following structure.
134 Associated with each temporary slot is a nesting level. When we pop up
135 one level, all temporaries associated with the previous level are freed.
136 Normally, all temporaries are freed after the execution of the statement
137 in which they were created. However, if we are inside a ({...}) grouping,
138 the result may be in a temporary and hence must be preserved. If the
139 result could be in a temporary, we preserve it if we can determine which
140 one it is in. If we cannot determine which temporary may contain the
141 result, all temporaries are preserved. A temporary is preserved by
142 pretending it was allocated at the previous nesting level.
144 Automatic variables are also assigned temporary slots, at the nesting
145 level where they are defined. They are marked a "kept" so that
146 free_temp_slots will not free them. */
148 struct temp_slot
GTY(())
150 /* Points to next temporary slot. */
151 struct temp_slot
*next
;
152 /* Points to previous temporary slot. */
153 struct temp_slot
*prev
;
155 /* The rtx to used to reference the slot. */
157 /* The rtx used to represent the address if not the address of the
158 slot above. May be an EXPR_LIST if multiple addresses exist. */
160 /* The alignment (in bits) of the slot. */
162 /* The size, in units, of the slot. */
164 /* The type of the object in the slot, or zero if it doesn't correspond
165 to a type. We use this to determine whether a slot can be reused.
166 It can be reused if objects of the type of the new slot will always
167 conflict with objects of the type of the old slot. */
169 /* Nonzero if this temporary is currently in use. */
171 /* Nonzero if this temporary has its address taken. */
173 /* Nesting level at which this slot is being used. */
175 /* Nonzero if this should survive a call to free_temp_slots. */
177 /* The offset of the slot from the frame_pointer, including extra space
178 for alignment. This info is for combine_temp_slots. */
179 HOST_WIDE_INT base_offset
;
180 /* The size of the slot, including extra space for alignment. This
181 info is for combine_temp_slots. */
182 HOST_WIDE_INT full_size
;
185 /* Forward declarations. */
187 static rtx
assign_stack_local_1 (enum machine_mode
, HOST_WIDE_INT
, int,
189 static struct temp_slot
*find_temp_slot_from_address (rtx
);
190 static void instantiate_decls (tree
, int);
191 static void instantiate_decls_1 (tree
, int);
192 static void instantiate_decl (rtx
, HOST_WIDE_INT
, int);
193 static rtx
instantiate_new_reg (rtx
, HOST_WIDE_INT
*);
194 static int instantiate_virtual_regs_1 (rtx
*, rtx
, int);
195 static void pad_to_arg_alignment (struct args_size
*, int, struct args_size
*);
196 static void pad_below (struct args_size
*, enum machine_mode
, tree
);
197 static void reorder_blocks_1 (rtx
, tree
, varray_type
*);
198 static void reorder_fix_fragments (tree
);
199 static int all_blocks (tree
, tree
*);
200 static tree
*get_block_vector (tree
, int *);
201 extern tree
debug_find_var_in_block_tree (tree
, tree
);
202 /* We always define `record_insns' even if it's not used so that we
203 can always export `prologue_epilogue_contains'. */
204 static void record_insns (rtx
, varray_type
*) ATTRIBUTE_UNUSED
;
205 static int contains (rtx
, varray_type
);
207 static void emit_return_into_block (basic_block
, rtx
);
209 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
210 static rtx
keep_stack_depressed (rtx
);
212 static void prepare_function_start (tree
);
213 static void do_clobber_return_reg (rtx
, void *);
214 static void do_use_return_reg (rtx
, void *);
215 static void instantiate_virtual_regs_lossage (rtx
);
216 static void set_insn_locators (rtx
, int) ATTRIBUTE_UNUSED
;
218 /* Pointer to chain of `struct function' for containing functions. */
219 struct function
*outer_function_chain
;
221 /* Given a function decl for a containing function,
222 return the `struct function' for it. */
225 find_function_data (tree decl
)
229 for (p
= outer_function_chain
; p
; p
= p
->outer
)
236 /* Save the current context for compilation of a nested function.
237 This is called from language-specific code. The caller should use
238 the enter_nested langhook to save any language-specific state,
239 since this function knows only about language-independent
243 push_function_context_to (tree context
)
249 if (context
== current_function_decl
)
250 cfun
->contains_functions
= 1;
253 struct function
*containing
= find_function_data (context
);
254 containing
->contains_functions
= 1;
259 init_dummy_function_start ();
262 p
->outer
= outer_function_chain
;
263 outer_function_chain
= p
;
265 lang_hooks
.function
.enter_nested (p
);
271 push_function_context (void)
273 push_function_context_to (current_function_decl
);
276 /* Restore the last saved context, at the end of a nested function.
277 This function is called from language-specific code. */
280 pop_function_context_from (tree context ATTRIBUTE_UNUSED
)
282 struct function
*p
= outer_function_chain
;
285 outer_function_chain
= p
->outer
;
287 current_function_decl
= p
->decl
;
290 restore_emit_status (p
);
292 lang_hooks
.function
.leave_nested (p
);
294 /* Reset variables that have known state during rtx generation. */
295 virtuals_instantiated
= 0;
296 generating_concat_p
= 1;
300 pop_function_context (void)
302 pop_function_context_from (current_function_decl
);
305 /* Clear out all parts of the state in F that can safely be discarded
306 after the function has been parsed, but not compiled, to let
307 garbage collection reclaim the memory. */
310 free_after_parsing (struct function
*f
)
312 /* f->expr->forced_labels is used by code generation. */
313 /* f->emit->regno_reg_rtx is used by code generation. */
314 /* f->varasm is used by code generation. */
315 /* f->eh->eh_return_stub_label is used by code generation. */
317 lang_hooks
.function
.final (f
);
321 /* Clear out all parts of the state in F that can safely be discarded
322 after the function has been compiled, to let garbage collection
323 reclaim the memory. */
326 free_after_compilation (struct function
*f
)
334 f
->x_avail_temp_slots
= NULL
;
335 f
->x_used_temp_slots
= NULL
;
336 f
->arg_offset_rtx
= NULL
;
337 f
->return_rtx
= NULL
;
338 f
->internal_arg_pointer
= NULL
;
339 f
->x_nonlocal_goto_handler_labels
= NULL
;
340 f
->x_return_label
= NULL
;
341 f
->x_naked_return_label
= NULL
;
342 f
->x_stack_slot_list
= NULL
;
343 f
->x_tail_recursion_reentry
= NULL
;
344 f
->x_arg_pointer_save_area
= NULL
;
345 f
->x_parm_birth_insn
= NULL
;
346 f
->original_arg_vector
= NULL
;
347 f
->original_decl_initial
= NULL
;
348 f
->epilogue_delay_list
= NULL
;
351 /* Allocate fixed slots in the stack frame of the current function. */
353 /* Return size needed for stack frame based on slots so far allocated in
355 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
356 the caller may have to do that. */
359 get_func_frame_size (struct function
*f
)
361 #ifdef FRAME_GROWS_DOWNWARD
362 return -f
->x_frame_offset
;
364 return f
->x_frame_offset
;
368 /* Return size needed for stack frame based on slots so far allocated.
369 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
370 the caller may have to do that. */
372 get_frame_size (void)
374 return get_func_frame_size (cfun
);
377 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
378 with machine mode MODE.
380 ALIGN controls the amount of alignment for the address of the slot:
381 0 means according to MODE,
382 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
383 -2 means use BITS_PER_UNIT,
384 positive specifies alignment boundary in bits.
386 We do not round to stack_boundary here.
388 FUNCTION specifies the function to allocate in. */
391 assign_stack_local_1 (enum machine_mode mode
, HOST_WIDE_INT size
, int align
,
392 struct function
*function
)
395 int bigend_correction
= 0;
396 unsigned int alignment
;
397 int frame_off
, frame_alignment
, frame_phase
;
404 alignment
= BIGGEST_ALIGNMENT
;
406 alignment
= GET_MODE_ALIGNMENT (mode
);
408 /* Allow the target to (possibly) increase the alignment of this
410 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
412 alignment
= LOCAL_ALIGNMENT (type
, alignment
);
414 alignment
/= BITS_PER_UNIT
;
416 else if (align
== -1)
418 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
419 size
= CEIL_ROUND (size
, alignment
);
421 else if (align
== -2)
422 alignment
= 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
424 alignment
= align
/ BITS_PER_UNIT
;
426 #ifdef FRAME_GROWS_DOWNWARD
427 function
->x_frame_offset
-= size
;
430 /* Ignore alignment we can't do with expected alignment of the boundary. */
431 if (alignment
* BITS_PER_UNIT
> PREFERRED_STACK_BOUNDARY
)
432 alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
434 if (function
->stack_alignment_needed
< alignment
* BITS_PER_UNIT
)
435 function
->stack_alignment_needed
= alignment
* BITS_PER_UNIT
;
437 /* Calculate how many bytes the start of local variables is off from
439 frame_alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
440 frame_off
= STARTING_FRAME_OFFSET
% frame_alignment
;
441 frame_phase
= frame_off
? frame_alignment
- frame_off
: 0;
443 /* Round the frame offset to the specified alignment. The default is
444 to always honor requests to align the stack but a port may choose to
445 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
446 if (STACK_ALIGNMENT_NEEDED
450 /* We must be careful here, since FRAME_OFFSET might be negative and
451 division with a negative dividend isn't as well defined as we might
452 like. So we instead assume that ALIGNMENT is a power of two and
453 use logical operations which are unambiguous. */
454 #ifdef FRAME_GROWS_DOWNWARD
455 function
->x_frame_offset
456 = (FLOOR_ROUND (function
->x_frame_offset
- frame_phase
, alignment
)
459 function
->x_frame_offset
460 = (CEIL_ROUND (function
->x_frame_offset
- frame_phase
, alignment
)
465 /* On a big-endian machine, if we are allocating more space than we will use,
466 use the least significant bytes of those that are allocated. */
467 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
)
468 bigend_correction
= size
- GET_MODE_SIZE (mode
);
470 /* If we have already instantiated virtual registers, return the actual
471 address relative to the frame pointer. */
472 if (function
== cfun
&& virtuals_instantiated
)
473 addr
= plus_constant (frame_pointer_rtx
,
475 (frame_offset
+ bigend_correction
476 + STARTING_FRAME_OFFSET
, Pmode
));
478 addr
= plus_constant (virtual_stack_vars_rtx
,
480 (function
->x_frame_offset
+ bigend_correction
,
483 #ifndef FRAME_GROWS_DOWNWARD
484 function
->x_frame_offset
+= size
;
487 x
= gen_rtx_MEM (mode
, addr
);
489 function
->x_stack_slot_list
490 = gen_rtx_EXPR_LIST (VOIDmode
, x
, function
->x_stack_slot_list
);
495 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
499 assign_stack_local (enum machine_mode mode
, HOST_WIDE_INT size
, int align
)
501 return assign_stack_local_1 (mode
, size
, align
, cfun
);
505 /* Removes temporary slot TEMP from LIST. */
508 cut_slot_from_list (struct temp_slot
*temp
, struct temp_slot
**list
)
511 temp
->next
->prev
= temp
->prev
;
513 temp
->prev
->next
= temp
->next
;
517 temp
->prev
= temp
->next
= NULL
;
520 /* Inserts temporary slot TEMP to LIST. */
523 insert_slot_to_list (struct temp_slot
*temp
, struct temp_slot
**list
)
527 (*list
)->prev
= temp
;
532 /* Returns the list of used temp slots at LEVEL. */
534 static struct temp_slot
**
535 temp_slots_at_level (int level
)
539 if (!used_temp_slots
)
540 VARRAY_GENERIC_PTR_INIT (used_temp_slots
, 3, "used_temp_slots");
542 while (level
>= (int) VARRAY_ACTIVE_SIZE (used_temp_slots
))
543 VARRAY_PUSH_GENERIC_PTR (used_temp_slots
, NULL
);
545 return (struct temp_slot
**) &VARRAY_GENERIC_PTR (used_temp_slots
, level
);
548 /* Returns the maximal temporary slot level. */
551 max_slot_level (void)
553 if (!used_temp_slots
)
556 return VARRAY_ACTIVE_SIZE (used_temp_slots
) - 1;
559 /* Moves temporary slot TEMP to LEVEL. */
562 move_slot_to_level (struct temp_slot
*temp
, int level
)
564 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
565 insert_slot_to_list (temp
, temp_slots_at_level (level
));
569 /* Make temporary slot TEMP available. */
572 make_slot_available (struct temp_slot
*temp
)
574 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
575 insert_slot_to_list (temp
, &avail_temp_slots
);
580 /* Allocate a temporary stack slot and record it for possible later
583 MODE is the machine mode to be given to the returned rtx.
585 SIZE is the size in units of the space required. We do no rounding here
586 since assign_stack_local will do any required rounding.
588 KEEP is 1 if this slot is to be retained after a call to
589 free_temp_slots. Automatic variables for a block are allocated
590 with this flag. KEEP is 2 if we allocate a longer term temporary,
591 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
592 if we are to allocate something at an inner level to be treated as
593 a variable in the block (e.g., a SAVE_EXPR).
595 TYPE is the type that will be used for the stack slot. */
598 assign_stack_temp_for_type (enum machine_mode mode
, HOST_WIDE_INT size
, int keep
,
602 struct temp_slot
*p
, *best_p
= 0, *selected
= NULL
, **pp
;
605 /* If SIZE is -1 it means that somebody tried to allocate a temporary
606 of a variable size. */
611 align
= BIGGEST_ALIGNMENT
;
613 align
= GET_MODE_ALIGNMENT (mode
);
616 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
619 align
= LOCAL_ALIGNMENT (type
, align
);
621 /* Try to find an available, already-allocated temporary of the proper
622 mode which meets the size and alignment requirements. Choose the
623 smallest one with the closest alignment. */
624 for (p
= avail_temp_slots
; p
; p
= p
->next
)
626 if (p
->align
>= align
&& p
->size
>= size
&& GET_MODE (p
->slot
) == mode
627 && objects_must_conflict_p (p
->type
, type
)
628 && (best_p
== 0 || best_p
->size
> p
->size
629 || (best_p
->size
== p
->size
&& best_p
->align
> p
->align
)))
631 if (p
->align
== align
&& p
->size
== size
)
634 cut_slot_from_list (selected
, &avail_temp_slots
);
642 /* Make our best, if any, the one to use. */
646 cut_slot_from_list (selected
, &avail_temp_slots
);
648 /* If there are enough aligned bytes left over, make them into a new
649 temp_slot so that the extra bytes don't get wasted. Do this only
650 for BLKmode slots, so that we can be sure of the alignment. */
651 if (GET_MODE (best_p
->slot
) == BLKmode
)
653 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
654 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
656 if (best_p
->size
- rounded_size
>= alignment
)
658 p
= ggc_alloc (sizeof (struct temp_slot
));
659 p
->in_use
= p
->addr_taken
= 0;
660 p
->size
= best_p
->size
- rounded_size
;
661 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
662 p
->full_size
= best_p
->full_size
- rounded_size
;
663 p
->slot
= gen_rtx_MEM (BLKmode
,
664 plus_constant (XEXP (best_p
->slot
, 0),
666 p
->align
= best_p
->align
;
668 p
->type
= best_p
->type
;
669 insert_slot_to_list (p
, &avail_temp_slots
);
671 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, p
->slot
,
674 best_p
->size
= rounded_size
;
675 best_p
->full_size
= rounded_size
;
680 /* If we still didn't find one, make a new temporary. */
683 HOST_WIDE_INT frame_offset_old
= frame_offset
;
685 p
= ggc_alloc (sizeof (struct temp_slot
));
687 /* We are passing an explicit alignment request to assign_stack_local.
688 One side effect of that is assign_stack_local will not round SIZE
689 to ensure the frame offset remains suitably aligned.
691 So for requests which depended on the rounding of SIZE, we go ahead
692 and round it now. We also make sure ALIGNMENT is at least
693 BIGGEST_ALIGNMENT. */
694 if (mode
== BLKmode
&& align
< BIGGEST_ALIGNMENT
)
696 p
->slot
= assign_stack_local (mode
,
698 ? CEIL_ROUND (size
, (int) align
/ BITS_PER_UNIT
)
704 /* The following slot size computation is necessary because we don't
705 know the actual size of the temporary slot until assign_stack_local
706 has performed all the frame alignment and size rounding for the
707 requested temporary. Note that extra space added for alignment
708 can be either above or below this stack slot depending on which
709 way the frame grows. We include the extra space if and only if it
710 is above this slot. */
711 #ifdef FRAME_GROWS_DOWNWARD
712 p
->size
= frame_offset_old
- frame_offset
;
717 /* Now define the fields used by combine_temp_slots. */
718 #ifdef FRAME_GROWS_DOWNWARD
719 p
->base_offset
= frame_offset
;
720 p
->full_size
= frame_offset_old
- frame_offset
;
722 p
->base_offset
= frame_offset_old
;
723 p
->full_size
= frame_offset
- frame_offset_old
;
737 p
->level
= target_temp_slot_level
;
742 p
->level
= var_temp_slot_level
;
747 p
->level
= temp_slot_level
;
751 pp
= temp_slots_at_level (p
->level
);
752 insert_slot_to_list (p
, pp
);
754 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
755 slot
= gen_rtx_MEM (mode
, XEXP (p
->slot
, 0));
756 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, slot
, stack_slot_list
);
758 /* If we know the alias set for the memory that will be used, use
759 it. If there's no TYPE, then we don't know anything about the
760 alias set for the memory. */
761 set_mem_alias_set (slot
, type
? get_alias_set (type
) : 0);
762 set_mem_align (slot
, align
);
764 /* If a type is specified, set the relevant flags. */
767 RTX_UNCHANGING_P (slot
) = (lang_hooks
.honor_readonly
768 && TYPE_READONLY (type
));
769 MEM_VOLATILE_P (slot
) = TYPE_VOLATILE (type
);
770 MEM_SET_IN_STRUCT_P (slot
, AGGREGATE_TYPE_P (type
));
776 /* Allocate a temporary stack slot and record it for possible later
777 reuse. First three arguments are same as in preceding function. */
780 assign_stack_temp (enum machine_mode mode
, HOST_WIDE_INT size
, int keep
)
782 return assign_stack_temp_for_type (mode
, size
, keep
, NULL_TREE
);
785 /* Assign a temporary.
786 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
787 and so that should be used in error messages. In either case, we
788 allocate of the given type.
789 KEEP is as for assign_stack_temp.
790 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
791 it is 0 if a register is OK.
792 DONT_PROMOTE is 1 if we should not promote values in register
796 assign_temp (tree type_or_decl
, int keep
, int memory_required
,
797 int dont_promote ATTRIBUTE_UNUSED
)
800 enum machine_mode mode
;
805 if (DECL_P (type_or_decl
))
806 decl
= type_or_decl
, type
= TREE_TYPE (decl
);
808 decl
= NULL
, type
= type_or_decl
;
810 mode
= TYPE_MODE (type
);
812 unsignedp
= TYPE_UNSIGNED (type
);
815 if (mode
== BLKmode
|| memory_required
)
817 HOST_WIDE_INT size
= int_size_in_bytes (type
);
821 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
822 problems with allocating the stack space. */
826 /* Unfortunately, we don't yet know how to allocate variable-sized
827 temporaries. However, sometimes we have a fixed upper limit on
828 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
829 instead. This is the case for Chill variable-sized strings. */
830 if (size
== -1 && TREE_CODE (type
) == ARRAY_TYPE
831 && TYPE_ARRAY_MAX_SIZE (type
) != NULL_TREE
832 && host_integerp (TYPE_ARRAY_MAX_SIZE (type
), 1))
833 size
= tree_low_cst (TYPE_ARRAY_MAX_SIZE (type
), 1);
835 /* If we still haven't been able to get a size, see if the language
836 can compute a maximum size. */
838 && (size_tree
= lang_hooks
.types
.max_size (type
)) != 0
839 && host_integerp (size_tree
, 1))
840 size
= tree_low_cst (size_tree
, 1);
842 /* The size of the temporary may be too large to fit into an integer. */
843 /* ??? Not sure this should happen except for user silliness, so limit
844 this to things that aren't compiler-generated temporaries. The
845 rest of the time we'll abort in assign_stack_temp_for_type. */
846 if (decl
&& size
== -1
847 && TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
)
849 error ("%Jsize of variable '%D' is too large", decl
, decl
);
853 tmp
= assign_stack_temp_for_type (mode
, size
, keep
, type
);
859 mode
= promote_mode (type
, mode
, &unsignedp
, 0);
862 return gen_reg_rtx (mode
);
865 /* Combine temporary stack slots which are adjacent on the stack.
867 This allows for better use of already allocated stack space. This is only
868 done for BLKmode slots because we can be sure that we won't have alignment
869 problems in this case. */
872 combine_temp_slots (void)
874 struct temp_slot
*p
, *q
, *next
, *next_q
;
877 /* We can't combine slots, because the information about which slot
878 is in which alias set will be lost. */
879 if (flag_strict_aliasing
)
882 /* If there are a lot of temp slots, don't do anything unless
883 high levels of optimization. */
884 if (! flag_expensive_optimizations
)
885 for (p
= avail_temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
886 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
889 for (p
= avail_temp_slots
; p
; p
= next
)
895 if (GET_MODE (p
->slot
) != BLKmode
)
898 for (q
= p
->next
; q
; q
= next_q
)
904 if (GET_MODE (q
->slot
) != BLKmode
)
907 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
909 /* Q comes after P; combine Q into P. */
911 p
->full_size
+= q
->full_size
;
914 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
916 /* P comes after Q; combine P into Q. */
918 q
->full_size
+= p
->full_size
;
923 cut_slot_from_list (q
, &avail_temp_slots
);
926 /* Either delete P or advance past it. */
928 cut_slot_from_list (p
, &avail_temp_slots
);
932 /* Find the temp slot corresponding to the object at address X. */
934 static struct temp_slot
*
935 find_temp_slot_from_address (rtx x
)
941 for (i
= max_slot_level (); i
>= 0; i
--)
942 for (p
= *temp_slots_at_level (i
); p
; p
= p
->next
)
944 if (XEXP (p
->slot
, 0) == x
946 || (GET_CODE (x
) == PLUS
947 && XEXP (x
, 0) == virtual_stack_vars_rtx
948 && GET_CODE (XEXP (x
, 1)) == CONST_INT
949 && INTVAL (XEXP (x
, 1)) >= p
->base_offset
950 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
))
953 else if (p
->address
!= 0 && GET_CODE (p
->address
) == EXPR_LIST
)
954 for (next
= p
->address
; next
; next
= XEXP (next
, 1))
955 if (XEXP (next
, 0) == x
)
959 /* If we have a sum involving a register, see if it points to a temp
961 if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 0))
962 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
964 else if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 1))
965 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
971 /* Indicate that NEW is an alternate way of referring to the temp slot
972 that previously was known by OLD. */
975 update_temp_slot_address (rtx old
, rtx
new)
979 if (rtx_equal_p (old
, new))
982 p
= find_temp_slot_from_address (old
);
984 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
985 is a register, see if one operand of the PLUS is a temporary
986 location. If so, NEW points into it. Otherwise, if both OLD and
987 NEW are a PLUS and if there is a register in common between them.
988 If so, try a recursive call on those values. */
991 if (GET_CODE (old
) != PLUS
)
996 update_temp_slot_address (XEXP (old
, 0), new);
997 update_temp_slot_address (XEXP (old
, 1), new);
1000 else if (GET_CODE (new) != PLUS
)
1003 if (rtx_equal_p (XEXP (old
, 0), XEXP (new, 0)))
1004 update_temp_slot_address (XEXP (old
, 1), XEXP (new, 1));
1005 else if (rtx_equal_p (XEXP (old
, 1), XEXP (new, 0)))
1006 update_temp_slot_address (XEXP (old
, 0), XEXP (new, 1));
1007 else if (rtx_equal_p (XEXP (old
, 0), XEXP (new, 1)))
1008 update_temp_slot_address (XEXP (old
, 1), XEXP (new, 0));
1009 else if (rtx_equal_p (XEXP (old
, 1), XEXP (new, 1)))
1010 update_temp_slot_address (XEXP (old
, 0), XEXP (new, 0));
1015 /* Otherwise add an alias for the temp's address. */
1016 else if (p
->address
== 0)
1020 if (GET_CODE (p
->address
) != EXPR_LIST
)
1021 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, p
->address
, NULL_RTX
);
1023 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, new, p
->address
);
1027 /* If X could be a reference to a temporary slot, mark the fact that its
1028 address was taken. */
1031 mark_temp_addr_taken (rtx x
)
1033 struct temp_slot
*p
;
1038 /* If X is not in memory or is at a constant address, it cannot be in
1039 a temporary slot. */
1040 if (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0)))
1043 p
= find_temp_slot_from_address (XEXP (x
, 0));
1048 /* If X could be a reference to a temporary slot, mark that slot as
1049 belonging to the to one level higher than the current level. If X
1050 matched one of our slots, just mark that one. Otherwise, we can't
1051 easily predict which it is, so upgrade all of them. Kept slots
1052 need not be touched.
1054 This is called when an ({...}) construct occurs and a statement
1055 returns a value in memory. */
1058 preserve_temp_slots (rtx x
)
1060 struct temp_slot
*p
= 0, *next
;
1062 /* If there is no result, we still might have some objects whose address
1063 were taken, so we need to make sure they stay around. */
1066 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1071 move_slot_to_level (p
, temp_slot_level
- 1);
1077 /* If X is a register that is being used as a pointer, see if we have
1078 a temporary slot we know it points to. To be consistent with
1079 the code below, we really should preserve all non-kept slots
1080 if we can't find a match, but that seems to be much too costly. */
1081 if (REG_P (x
) && REG_POINTER (x
))
1082 p
= find_temp_slot_from_address (x
);
1084 /* If X is not in memory or is at a constant address, it cannot be in
1085 a temporary slot, but it can contain something whose address was
1087 if (p
== 0 && (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0))))
1089 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1094 move_slot_to_level (p
, temp_slot_level
- 1);
1100 /* First see if we can find a match. */
1102 p
= find_temp_slot_from_address (XEXP (x
, 0));
1106 /* Move everything at our level whose address was taken to our new
1107 level in case we used its address. */
1108 struct temp_slot
*q
;
1110 if (p
->level
== temp_slot_level
)
1112 for (q
= *temp_slots_at_level (temp_slot_level
); q
; q
= next
)
1116 if (p
!= q
&& q
->addr_taken
)
1117 move_slot_to_level (q
, temp_slot_level
- 1);
1120 move_slot_to_level (p
, temp_slot_level
- 1);
1126 /* Otherwise, preserve all non-kept slots at this level. */
1127 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1132 move_slot_to_level (p
, temp_slot_level
- 1);
1136 /* Free all temporaries used so far. This is normally called at the
1137 end of generating code for a statement. */
1140 free_temp_slots (void)
1142 struct temp_slot
*p
, *next
;
1144 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1149 make_slot_available (p
);
1152 combine_temp_slots ();
1155 /* Push deeper into the nesting level for stack temporaries. */
1158 push_temp_slots (void)
1163 /* Pop a temporary nesting level. All slots in use in the current level
1167 pop_temp_slots (void)
1169 struct temp_slot
*p
, *next
;
1171 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1174 make_slot_available (p
);
1177 combine_temp_slots ();
1182 /* Initialize temporary slots. */
1185 init_temp_slots (void)
1187 /* We have not allocated any temporaries yet. */
1188 avail_temp_slots
= 0;
1189 used_temp_slots
= 0;
1190 temp_slot_level
= 0;
1191 var_temp_slot_level
= 0;
1192 target_temp_slot_level
= 0;
1195 /* These routines are responsible for converting virtual register references
1196 to the actual hard register references once RTL generation is complete.
1198 The following four variables are used for communication between the
1199 routines. They contain the offsets of the virtual registers from their
1200 respective hard registers. */
1202 static int in_arg_offset
;
1203 static int var_offset
;
1204 static int dynamic_offset
;
1205 static int out_arg_offset
;
1206 static int cfa_offset
;
1208 /* In most machines, the stack pointer register is equivalent to the bottom
1211 #ifndef STACK_POINTER_OFFSET
1212 #define STACK_POINTER_OFFSET 0
1215 /* If not defined, pick an appropriate default for the offset of dynamically
1216 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1217 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1219 #ifndef STACK_DYNAMIC_OFFSET
1221 /* The bottom of the stack points to the actual arguments. If
1222 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1223 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1224 stack space for register parameters is not pushed by the caller, but
1225 rather part of the fixed stack areas and hence not included in
1226 `current_function_outgoing_args_size'. Nevertheless, we must allow
1227 for it when allocating stack dynamic objects. */
1229 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1230 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1231 ((ACCUMULATE_OUTGOING_ARGS \
1232 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1233 + (STACK_POINTER_OFFSET)) \
1236 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1237 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1238 + (STACK_POINTER_OFFSET))
1242 /* On most machines, the CFA coincides with the first incoming parm. */
1244 #ifndef ARG_POINTER_CFA_OFFSET
1245 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
1249 /* Pass through the INSNS of function FNDECL and convert virtual register
1250 references to hard register references. */
1253 instantiate_virtual_regs (void)
1257 /* Compute the offsets to use for this function. */
1258 in_arg_offset
= FIRST_PARM_OFFSET (current_function_decl
);
1259 var_offset
= STARTING_FRAME_OFFSET
;
1260 dynamic_offset
= STACK_DYNAMIC_OFFSET (current_function_decl
);
1261 out_arg_offset
= STACK_POINTER_OFFSET
;
1262 cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
1264 /* Scan all variables and parameters of this function. For each that is
1265 in memory, instantiate all virtual registers if the result is a valid
1266 address. If not, we do it later. That will handle most uses of virtual
1267 regs on many machines. */
1268 instantiate_decls (current_function_decl
, 1);
1270 /* Initialize recognition, indicating that volatile is OK. */
1273 /* Scan through all the insns, instantiating every virtual register still
1275 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1276 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
1277 || GET_CODE (insn
) == CALL_INSN
)
1279 instantiate_virtual_regs_1 (&PATTERN (insn
), insn
, 1);
1280 if (INSN_DELETED_P (insn
))
1282 instantiate_virtual_regs_1 (®_NOTES (insn
), NULL_RTX
, 0);
1283 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1284 if (GET_CODE (insn
) == CALL_INSN
)
1285 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn
),
1288 /* Past this point all ASM statements should match. Verify that
1289 to avoid failures later in the compilation process. */
1290 if (asm_noperands (PATTERN (insn
)) >= 0
1291 && ! check_asm_operands (PATTERN (insn
)))
1292 instantiate_virtual_regs_lossage (insn
);
1295 /* Now instantiate the remaining register equivalences for debugging info.
1296 These will not be valid addresses. */
1297 instantiate_decls (current_function_decl
, 0);
1299 /* Indicate that, from now on, assign_stack_local should use
1300 frame_pointer_rtx. */
1301 virtuals_instantiated
= 1;
1304 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1305 all virtual registers in their DECL_RTL's.
1307 If VALID_ONLY, do this only if the resulting address is still valid.
1308 Otherwise, always do it. */
1311 instantiate_decls (tree fndecl
, int valid_only
)
1315 /* Process all parameters of the function. */
1316 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= TREE_CHAIN (decl
))
1318 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
1319 HOST_WIDE_INT size_rtl
;
1321 instantiate_decl (DECL_RTL (decl
), size
, valid_only
);
1323 /* If the parameter was promoted, then the incoming RTL mode may be
1324 larger than the declared type size. We must use the larger of
1326 size_rtl
= GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl
)));
1327 size
= MAX (size_rtl
, size
);
1328 instantiate_decl (DECL_INCOMING_RTL (decl
), size
, valid_only
);
1331 /* Now process all variables defined in the function or its subblocks. */
1332 instantiate_decls_1 (DECL_INITIAL (fndecl
), valid_only
);
1335 /* Subroutine of instantiate_decls: Process all decls in the given
1336 BLOCK node and all its subblocks. */
1339 instantiate_decls_1 (tree let
, int valid_only
)
1343 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
1344 if (DECL_RTL_SET_P (t
))
1345 instantiate_decl (DECL_RTL (t
),
1346 int_size_in_bytes (TREE_TYPE (t
)),
1349 /* Process all subblocks. */
1350 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= TREE_CHAIN (t
))
1351 instantiate_decls_1 (t
, valid_only
);
1354 /* Subroutine of the preceding procedures: Given RTL representing a
1355 decl and the size of the object, do any instantiation required.
1357 If VALID_ONLY is nonzero, it means that the RTL should only be
1358 changed if the new address is valid. */
1361 instantiate_decl (rtx x
, HOST_WIDE_INT size
, int valid_only
)
1363 enum machine_mode mode
;
1366 /* If this is not a MEM, no need to do anything. Similarly if the
1367 address is a constant or a register that is not a virtual register. */
1369 if (x
== 0 || !MEM_P (x
))
1373 if (CONSTANT_P (addr
)
1375 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
1376 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
1379 /* If we should only do this if the address is valid, copy the address.
1380 We need to do this so we can undo any changes that might make the
1381 address invalid. This copy is unfortunate, but probably can't be
1385 addr
= copy_rtx (addr
);
1387 instantiate_virtual_regs_1 (&addr
, NULL_RTX
, 0);
1389 if (valid_only
&& size
>= 0)
1391 unsigned HOST_WIDE_INT decl_size
= size
;
1393 /* Now verify that the resulting address is valid for every integer or
1394 floating-point mode up to and including SIZE bytes long. We do this
1395 since the object might be accessed in any mode and frame addresses
1398 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1399 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= decl_size
;
1400 mode
= GET_MODE_WIDER_MODE (mode
))
1401 if (! memory_address_p (mode
, addr
))
1404 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
1405 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= decl_size
;
1406 mode
= GET_MODE_WIDER_MODE (mode
))
1407 if (! memory_address_p (mode
, addr
))
1411 /* Put back the address now that we have updated it and we either know
1412 it is valid or we don't care whether it is valid. */
1417 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1418 is a virtual register, return the equivalent hard register and set the
1419 offset indirectly through the pointer. Otherwise, return 0. */
1422 instantiate_new_reg (rtx x
, HOST_WIDE_INT
*poffset
)
1425 HOST_WIDE_INT offset
;
1427 if (x
== virtual_incoming_args_rtx
)
1428 new = arg_pointer_rtx
, offset
= in_arg_offset
;
1429 else if (x
== virtual_stack_vars_rtx
)
1430 new = frame_pointer_rtx
, offset
= var_offset
;
1431 else if (x
== virtual_stack_dynamic_rtx
)
1432 new = stack_pointer_rtx
, offset
= dynamic_offset
;
1433 else if (x
== virtual_outgoing_args_rtx
)
1434 new = stack_pointer_rtx
, offset
= out_arg_offset
;
1435 else if (x
== virtual_cfa_rtx
)
1436 new = arg_pointer_rtx
, offset
= cfa_offset
;
1445 /* Called when instantiate_virtual_regs has failed to update the instruction.
1446 Usually this means that non-matching instruction has been emit, however for
1447 asm statements it may be the problem in the constraints. */
1449 instantiate_virtual_regs_lossage (rtx insn
)
1451 if (asm_noperands (PATTERN (insn
)) >= 0)
1453 error_for_asm (insn
, "impossible constraint in `asm'");
1459 /* Given a pointer to a piece of rtx and an optional pointer to the
1460 containing object, instantiate any virtual registers present in it.
1462 If EXTRA_INSNS, we always do the replacement and generate
1463 any extra insns before OBJECT. If it zero, we do nothing if replacement
1466 Return 1 if we either had nothing to do or if we were able to do the
1467 needed replacement. Return 0 otherwise; we only return zero if
1468 EXTRA_INSNS is zero.
1470 We first try some simple transformations to avoid the creation of extra
1474 instantiate_virtual_regs_1 (rtx
*loc
, rtx object
, int extra_insns
)
1479 HOST_WIDE_INT offset
= 0;
1485 /* Re-start here to avoid recursion in common cases. */
1492 /* We may have detected and deleted invalid asm statements. */
1493 if (object
&& INSN_P (object
) && INSN_DELETED_P (object
))
1496 code
= GET_CODE (x
);
1498 /* Check for some special cases. */
1516 /* We are allowed to set the virtual registers. This means that
1517 the actual register should receive the source minus the
1518 appropriate offset. This is used, for example, in the handling
1519 of non-local gotos. */
1520 if ((new = instantiate_new_reg (SET_DEST (x
), &offset
)) != 0)
1522 rtx src
= SET_SRC (x
);
1524 /* We are setting the register, not using it, so the relevant
1525 offset is the negative of the offset to use were we using
1528 instantiate_virtual_regs_1 (&src
, NULL_RTX
, 0);
1530 /* The only valid sources here are PLUS or REG. Just do
1531 the simplest possible thing to handle them. */
1532 if (!REG_P (src
) && GET_CODE (src
) != PLUS
)
1534 instantiate_virtual_regs_lossage (object
);
1540 temp
= force_operand (src
, NULL_RTX
);
1543 temp
= force_operand (plus_constant (temp
, offset
), NULL_RTX
);
1547 emit_insn_before (seq
, object
);
1550 if (! validate_change (object
, &SET_SRC (x
), temp
, 0)
1552 instantiate_virtual_regs_lossage (object
);
1557 instantiate_virtual_regs_1 (&SET_DEST (x
), object
, extra_insns
);
1562 /* Handle special case of virtual register plus constant. */
1563 if (CONSTANT_P (XEXP (x
, 1)))
1565 rtx old
, new_offset
;
1567 /* Check for (plus (plus VIRT foo) (const_int)) first. */
1568 if (GET_CODE (XEXP (x
, 0)) == PLUS
)
1570 if ((new = instantiate_new_reg (XEXP (XEXP (x
, 0), 0), &offset
)))
1572 instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 1), object
,
1574 new = gen_rtx_PLUS (Pmode
, new, XEXP (XEXP (x
, 0), 1));
1583 #ifdef POINTERS_EXTEND_UNSIGNED
1584 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1585 we can commute the PLUS and SUBREG because pointers into the
1586 frame are well-behaved. */
1587 else if (GET_CODE (XEXP (x
, 0)) == SUBREG
&& GET_MODE (x
) == ptr_mode
1588 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1590 = instantiate_new_reg (SUBREG_REG (XEXP (x
, 0)),
1592 && validate_change (object
, loc
,
1593 plus_constant (gen_lowpart (ptr_mode
,
1596 + INTVAL (XEXP (x
, 1))),
1600 else if ((new = instantiate_new_reg (XEXP (x
, 0), &offset
)) == 0)
1602 /* We know the second operand is a constant. Unless the
1603 first operand is a REG (which has been already checked),
1604 it needs to be checked. */
1605 if (!REG_P (XEXP (x
, 0)))
1613 new_offset
= plus_constant (XEXP (x
, 1), offset
);
1615 /* If the new constant is zero, try to replace the sum with just
1617 if (new_offset
== const0_rtx
1618 && validate_change (object
, loc
, new, 0))
1621 /* Next try to replace the register and new offset.
1622 There are two changes to validate here and we can't assume that
1623 in the case of old offset equals new just changing the register
1624 will yield a valid insn. In the interests of a little efficiency,
1625 however, we only call validate change once (we don't queue up the
1626 changes and then call apply_change_group). */
1630 ? ! validate_change (object
, &XEXP (x
, 0), new, 0)
1631 : (XEXP (x
, 0) = new,
1632 ! validate_change (object
, &XEXP (x
, 1), new_offset
, 0)))
1640 /* Otherwise copy the new constant into a register and replace
1641 constant with that register. */
1642 temp
= gen_reg_rtx (Pmode
);
1644 if (validate_change (object
, &XEXP (x
, 1), temp
, 0))
1645 emit_insn_before (gen_move_insn (temp
, new_offset
), object
);
1648 /* If that didn't work, replace this expression with a
1649 register containing the sum. */
1652 new = gen_rtx_PLUS (Pmode
, new, new_offset
);
1655 temp
= force_operand (new, NULL_RTX
);
1659 emit_insn_before (seq
, object
);
1660 if (! validate_change (object
, loc
, temp
, 0)
1661 && ! validate_replace_rtx (x
, temp
, object
))
1663 instantiate_virtual_regs_lossage (object
);
1672 /* Fall through to generic two-operand expression case. */
1678 case DIV
: case UDIV
:
1679 case MOD
: case UMOD
:
1680 case AND
: case IOR
: case XOR
:
1681 case ROTATERT
: case ROTATE
:
1682 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
1684 case GE
: case GT
: case GEU
: case GTU
:
1685 case LE
: case LT
: case LEU
: case LTU
:
1686 if (XEXP (x
, 1) && ! CONSTANT_P (XEXP (x
, 1)))
1687 instantiate_virtual_regs_1 (&XEXP (x
, 1), object
, extra_insns
);
1692 /* Most cases of MEM that convert to valid addresses have already been
1693 handled by our scan of decls. The only special handling we
1694 need here is to make a copy of the rtx to ensure it isn't being
1695 shared if we have to change it to a pseudo.
1697 If the rtx is a simple reference to an address via a virtual register,
1698 it can potentially be shared. In such cases, first try to make it
1699 a valid address, which can also be shared. Otherwise, copy it and
1702 First check for common cases that need no processing. These are
1703 usually due to instantiation already being done on a previous instance
1707 if (CONSTANT_ADDRESS_P (temp
)
1708 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1709 || temp
== arg_pointer_rtx
1711 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1712 || temp
== hard_frame_pointer_rtx
1714 || temp
== frame_pointer_rtx
)
1717 if (GET_CODE (temp
) == PLUS
1718 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
1719 && (XEXP (temp
, 0) == frame_pointer_rtx
1720 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1721 || XEXP (temp
, 0) == hard_frame_pointer_rtx
1723 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1724 || XEXP (temp
, 0) == arg_pointer_rtx
1729 if (temp
== virtual_stack_vars_rtx
1730 || temp
== virtual_incoming_args_rtx
1731 || (GET_CODE (temp
) == PLUS
1732 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
1733 && (XEXP (temp
, 0) == virtual_stack_vars_rtx
1734 || XEXP (temp
, 0) == virtual_incoming_args_rtx
)))
1736 /* This MEM may be shared. If the substitution can be done without
1737 the need to generate new pseudos, we want to do it in place
1738 so all copies of the shared rtx benefit. The call below will
1739 only make substitutions if the resulting address is still
1742 Note that we cannot pass X as the object in the recursive call
1743 since the insn being processed may not allow all valid
1744 addresses. However, if we were not passed on object, we can
1745 only modify X without copying it if X will have a valid
1748 ??? Also note that this can still lose if OBJECT is an insn that
1749 has less restrictions on an address that some other insn.
1750 In that case, we will modify the shared address. This case
1751 doesn't seem very likely, though. One case where this could
1752 happen is in the case of a USE or CLOBBER reference, but we
1753 take care of that below. */
1755 if (instantiate_virtual_regs_1 (&XEXP (x
, 0),
1756 object
? object
: x
, 0))
1759 /* Otherwise make a copy and process that copy. We copy the entire
1760 RTL expression since it might be a PLUS which could also be
1762 *loc
= x
= copy_rtx (x
);
1765 /* Fall through to generic unary operation case. */
1768 case STRICT_LOW_PART
:
1770 case PRE_DEC
: case PRE_INC
: case POST_DEC
: case POST_INC
:
1771 case SIGN_EXTEND
: case ZERO_EXTEND
:
1772 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
1773 case FLOAT
: case FIX
:
1774 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
1779 case POPCOUNT
: case PARITY
:
1780 /* These case either have just one operand or we know that we need not
1781 check the rest of the operands. */
1787 /* If the operand is a MEM, see if the change is a valid MEM. If not,
1788 go ahead and make the invalid one, but do it to a copy. For a REG,
1789 just make the recursive call, since there's no chance of a problem. */
1791 if ((MEM_P (XEXP (x
, 0))
1792 && instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 0), XEXP (x
, 0),
1794 || (REG_P (XEXP (x
, 0))
1795 && instantiate_virtual_regs_1 (&XEXP (x
, 0), object
, 0)))
1798 XEXP (x
, 0) = copy_rtx (XEXP (x
, 0));
1803 /* Try to replace with a PLUS. If that doesn't work, compute the sum
1804 in front of this insn and substitute the temporary. */
1805 if ((new = instantiate_new_reg (x
, &offset
)) != 0)
1807 temp
= plus_constant (new, offset
);
1808 if (!validate_change (object
, loc
, temp
, 0))
1814 temp
= force_operand (temp
, NULL_RTX
);
1818 emit_insn_before (seq
, object
);
1819 if (! validate_change (object
, loc
, temp
, 0)
1820 && ! validate_replace_rtx (x
, temp
, object
))
1821 instantiate_virtual_regs_lossage (object
);
1831 /* Scan all subexpressions. */
1832 fmt
= GET_RTX_FORMAT (code
);
1833 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
1836 if (!instantiate_virtual_regs_1 (&XEXP (x
, i
), object
, extra_insns
))
1839 else if (*fmt
== 'E')
1840 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1841 if (! instantiate_virtual_regs_1 (&XVECEXP (x
, i
, j
), object
,
1848 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1849 This means a type for which function calls must pass an address to the
1850 function or get an address back from the function.
1851 EXP may be a type node or an expression (whose type is tested). */
1854 aggregate_value_p (tree exp
, tree fntype
)
1856 int i
, regno
, nregs
;
1859 tree type
= (TYPE_P (exp
)) ? exp
: TREE_TYPE (exp
);
1862 switch (TREE_CODE (fntype
))
1865 fntype
= get_callee_fndecl (fntype
);
1866 fntype
= fntype
? TREE_TYPE (fntype
) : 0;
1869 fntype
= TREE_TYPE (fntype
);
1874 case IDENTIFIER_NODE
:
1878 /* We don't expect other rtl types here. */
1882 if (TREE_CODE (type
) == VOID_TYPE
)
1884 if (targetm
.calls
.return_in_memory (type
, fntype
))
1886 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1887 and thus can't be returned in registers. */
1888 if (TREE_ADDRESSABLE (type
))
1890 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
1892 /* Make sure we have suitable call-clobbered regs to return
1893 the value in; if not, we must return it in memory. */
1894 reg
= hard_function_value (type
, 0, 0);
1896 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1901 regno
= REGNO (reg
);
1902 nregs
= hard_regno_nregs
[regno
][TYPE_MODE (type
)];
1903 for (i
= 0; i
< nregs
; i
++)
1904 if (! call_used_regs
[regno
+ i
])
1909 /* Return true if we should assign DECL a pseudo register; false if it
1910 should live on the local stack. */
1913 use_register_for_decl (tree decl
)
1915 /* Honor volatile. */
1916 if (TREE_SIDE_EFFECTS (decl
))
1919 /* Honor addressability. */
1920 if (TREE_ADDRESSABLE (decl
))
1923 /* Only register-like things go in registers. */
1924 if (DECL_MODE (decl
) == BLKmode
)
1927 /* If -ffloat-store specified, don't put explicit float variables
1929 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1930 propagates values across these stores, and it probably shouldn't. */
1931 if (flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)))
1934 /* Compiler-generated temporaries can always go in registers. */
1935 if (DECL_ARTIFICIAL (decl
))
1938 #ifdef NON_SAVING_SETJMP
1939 /* Protect variables not declared "register" from setjmp. */
1940 if (NON_SAVING_SETJMP
1941 && current_function_calls_setjmp
1942 && !DECL_REGISTER (decl
))
1946 return (optimize
|| DECL_REGISTER (decl
));
1949 /* Return true if TYPE should be passed by invisible reference. */
1952 pass_by_reference (CUMULATIVE_ARGS
*ca
, enum machine_mode mode
,
1953 tree type
, bool named_arg
)
1957 /* If this type contains non-trivial constructors, then it is
1958 forbidden for the middle-end to create any new copies. */
1959 if (TREE_ADDRESSABLE (type
))
1962 /* GCC post 3.4 passes *all* variable sized types by reference. */
1963 if (!TYPE_SIZE (type
) || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
1967 return targetm
.calls
.pass_by_reference (ca
, mode
, type
, named_arg
);
1970 /* Structures to communicate between the subroutines of assign_parms.
1971 The first holds data persistent across all parameters, the second
1972 is cleared out for each parameter. */
1974 struct assign_parm_data_all
1976 CUMULATIVE_ARGS args_so_far
;
1977 struct args_size stack_args_size
;
1978 tree function_result_decl
;
1980 rtx conversion_insns
;
1981 HOST_WIDE_INT pretend_args_size
;
1982 HOST_WIDE_INT extra_pretend_bytes
;
1983 int reg_parm_stack_space
;
1986 struct assign_parm_data_one
1992 enum machine_mode nominal_mode
;
1993 enum machine_mode passed_mode
;
1994 enum machine_mode promoted_mode
;
1995 struct locate_and_pad_arg_data locate
;
1997 BOOL_BITFIELD named_arg
: 1;
1998 BOOL_BITFIELD last_named
: 1;
1999 BOOL_BITFIELD passed_pointer
: 1;
2000 BOOL_BITFIELD on_stack
: 1;
2001 BOOL_BITFIELD loaded_in_reg
: 1;
2004 /* A subroutine of assign_parms. Initialize ALL. */
2007 assign_parms_initialize_all (struct assign_parm_data_all
*all
)
2011 memset (all
, 0, sizeof (*all
));
2013 fntype
= TREE_TYPE (current_function_decl
);
2015 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2016 INIT_CUMULATIVE_INCOMING_ARGS (all
->args_so_far
, fntype
, NULL_RTX
);
2018 INIT_CUMULATIVE_ARGS (all
->args_so_far
, fntype
, NULL_RTX
,
2019 current_function_decl
, -1);
2022 #ifdef REG_PARM_STACK_SPACE
2023 all
->reg_parm_stack_space
= REG_PARM_STACK_SPACE (current_function_decl
);
2027 /* If ARGS contains entries with complex types, split the entry into two
2028 entries of the component type. Return a new list of substitutions are
2029 needed, else the old list. */
2032 split_complex_args (tree args
)
2036 /* Before allocating memory, check for the common case of no complex. */
2037 for (p
= args
; p
; p
= TREE_CHAIN (p
))
2039 tree type
= TREE_TYPE (p
);
2040 if (TREE_CODE (type
) == COMPLEX_TYPE
2041 && targetm
.calls
.split_complex_arg (type
))
2047 args
= copy_list (args
);
2049 for (p
= args
; p
; p
= TREE_CHAIN (p
))
2051 tree type
= TREE_TYPE (p
);
2052 if (TREE_CODE (type
) == COMPLEX_TYPE
2053 && targetm
.calls
.split_complex_arg (type
))
2056 tree subtype
= TREE_TYPE (type
);
2058 /* Rewrite the PARM_DECL's type with its component. */
2059 TREE_TYPE (p
) = subtype
;
2060 DECL_ARG_TYPE (p
) = TREE_TYPE (DECL_ARG_TYPE (p
));
2061 DECL_MODE (p
) = VOIDmode
;
2062 DECL_SIZE (p
) = NULL
;
2063 DECL_SIZE_UNIT (p
) = NULL
;
2066 /* Build a second synthetic decl. */
2067 decl
= build_decl (PARM_DECL
, NULL_TREE
, subtype
);
2068 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (p
);
2069 layout_decl (decl
, 0);
2071 /* Splice it in; skip the new decl. */
2072 TREE_CHAIN (decl
) = TREE_CHAIN (p
);
2073 TREE_CHAIN (p
) = decl
;
2081 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2082 the hidden struct return argument, and (abi willing) complex args.
2083 Return the new parameter list. */
2086 assign_parms_augmented_arg_list (struct assign_parm_data_all
*all
)
2088 tree fndecl
= current_function_decl
;
2089 tree fntype
= TREE_TYPE (fndecl
);
2090 tree fnargs
= DECL_ARGUMENTS (fndecl
);
2092 /* If struct value address is treated as the first argument, make it so. */
2093 if (aggregate_value_p (DECL_RESULT (fndecl
), fndecl
)
2094 && ! current_function_returns_pcc_struct
2095 && targetm
.calls
.struct_value_rtx (TREE_TYPE (fndecl
), 1) == 0)
2097 tree type
= build_pointer_type (TREE_TYPE (fntype
));
2100 decl
= build_decl (PARM_DECL
, NULL_TREE
, type
);
2101 DECL_ARG_TYPE (decl
) = type
;
2102 DECL_ARTIFICIAL (decl
) = 1;
2104 TREE_CHAIN (decl
) = fnargs
;
2106 all
->function_result_decl
= decl
;
2109 all
->orig_fnargs
= fnargs
;
2111 /* If the target wants to split complex arguments into scalars, do so. */
2112 if (targetm
.calls
.split_complex_arg
)
2113 fnargs
= split_complex_args (fnargs
);
2118 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2119 data for the parameter. Incorporate ABI specifics such as pass-by-
2120 reference and type promotion. */
2123 assign_parm_find_data_types (struct assign_parm_data_all
*all
, tree parm
,
2124 struct assign_parm_data_one
*data
)
2126 tree nominal_type
, passed_type
;
2127 enum machine_mode nominal_mode
, passed_mode
, promoted_mode
;
2129 memset (data
, 0, sizeof (*data
));
2131 /* Set LAST_NAMED if this is last named arg before last anonymous args. */
2132 if (current_function_stdarg
)
2135 for (tem
= TREE_CHAIN (parm
); tem
; tem
= TREE_CHAIN (tem
))
2136 if (DECL_NAME (tem
))
2139 data
->last_named
= true;
2142 /* Set NAMED_ARG if this arg should be treated as a named arg. For
2143 most machines, if this is a varargs/stdarg function, then we treat
2144 the last named arg as if it were anonymous too. */
2145 if (targetm
.calls
.strict_argument_naming (&all
->args_so_far
))
2146 data
->named_arg
= 1;
2148 data
->named_arg
= !data
->last_named
;
2150 nominal_type
= TREE_TYPE (parm
);
2151 passed_type
= DECL_ARG_TYPE (parm
);
2153 /* Look out for errors propagating this far. Also, if the parameter's
2154 type is void then its value doesn't matter. */
2155 if (TREE_TYPE (parm
) == error_mark_node
2156 /* This can happen after weird syntax errors
2157 or if an enum type is defined among the parms. */
2158 || TREE_CODE (parm
) != PARM_DECL
2159 || passed_type
== NULL
2160 || VOID_TYPE_P (nominal_type
))
2162 nominal_type
= passed_type
= void_type_node
;
2163 nominal_mode
= passed_mode
= promoted_mode
= VOIDmode
;
2167 /* Find mode of arg as it is passed, and mode of arg as it should be
2168 during execution of this function. */
2169 passed_mode
= TYPE_MODE (passed_type
);
2170 nominal_mode
= TYPE_MODE (nominal_type
);
2172 /* If the parm is to be passed as a transparent union, use the type of
2173 the first field for the tests below. We have already verified that
2174 the modes are the same. */
2175 if (DECL_TRANSPARENT_UNION (parm
)
2176 || (TREE_CODE (passed_type
) == UNION_TYPE
2177 && TYPE_TRANSPARENT_UNION (passed_type
)))
2178 passed_type
= TREE_TYPE (TYPE_FIELDS (passed_type
));
2180 /* See if this arg was passed by invisible reference. */
2181 if (pass_by_reference (&all
->args_so_far
, passed_mode
,
2182 passed_type
, data
->named_arg
))
2184 passed_type
= nominal_type
= build_pointer_type (passed_type
);
2185 data
->passed_pointer
= true;
2186 passed_mode
= nominal_mode
= Pmode
;
2188 /* See if the frontend wants to pass this by invisible reference. */
2189 else if (passed_type
!= nominal_type
2190 && POINTER_TYPE_P (passed_type
)
2191 && TREE_TYPE (passed_type
) == nominal_type
)
2193 nominal_type
= passed_type
;
2194 data
->passed_pointer
= 1;
2195 passed_mode
= nominal_mode
= Pmode
;
2198 /* Find mode as it is passed by the ABI. */
2199 promoted_mode
= passed_mode
;
2200 if (targetm
.calls
.promote_function_args (TREE_TYPE (current_function_decl
)))
2202 int unsignedp
= TYPE_UNSIGNED (passed_type
);
2203 promoted_mode
= promote_mode (passed_type
, promoted_mode
,
2208 data
->nominal_type
= nominal_type
;
2209 data
->passed_type
= passed_type
;
2210 data
->nominal_mode
= nominal_mode
;
2211 data
->passed_mode
= passed_mode
;
2212 data
->promoted_mode
= promoted_mode
;
2215 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2218 assign_parms_setup_varargs (struct assign_parm_data_all
*all
,
2219 struct assign_parm_data_one
*data
, bool no_rtl
)
2221 int varargs_pretend_bytes
= 0;
2223 targetm
.calls
.setup_incoming_varargs (&all
->args_so_far
,
2224 data
->promoted_mode
,
2226 &varargs_pretend_bytes
, no_rtl
);
2228 /* If the back-end has requested extra stack space, record how much is
2229 needed. Do not change pretend_args_size otherwise since it may be
2230 nonzero from an earlier partial argument. */
2231 if (varargs_pretend_bytes
> 0)
2232 all
->pretend_args_size
= varargs_pretend_bytes
;
2235 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2236 the incoming location of the current parameter. */
2239 assign_parm_find_entry_rtl (struct assign_parm_data_all
*all
,
2240 struct assign_parm_data_one
*data
)
2242 HOST_WIDE_INT pretend_bytes
= 0;
2246 if (data
->promoted_mode
== VOIDmode
)
2248 data
->entry_parm
= data
->stack_parm
= const0_rtx
;
2252 #ifdef FUNCTION_INCOMING_ARG
2253 entry_parm
= FUNCTION_INCOMING_ARG (all
->args_so_far
, data
->promoted_mode
,
2254 data
->passed_type
, data
->named_arg
);
2256 entry_parm
= FUNCTION_ARG (all
->args_so_far
, data
->promoted_mode
,
2257 data
->passed_type
, data
->named_arg
);
2260 if (entry_parm
== 0)
2261 data
->promoted_mode
= data
->passed_mode
;
2263 /* Determine parm's home in the stack, in case it arrives in the stack
2264 or we should pretend it did. Compute the stack position and rtx where
2265 the argument arrives and its size.
2267 There is one complexity here: If this was a parameter that would
2268 have been passed in registers, but wasn't only because it is
2269 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2270 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2271 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2272 as it was the previous time. */
2273 in_regs
= entry_parm
!= 0;
2274 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2277 if (!in_regs
&& !data
->named_arg
)
2279 if (targetm
.calls
.pretend_outgoing_varargs_named (&all
->args_so_far
))
2282 #ifdef FUNCTION_INCOMING_ARG
2283 tem
= FUNCTION_INCOMING_ARG (all
->args_so_far
, data
->promoted_mode
,
2284 data
->passed_type
, true);
2286 tem
= FUNCTION_ARG (all
->args_so_far
, data
->promoted_mode
,
2287 data
->passed_type
, true);
2289 in_regs
= tem
!= NULL
;
2293 /* If this parameter was passed both in registers and in the stack, use
2294 the copy on the stack. */
2295 if (targetm
.calls
.must_pass_in_stack (data
->promoted_mode
,
2303 partial
= FUNCTION_ARG_PARTIAL_NREGS (all
->args_so_far
,
2304 data
->promoted_mode
,
2307 data
->partial
= partial
;
2309 /* The caller might already have allocated stack space for the
2310 register parameters. */
2311 if (partial
!= 0 && all
->reg_parm_stack_space
== 0)
2313 /* Part of this argument is passed in registers and part
2314 is passed on the stack. Ask the prologue code to extend
2315 the stack part so that we can recreate the full value.
2317 PRETEND_BYTES is the size of the registers we need to store.
2318 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2319 stack space that the prologue should allocate.
2321 Internally, gcc assumes that the argument pointer is aligned
2322 to STACK_BOUNDARY bits. This is used both for alignment
2323 optimizations (see init_emit) and to locate arguments that are
2324 aligned to more than PARM_BOUNDARY bits. We must preserve this
2325 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2326 a stack boundary. */
2328 /* We assume at most one partial arg, and it must be the first
2329 argument on the stack. */
2330 if (all
->extra_pretend_bytes
|| all
->pretend_args_size
)
2333 pretend_bytes
= partial
* UNITS_PER_WORD
;
2334 all
->pretend_args_size
= CEIL_ROUND (pretend_bytes
, STACK_BYTES
);
2336 /* We want to align relative to the actual stack pointer, so
2337 don't include this in the stack size until later. */
2338 all
->extra_pretend_bytes
= all
->pretend_args_size
;
2342 locate_and_pad_parm (data
->promoted_mode
, data
->passed_type
, in_regs
,
2343 entry_parm
? data
->partial
: 0, current_function_decl
,
2344 &all
->stack_args_size
, &data
->locate
);
2346 /* Adjust offsets to include the pretend args. */
2347 pretend_bytes
= all
->extra_pretend_bytes
- pretend_bytes
;
2348 data
->locate
.slot_offset
.constant
+= pretend_bytes
;
2349 data
->locate
.offset
.constant
+= pretend_bytes
;
2351 data
->entry_parm
= entry_parm
;
2354 /* A subroutine of assign_parms. If there is actually space on the stack
2355 for this parm, count it in stack_args_size and return true. */
2358 assign_parm_is_stack_parm (struct assign_parm_data_all
*all
,
2359 struct assign_parm_data_one
*data
)
2361 /* Trivially true if we've no incomming register. */
2362 if (data
->entry_parm
== NULL
)
2364 /* Also true if we're partially in registers and partially not,
2365 since we've arranged to drop the entire argument on the stack. */
2366 else if (data
->partial
!= 0)
2368 /* Also true if the target says that it's passed in both registers
2369 and on the stack. */
2370 else if (GET_CODE (data
->entry_parm
) == PARALLEL
2371 && XEXP (XVECEXP (data
->entry_parm
, 0, 0), 0) == NULL_RTX
)
2373 /* Also true if the target says that there's stack allocated for
2374 all register parameters. */
2375 else if (all
->reg_parm_stack_space
> 0)
2377 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2381 all
->stack_args_size
.constant
+= data
->locate
.size
.constant
;
2382 if (data
->locate
.size
.var
)
2383 ADD_PARM_SIZE (all
->stack_args_size
, data
->locate
.size
.var
);
2388 /* A subroutine of assign_parms. Given that this parameter is allocated
2389 stack space by the ABI, find it. */
2392 assign_parm_find_stack_rtl (tree parm
, struct assign_parm_data_one
*data
)
2394 rtx offset_rtx
, stack_parm
;
2395 unsigned int align
, boundary
;
2397 /* If we're passing this arg using a reg, make its stack home the
2398 aligned stack slot. */
2399 if (data
->entry_parm
)
2400 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.slot_offset
);
2402 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.offset
);
2404 stack_parm
= current_function_internal_arg_pointer
;
2405 if (offset_rtx
!= const0_rtx
)
2406 stack_parm
= gen_rtx_PLUS (Pmode
, stack_parm
, offset_rtx
);
2407 stack_parm
= gen_rtx_MEM (data
->promoted_mode
, stack_parm
);
2409 set_mem_attributes (stack_parm
, parm
, 1);
2411 boundary
= FUNCTION_ARG_BOUNDARY (data
->promoted_mode
, data
->passed_type
);
2414 /* If we're padding upward, we know that the alignment of the slot
2415 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2416 intentionally forcing upward padding. Otherwise we have to come
2417 up with a guess at the alignment based on OFFSET_RTX. */
2418 if (data
->locate
.where_pad
== upward
|| data
->entry_parm
)
2420 else if (GET_CODE (offset_rtx
) == CONST_INT
)
2422 align
= INTVAL (offset_rtx
) * BITS_PER_UNIT
| boundary
;
2423 align
= align
& -align
;
2426 set_mem_align (stack_parm
, align
);
2428 if (data
->entry_parm
)
2429 set_reg_attrs_for_parm (data
->entry_parm
, stack_parm
);
2431 data
->stack_parm
= stack_parm
;
2434 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2435 always valid and contiguous. */
2438 assign_parm_adjust_entry_rtl (struct assign_parm_data_one
*data
)
2440 rtx entry_parm
= data
->entry_parm
;
2441 rtx stack_parm
= data
->stack_parm
;
2443 /* If this parm was passed part in regs and part in memory, pretend it
2444 arrived entirely in memory by pushing the register-part onto the stack.
2445 In the special case of a DImode or DFmode that is split, we could put
2446 it together in a pseudoreg directly, but for now that's not worth
2448 if (data
->partial
!= 0)
2450 /* Handle calls that pass values in multiple non-contiguous
2451 locations. The Irix 6 ABI has examples of this. */
2452 if (GET_CODE (entry_parm
) == PARALLEL
)
2453 emit_group_store (validize_mem (stack_parm
), entry_parm
,
2455 int_size_in_bytes (data
->passed_type
));
2457 move_block_from_reg (REGNO (entry_parm
), validize_mem (stack_parm
),
2460 entry_parm
= stack_parm
;
2463 /* If we didn't decide this parm came in a register, by default it came
2465 else if (entry_parm
== NULL
)
2466 entry_parm
= stack_parm
;
2468 /* When an argument is passed in multiple locations, we can't make use
2469 of this information, but we can save some copying if the whole argument
2470 is passed in a single register. */
2471 else if (GET_CODE (entry_parm
) == PARALLEL
2472 && data
->nominal_mode
!= BLKmode
2473 && data
->passed_mode
!= BLKmode
)
2475 size_t i
, len
= XVECLEN (entry_parm
, 0);
2477 for (i
= 0; i
< len
; i
++)
2478 if (XEXP (XVECEXP (entry_parm
, 0, i
), 0) != NULL_RTX
2479 && REG_P (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2480 && (GET_MODE (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2481 == data
->passed_mode
)
2482 && INTVAL (XEXP (XVECEXP (entry_parm
, 0, i
), 1)) == 0)
2484 entry_parm
= XEXP (XVECEXP (entry_parm
, 0, i
), 0);
2489 data
->entry_parm
= entry_parm
;
2492 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2493 always valid and properly aligned. */
2497 assign_parm_adjust_stack_rtl (struct assign_parm_data_one
*data
)
2499 rtx stack_parm
= data
->stack_parm
;
2501 /* If we can't trust the parm stack slot to be aligned enough for its
2502 ultimate type, don't use that slot after entry. We'll make another
2503 stack slot, if we need one. */
2504 if (STRICT_ALIGNMENT
&& stack_parm
2505 && GET_MODE_ALIGNMENT (data
->nominal_mode
) > MEM_ALIGN (stack_parm
))
2508 /* If parm was passed in memory, and we need to convert it on entry,
2509 don't store it back in that same slot. */
2510 else if (data
->entry_parm
== stack_parm
2511 && data
->nominal_mode
!= BLKmode
2512 && data
->nominal_mode
!= data
->passed_mode
)
2515 data
->stack_parm
= stack_parm
;
2518 /* A subroutine of assign_parms. Return true if the current parameter
2519 should be stored as a BLKmode in the current frame. */
2522 assign_parm_setup_block_p (struct assign_parm_data_one
*data
)
2524 if (data
->nominal_mode
== BLKmode
)
2526 if (GET_CODE (data
->entry_parm
) == PARALLEL
)
2529 #ifdef BLOCK_REG_PADDING
2530 if (data
->locate
.where_pad
== (BYTES_BIG_ENDIAN
? upward
: downward
)
2531 && GET_MODE_SIZE (data
->promoted_mode
) < UNITS_PER_WORD
)
2538 /* A subroutine of assign_parms. Arrange for the parameter to be
2539 present and valid in DATA->STACK_RTL. */
2542 assign_parm_setup_block (tree parm
, struct assign_parm_data_one
*data
)
2544 rtx entry_parm
= data
->entry_parm
;
2545 rtx stack_parm
= data
->stack_parm
;
2547 /* If we've a non-block object that's nevertheless passed in parts,
2548 reconstitute it in register operations rather than on the stack. */
2549 if (GET_CODE (entry_parm
) == PARALLEL
2550 && data
->nominal_mode
!= BLKmode
2551 && XVECLEN (entry_parm
, 0) > 1
2554 rtx parmreg
= gen_reg_rtx (data
->nominal_mode
);
2556 emit_group_store (parmreg
, entry_parm
, data
->nominal_type
,
2557 int_size_in_bytes (data
->nominal_type
));
2558 SET_DECL_RTL (parm
, parmreg
);
2562 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2563 calls that pass values in multiple non-contiguous locations. */
2564 if (REG_P (entry_parm
) || GET_CODE (entry_parm
) == PARALLEL
)
2566 HOST_WIDE_INT size
= int_size_in_bytes (data
->passed_type
);
2567 HOST_WIDE_INT size_stored
= CEIL_ROUND (size
, UNITS_PER_WORD
);
2570 /* Note that we will be storing an integral number of words.
2571 So we have to be careful to ensure that we allocate an
2572 integral number of words. We do this below in the
2573 assign_stack_local if space was not allocated in the argument
2574 list. If it was, this will not work if PARM_BOUNDARY is not
2575 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2576 if it becomes a problem. Exception is when BLKmode arrives
2577 with arguments not conforming to word_mode. */
2579 if (stack_parm
== 0)
2581 stack_parm
= assign_stack_local (BLKmode
, size_stored
, 0);
2582 data
->stack_parm
= stack_parm
;
2583 PUT_MODE (stack_parm
, GET_MODE (entry_parm
));
2584 set_mem_attributes (stack_parm
, parm
, 1);
2586 else if (GET_CODE (entry_parm
) == PARALLEL
)
2588 else if (size
!= 0 && PARM_BOUNDARY
% BITS_PER_WORD
!= 0)
2591 mem
= validize_mem (stack_parm
);
2593 /* Handle values in multiple non-contiguous locations. */
2594 if (GET_CODE (entry_parm
) == PARALLEL
)
2595 emit_group_store (mem
, entry_parm
, data
->passed_type
, size
);
2600 /* If SIZE is that of a mode no bigger than a word, just use
2601 that mode's store operation. */
2602 else if (size
<= UNITS_PER_WORD
)
2604 enum machine_mode mode
2605 = mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0);
2608 #ifdef BLOCK_REG_PADDING
2609 && (size
== UNITS_PER_WORD
2610 || (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2611 != (BYTES_BIG_ENDIAN
? upward
: downward
)))
2615 rtx reg
= gen_rtx_REG (mode
, REGNO (entry_parm
));
2616 emit_move_insn (change_address (mem
, mode
, 0), reg
);
2619 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2620 machine must be aligned to the left before storing
2621 to memory. Note that the previous test doesn't
2622 handle all cases (e.g. SIZE == 3). */
2623 else if (size
!= UNITS_PER_WORD
2624 #ifdef BLOCK_REG_PADDING
2625 && (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2633 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
2634 rtx reg
= gen_rtx_REG (word_mode
, REGNO (data
->entry_parm
));
2636 x
= expand_shift (LSHIFT_EXPR
, word_mode
, reg
,
2637 build_int_2 (by
, 0), NULL_RTX
, 1);
2638 tem
= change_address (mem
, word_mode
, 0);
2639 emit_move_insn (tem
, x
);
2642 move_block_from_reg (REGNO (data
->entry_parm
), mem
,
2643 size_stored
/ UNITS_PER_WORD
);
2646 move_block_from_reg (REGNO (data
->entry_parm
), mem
,
2647 size_stored
/ UNITS_PER_WORD
);
2650 SET_DECL_RTL (parm
, stack_parm
);
2653 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2654 parameter. Get it there. Perform all ABI specified conversions. */
2657 assign_parm_setup_reg (struct assign_parm_data_all
*all
, tree parm
,
2658 struct assign_parm_data_one
*data
)
2661 enum machine_mode promoted_nominal_mode
;
2662 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (parm
));
2663 bool did_conversion
= false;
2665 /* Store the parm in a pseudoregister during the function, but we may
2666 need to do it in a wider mode. */
2668 promoted_nominal_mode
2669 = promote_mode (data
->nominal_type
, data
->nominal_mode
, &unsignedp
, 0);
2671 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
2673 if (!DECL_ARTIFICIAL (parm
))
2674 mark_user_reg (parmreg
);
2676 /* If this was an item that we received a pointer to,
2677 set DECL_RTL appropriately. */
2678 if (data
->passed_pointer
)
2680 rtx x
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
->passed_type
)), parmreg
);
2681 set_mem_attributes (x
, parm
, 1);
2682 SET_DECL_RTL (parm
, x
);
2686 SET_DECL_RTL (parm
, parmreg
);
2687 maybe_set_unchanging (DECL_RTL (parm
), parm
);
2690 /* Copy the value into the register. */
2691 if (data
->nominal_mode
!= data
->passed_mode
2692 || promoted_nominal_mode
!= data
->promoted_mode
)
2696 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2697 mode, by the caller. We now have to convert it to
2698 NOMINAL_MODE, if different. However, PARMREG may be in
2699 a different mode than NOMINAL_MODE if it is being stored
2702 If ENTRY_PARM is a hard register, it might be in a register
2703 not valid for operating in its mode (e.g., an odd-numbered
2704 register for a DFmode). In that case, moves are the only
2705 thing valid, so we can't do a convert from there. This
2706 occurs when the calling sequence allow such misaligned
2709 In addition, the conversion may involve a call, which could
2710 clobber parameters which haven't been copied to pseudo
2711 registers yet. Therefore, we must first copy the parm to
2712 a pseudo reg here, and save the conversion until after all
2713 parameters have been moved. */
2715 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
2717 emit_move_insn (tempreg
, validize_mem (data
->entry_parm
));
2719 push_to_sequence (all
->conversion_insns
);
2720 tempreg
= convert_to_mode (data
->nominal_mode
, tempreg
, unsignedp
);
2722 if (GET_CODE (tempreg
) == SUBREG
2723 && GET_MODE (tempreg
) == data
->nominal_mode
2724 && REG_P (SUBREG_REG (tempreg
))
2725 && data
->nominal_mode
== data
->passed_mode
2726 && GET_MODE (SUBREG_REG (tempreg
)) == GET_MODE (data
->entry_parm
)
2727 && GET_MODE_SIZE (GET_MODE (tempreg
))
2728 < GET_MODE_SIZE (GET_MODE (data
->entry_parm
)))
2730 /* The argument is already sign/zero extended, so note it
2732 SUBREG_PROMOTED_VAR_P (tempreg
) = 1;
2733 SUBREG_PROMOTED_UNSIGNED_SET (tempreg
, unsignedp
);
2736 /* TREE_USED gets set erroneously during expand_assignment. */
2737 save_tree_used
= TREE_USED (parm
);
2738 expand_assignment (parm
, make_tree (data
->nominal_type
, tempreg
), 0);
2739 TREE_USED (parm
) = save_tree_used
;
2740 all
->conversion_insns
= get_insns ();
2743 did_conversion
= true;
2746 emit_move_insn (parmreg
, validize_mem (data
->entry_parm
));
2748 /* If we were passed a pointer but the actual value can safely live
2749 in a register, put it in one. */
2750 if (data
->passed_pointer
2751 && TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
2752 /* If by-reference argument was promoted, demote it. */
2753 && (TYPE_MODE (TREE_TYPE (parm
)) != GET_MODE (DECL_RTL (parm
))
2754 || use_register_for_decl (parm
)))
2756 /* We can't use nominal_mode, because it will have been set to
2757 Pmode above. We must use the actual mode of the parm. */
2758 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
2759 mark_user_reg (parmreg
);
2761 if (GET_MODE (parmreg
) != GET_MODE (DECL_RTL (parm
)))
2763 rtx tempreg
= gen_reg_rtx (GET_MODE (DECL_RTL (parm
)));
2764 int unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (parm
));
2766 push_to_sequence (all
->conversion_insns
);
2767 emit_move_insn (tempreg
, DECL_RTL (parm
));
2768 tempreg
= convert_to_mode (GET_MODE (parmreg
), tempreg
, unsigned_p
);
2769 emit_move_insn (parmreg
, tempreg
);
2770 all
->conversion_insns
= get_insns();
2773 did_conversion
= true;
2776 emit_move_insn (parmreg
, DECL_RTL (parm
));
2778 SET_DECL_RTL (parm
, parmreg
);
2780 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2782 data
->stack_parm
= NULL
;
2785 /* If we are passed an arg by reference and it is our responsibility
2786 to make a copy, do it now.
2787 PASSED_TYPE and PASSED mode now refer to the pointer, not the
2788 original argument, so we must recreate them in the call to
2789 FUNCTION_ARG_CALLEE_COPIES. */
2790 /* ??? Later add code to handle the case that if the argument isn't
2791 modified, don't do the copy. */
2793 else if (data
->passed_pointer
)
2795 tree type
= TREE_TYPE (data
->passed_type
);
2797 if (FUNCTION_ARG_CALLEE_COPIES (all
->args_so_far
, TYPE_MODE (type
),
2798 type
, data
->named_arg
)
2799 && !TREE_ADDRESSABLE (type
))
2803 /* This sequence may involve a library call perhaps clobbering
2804 registers that haven't been copied to pseudos yet. */
2806 push_to_sequence (all
->conversion_insns
);
2808 if (!COMPLETE_TYPE_P (type
)
2809 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
2811 /* This is a variable sized object. */
2812 copy
= allocate_dynamic_stack_space (expr_size (parm
), NULL_RTX
,
2814 copy
= gen_rtx_MEM (BLKmode
, copy
);
2817 copy
= assign_stack_temp (TYPE_MODE (type
),
2818 int_size_in_bytes (type
), 1);
2819 set_mem_attributes (copy
, parm
, 1);
2821 store_expr (parm
, copy
, 0);
2822 emit_move_insn (parmreg
, XEXP (copy
, 0));
2823 all
->conversion_insns
= get_insns ();
2826 did_conversion
= true;
2830 /* Mark the register as eliminable if we did no conversion and it was
2831 copied from memory at a fixed offset, and the arg pointer was not
2832 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2833 offset formed an invalid address, such memory-equivalences as we
2834 make here would screw up life analysis for it. */
2835 if (data
->nominal_mode
== data
->passed_mode
2837 && data
->stack_parm
!= 0
2838 && MEM_P (data
->stack_parm
)
2839 && data
->locate
.offset
.var
== 0
2840 && reg_mentioned_p (virtual_incoming_args_rtx
,
2841 XEXP (data
->stack_parm
, 0)))
2843 rtx linsn
= get_last_insn ();
2846 /* Mark complex types separately. */
2847 if (GET_CODE (parmreg
) == CONCAT
)
2849 enum machine_mode submode
2850 = GET_MODE_INNER (GET_MODE (parmreg
));
2851 int regnor
= REGNO (gen_realpart (submode
, parmreg
));
2852 int regnoi
= REGNO (gen_imagpart (submode
, parmreg
));
2853 rtx stackr
= gen_realpart (submode
, data
->stack_parm
);
2854 rtx stacki
= gen_imagpart (submode
, data
->stack_parm
);
2856 /* Scan backwards for the set of the real and
2858 for (sinsn
= linsn
; sinsn
!= 0;
2859 sinsn
= prev_nonnote_insn (sinsn
))
2861 set
= single_set (sinsn
);
2865 if (SET_DEST (set
) == regno_reg_rtx
[regnoi
])
2867 = gen_rtx_EXPR_LIST (REG_EQUIV
, stacki
,
2869 else if (SET_DEST (set
) == regno_reg_rtx
[regnor
])
2871 = gen_rtx_EXPR_LIST (REG_EQUIV
, stackr
,
2875 else if ((set
= single_set (linsn
)) != 0
2876 && SET_DEST (set
) == parmreg
)
2878 = gen_rtx_EXPR_LIST (REG_EQUIV
,
2879 data
->stack_parm
, REG_NOTES (linsn
));
2882 /* For pointer data type, suggest pointer register. */
2883 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
2884 mark_reg_pointer (parmreg
,
2885 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
2888 /* A subroutine of assign_parms. Allocate stack space to hold the current
2889 parameter. Get it there. Perform all ABI specified conversions. */
2892 assign_parm_setup_stack (struct assign_parm_data_all
*all
, tree parm
,
2893 struct assign_parm_data_one
*data
)
2895 /* Value must be stored in the stack slot STACK_PARM during function
2898 if (data
->promoted_mode
!= data
->nominal_mode
)
2900 /* Conversion is required. */
2901 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
2903 emit_move_insn (tempreg
, validize_mem (data
->entry_parm
));
2905 push_to_sequence (all
->conversion_insns
);
2906 data
->entry_parm
= convert_to_mode (data
->nominal_mode
, tempreg
,
2907 TYPE_UNSIGNED (TREE_TYPE (parm
)));
2909 if (data
->stack_parm
)
2910 /* ??? This may need a big-endian conversion on sparc64. */
2912 = adjust_address (data
->stack_parm
, data
->nominal_mode
, 0);
2914 all
->conversion_insns
= get_insns ();
2918 if (data
->entry_parm
!= data
->stack_parm
)
2920 if (data
->stack_parm
== 0)
2923 = assign_stack_local (GET_MODE (data
->entry_parm
),
2924 GET_MODE_SIZE (GET_MODE (data
->entry_parm
)),
2926 set_mem_attributes (data
->stack_parm
, parm
, 1);
2929 if (data
->promoted_mode
!= data
->nominal_mode
)
2931 push_to_sequence (all
->conversion_insns
);
2932 emit_move_insn (validize_mem (data
->stack_parm
),
2933 validize_mem (data
->entry_parm
));
2934 all
->conversion_insns
= get_insns ();
2938 emit_move_insn (validize_mem (data
->stack_parm
),
2939 validize_mem (data
->entry_parm
));
2942 SET_DECL_RTL (parm
, data
->stack_parm
);
2945 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2946 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2949 assign_parms_unsplit_complex (tree orig_fnargs
, tree fnargs
)
2953 for (parm
= orig_fnargs
; parm
; parm
= TREE_CHAIN (parm
))
2955 if (TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
2956 && targetm
.calls
.split_complex_arg (TREE_TYPE (parm
)))
2958 rtx tmp
, real
, imag
;
2959 enum machine_mode inner
= GET_MODE_INNER (DECL_MODE (parm
));
2961 real
= DECL_RTL (fnargs
);
2962 imag
= DECL_RTL (TREE_CHAIN (fnargs
));
2963 if (inner
!= GET_MODE (real
))
2965 real
= gen_lowpart_SUBREG (inner
, real
);
2966 imag
= gen_lowpart_SUBREG (inner
, imag
);
2968 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
2969 SET_DECL_RTL (parm
, tmp
);
2971 real
= DECL_INCOMING_RTL (fnargs
);
2972 imag
= DECL_INCOMING_RTL (TREE_CHAIN (fnargs
));
2973 if (inner
!= GET_MODE (real
))
2975 real
= gen_lowpart_SUBREG (inner
, real
);
2976 imag
= gen_lowpart_SUBREG (inner
, imag
);
2978 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
2979 set_decl_incoming_rtl (parm
, tmp
);
2980 fnargs
= TREE_CHAIN (fnargs
);
2984 SET_DECL_RTL (parm
, DECL_RTL (fnargs
));
2985 set_decl_incoming_rtl (parm
, DECL_INCOMING_RTL (fnargs
));
2987 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2988 instead of the copy of decl, i.e. FNARGS. */
2989 if (DECL_INCOMING_RTL (parm
) && MEM_P (DECL_INCOMING_RTL (parm
)))
2990 set_mem_expr (DECL_INCOMING_RTL (parm
), parm
);
2993 fnargs
= TREE_CHAIN (fnargs
);
2997 /* Assign RTL expressions to the function's parameters. This may involve
2998 copying them into registers and using those registers as the DECL_RTL. */
3001 assign_parms (tree fndecl
)
3003 struct assign_parm_data_all all
;
3005 rtx internal_arg_pointer
;
3006 int varargs_setup
= 0;
3008 /* If the reg that the virtual arg pointer will be translated into is
3009 not a fixed reg or is the stack pointer, make a copy of the virtual
3010 arg pointer, and address parms via the copy. The frame pointer is
3011 considered fixed even though it is not marked as such.
3013 The second time through, simply use ap to avoid generating rtx. */
3015 if ((ARG_POINTER_REGNUM
== STACK_POINTER_REGNUM
3016 || ! (fixed_regs
[ARG_POINTER_REGNUM
]
3017 || ARG_POINTER_REGNUM
== FRAME_POINTER_REGNUM
)))
3018 internal_arg_pointer
= copy_to_reg (virtual_incoming_args_rtx
);
3020 internal_arg_pointer
= virtual_incoming_args_rtx
;
3021 current_function_internal_arg_pointer
= internal_arg_pointer
;
3023 assign_parms_initialize_all (&all
);
3024 fnargs
= assign_parms_augmented_arg_list (&all
);
3026 for (parm
= fnargs
; parm
; parm
= TREE_CHAIN (parm
))
3028 struct assign_parm_data_one data
;
3030 /* Extract the type of PARM; adjust it according to ABI. */
3031 assign_parm_find_data_types (&all
, parm
, &data
);
3033 /* Early out for errors and void parameters. */
3034 if (data
.passed_mode
== VOIDmode
)
3036 SET_DECL_RTL (parm
, const0_rtx
);
3037 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
);
3041 /* Handle stdargs. LAST_NAMED is a slight mis-nomer; it's also true
3042 for the unnamed dummy argument following the last named argument.
3043 See ABI silliness wrt strict_argument_naming and NAMED_ARG. So
3044 we only want to do this when we get to the actual last named
3045 argument, which will be the first time LAST_NAMED gets set. */
3046 if (data
.last_named
&& !varargs_setup
)
3048 varargs_setup
= true;
3049 assign_parms_setup_varargs (&all
, &data
, false);
3052 /* Find out where the parameter arrives in this function. */
3053 assign_parm_find_entry_rtl (&all
, &data
);
3055 /* Find out where stack space for this parameter might be. */
3056 if (assign_parm_is_stack_parm (&all
, &data
))
3058 assign_parm_find_stack_rtl (parm
, &data
);
3059 assign_parm_adjust_entry_rtl (&data
);
3062 /* Record permanently how this parm was passed. */
3063 set_decl_incoming_rtl (parm
, data
.entry_parm
);
3065 /* Update info on where next arg arrives in registers. */
3066 FUNCTION_ARG_ADVANCE (all
.args_so_far
, data
.promoted_mode
,
3067 data
.passed_type
, data
.named_arg
);
3069 assign_parm_adjust_stack_rtl (&data
);
3071 if (assign_parm_setup_block_p (&data
))
3072 assign_parm_setup_block (parm
, &data
);
3073 else if (data
.passed_pointer
|| use_register_for_decl (parm
))
3074 assign_parm_setup_reg (&all
, parm
, &data
);
3076 assign_parm_setup_stack (&all
, parm
, &data
);
3079 if (targetm
.calls
.split_complex_arg
&& fnargs
!= all
.orig_fnargs
)
3080 assign_parms_unsplit_complex (all
.orig_fnargs
, fnargs
);
3082 /* Output all parameter conversion instructions (possibly including calls)
3083 now that all parameters have been copied out of hard registers. */
3084 emit_insn (all
.conversion_insns
);
3086 /* If we are receiving a struct value address as the first argument, set up
3087 the RTL for the function result. As this might require code to convert
3088 the transmitted address to Pmode, we do this here to ensure that possible
3089 preliminary conversions of the address have been emitted already. */
3090 if (all
.function_result_decl
)
3092 tree result
= DECL_RESULT (current_function_decl
);
3093 rtx addr
= DECL_RTL (all
.function_result_decl
);
3096 addr
= convert_memory_address (Pmode
, addr
);
3097 x
= gen_rtx_MEM (DECL_MODE (result
), addr
);
3098 set_mem_attributes (x
, result
, 1);
3099 SET_DECL_RTL (result
, x
);
3102 /* We have aligned all the args, so add space for the pretend args. */
3103 current_function_pretend_args_size
= all
.pretend_args_size
;
3104 all
.stack_args_size
.constant
+= all
.extra_pretend_bytes
;
3105 current_function_args_size
= all
.stack_args_size
.constant
;
3107 /* Adjust function incoming argument size for alignment and
3110 #ifdef REG_PARM_STACK_SPACE
3111 current_function_args_size
= MAX (current_function_args_size
,
3112 REG_PARM_STACK_SPACE (fndecl
));
3115 current_function_args_size
3116 = ((current_function_args_size
+ STACK_BYTES
- 1)
3117 / STACK_BYTES
) * STACK_BYTES
;
3119 #ifdef ARGS_GROW_DOWNWARD
3120 current_function_arg_offset_rtx
3121 = (all
.stack_args_size
.var
== 0 ? GEN_INT (-all
.stack_args_size
.constant
)
3122 : expand_expr (size_diffop (all
.stack_args_size
.var
,
3123 size_int (-all
.stack_args_size
.constant
)),
3124 NULL_RTX
, VOIDmode
, 0));
3126 current_function_arg_offset_rtx
= ARGS_SIZE_RTX (all
.stack_args_size
);
3129 /* See how many bytes, if any, of its args a function should try to pop
3132 current_function_pops_args
= RETURN_POPS_ARGS (fndecl
, TREE_TYPE (fndecl
),
3133 current_function_args_size
);
3135 /* For stdarg.h function, save info about
3136 regs and stack space used by the named args. */
3138 current_function_args_info
= all
.args_so_far
;
3140 /* Set the rtx used for the function return value. Put this in its
3141 own variable so any optimizers that need this information don't have
3142 to include tree.h. Do this here so it gets done when an inlined
3143 function gets output. */
3145 current_function_return_rtx
3146 = (DECL_RTL_SET_P (DECL_RESULT (fndecl
))
3147 ? DECL_RTL (DECL_RESULT (fndecl
)) : NULL_RTX
);
3149 /* If scalar return value was computed in a pseudo-reg, or was a named
3150 return value that got dumped to the stack, copy that to the hard
3152 if (DECL_RTL_SET_P (DECL_RESULT (fndecl
)))
3154 tree decl_result
= DECL_RESULT (fndecl
);
3155 rtx decl_rtl
= DECL_RTL (decl_result
);
3157 if (REG_P (decl_rtl
)
3158 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
3159 : DECL_REGISTER (decl_result
))
3163 #ifdef FUNCTION_OUTGOING_VALUE
3164 real_decl_rtl
= FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result
),
3167 real_decl_rtl
= FUNCTION_VALUE (TREE_TYPE (decl_result
),
3170 REG_FUNCTION_VALUE_P (real_decl_rtl
) = 1;
3171 /* The delay slot scheduler assumes that current_function_return_rtx
3172 holds the hard register containing the return value, not a
3173 temporary pseudo. */
3174 current_function_return_rtx
= real_decl_rtl
;
3179 /* Indicate whether REGNO is an incoming argument to the current function
3180 that was promoted to a wider mode. If so, return the RTX for the
3181 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3182 that REGNO is promoted from and whether the promotion was signed or
3186 promoted_input_arg (unsigned int regno
, enum machine_mode
*pmode
, int *punsignedp
)
3190 for (arg
= DECL_ARGUMENTS (current_function_decl
); arg
;
3191 arg
= TREE_CHAIN (arg
))
3192 if (REG_P (DECL_INCOMING_RTL (arg
))
3193 && REGNO (DECL_INCOMING_RTL (arg
)) == regno
3194 && TYPE_MODE (DECL_ARG_TYPE (arg
)) == TYPE_MODE (TREE_TYPE (arg
)))
3196 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg
));
3197 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (arg
));
3199 mode
= promote_mode (TREE_TYPE (arg
), mode
, &unsignedp
, 1);
3200 if (mode
== GET_MODE (DECL_INCOMING_RTL (arg
))
3201 && mode
!= DECL_MODE (arg
))
3203 *pmode
= DECL_MODE (arg
);
3204 *punsignedp
= unsignedp
;
3205 return DECL_INCOMING_RTL (arg
);
3213 /* Compute the size and offset from the start of the stacked arguments for a
3214 parm passed in mode PASSED_MODE and with type TYPE.
3216 INITIAL_OFFSET_PTR points to the current offset into the stacked
3219 The starting offset and size for this parm are returned in
3220 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3221 nonzero, the offset is that of stack slot, which is returned in
3222 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3223 padding required from the initial offset ptr to the stack slot.
3225 IN_REGS is nonzero if the argument will be passed in registers. It will
3226 never be set if REG_PARM_STACK_SPACE is not defined.
3228 FNDECL is the function in which the argument was defined.
3230 There are two types of rounding that are done. The first, controlled by
3231 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3232 list to be aligned to the specific boundary (in bits). This rounding
3233 affects the initial and starting offsets, but not the argument size.
3235 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3236 optionally rounds the size of the parm to PARM_BOUNDARY. The
3237 initial offset is not affected by this rounding, while the size always
3238 is and the starting offset may be. */
3240 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3241 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3242 callers pass in the total size of args so far as
3243 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3246 locate_and_pad_parm (enum machine_mode passed_mode
, tree type
, int in_regs
,
3247 int partial
, tree fndecl ATTRIBUTE_UNUSED
,
3248 struct args_size
*initial_offset_ptr
,
3249 struct locate_and_pad_arg_data
*locate
)
3252 enum direction where_pad
;
3254 int reg_parm_stack_space
= 0;
3255 int part_size_in_regs
;
3257 #ifdef REG_PARM_STACK_SPACE
3258 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
3260 /* If we have found a stack parm before we reach the end of the
3261 area reserved for registers, skip that area. */
3264 if (reg_parm_stack_space
> 0)
3266 if (initial_offset_ptr
->var
)
3268 initial_offset_ptr
->var
3269 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
3270 ssize_int (reg_parm_stack_space
));
3271 initial_offset_ptr
->constant
= 0;
3273 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
3274 initial_offset_ptr
->constant
= reg_parm_stack_space
;
3277 #endif /* REG_PARM_STACK_SPACE */
3279 part_size_in_regs
= 0;
3280 if (reg_parm_stack_space
== 0)
3281 part_size_in_regs
= ((partial
* UNITS_PER_WORD
)
3282 / (PARM_BOUNDARY
/ BITS_PER_UNIT
)
3283 * (PARM_BOUNDARY
/ BITS_PER_UNIT
));
3286 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
3287 where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
3288 boundary
= FUNCTION_ARG_BOUNDARY (passed_mode
, type
);
3289 locate
->where_pad
= where_pad
;
3291 #ifdef ARGS_GROW_DOWNWARD
3292 locate
->slot_offset
.constant
= -initial_offset_ptr
->constant
;
3293 if (initial_offset_ptr
->var
)
3294 locate
->slot_offset
.var
= size_binop (MINUS_EXPR
, ssize_int (0),
3295 initial_offset_ptr
->var
);
3299 if (where_pad
!= none
3300 && (!host_integerp (sizetree
, 1)
3301 || (tree_low_cst (sizetree
, 1) * BITS_PER_UNIT
) % PARM_BOUNDARY
))
3302 s2
= round_up (s2
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3303 SUB_PARM_SIZE (locate
->slot_offset
, s2
);
3306 locate
->slot_offset
.constant
+= part_size_in_regs
;
3309 #ifdef REG_PARM_STACK_SPACE
3310 || REG_PARM_STACK_SPACE (fndecl
) > 0
3313 pad_to_arg_alignment (&locate
->slot_offset
, boundary
,
3314 &locate
->alignment_pad
);
3316 locate
->size
.constant
= (-initial_offset_ptr
->constant
3317 - locate
->slot_offset
.constant
);
3318 if (initial_offset_ptr
->var
)
3319 locate
->size
.var
= size_binop (MINUS_EXPR
,
3320 size_binop (MINUS_EXPR
,
3322 initial_offset_ptr
->var
),
3323 locate
->slot_offset
.var
);
3325 /* Pad_below needs the pre-rounded size to know how much to pad
3327 locate
->offset
= locate
->slot_offset
;
3328 if (where_pad
== downward
)
3329 pad_below (&locate
->offset
, passed_mode
, sizetree
);
3331 #else /* !ARGS_GROW_DOWNWARD */
3333 #ifdef REG_PARM_STACK_SPACE
3334 || REG_PARM_STACK_SPACE (fndecl
) > 0
3337 pad_to_arg_alignment (initial_offset_ptr
, boundary
,
3338 &locate
->alignment_pad
);
3339 locate
->slot_offset
= *initial_offset_ptr
;
3341 #ifdef PUSH_ROUNDING
3342 if (passed_mode
!= BLKmode
)
3343 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
3346 /* Pad_below needs the pre-rounded size to know how much to pad below
3347 so this must be done before rounding up. */
3348 locate
->offset
= locate
->slot_offset
;
3349 if (where_pad
== downward
)
3350 pad_below (&locate
->offset
, passed_mode
, sizetree
);
3352 if (where_pad
!= none
3353 && (!host_integerp (sizetree
, 1)
3354 || (tree_low_cst (sizetree
, 1) * BITS_PER_UNIT
) % PARM_BOUNDARY
))
3355 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3357 ADD_PARM_SIZE (locate
->size
, sizetree
);
3359 locate
->size
.constant
-= part_size_in_regs
;
3360 #endif /* ARGS_GROW_DOWNWARD */
3363 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3364 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3367 pad_to_arg_alignment (struct args_size
*offset_ptr
, int boundary
,
3368 struct args_size
*alignment_pad
)
3370 tree save_var
= NULL_TREE
;
3371 HOST_WIDE_INT save_constant
= 0;
3372 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
3373 HOST_WIDE_INT sp_offset
= STACK_POINTER_OFFSET
;
3375 #ifdef SPARC_STACK_BOUNDARY_HACK
3376 /* The sparc port has a bug. It sometimes claims a STACK_BOUNDARY
3377 higher than the real alignment of %sp. However, when it does this,
3378 the alignment of %sp+STACK_POINTER_OFFSET will be STACK_BOUNDARY.
3379 This is a temporary hack while the sparc port is fixed. */
3380 if (SPARC_STACK_BOUNDARY_HACK
)
3384 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
3386 save_var
= offset_ptr
->var
;
3387 save_constant
= offset_ptr
->constant
;
3390 alignment_pad
->var
= NULL_TREE
;
3391 alignment_pad
->constant
= 0;
3393 if (boundary
> BITS_PER_UNIT
)
3395 if (offset_ptr
->var
)
3397 tree sp_offset_tree
= ssize_int (sp_offset
);
3398 tree offset
= size_binop (PLUS_EXPR
,
3399 ARGS_SIZE_TREE (*offset_ptr
),
3401 #ifdef ARGS_GROW_DOWNWARD
3402 tree rounded
= round_down (offset
, boundary
/ BITS_PER_UNIT
);
3404 tree rounded
= round_up (offset
, boundary
/ BITS_PER_UNIT
);
3407 offset_ptr
->var
= size_binop (MINUS_EXPR
, rounded
, sp_offset_tree
);
3408 /* ARGS_SIZE_TREE includes constant term. */
3409 offset_ptr
->constant
= 0;
3410 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
3411 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
,
3416 offset_ptr
->constant
= -sp_offset
+
3417 #ifdef ARGS_GROW_DOWNWARD
3418 FLOOR_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
3420 CEIL_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
3422 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
3423 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
3429 pad_below (struct args_size
*offset_ptr
, enum machine_mode passed_mode
, tree sizetree
)
3431 if (passed_mode
!= BLKmode
)
3433 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
3434 offset_ptr
->constant
3435 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
3436 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
3437 - GET_MODE_SIZE (passed_mode
));
3441 if (TREE_CODE (sizetree
) != INTEGER_CST
3442 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
3444 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3445 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3447 ADD_PARM_SIZE (*offset_ptr
, s2
);
3448 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
3453 /* Walk the tree of blocks describing the binding levels within a function
3454 and warn about variables the might be killed by setjmp or vfork.
3455 This is done after calling flow_analysis and before global_alloc
3456 clobbers the pseudo-regs to hard regs. */
3459 setjmp_vars_warning (tree block
)
3463 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
3465 if (TREE_CODE (decl
) == VAR_DECL
3466 && DECL_RTL_SET_P (decl
)
3467 && REG_P (DECL_RTL (decl
))
3468 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
3469 warning ("%Jvariable '%D' might be clobbered by `longjmp' or `vfork'",
3473 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
3474 setjmp_vars_warning (sub
);
3477 /* Do the appropriate part of setjmp_vars_warning
3478 but for arguments instead of local variables. */
3481 setjmp_args_warning (void)
3484 for (decl
= DECL_ARGUMENTS (current_function_decl
);
3485 decl
; decl
= TREE_CHAIN (decl
))
3486 if (DECL_RTL (decl
) != 0
3487 && REG_P (DECL_RTL (decl
))
3488 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
3489 warning ("%Jargument '%D' might be clobbered by `longjmp' or `vfork'",
3494 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3495 and create duplicate blocks. */
3496 /* ??? Need an option to either create block fragments or to create
3497 abstract origin duplicates of a source block. It really depends
3498 on what optimization has been performed. */
3501 reorder_blocks (void)
3503 tree block
= DECL_INITIAL (current_function_decl
);
3504 varray_type block_stack
;
3506 if (block
== NULL_TREE
)
3509 VARRAY_TREE_INIT (block_stack
, 10, "block_stack");
3511 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3512 clear_block_marks (block
);
3514 /* Prune the old trees away, so that they don't get in the way. */
3515 BLOCK_SUBBLOCKS (block
) = NULL_TREE
;
3516 BLOCK_CHAIN (block
) = NULL_TREE
;
3518 /* Recreate the block tree from the note nesting. */
3519 reorder_blocks_1 (get_insns (), block
, &block_stack
);
3520 BLOCK_SUBBLOCKS (block
) = blocks_nreverse (BLOCK_SUBBLOCKS (block
));
3522 /* Remove deleted blocks from the block fragment chains. */
3523 reorder_fix_fragments (block
);
3526 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3529 clear_block_marks (tree block
)
3533 TREE_ASM_WRITTEN (block
) = 0;
3534 clear_block_marks (BLOCK_SUBBLOCKS (block
));
3535 block
= BLOCK_CHAIN (block
);
3540 reorder_blocks_1 (rtx insns
, tree current_block
, varray_type
*p_block_stack
)
3544 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
3548 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
3550 tree block
= NOTE_BLOCK (insn
);
3552 /* If we have seen this block before, that means it now
3553 spans multiple address regions. Create a new fragment. */
3554 if (TREE_ASM_WRITTEN (block
))
3556 tree new_block
= copy_node (block
);
3559 origin
= (BLOCK_FRAGMENT_ORIGIN (block
)
3560 ? BLOCK_FRAGMENT_ORIGIN (block
)
3562 BLOCK_FRAGMENT_ORIGIN (new_block
) = origin
;
3563 BLOCK_FRAGMENT_CHAIN (new_block
)
3564 = BLOCK_FRAGMENT_CHAIN (origin
);
3565 BLOCK_FRAGMENT_CHAIN (origin
) = new_block
;
3567 NOTE_BLOCK (insn
) = new_block
;
3571 BLOCK_SUBBLOCKS (block
) = 0;
3572 TREE_ASM_WRITTEN (block
) = 1;
3573 /* When there's only one block for the entire function,
3574 current_block == block and we mustn't do this, it
3575 will cause infinite recursion. */
3576 if (block
!= current_block
)
3578 BLOCK_SUPERCONTEXT (block
) = current_block
;
3579 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
3580 BLOCK_SUBBLOCKS (current_block
) = block
;
3581 current_block
= block
;
3583 VARRAY_PUSH_TREE (*p_block_stack
, block
);
3585 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
3587 NOTE_BLOCK (insn
) = VARRAY_TOP_TREE (*p_block_stack
);
3588 VARRAY_POP (*p_block_stack
);
3589 BLOCK_SUBBLOCKS (current_block
)
3590 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block
));
3591 current_block
= BLOCK_SUPERCONTEXT (current_block
);
3597 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
3598 appears in the block tree, select one of the fragments to become
3599 the new origin block. */
3602 reorder_fix_fragments (tree block
)
3606 tree dup_origin
= BLOCK_FRAGMENT_ORIGIN (block
);
3607 tree new_origin
= NULL_TREE
;
3611 if (! TREE_ASM_WRITTEN (dup_origin
))
3613 new_origin
= BLOCK_FRAGMENT_CHAIN (dup_origin
);
3615 /* Find the first of the remaining fragments. There must
3616 be at least one -- the current block. */
3617 while (! TREE_ASM_WRITTEN (new_origin
))
3618 new_origin
= BLOCK_FRAGMENT_CHAIN (new_origin
);
3619 BLOCK_FRAGMENT_ORIGIN (new_origin
) = NULL_TREE
;
3622 else if (! dup_origin
)
3625 /* Re-root the rest of the fragments to the new origin. In the
3626 case that DUP_ORIGIN was null, that means BLOCK was the origin
3627 of a chain of fragments and we want to remove those fragments
3628 that didn't make it to the output. */
3631 tree
*pp
= &BLOCK_FRAGMENT_CHAIN (new_origin
);
3636 if (TREE_ASM_WRITTEN (chain
))
3638 BLOCK_FRAGMENT_ORIGIN (chain
) = new_origin
;
3640 pp
= &BLOCK_FRAGMENT_CHAIN (chain
);
3642 chain
= BLOCK_FRAGMENT_CHAIN (chain
);
3647 reorder_fix_fragments (BLOCK_SUBBLOCKS (block
));
3648 block
= BLOCK_CHAIN (block
);
3652 /* Reverse the order of elements in the chain T of blocks,
3653 and return the new head of the chain (old last element). */
3656 blocks_nreverse (tree t
)
3658 tree prev
= 0, decl
, next
;
3659 for (decl
= t
; decl
; decl
= next
)
3661 next
= BLOCK_CHAIN (decl
);
3662 BLOCK_CHAIN (decl
) = prev
;
3668 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3669 non-NULL, list them all into VECTOR, in a depth-first preorder
3670 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3674 all_blocks (tree block
, tree
*vector
)
3680 TREE_ASM_WRITTEN (block
) = 0;
3682 /* Record this block. */
3684 vector
[n_blocks
] = block
;
3688 /* Record the subblocks, and their subblocks... */
3689 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
3690 vector
? vector
+ n_blocks
: 0);
3691 block
= BLOCK_CHAIN (block
);
3697 /* Return a vector containing all the blocks rooted at BLOCK. The
3698 number of elements in the vector is stored in N_BLOCKS_P. The
3699 vector is dynamically allocated; it is the caller's responsibility
3700 to call `free' on the pointer returned. */
3703 get_block_vector (tree block
, int *n_blocks_p
)
3707 *n_blocks_p
= all_blocks (block
, NULL
);
3708 block_vector
= xmalloc (*n_blocks_p
* sizeof (tree
));
3709 all_blocks (block
, block_vector
);
3711 return block_vector
;
3714 static GTY(()) int next_block_index
= 2;
3716 /* Set BLOCK_NUMBER for all the blocks in FN. */
3719 number_blocks (tree fn
)
3725 /* For SDB and XCOFF debugging output, we start numbering the blocks
3726 from 1 within each function, rather than keeping a running
3728 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3729 if (write_symbols
== SDB_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
3730 next_block_index
= 1;
3733 block_vector
= get_block_vector (DECL_INITIAL (fn
), &n_blocks
);
3735 /* The top-level BLOCK isn't numbered at all. */
3736 for (i
= 1; i
< n_blocks
; ++i
)
3737 /* We number the blocks from two. */
3738 BLOCK_NUMBER (block_vector
[i
]) = next_block_index
++;
3740 free (block_vector
);
3745 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3748 debug_find_var_in_block_tree (tree var
, tree block
)
3752 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
3756 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
3758 tree ret
= debug_find_var_in_block_tree (var
, t
);
3766 /* Allocate a function structure for FNDECL and set its contents
3770 allocate_struct_function (tree fndecl
)
3773 tree fntype
= fndecl
? TREE_TYPE (fndecl
) : NULL_TREE
;
3775 cfun
= ggc_alloc_cleared (sizeof (struct function
));
3777 cfun
->stack_alignment_needed
= STACK_BOUNDARY
;
3778 cfun
->preferred_stack_boundary
= STACK_BOUNDARY
;
3780 current_function_funcdef_no
= funcdef_no
++;
3782 cfun
->function_frequency
= FUNCTION_FREQUENCY_NORMAL
;
3784 init_stmt_for_function ();
3785 init_eh_for_function ();
3787 lang_hooks
.function
.init (cfun
);
3788 if (init_machine_status
)
3789 cfun
->machine
= (*init_machine_status
) ();
3794 DECL_STRUCT_FUNCTION (fndecl
) = cfun
;
3795 cfun
->decl
= fndecl
;
3797 result
= DECL_RESULT (fndecl
);
3798 if (aggregate_value_p (result
, fndecl
))
3800 #ifdef PCC_STATIC_STRUCT_RETURN
3801 current_function_returns_pcc_struct
= 1;
3803 current_function_returns_struct
= 1;
3806 current_function_returns_pointer
= POINTER_TYPE_P (TREE_TYPE (result
));
3808 current_function_stdarg
3810 && TYPE_ARG_TYPES (fntype
) != 0
3811 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
3812 != void_type_node
));
3815 /* Reset cfun, and other non-struct-function variables to defaults as
3816 appropriate for emitting rtl at the start of a function. */
3819 prepare_function_start (tree fndecl
)
3821 if (fndecl
&& DECL_STRUCT_FUNCTION (fndecl
))
3822 cfun
= DECL_STRUCT_FUNCTION (fndecl
);
3824 allocate_struct_function (fndecl
);
3826 init_varasm_status (cfun
);
3829 cse_not_expected
= ! optimize
;
3831 /* Caller save not needed yet. */
3832 caller_save_needed
= 0;
3834 /* We haven't done register allocation yet. */
3837 /* Indicate that we have not instantiated virtual registers yet. */
3838 virtuals_instantiated
= 0;
3840 /* Indicate that we want CONCATs now. */
3841 generating_concat_p
= 1;
3843 /* Indicate we have no need of a frame pointer yet. */
3844 frame_pointer_needed
= 0;
3847 /* Initialize the rtl expansion mechanism so that we can do simple things
3848 like generate sequences. This is used to provide a context during global
3849 initialization of some passes. */
3851 init_dummy_function_start (void)
3853 prepare_function_start (NULL
);
3856 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3857 and initialize static variables for generating RTL for the statements
3861 init_function_start (tree subr
)
3863 prepare_function_start (subr
);
3865 /* Prevent ever trying to delete the first instruction of a
3866 function. Also tell final how to output a linenum before the
3867 function prologue. Note linenums could be missing, e.g. when
3868 compiling a Java .class file. */
3869 if (! DECL_IS_BUILTIN (subr
))
3870 emit_line_note (DECL_SOURCE_LOCATION (subr
));
3872 /* Make sure first insn is a note even if we don't want linenums.
3873 This makes sure the first insn will never be deleted.
3874 Also, final expects a note to appear there. */
3875 emit_note (NOTE_INSN_DELETED
);
3877 /* Warn if this value is an aggregate type,
3878 regardless of which calling convention we are using for it. */
3879 if (warn_aggregate_return
3880 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
3881 warning ("function returns an aggregate");
3884 /* Make sure all values used by the optimization passes have sane
3887 init_function_for_compilation (void)
3891 /* No prologue/epilogue insns yet. */
3892 VARRAY_GROW (prologue
, 0);
3893 VARRAY_GROW (epilogue
, 0);
3894 VARRAY_GROW (sibcall_epilogue
, 0);
3897 /* Expand a call to __main at the beginning of a possible main function. */
3899 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
3900 #undef HAS_INIT_SECTION
3901 #define HAS_INIT_SECTION
3905 expand_main_function (void)
3907 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
3908 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
)
3910 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
3914 /* Forcibly align the stack. */
3915 #ifdef STACK_GROWS_DOWNWARD
3916 tmp
= expand_simple_binop (Pmode
, AND
, stack_pointer_rtx
, GEN_INT(-align
),
3917 stack_pointer_rtx
, 1, OPTAB_WIDEN
);
3919 tmp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
3920 GEN_INT (align
- 1), NULL_RTX
, 1, OPTAB_WIDEN
);
3921 tmp
= expand_simple_binop (Pmode
, AND
, tmp
, GEN_INT (-align
),
3922 stack_pointer_rtx
, 1, OPTAB_WIDEN
);
3924 if (tmp
!= stack_pointer_rtx
)
3925 emit_move_insn (stack_pointer_rtx
, tmp
);
3927 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
3928 tmp
= force_reg (Pmode
, const0_rtx
);
3929 allocate_dynamic_stack_space (tmp
, NULL_RTX
, BIGGEST_ALIGNMENT
);
3933 for (tmp
= get_last_insn (); tmp
; tmp
= PREV_INSN (tmp
))
3934 if (NOTE_P (tmp
) && NOTE_LINE_NUMBER (tmp
) == NOTE_INSN_FUNCTION_BEG
)
3937 emit_insn_before (seq
, tmp
);
3943 #ifndef HAS_INIT_SECTION
3944 emit_library_call (init_one_libfunc (NAME__MAIN
), LCT_NORMAL
, VOIDmode
, 0);
3948 /* The PENDING_SIZES represent the sizes of variable-sized types.
3949 Create RTL for the various sizes now (using temporary variables),
3950 so that we can refer to the sizes from the RTL we are generating
3951 for the current function. The PENDING_SIZES are a TREE_LIST. The
3952 TREE_VALUE of each node is a SAVE_EXPR. */
3955 expand_pending_sizes (tree pending_sizes
)
3959 /* Evaluate now the sizes of any types declared among the arguments. */
3960 for (tem
= pending_sizes
; tem
; tem
= TREE_CHAIN (tem
))
3961 expand_expr (TREE_VALUE (tem
), const0_rtx
, VOIDmode
, 0);
3964 /* Start the RTL for a new function, and set variables used for
3966 SUBR is the FUNCTION_DECL node.
3967 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
3968 the function's parameters, which must be run at any return statement. */
3971 expand_function_start (tree subr
)
3973 /* Make sure volatile mem refs aren't considered
3974 valid operands of arithmetic insns. */
3975 init_recog_no_volatile ();
3977 current_function_profile
3979 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
3981 current_function_limit_stack
3982 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
3984 /* Make the label for return statements to jump to. Do not special
3985 case machines with special return instructions -- they will be
3986 handled later during jump, ifcvt, or epilogue creation. */
3987 return_label
= gen_label_rtx ();
3989 /* Initialize rtx used to return the value. */
3990 /* Do this before assign_parms so that we copy the struct value address
3991 before any library calls that assign parms might generate. */
3993 /* Decide whether to return the value in memory or in a register. */
3994 if (aggregate_value_p (DECL_RESULT (subr
), subr
))
3996 /* Returning something that won't go in a register. */
3997 rtx value_address
= 0;
3999 #ifdef PCC_STATIC_STRUCT_RETURN
4000 if (current_function_returns_pcc_struct
)
4002 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
4003 value_address
= assemble_static_space (size
);
4008 rtx sv
= targetm
.calls
.struct_value_rtx (TREE_TYPE (subr
), 1);
4009 /* Expect to be passed the address of a place to store the value.
4010 If it is passed as an argument, assign_parms will take care of
4014 value_address
= gen_reg_rtx (Pmode
);
4015 emit_move_insn (value_address
, sv
);
4020 rtx x
= gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr
)), value_address
);
4021 set_mem_attributes (x
, DECL_RESULT (subr
), 1);
4022 SET_DECL_RTL (DECL_RESULT (subr
), x
);
4025 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
4026 /* If return mode is void, this decl rtl should not be used. */
4027 SET_DECL_RTL (DECL_RESULT (subr
), NULL_RTX
);
4030 /* Compute the return values into a pseudo reg, which we will copy
4031 into the true return register after the cleanups are done. */
4033 /* In order to figure out what mode to use for the pseudo, we
4034 figure out what the mode of the eventual return register will
4035 actually be, and use that. */
4037 = hard_function_value (TREE_TYPE (DECL_RESULT (subr
)),
4040 /* Structures that are returned in registers are not aggregate_value_p,
4041 so we may see a PARALLEL or a REG. */
4042 if (REG_P (hard_reg
))
4043 SET_DECL_RTL (DECL_RESULT (subr
), gen_reg_rtx (GET_MODE (hard_reg
)));
4044 else if (GET_CODE (hard_reg
) == PARALLEL
)
4045 SET_DECL_RTL (DECL_RESULT (subr
), gen_group_rtx (hard_reg
));
4049 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4050 result to the real return register(s). */
4051 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
4054 /* Initialize rtx for parameters and local variables.
4055 In some cases this requires emitting insns. */
4056 assign_parms (subr
);
4058 /* If function gets a static chain arg, store it. */
4059 if (cfun
->static_chain_decl
)
4061 tree parm
= cfun
->static_chain_decl
;
4062 rtx local
= gen_reg_rtx (Pmode
);
4064 set_decl_incoming_rtl (parm
, static_chain_incoming_rtx
);
4065 SET_DECL_RTL (parm
, local
);
4066 maybe_set_unchanging (local
, parm
);
4067 mark_reg_pointer (local
, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
4069 emit_move_insn (local
, static_chain_incoming_rtx
);
4072 /* If the function receives a non-local goto, then store the
4073 bits we need to restore the frame pointer. */
4074 if (cfun
->nonlocal_goto_save_area
)
4079 /* ??? We need to do this save early. Unfortunately here is
4080 before the frame variable gets declared. Help out... */
4081 expand_var (TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0));
4083 t_save
= build4 (ARRAY_REF
, ptr_type_node
,
4084 cfun
->nonlocal_goto_save_area
,
4085 integer_zero_node
, NULL_TREE
, NULL_TREE
);
4086 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4087 r_save
= convert_memory_address (Pmode
, r_save
);
4089 emit_move_insn (r_save
, virtual_stack_vars_rtx
);
4090 update_nonlocal_goto_save_area ();
4093 /* The following was moved from init_function_start.
4094 The move is supposed to make sdb output more accurate. */
4095 /* Indicate the beginning of the function body,
4096 as opposed to parm setup. */
4097 emit_note (NOTE_INSN_FUNCTION_BEG
);
4099 if (!NOTE_P (get_last_insn ()))
4100 emit_note (NOTE_INSN_DELETED
);
4101 parm_birth_insn
= get_last_insn ();
4103 if (current_function_profile
)
4106 PROFILE_HOOK (current_function_funcdef_no
);
4110 /* After the display initializations is where the tail-recursion label
4111 should go, if we end up needing one. Ensure we have a NOTE here
4112 since some things (like trampolines) get placed before this. */
4113 tail_recursion_reentry
= emit_note (NOTE_INSN_DELETED
);
4115 /* Evaluate now the sizes of any types declared among the arguments. */
4116 expand_pending_sizes (nreverse (get_pending_sizes ()));
4118 /* Make sure there is a line number after the function entry setup code. */
4119 force_next_line_note ();
4122 /* Undo the effects of init_dummy_function_start. */
4124 expand_dummy_function_end (void)
4126 /* End any sequences that failed to be closed due to syntax errors. */
4127 while (in_sequence_p ())
4130 /* Outside function body, can't compute type's actual size
4131 until next function's body starts. */
4133 free_after_parsing (cfun
);
4134 free_after_compilation (cfun
);
4138 /* Call DOIT for each hard register used as a return value from
4139 the current function. */
4142 diddle_return_value (void (*doit
) (rtx
, void *), void *arg
)
4144 rtx outgoing
= current_function_return_rtx
;
4149 if (REG_P (outgoing
))
4150 (*doit
) (outgoing
, arg
);
4151 else if (GET_CODE (outgoing
) == PARALLEL
)
4155 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
4157 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
4159 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4166 do_clobber_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
4168 emit_insn (gen_rtx_CLOBBER (VOIDmode
, reg
));
4172 clobber_return_register (void)
4174 diddle_return_value (do_clobber_return_reg
, NULL
);
4176 /* In case we do use pseudo to return value, clobber it too. */
4177 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
4179 tree decl_result
= DECL_RESULT (current_function_decl
);
4180 rtx decl_rtl
= DECL_RTL (decl_result
);
4181 if (REG_P (decl_rtl
) && REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
)
4183 do_clobber_return_reg (decl_rtl
, NULL
);
4189 do_use_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
4191 emit_insn (gen_rtx_USE (VOIDmode
, reg
));
4195 use_return_register (void)
4197 diddle_return_value (do_use_return_reg
, NULL
);
4200 /* Possibly warn about unused parameters. */
4202 do_warn_unused_parameter (tree fn
)
4206 for (decl
= DECL_ARGUMENTS (fn
);
4207 decl
; decl
= TREE_CHAIN (decl
))
4208 if (!TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
4209 && DECL_NAME (decl
) && !DECL_ARTIFICIAL (decl
))
4210 warning ("%Junused parameter '%D'", decl
, decl
);
4213 static GTY(()) rtx initial_trampoline
;
4215 /* Generate RTL for the end of the current function. */
4218 expand_function_end (void)
4222 /* If arg_pointer_save_area was referenced only from a nested
4223 function, we will not have initialized it yet. Do that now. */
4224 if (arg_pointer_save_area
&& ! cfun
->arg_pointer_save_area_init
)
4225 get_arg_pointer_save_area (cfun
);
4227 /* If we are doing stack checking and this function makes calls,
4228 do a stack probe at the start of the function to ensure we have enough
4229 space for another stack frame. */
4230 if (flag_stack_check
&& ! STACK_CHECK_BUILTIN
)
4234 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4238 probe_stack_range (STACK_CHECK_PROTECT
,
4239 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
));
4242 emit_insn_before (seq
, tail_recursion_reentry
);
4247 /* Possibly warn about unused parameters.
4248 When frontend does unit-at-a-time, the warning is already
4249 issued at finalization time. */
4250 if (warn_unused_parameter
4251 && !lang_hooks
.callgraph
.expand_function
)
4252 do_warn_unused_parameter (current_function_decl
);
4254 /* End any sequences that failed to be closed due to syntax errors. */
4255 while (in_sequence_p ())
4258 clear_pending_stack_adjust ();
4259 do_pending_stack_adjust ();
4261 /* @@@ This is a kludge. We want to ensure that instructions that
4262 may trap are not moved into the epilogue by scheduling, because
4263 we don't always emit unwind information for the epilogue.
4264 However, not all machine descriptions define a blockage insn, so
4265 emit an ASM_INPUT to act as one. */
4266 if (flag_non_call_exceptions
)
4267 emit_insn (gen_rtx_ASM_INPUT (VOIDmode
, ""));
4269 /* Mark the end of the function body.
4270 If control reaches this insn, the function can drop through
4271 without returning a value. */
4272 emit_note (NOTE_INSN_FUNCTION_END
);
4274 /* Must mark the last line number note in the function, so that the test
4275 coverage code can avoid counting the last line twice. This just tells
4276 the code to ignore the immediately following line note, since there
4277 already exists a copy of this note somewhere above. This line number
4278 note is still needed for debugging though, so we can't delete it. */
4279 if (flag_test_coverage
)
4280 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER
);
4282 /* Output a linenumber for the end of the function.
4283 SDB depends on this. */
4284 force_next_line_note ();
4285 emit_line_note (input_location
);
4287 /* Before the return label (if any), clobber the return
4288 registers so that they are not propagated live to the rest of
4289 the function. This can only happen with functions that drop
4290 through; if there had been a return statement, there would
4291 have either been a return rtx, or a jump to the return label.
4293 We delay actual code generation after the current_function_value_rtx
4295 clobber_after
= get_last_insn ();
4297 /* Output the label for the actual return from the function,
4298 if one is expected. This happens either because a function epilogue
4299 is used instead of a return instruction, or because a return was done
4300 with a goto in order to run local cleanups, or because of pcc-style
4301 structure returning. */
4303 emit_label (return_label
);
4305 /* Let except.c know where it should emit the call to unregister
4306 the function context for sjlj exceptions. */
4307 if (flag_exceptions
&& USING_SJLJ_EXCEPTIONS
)
4308 sjlj_emit_function_exit_after (get_last_insn ());
4310 /* If we had calls to alloca, and this machine needs
4311 an accurate stack pointer to exit the function,
4312 insert some code to save and restore the stack pointer. */
4313 if (! EXIT_IGNORE_STACK
4314 && current_function_calls_alloca
)
4318 emit_stack_save (SAVE_FUNCTION
, &tem
, parm_birth_insn
);
4319 emit_stack_restore (SAVE_FUNCTION
, tem
, NULL_RTX
);
4322 /* If scalar return value was computed in a pseudo-reg, or was a named
4323 return value that got dumped to the stack, copy that to the hard
4325 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
4327 tree decl_result
= DECL_RESULT (current_function_decl
);
4328 rtx decl_rtl
= DECL_RTL (decl_result
);
4330 if (REG_P (decl_rtl
)
4331 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
4332 : DECL_REGISTER (decl_result
))
4334 rtx real_decl_rtl
= current_function_return_rtx
;
4336 /* This should be set in assign_parms. */
4337 if (! REG_FUNCTION_VALUE_P (real_decl_rtl
))
4340 /* If this is a BLKmode structure being returned in registers,
4341 then use the mode computed in expand_return. Note that if
4342 decl_rtl is memory, then its mode may have been changed,
4343 but that current_function_return_rtx has not. */
4344 if (GET_MODE (real_decl_rtl
) == BLKmode
)
4345 PUT_MODE (real_decl_rtl
, GET_MODE (decl_rtl
));
4347 /* If a named return value dumped decl_return to memory, then
4348 we may need to re-do the PROMOTE_MODE signed/unsigned
4350 if (GET_MODE (real_decl_rtl
) != GET_MODE (decl_rtl
))
4352 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (decl_result
));
4354 if (targetm
.calls
.promote_function_return (TREE_TYPE (current_function_decl
)))
4355 promote_mode (TREE_TYPE (decl_result
), GET_MODE (decl_rtl
),
4358 convert_move (real_decl_rtl
, decl_rtl
, unsignedp
);
4360 else if (GET_CODE (real_decl_rtl
) == PARALLEL
)
4362 /* If expand_function_start has created a PARALLEL for decl_rtl,
4363 move the result to the real return registers. Otherwise, do
4364 a group load from decl_rtl for a named return. */
4365 if (GET_CODE (decl_rtl
) == PARALLEL
)
4366 emit_group_move (real_decl_rtl
, decl_rtl
);
4368 emit_group_load (real_decl_rtl
, decl_rtl
,
4369 TREE_TYPE (decl_result
),
4370 int_size_in_bytes (TREE_TYPE (decl_result
)));
4373 emit_move_insn (real_decl_rtl
, decl_rtl
);
4377 /* If returning a structure, arrange to return the address of the value
4378 in a place where debuggers expect to find it.
4380 If returning a structure PCC style,
4381 the caller also depends on this value.
4382 And current_function_returns_pcc_struct is not necessarily set. */
4383 if (current_function_returns_struct
4384 || current_function_returns_pcc_struct
)
4387 = XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
4388 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
4389 #ifdef FUNCTION_OUTGOING_VALUE
4391 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type
),
4392 current_function_decl
);
4395 = FUNCTION_VALUE (build_pointer_type (type
), current_function_decl
);
4398 /* Mark this as a function return value so integrate will delete the
4399 assignment and USE below when inlining this function. */
4400 REG_FUNCTION_VALUE_P (outgoing
) = 1;
4402 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4403 value_address
= convert_memory_address (GET_MODE (outgoing
),
4406 emit_move_insn (outgoing
, value_address
);
4408 /* Show return register used to hold result (in this case the address
4410 current_function_return_rtx
= outgoing
;
4413 /* If this is an implementation of throw, do what's necessary to
4414 communicate between __builtin_eh_return and the epilogue. */
4415 expand_eh_return ();
4417 /* Emit the actual code to clobber return register. */
4422 clobber_return_register ();
4426 after
= emit_insn_after (seq
, clobber_after
);
4429 /* Output the label for the naked return from the function, if one is
4430 expected. This is currently used only by __builtin_return. */
4431 if (naked_return_label
)
4432 emit_label (naked_return_label
);
4434 /* ??? This should no longer be necessary since stupid is no longer with
4435 us, but there are some parts of the compiler (eg reload_combine, and
4436 sh mach_dep_reorg) that still try and compute their own lifetime info
4437 instead of using the general framework. */
4438 use_return_register ();
4442 get_arg_pointer_save_area (struct function
*f
)
4444 rtx ret
= f
->x_arg_pointer_save_area
;
4448 ret
= assign_stack_local_1 (Pmode
, GET_MODE_SIZE (Pmode
), 0, f
);
4449 f
->x_arg_pointer_save_area
= ret
;
4452 if (f
== cfun
&& ! f
->arg_pointer_save_area_init
)
4456 /* Save the arg pointer at the beginning of the function. The
4457 generated stack slot may not be a valid memory address, so we
4458 have to check it and fix it if necessary. */
4460 emit_move_insn (validize_mem (ret
), virtual_incoming_args_rtx
);
4464 push_topmost_sequence ();
4465 emit_insn_after (seq
, get_insns ());
4466 pop_topmost_sequence ();
4472 /* Extend a vector that records the INSN_UIDs of INSNS
4473 (a list of one or more insns). */
4476 record_insns (rtx insns
, varray_type
*vecp
)
4483 while (tmp
!= NULL_RTX
)
4486 tmp
= NEXT_INSN (tmp
);
4489 i
= VARRAY_SIZE (*vecp
);
4490 VARRAY_GROW (*vecp
, i
+ len
);
4492 while (tmp
!= NULL_RTX
)
4494 VARRAY_INT (*vecp
, i
) = INSN_UID (tmp
);
4496 tmp
= NEXT_INSN (tmp
);
4500 /* Set the locator of the insn chain starting at INSN to LOC. */
4502 set_insn_locators (rtx insn
, int loc
)
4504 while (insn
!= NULL_RTX
)
4507 INSN_LOCATOR (insn
) = loc
;
4508 insn
= NEXT_INSN (insn
);
4512 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4513 be running after reorg, SEQUENCE rtl is possible. */
4516 contains (rtx insn
, varray_type vec
)
4520 if (NONJUMP_INSN_P (insn
)
4521 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
4524 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
4525 for (j
= VARRAY_SIZE (vec
) - 1; j
>= 0; --j
)
4526 if (INSN_UID (XVECEXP (PATTERN (insn
), 0, i
)) == VARRAY_INT (vec
, j
))
4532 for (j
= VARRAY_SIZE (vec
) - 1; j
>= 0; --j
)
4533 if (INSN_UID (insn
) == VARRAY_INT (vec
, j
))
4540 prologue_epilogue_contains (rtx insn
)
4542 if (contains (insn
, prologue
))
4544 if (contains (insn
, epilogue
))
4550 sibcall_epilogue_contains (rtx insn
)
4552 if (sibcall_epilogue
)
4553 return contains (insn
, sibcall_epilogue
);
4558 /* Insert gen_return at the end of block BB. This also means updating
4559 block_for_insn appropriately. */
4562 emit_return_into_block (basic_block bb
, rtx line_note
)
4564 emit_jump_insn_after (gen_return (), BB_END (bb
));
4566 emit_note_copy_after (line_note
, PREV_INSN (BB_END (bb
)));
4568 #endif /* HAVE_return */
4570 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4572 /* These functions convert the epilogue into a variant that does not modify the
4573 stack pointer. This is used in cases where a function returns an object
4574 whose size is not known until it is computed. The called function leaves the
4575 object on the stack, leaves the stack depressed, and returns a pointer to
4578 What we need to do is track all modifications and references to the stack
4579 pointer, deleting the modifications and changing the references to point to
4580 the location the stack pointer would have pointed to had the modifications
4583 These functions need to be portable so we need to make as few assumptions
4584 about the epilogue as we can. However, the epilogue basically contains
4585 three things: instructions to reset the stack pointer, instructions to
4586 reload registers, possibly including the frame pointer, and an
4587 instruction to return to the caller.
4589 If we can't be sure of what a relevant epilogue insn is doing, we abort.
4590 We also make no attempt to validate the insns we make since if they are
4591 invalid, we probably can't do anything valid. The intent is that these
4592 routines get "smarter" as more and more machines start to use them and
4593 they try operating on different epilogues.
4595 We use the following structure to track what the part of the epilogue that
4596 we've already processed has done. We keep two copies of the SP equivalence,
4597 one for use during the insn we are processing and one for use in the next
4598 insn. The difference is because one part of a PARALLEL may adjust SP
4599 and the other may use it. */
4603 rtx sp_equiv_reg
; /* REG that SP is set from, perhaps SP. */
4604 HOST_WIDE_INT sp_offset
; /* Offset from SP_EQUIV_REG of present SP. */
4605 rtx new_sp_equiv_reg
; /* REG to be used at end of insn. */
4606 HOST_WIDE_INT new_sp_offset
; /* Offset to be used at end of insn. */
4607 rtx equiv_reg_src
; /* If nonzero, the value that SP_EQUIV_REG
4608 should be set to once we no longer need
4610 rtx const_equiv
[FIRST_PSEUDO_REGISTER
]; /* Any known constant equivalences
4614 static void handle_epilogue_set (rtx
, struct epi_info
*);
4615 static void update_epilogue_consts (rtx
, rtx
, void *);
4616 static void emit_equiv_load (struct epi_info
*);
4618 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4619 no modifications to the stack pointer. Return the new list of insns. */
4622 keep_stack_depressed (rtx insns
)
4625 struct epi_info info
;
4628 /* If the epilogue is just a single instruction, it must be OK as is. */
4629 if (NEXT_INSN (insns
) == NULL_RTX
)
4632 /* Otherwise, start a sequence, initialize the information we have, and
4633 process all the insns we were given. */
4636 info
.sp_equiv_reg
= stack_pointer_rtx
;
4638 info
.equiv_reg_src
= 0;
4640 for (j
= 0; j
< FIRST_PSEUDO_REGISTER
; j
++)
4641 info
.const_equiv
[j
] = 0;
4645 while (insn
!= NULL_RTX
)
4647 next
= NEXT_INSN (insn
);
4656 /* If this insn references the register that SP is equivalent to and
4657 we have a pending load to that register, we must force out the load
4658 first and then indicate we no longer know what SP's equivalent is. */
4659 if (info
.equiv_reg_src
!= 0
4660 && reg_referenced_p (info
.sp_equiv_reg
, PATTERN (insn
)))
4662 emit_equiv_load (&info
);
4663 info
.sp_equiv_reg
= 0;
4666 info
.new_sp_equiv_reg
= info
.sp_equiv_reg
;
4667 info
.new_sp_offset
= info
.sp_offset
;
4669 /* If this is a (RETURN) and the return address is on the stack,
4670 update the address and change to an indirect jump. */
4671 if (GET_CODE (PATTERN (insn
)) == RETURN
4672 || (GET_CODE (PATTERN (insn
)) == PARALLEL
4673 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == RETURN
))
4675 rtx retaddr
= INCOMING_RETURN_ADDR_RTX
;
4677 HOST_WIDE_INT offset
= 0;
4678 rtx jump_insn
, jump_set
;
4680 /* If the return address is in a register, we can emit the insn
4681 unchanged. Otherwise, it must be a MEM and we see what the
4682 base register and offset are. In any case, we have to emit any
4683 pending load to the equivalent reg of SP, if any. */
4684 if (REG_P (retaddr
))
4686 emit_equiv_load (&info
);
4691 else if (MEM_P (retaddr
)
4692 && REG_P (XEXP (retaddr
, 0)))
4693 base
= gen_rtx_REG (Pmode
, REGNO (XEXP (retaddr
, 0))), offset
= 0;
4694 else if (MEM_P (retaddr
)
4695 && GET_CODE (XEXP (retaddr
, 0)) == PLUS
4696 && REG_P (XEXP (XEXP (retaddr
, 0), 0))
4697 && GET_CODE (XEXP (XEXP (retaddr
, 0), 1)) == CONST_INT
)
4699 base
= gen_rtx_REG (Pmode
, REGNO (XEXP (XEXP (retaddr
, 0), 0)));
4700 offset
= INTVAL (XEXP (XEXP (retaddr
, 0), 1));
4705 /* If the base of the location containing the return pointer
4706 is SP, we must update it with the replacement address. Otherwise,
4707 just build the necessary MEM. */
4708 retaddr
= plus_constant (base
, offset
);
4709 if (base
== stack_pointer_rtx
)
4710 retaddr
= simplify_replace_rtx (retaddr
, stack_pointer_rtx
,
4711 plus_constant (info
.sp_equiv_reg
,
4714 retaddr
= gen_rtx_MEM (Pmode
, retaddr
);
4716 /* If there is a pending load to the equivalent register for SP
4717 and we reference that register, we must load our address into
4718 a scratch register and then do that load. */
4719 if (info
.equiv_reg_src
4720 && reg_overlap_mentioned_p (info
.equiv_reg_src
, retaddr
))
4725 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
4726 if (HARD_REGNO_MODE_OK (regno
, Pmode
)
4727 && !fixed_regs
[regno
]
4728 && TEST_HARD_REG_BIT (regs_invalidated_by_call
, regno
)
4729 && !REGNO_REG_SET_P (EXIT_BLOCK_PTR
->global_live_at_start
,
4731 && !refers_to_regno_p (regno
,
4732 regno
+ hard_regno_nregs
[regno
]
4734 info
.equiv_reg_src
, NULL
)
4735 && info
.const_equiv
[regno
] == 0)
4738 if (regno
== FIRST_PSEUDO_REGISTER
)
4741 reg
= gen_rtx_REG (Pmode
, regno
);
4742 emit_move_insn (reg
, retaddr
);
4746 emit_equiv_load (&info
);
4747 jump_insn
= emit_jump_insn (gen_indirect_jump (retaddr
));
4749 /* Show the SET in the above insn is a RETURN. */
4750 jump_set
= single_set (jump_insn
);
4754 SET_IS_RETURN_P (jump_set
) = 1;
4757 /* If SP is not mentioned in the pattern and its equivalent register, if
4758 any, is not modified, just emit it. Otherwise, if neither is set,
4759 replace the reference to SP and emit the insn. If none of those are
4760 true, handle each SET individually. */
4761 else if (!reg_mentioned_p (stack_pointer_rtx
, PATTERN (insn
))
4762 && (info
.sp_equiv_reg
== stack_pointer_rtx
4763 || !reg_set_p (info
.sp_equiv_reg
, insn
)))
4765 else if (! reg_set_p (stack_pointer_rtx
, insn
)
4766 && (info
.sp_equiv_reg
== stack_pointer_rtx
4767 || !reg_set_p (info
.sp_equiv_reg
, insn
)))
4769 if (! validate_replace_rtx (stack_pointer_rtx
,
4770 plus_constant (info
.sp_equiv_reg
,
4777 else if (GET_CODE (PATTERN (insn
)) == SET
)
4778 handle_epilogue_set (PATTERN (insn
), &info
);
4779 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
4781 for (j
= 0; j
< XVECLEN (PATTERN (insn
), 0); j
++)
4782 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, j
)) == SET
)
4783 handle_epilogue_set (XVECEXP (PATTERN (insn
), 0, j
), &info
);
4788 info
.sp_equiv_reg
= info
.new_sp_equiv_reg
;
4789 info
.sp_offset
= info
.new_sp_offset
;
4791 /* Now update any constants this insn sets. */
4792 note_stores (PATTERN (insn
), update_epilogue_consts
, &info
);
4796 insns
= get_insns ();
4801 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
4802 structure that contains information about what we've seen so far. We
4803 process this SET by either updating that data or by emitting one or
4807 handle_epilogue_set (rtx set
, struct epi_info
*p
)
4809 /* First handle the case where we are setting SP. Record what it is being
4810 set from. If unknown, abort. */
4811 if (reg_set_p (stack_pointer_rtx
, set
))
4813 if (SET_DEST (set
) != stack_pointer_rtx
)
4816 if (GET_CODE (SET_SRC (set
)) == PLUS
)
4818 p
->new_sp_equiv_reg
= XEXP (SET_SRC (set
), 0);
4819 if (GET_CODE (XEXP (SET_SRC (set
), 1)) == CONST_INT
)
4820 p
->new_sp_offset
= INTVAL (XEXP (SET_SRC (set
), 1));
4821 else if (REG_P (XEXP (SET_SRC (set
), 1))
4822 && REGNO (XEXP (SET_SRC (set
), 1)) < FIRST_PSEUDO_REGISTER
4823 && p
->const_equiv
[REGNO (XEXP (SET_SRC (set
), 1))] != 0)
4825 = INTVAL (p
->const_equiv
[REGNO (XEXP (SET_SRC (set
), 1))]);
4830 p
->new_sp_equiv_reg
= SET_SRC (set
), p
->new_sp_offset
= 0;
4832 /* If we are adjusting SP, we adjust from the old data. */
4833 if (p
->new_sp_equiv_reg
== stack_pointer_rtx
)
4835 p
->new_sp_equiv_reg
= p
->sp_equiv_reg
;
4836 p
->new_sp_offset
+= p
->sp_offset
;
4839 if (p
->new_sp_equiv_reg
== 0 || !REG_P (p
->new_sp_equiv_reg
))
4845 /* Next handle the case where we are setting SP's equivalent register.
4846 If we already have a value to set it to, abort. We could update, but
4847 there seems little point in handling that case. Note that we have
4848 to allow for the case where we are setting the register set in
4849 the previous part of a PARALLEL inside a single insn. But use the
4850 old offset for any updates within this insn. We must allow for the case
4851 where the register is being set in a different (usually wider) mode than
4853 else if (p
->new_sp_equiv_reg
!= 0 && reg_set_p (p
->new_sp_equiv_reg
, set
))
4855 if (p
->equiv_reg_src
!= 0
4856 || !REG_P (p
->new_sp_equiv_reg
)
4857 || !REG_P (SET_DEST (set
))
4858 || GET_MODE_BITSIZE (GET_MODE (SET_DEST (set
))) > BITS_PER_WORD
4859 || REGNO (p
->new_sp_equiv_reg
) != REGNO (SET_DEST (set
)))
4863 = simplify_replace_rtx (SET_SRC (set
), stack_pointer_rtx
,
4864 plus_constant (p
->sp_equiv_reg
,
4868 /* Otherwise, replace any references to SP in the insn to its new value
4869 and emit the insn. */
4872 SET_SRC (set
) = simplify_replace_rtx (SET_SRC (set
), stack_pointer_rtx
,
4873 plus_constant (p
->sp_equiv_reg
,
4875 SET_DEST (set
) = simplify_replace_rtx (SET_DEST (set
), stack_pointer_rtx
,
4876 plus_constant (p
->sp_equiv_reg
,
4882 /* Update the tracking information for registers set to constants. */
4885 update_epilogue_consts (rtx dest
, rtx x
, void *data
)
4887 struct epi_info
*p
= (struct epi_info
*) data
;
4890 if (!REG_P (dest
) || REGNO (dest
) >= FIRST_PSEUDO_REGISTER
)
4893 /* If we are either clobbering a register or doing a partial set,
4894 show we don't know the value. */
4895 else if (GET_CODE (x
) == CLOBBER
|| ! rtx_equal_p (dest
, SET_DEST (x
)))
4896 p
->const_equiv
[REGNO (dest
)] = 0;
4898 /* If we are setting it to a constant, record that constant. */
4899 else if (GET_CODE (SET_SRC (x
)) == CONST_INT
)
4900 p
->const_equiv
[REGNO (dest
)] = SET_SRC (x
);
4902 /* If this is a binary operation between a register we have been tracking
4903 and a constant, see if we can compute a new constant value. */
4904 else if (ARITHMETIC_P (SET_SRC (x
))
4905 && REG_P (XEXP (SET_SRC (x
), 0))
4906 && REGNO (XEXP (SET_SRC (x
), 0)) < FIRST_PSEUDO_REGISTER
4907 && p
->const_equiv
[REGNO (XEXP (SET_SRC (x
), 0))] != 0
4908 && GET_CODE (XEXP (SET_SRC (x
), 1)) == CONST_INT
4909 && 0 != (new = simplify_binary_operation
4910 (GET_CODE (SET_SRC (x
)), GET_MODE (dest
),
4911 p
->const_equiv
[REGNO (XEXP (SET_SRC (x
), 0))],
4912 XEXP (SET_SRC (x
), 1)))
4913 && GET_CODE (new) == CONST_INT
)
4914 p
->const_equiv
[REGNO (dest
)] = new;
4916 /* Otherwise, we can't do anything with this value. */
4918 p
->const_equiv
[REGNO (dest
)] = 0;
4921 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
4924 emit_equiv_load (struct epi_info
*p
)
4926 if (p
->equiv_reg_src
!= 0)
4928 rtx dest
= p
->sp_equiv_reg
;
4930 if (GET_MODE (p
->equiv_reg_src
) != GET_MODE (dest
))
4931 dest
= gen_rtx_REG (GET_MODE (p
->equiv_reg_src
),
4932 REGNO (p
->sp_equiv_reg
));
4934 emit_move_insn (dest
, p
->equiv_reg_src
);
4935 p
->equiv_reg_src
= 0;
4940 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
4941 this into place with notes indicating where the prologue ends and where
4942 the epilogue begins. Update the basic block information when possible. */
4945 thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED
)
4949 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
4952 #ifdef HAVE_prologue
4953 rtx prologue_end
= NULL_RTX
;
4955 #if defined (HAVE_epilogue) || defined(HAVE_return)
4956 rtx epilogue_end
= NULL_RTX
;
4959 #ifdef HAVE_prologue
4963 seq
= gen_prologue ();
4966 /* Retain a map of the prologue insns. */
4967 record_insns (seq
, &prologue
);
4968 prologue_end
= emit_note (NOTE_INSN_PROLOGUE_END
);
4972 set_insn_locators (seq
, prologue_locator
);
4974 /* Can't deal with multiple successors of the entry block
4975 at the moment. Function should always have at least one
4977 if (!ENTRY_BLOCK_PTR
->succ
|| ENTRY_BLOCK_PTR
->succ
->succ_next
)
4980 insert_insn_on_edge (seq
, ENTRY_BLOCK_PTR
->succ
);
4985 /* If the exit block has no non-fake predecessors, we don't need
4987 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
4988 if ((e
->flags
& EDGE_FAKE
) == 0)
4994 if (optimize
&& HAVE_return
)
4996 /* If we're allowed to generate a simple return instruction,
4997 then by definition we don't need a full epilogue. Examine
4998 the block that falls through to EXIT. If it does not
4999 contain any code, examine its predecessors and try to
5000 emit (conditional) return instructions. */
5006 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
5007 if (e
->flags
& EDGE_FALLTHRU
)
5013 /* Verify that there are no active instructions in the last block. */
5014 label
= BB_END (last
);
5015 while (label
&& !LABEL_P (label
))
5017 if (active_insn_p (label
))
5019 label
= PREV_INSN (label
);
5022 if (BB_HEAD (last
) == label
&& LABEL_P (label
))
5024 rtx epilogue_line_note
= NULL_RTX
;
5026 /* Locate the line number associated with the closing brace,
5027 if we can find one. */
5028 for (seq
= get_last_insn ();
5029 seq
&& ! active_insn_p (seq
);
5030 seq
= PREV_INSN (seq
))
5031 if (NOTE_P (seq
) && NOTE_LINE_NUMBER (seq
) > 0)
5033 epilogue_line_note
= seq
;
5037 for (e
= last
->pred
; e
; e
= e_next
)
5039 basic_block bb
= e
->src
;
5042 e_next
= e
->pred_next
;
5043 if (bb
== ENTRY_BLOCK_PTR
)
5047 if (!JUMP_P (jump
) || JUMP_LABEL (jump
) != label
)
5050 /* If we have an unconditional jump, we can replace that
5051 with a simple return instruction. */
5052 if (simplejump_p (jump
))
5054 emit_return_into_block (bb
, epilogue_line_note
);
5058 /* If we have a conditional jump, we can try to replace
5059 that with a conditional return instruction. */
5060 else if (condjump_p (jump
))
5062 if (! redirect_jump (jump
, 0, 0))
5065 /* If this block has only one successor, it both jumps
5066 and falls through to the fallthru block, so we can't
5068 if (bb
->succ
->succ_next
== NULL
)
5074 /* Fix up the CFG for the successful change we just made. */
5075 redirect_edge_succ (e
, EXIT_BLOCK_PTR
);
5078 /* Emit a return insn for the exit fallthru block. Whether
5079 this is still reachable will be determined later. */
5081 emit_barrier_after (BB_END (last
));
5082 emit_return_into_block (last
, epilogue_line_note
);
5083 epilogue_end
= BB_END (last
);
5084 last
->succ
->flags
&= ~EDGE_FALLTHRU
;
5089 /* Find the edge that falls through to EXIT. Other edges may exist
5090 due to RETURN instructions, but those don't need epilogues.
5091 There really shouldn't be a mixture -- either all should have
5092 been converted or none, however... */
5094 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
5095 if (e
->flags
& EDGE_FALLTHRU
)
5100 #ifdef HAVE_epilogue
5104 epilogue_end
= emit_note (NOTE_INSN_EPILOGUE_BEG
);
5106 seq
= gen_epilogue ();
5108 #ifdef INCOMING_RETURN_ADDR_RTX
5109 /* If this function returns with the stack depressed and we can support
5110 it, massage the epilogue to actually do that. */
5111 if (TREE_CODE (TREE_TYPE (current_function_decl
)) == FUNCTION_TYPE
5112 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl
)))
5113 seq
= keep_stack_depressed (seq
);
5116 emit_jump_insn (seq
);
5118 /* Retain a map of the epilogue insns. */
5119 record_insns (seq
, &epilogue
);
5120 set_insn_locators (seq
, epilogue_locator
);
5125 insert_insn_on_edge (seq
, e
);
5133 if (! next_active_insn (BB_END (e
->src
)))
5135 /* We have a fall-through edge to the exit block, the source is not
5136 at the end of the function, and there will be an assembler epilogue
5137 at the end of the function.
5138 We can't use force_nonfallthru here, because that would try to
5139 use return. Inserting a jump 'by hand' is extremely messy, so
5140 we take advantage of cfg_layout_finalize using
5141 fixup_fallthru_exit_predecessor. */
5142 cfg_layout_initialize (0);
5143 FOR_EACH_BB (cur_bb
)
5144 if (cur_bb
->index
>= 0 && cur_bb
->next_bb
->index
>= 0)
5145 cur_bb
->rbi
->next
= cur_bb
->next_bb
;
5146 cfg_layout_finalize ();
5151 commit_edge_insertions ();
5153 #ifdef HAVE_sibcall_epilogue
5154 /* Emit sibling epilogues before any sibling call sites. */
5155 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
5157 basic_block bb
= e
->src
;
5158 rtx insn
= BB_END (bb
);
5163 || ! SIBLING_CALL_P (insn
))
5167 emit_insn (gen_sibcall_epilogue ());
5171 /* Retain a map of the epilogue insns. Used in life analysis to
5172 avoid getting rid of sibcall epilogue insns. Do this before we
5173 actually emit the sequence. */
5174 record_insns (seq
, &sibcall_epilogue
);
5175 set_insn_locators (seq
, epilogue_locator
);
5177 i
= PREV_INSN (insn
);
5178 newinsn
= emit_insn_before (seq
, insn
);
5182 #ifdef HAVE_prologue
5183 /* This is probably all useless now that we use locators. */
5188 /* GDB handles `break f' by setting a breakpoint on the first
5189 line note after the prologue. Which means (1) that if
5190 there are line number notes before where we inserted the
5191 prologue we should move them, and (2) we should generate a
5192 note before the end of the first basic block, if there isn't
5195 ??? This behavior is completely broken when dealing with
5196 multiple entry functions. We simply place the note always
5197 into first basic block and let alternate entry points
5201 for (insn
= prologue_end
; insn
; insn
= prev
)
5203 prev
= PREV_INSN (insn
);
5204 if (NOTE_P (insn
) && NOTE_LINE_NUMBER (insn
) > 0)
5206 /* Note that we cannot reorder the first insn in the
5207 chain, since rest_of_compilation relies on that
5208 remaining constant. */
5211 reorder_insns (insn
, insn
, prologue_end
);
5215 /* Find the last line number note in the first block. */
5216 for (insn
= BB_END (ENTRY_BLOCK_PTR
->next_bb
);
5217 insn
!= prologue_end
&& insn
;
5218 insn
= PREV_INSN (insn
))
5219 if (NOTE_P (insn
) && NOTE_LINE_NUMBER (insn
) > 0)
5222 /* If we didn't find one, make a copy of the first line number
5226 for (insn
= next_active_insn (prologue_end
);
5228 insn
= PREV_INSN (insn
))
5229 if (NOTE_P (insn
) && NOTE_LINE_NUMBER (insn
) > 0)
5231 emit_note_copy_after (insn
, prologue_end
);
5237 #ifdef HAVE_epilogue
5242 /* Similarly, move any line notes that appear after the epilogue.
5243 There is no need, however, to be quite so anal about the existence
5244 of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly)
5245 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5247 for (insn
= epilogue_end
; insn
; insn
= next
)
5249 next
= NEXT_INSN (insn
);
5251 && (NOTE_LINE_NUMBER (insn
) > 0
5252 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_BEG
5253 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_END
))
5254 reorder_insns (insn
, insn
, PREV_INSN (epilogue_end
));
5260 /* Reposition the prologue-end and epilogue-begin notes after instruction
5261 scheduling and delayed branch scheduling. */
5264 reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED
)
5266 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5267 rtx insn
, last
, note
;
5270 if ((len
= VARRAY_SIZE (prologue
)) > 0)
5274 /* Scan from the beginning until we reach the last prologue insn.
5275 We apparently can't depend on basic_block_{head,end} after
5277 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
5281 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_PROLOGUE_END
)
5284 else if (contains (insn
, prologue
))
5294 /* Find the prologue-end note if we haven't already, and
5295 move it to just after the last prologue insn. */
5298 for (note
= last
; (note
= NEXT_INSN (note
));)
5300 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_PROLOGUE_END
)
5304 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5306 last
= NEXT_INSN (last
);
5307 reorder_insns (note
, note
, last
);
5311 if ((len
= VARRAY_SIZE (epilogue
)) > 0)
5315 /* Scan from the end until we reach the first epilogue insn.
5316 We apparently can't depend on basic_block_{head,end} after
5318 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
5322 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EPILOGUE_BEG
)
5325 else if (contains (insn
, epilogue
))
5335 /* Find the epilogue-begin note if we haven't already, and
5336 move it to just before the first epilogue insn. */
5339 for (note
= insn
; (note
= PREV_INSN (note
));)
5341 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_EPILOGUE_BEG
)
5345 if (PREV_INSN (last
) != note
)
5346 reorder_insns (note
, note
, PREV_INSN (last
));
5349 #endif /* HAVE_prologue or HAVE_epilogue */
5352 /* Called once, at initialization, to initialize function.c. */
5355 init_function_once (void)
5357 VARRAY_INT_INIT (prologue
, 0, "prologue");
5358 VARRAY_INT_INIT (epilogue
, 0, "epilogue");
5359 VARRAY_INT_INIT (sibcall_epilogue
, 0, "sibcall_epilogue");
5362 /* Resets insn_block_boundaries array. */
5365 reset_block_changes (void)
5367 VARRAY_TREE_INIT (cfun
->ib_boundaries_block
, 100, "ib_boundaries_block");
5368 VARRAY_PUSH_TREE (cfun
->ib_boundaries_block
, NULL_TREE
);
5371 /* Record the boundary for BLOCK. */
5373 record_block_change (tree block
)
5381 last_block
= VARRAY_TOP_TREE (cfun
->ib_boundaries_block
);
5382 VARRAY_POP (cfun
->ib_boundaries_block
);
5384 for (i
= VARRAY_ACTIVE_SIZE (cfun
->ib_boundaries_block
); i
< n
; i
++)
5385 VARRAY_PUSH_TREE (cfun
->ib_boundaries_block
, last_block
);
5387 VARRAY_PUSH_TREE (cfun
->ib_boundaries_block
, block
);
5390 /* Finishes record of boundaries. */
5391 void finalize_block_changes (void)
5393 record_block_change (DECL_INITIAL (current_function_decl
));
5396 /* For INSN return the BLOCK it belongs to. */
5398 check_block_change (rtx insn
, tree
*block
)
5400 unsigned uid
= INSN_UID (insn
);
5402 if (uid
>= VARRAY_ACTIVE_SIZE (cfun
->ib_boundaries_block
))
5405 *block
= VARRAY_TREE (cfun
->ib_boundaries_block
, uid
);
5408 /* Releases the ib_boundaries_block records. */
5410 free_block_changes (void)
5412 cfun
->ib_boundaries_block
= NULL
;
5415 /* Returns the name of the current function. */
5417 current_function_name (void)
5419 return lang_hooks
.decl_printable_name (cfun
->decl
, 2);
5422 #include "gt-function.h"