]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/function.c
cfgexpand.c (tree_expand_cfg): Fix comment.
[thirdparty/gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "basic-block.h"
54 #include "toplev.h"
55 #include "hashtab.h"
56 #include "ggc.h"
57 #include "tm_p.h"
58 #include "integrate.h"
59 #include "langhooks.h"
60 #include "target.h"
61 #include "cfglayout.h"
62
63 #ifndef LOCAL_ALIGNMENT
64 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
65 #endif
66
67 #ifndef STACK_ALIGNMENT_NEEDED
68 #define STACK_ALIGNMENT_NEEDED 1
69 #endif
70
71 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
72
73 /* Some systems use __main in a way incompatible with its use in gcc, in these
74 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
75 give the same symbol without quotes for an alternative entry point. You
76 must define both, or neither. */
77 #ifndef NAME__MAIN
78 #define NAME__MAIN "__main"
79 #endif
80
81 /* Round a value to the lowest integer less than it that is a multiple of
82 the required alignment. Avoid using division in case the value is
83 negative. Assume the alignment is a power of two. */
84 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
85
86 /* Similar, but round to the next highest integer that meets the
87 alignment. */
88 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
89
90 /* Nonzero if function being compiled doesn't contain any calls
91 (ignoring the prologue and epilogue). This is set prior to
92 local register allocation and is valid for the remaining
93 compiler passes. */
94 int current_function_is_leaf;
95
96 /* Nonzero if function being compiled doesn't modify the stack pointer
97 (ignoring the prologue and epilogue). This is only valid after
98 life_analysis has run. */
99 int current_function_sp_is_unchanging;
100
101 /* Nonzero if the function being compiled is a leaf function which only
102 uses leaf registers. This is valid after reload (specifically after
103 sched2) and is useful only if the port defines LEAF_REGISTERS. */
104 int current_function_uses_only_leaf_regs;
105
106 /* Nonzero once virtual register instantiation has been done.
107 assign_stack_local uses frame_pointer_rtx when this is nonzero.
108 calls.c:emit_library_call_value_1 uses it to set up
109 post-instantiation libcalls. */
110 int virtuals_instantiated;
111
112 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
113 static GTY(()) int funcdef_no;
114
115 /* These variables hold pointers to functions to create and destroy
116 target specific, per-function data structures. */
117 struct machine_function * (*init_machine_status) (void);
118
119 /* The currently compiled function. */
120 struct function *cfun = 0;
121
122 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
123 static GTY(()) varray_type prologue;
124 static GTY(()) varray_type epilogue;
125
126 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
127 in this function. */
128 static GTY(()) varray_type sibcall_epilogue;
129 \f
130 /* In order to evaluate some expressions, such as function calls returning
131 structures in memory, we need to temporarily allocate stack locations.
132 We record each allocated temporary in the following structure.
133
134 Associated with each temporary slot is a nesting level. When we pop up
135 one level, all temporaries associated with the previous level are freed.
136 Normally, all temporaries are freed after the execution of the statement
137 in which they were created. However, if we are inside a ({...}) grouping,
138 the result may be in a temporary and hence must be preserved. If the
139 result could be in a temporary, we preserve it if we can determine which
140 one it is in. If we cannot determine which temporary may contain the
141 result, all temporaries are preserved. A temporary is preserved by
142 pretending it was allocated at the previous nesting level.
143
144 Automatic variables are also assigned temporary slots, at the nesting
145 level where they are defined. They are marked a "kept" so that
146 free_temp_slots will not free them. */
147
148 struct temp_slot GTY(())
149 {
150 /* Points to next temporary slot. */
151 struct temp_slot *next;
152 /* Points to previous temporary slot. */
153 struct temp_slot *prev;
154
155 /* The rtx to used to reference the slot. */
156 rtx slot;
157 /* The rtx used to represent the address if not the address of the
158 slot above. May be an EXPR_LIST if multiple addresses exist. */
159 rtx address;
160 /* The alignment (in bits) of the slot. */
161 unsigned int align;
162 /* The size, in units, of the slot. */
163 HOST_WIDE_INT size;
164 /* The type of the object in the slot, or zero if it doesn't correspond
165 to a type. We use this to determine whether a slot can be reused.
166 It can be reused if objects of the type of the new slot will always
167 conflict with objects of the type of the old slot. */
168 tree type;
169 /* Nonzero if this temporary is currently in use. */
170 char in_use;
171 /* Nonzero if this temporary has its address taken. */
172 char addr_taken;
173 /* Nesting level at which this slot is being used. */
174 int level;
175 /* Nonzero if this should survive a call to free_temp_slots. */
176 int keep;
177 /* The offset of the slot from the frame_pointer, including extra space
178 for alignment. This info is for combine_temp_slots. */
179 HOST_WIDE_INT base_offset;
180 /* The size of the slot, including extra space for alignment. This
181 info is for combine_temp_slots. */
182 HOST_WIDE_INT full_size;
183 };
184 \f
185 /* Forward declarations. */
186
187 static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
188 struct function *);
189 static struct temp_slot *find_temp_slot_from_address (rtx);
190 static void instantiate_decls (tree, int);
191 static void instantiate_decls_1 (tree, int);
192 static void instantiate_decl (rtx, HOST_WIDE_INT, int);
193 static rtx instantiate_new_reg (rtx, HOST_WIDE_INT *);
194 static int instantiate_virtual_regs_1 (rtx *, rtx, int);
195 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
196 static void pad_below (struct args_size *, enum machine_mode, tree);
197 static void reorder_blocks_1 (rtx, tree, varray_type *);
198 static void reorder_fix_fragments (tree);
199 static int all_blocks (tree, tree *);
200 static tree *get_block_vector (tree, int *);
201 extern tree debug_find_var_in_block_tree (tree, tree);
202 /* We always define `record_insns' even if it's not used so that we
203 can always export `prologue_epilogue_contains'. */
204 static void record_insns (rtx, varray_type *) ATTRIBUTE_UNUSED;
205 static int contains (rtx, varray_type);
206 #ifdef HAVE_return
207 static void emit_return_into_block (basic_block, rtx);
208 #endif
209 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
210 static rtx keep_stack_depressed (rtx);
211 #endif
212 static void prepare_function_start (tree);
213 static void do_clobber_return_reg (rtx, void *);
214 static void do_use_return_reg (rtx, void *);
215 static void instantiate_virtual_regs_lossage (rtx);
216 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
217 \f
218 /* Pointer to chain of `struct function' for containing functions. */
219 struct function *outer_function_chain;
220
221 /* Given a function decl for a containing function,
222 return the `struct function' for it. */
223
224 struct function *
225 find_function_data (tree decl)
226 {
227 struct function *p;
228
229 for (p = outer_function_chain; p; p = p->outer)
230 if (p->decl == decl)
231 return p;
232
233 abort ();
234 }
235
236 /* Save the current context for compilation of a nested function.
237 This is called from language-specific code. The caller should use
238 the enter_nested langhook to save any language-specific state,
239 since this function knows only about language-independent
240 variables. */
241
242 void
243 push_function_context_to (tree context)
244 {
245 struct function *p;
246
247 if (context)
248 {
249 if (context == current_function_decl)
250 cfun->contains_functions = 1;
251 else
252 {
253 struct function *containing = find_function_data (context);
254 containing->contains_functions = 1;
255 }
256 }
257
258 if (cfun == 0)
259 init_dummy_function_start ();
260 p = cfun;
261
262 p->outer = outer_function_chain;
263 outer_function_chain = p;
264
265 lang_hooks.function.enter_nested (p);
266
267 cfun = 0;
268 }
269
270 void
271 push_function_context (void)
272 {
273 push_function_context_to (current_function_decl);
274 }
275
276 /* Restore the last saved context, at the end of a nested function.
277 This function is called from language-specific code. */
278
279 void
280 pop_function_context_from (tree context ATTRIBUTE_UNUSED)
281 {
282 struct function *p = outer_function_chain;
283
284 cfun = p;
285 outer_function_chain = p->outer;
286
287 current_function_decl = p->decl;
288 reg_renumber = 0;
289
290 restore_emit_status (p);
291
292 lang_hooks.function.leave_nested (p);
293
294 /* Reset variables that have known state during rtx generation. */
295 virtuals_instantiated = 0;
296 generating_concat_p = 1;
297 }
298
299 void
300 pop_function_context (void)
301 {
302 pop_function_context_from (current_function_decl);
303 }
304
305 /* Clear out all parts of the state in F that can safely be discarded
306 after the function has been parsed, but not compiled, to let
307 garbage collection reclaim the memory. */
308
309 void
310 free_after_parsing (struct function *f)
311 {
312 /* f->expr->forced_labels is used by code generation. */
313 /* f->emit->regno_reg_rtx is used by code generation. */
314 /* f->varasm is used by code generation. */
315 /* f->eh->eh_return_stub_label is used by code generation. */
316
317 lang_hooks.function.final (f);
318 f->stmt = NULL;
319 }
320
321 /* Clear out all parts of the state in F that can safely be discarded
322 after the function has been compiled, to let garbage collection
323 reclaim the memory. */
324
325 void
326 free_after_compilation (struct function *f)
327 {
328 f->eh = NULL;
329 f->expr = NULL;
330 f->emit = NULL;
331 f->varasm = NULL;
332 f->machine = NULL;
333
334 f->x_avail_temp_slots = NULL;
335 f->x_used_temp_slots = NULL;
336 f->arg_offset_rtx = NULL;
337 f->return_rtx = NULL;
338 f->internal_arg_pointer = NULL;
339 f->x_nonlocal_goto_handler_labels = NULL;
340 f->x_return_label = NULL;
341 f->x_naked_return_label = NULL;
342 f->x_stack_slot_list = NULL;
343 f->x_tail_recursion_reentry = NULL;
344 f->x_arg_pointer_save_area = NULL;
345 f->x_parm_birth_insn = NULL;
346 f->original_arg_vector = NULL;
347 f->original_decl_initial = NULL;
348 f->epilogue_delay_list = NULL;
349 }
350 \f
351 /* Allocate fixed slots in the stack frame of the current function. */
352
353 /* Return size needed for stack frame based on slots so far allocated in
354 function F.
355 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
356 the caller may have to do that. */
357
358 HOST_WIDE_INT
359 get_func_frame_size (struct function *f)
360 {
361 #ifdef FRAME_GROWS_DOWNWARD
362 return -f->x_frame_offset;
363 #else
364 return f->x_frame_offset;
365 #endif
366 }
367
368 /* Return size needed for stack frame based on slots so far allocated.
369 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
370 the caller may have to do that. */
371 HOST_WIDE_INT
372 get_frame_size (void)
373 {
374 return get_func_frame_size (cfun);
375 }
376
377 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
378 with machine mode MODE.
379
380 ALIGN controls the amount of alignment for the address of the slot:
381 0 means according to MODE,
382 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
383 -2 means use BITS_PER_UNIT,
384 positive specifies alignment boundary in bits.
385
386 We do not round to stack_boundary here.
387
388 FUNCTION specifies the function to allocate in. */
389
390 static rtx
391 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
392 struct function *function)
393 {
394 rtx x, addr;
395 int bigend_correction = 0;
396 unsigned int alignment;
397 int frame_off, frame_alignment, frame_phase;
398
399 if (align == 0)
400 {
401 tree type;
402
403 if (mode == BLKmode)
404 alignment = BIGGEST_ALIGNMENT;
405 else
406 alignment = GET_MODE_ALIGNMENT (mode);
407
408 /* Allow the target to (possibly) increase the alignment of this
409 stack slot. */
410 type = lang_hooks.types.type_for_mode (mode, 0);
411 if (type)
412 alignment = LOCAL_ALIGNMENT (type, alignment);
413
414 alignment /= BITS_PER_UNIT;
415 }
416 else if (align == -1)
417 {
418 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
419 size = CEIL_ROUND (size, alignment);
420 }
421 else if (align == -2)
422 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
423 else
424 alignment = align / BITS_PER_UNIT;
425
426 #ifdef FRAME_GROWS_DOWNWARD
427 function->x_frame_offset -= size;
428 #endif
429
430 /* Ignore alignment we can't do with expected alignment of the boundary. */
431 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
432 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
433
434 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
435 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
436
437 /* Calculate how many bytes the start of local variables is off from
438 stack alignment. */
439 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
440 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
441 frame_phase = frame_off ? frame_alignment - frame_off : 0;
442
443 /* Round the frame offset to the specified alignment. The default is
444 to always honor requests to align the stack but a port may choose to
445 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
446 if (STACK_ALIGNMENT_NEEDED
447 || mode != BLKmode
448 || size != 0)
449 {
450 /* We must be careful here, since FRAME_OFFSET might be negative and
451 division with a negative dividend isn't as well defined as we might
452 like. So we instead assume that ALIGNMENT is a power of two and
453 use logical operations which are unambiguous. */
454 #ifdef FRAME_GROWS_DOWNWARD
455 function->x_frame_offset
456 = (FLOOR_ROUND (function->x_frame_offset - frame_phase, alignment)
457 + frame_phase);
458 #else
459 function->x_frame_offset
460 = (CEIL_ROUND (function->x_frame_offset - frame_phase, alignment)
461 + frame_phase);
462 #endif
463 }
464
465 /* On a big-endian machine, if we are allocating more space than we will use,
466 use the least significant bytes of those that are allocated. */
467 if (BYTES_BIG_ENDIAN && mode != BLKmode)
468 bigend_correction = size - GET_MODE_SIZE (mode);
469
470 /* If we have already instantiated virtual registers, return the actual
471 address relative to the frame pointer. */
472 if (function == cfun && virtuals_instantiated)
473 addr = plus_constant (frame_pointer_rtx,
474 trunc_int_for_mode
475 (frame_offset + bigend_correction
476 + STARTING_FRAME_OFFSET, Pmode));
477 else
478 addr = plus_constant (virtual_stack_vars_rtx,
479 trunc_int_for_mode
480 (function->x_frame_offset + bigend_correction,
481 Pmode));
482
483 #ifndef FRAME_GROWS_DOWNWARD
484 function->x_frame_offset += size;
485 #endif
486
487 x = gen_rtx_MEM (mode, addr);
488
489 function->x_stack_slot_list
490 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
491
492 return x;
493 }
494
495 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
496 current function. */
497
498 rtx
499 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
500 {
501 return assign_stack_local_1 (mode, size, align, cfun);
502 }
503
504 \f
505 /* Removes temporary slot TEMP from LIST. */
506
507 static void
508 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
509 {
510 if (temp->next)
511 temp->next->prev = temp->prev;
512 if (temp->prev)
513 temp->prev->next = temp->next;
514 else
515 *list = temp->next;
516
517 temp->prev = temp->next = NULL;
518 }
519
520 /* Inserts temporary slot TEMP to LIST. */
521
522 static void
523 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
524 {
525 temp->next = *list;
526 if (*list)
527 (*list)->prev = temp;
528 temp->prev = NULL;
529 *list = temp;
530 }
531
532 /* Returns the list of used temp slots at LEVEL. */
533
534 static struct temp_slot **
535 temp_slots_at_level (int level)
536 {
537 level++;
538
539 if (!used_temp_slots)
540 VARRAY_GENERIC_PTR_INIT (used_temp_slots, 3, "used_temp_slots");
541
542 while (level >= (int) VARRAY_ACTIVE_SIZE (used_temp_slots))
543 VARRAY_PUSH_GENERIC_PTR (used_temp_slots, NULL);
544
545 return (struct temp_slot **) &VARRAY_GENERIC_PTR (used_temp_slots, level);
546 }
547
548 /* Returns the maximal temporary slot level. */
549
550 static int
551 max_slot_level (void)
552 {
553 if (!used_temp_slots)
554 return -1;
555
556 return VARRAY_ACTIVE_SIZE (used_temp_slots) - 1;
557 }
558
559 /* Moves temporary slot TEMP to LEVEL. */
560
561 static void
562 move_slot_to_level (struct temp_slot *temp, int level)
563 {
564 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
565 insert_slot_to_list (temp, temp_slots_at_level (level));
566 temp->level = level;
567 }
568
569 /* Make temporary slot TEMP available. */
570
571 static void
572 make_slot_available (struct temp_slot *temp)
573 {
574 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
575 insert_slot_to_list (temp, &avail_temp_slots);
576 temp->in_use = 0;
577 temp->level = -1;
578 }
579 \f
580 /* Allocate a temporary stack slot and record it for possible later
581 reuse.
582
583 MODE is the machine mode to be given to the returned rtx.
584
585 SIZE is the size in units of the space required. We do no rounding here
586 since assign_stack_local will do any required rounding.
587
588 KEEP is 1 if this slot is to be retained after a call to
589 free_temp_slots. Automatic variables for a block are allocated
590 with this flag. KEEP is 2 if we allocate a longer term temporary,
591 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
592 if we are to allocate something at an inner level to be treated as
593 a variable in the block (e.g., a SAVE_EXPR).
594
595 TYPE is the type that will be used for the stack slot. */
596
597 rtx
598 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, int keep,
599 tree type)
600 {
601 unsigned int align;
602 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
603 rtx slot;
604
605 /* If SIZE is -1 it means that somebody tried to allocate a temporary
606 of a variable size. */
607 if (size == -1)
608 abort ();
609
610 if (mode == BLKmode)
611 align = BIGGEST_ALIGNMENT;
612 else
613 align = GET_MODE_ALIGNMENT (mode);
614
615 if (! type)
616 type = lang_hooks.types.type_for_mode (mode, 0);
617
618 if (type)
619 align = LOCAL_ALIGNMENT (type, align);
620
621 /* Try to find an available, already-allocated temporary of the proper
622 mode which meets the size and alignment requirements. Choose the
623 smallest one with the closest alignment. */
624 for (p = avail_temp_slots; p; p = p->next)
625 {
626 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
627 && objects_must_conflict_p (p->type, type)
628 && (best_p == 0 || best_p->size > p->size
629 || (best_p->size == p->size && best_p->align > p->align)))
630 {
631 if (p->align == align && p->size == size)
632 {
633 selected = p;
634 cut_slot_from_list (selected, &avail_temp_slots);
635 best_p = 0;
636 break;
637 }
638 best_p = p;
639 }
640 }
641
642 /* Make our best, if any, the one to use. */
643 if (best_p)
644 {
645 selected = best_p;
646 cut_slot_from_list (selected, &avail_temp_slots);
647
648 /* If there are enough aligned bytes left over, make them into a new
649 temp_slot so that the extra bytes don't get wasted. Do this only
650 for BLKmode slots, so that we can be sure of the alignment. */
651 if (GET_MODE (best_p->slot) == BLKmode)
652 {
653 int alignment = best_p->align / BITS_PER_UNIT;
654 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
655
656 if (best_p->size - rounded_size >= alignment)
657 {
658 p = ggc_alloc (sizeof (struct temp_slot));
659 p->in_use = p->addr_taken = 0;
660 p->size = best_p->size - rounded_size;
661 p->base_offset = best_p->base_offset + rounded_size;
662 p->full_size = best_p->full_size - rounded_size;
663 p->slot = gen_rtx_MEM (BLKmode,
664 plus_constant (XEXP (best_p->slot, 0),
665 rounded_size));
666 p->align = best_p->align;
667 p->address = 0;
668 p->type = best_p->type;
669 insert_slot_to_list (p, &avail_temp_slots);
670
671 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
672 stack_slot_list);
673
674 best_p->size = rounded_size;
675 best_p->full_size = rounded_size;
676 }
677 }
678 }
679
680 /* If we still didn't find one, make a new temporary. */
681 if (selected == 0)
682 {
683 HOST_WIDE_INT frame_offset_old = frame_offset;
684
685 p = ggc_alloc (sizeof (struct temp_slot));
686
687 /* We are passing an explicit alignment request to assign_stack_local.
688 One side effect of that is assign_stack_local will not round SIZE
689 to ensure the frame offset remains suitably aligned.
690
691 So for requests which depended on the rounding of SIZE, we go ahead
692 and round it now. We also make sure ALIGNMENT is at least
693 BIGGEST_ALIGNMENT. */
694 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
695 abort ();
696 p->slot = assign_stack_local (mode,
697 (mode == BLKmode
698 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
699 : size),
700 align);
701
702 p->align = align;
703
704 /* The following slot size computation is necessary because we don't
705 know the actual size of the temporary slot until assign_stack_local
706 has performed all the frame alignment and size rounding for the
707 requested temporary. Note that extra space added for alignment
708 can be either above or below this stack slot depending on which
709 way the frame grows. We include the extra space if and only if it
710 is above this slot. */
711 #ifdef FRAME_GROWS_DOWNWARD
712 p->size = frame_offset_old - frame_offset;
713 #else
714 p->size = size;
715 #endif
716
717 /* Now define the fields used by combine_temp_slots. */
718 #ifdef FRAME_GROWS_DOWNWARD
719 p->base_offset = frame_offset;
720 p->full_size = frame_offset_old - frame_offset;
721 #else
722 p->base_offset = frame_offset_old;
723 p->full_size = frame_offset - frame_offset_old;
724 #endif
725 p->address = 0;
726
727 selected = p;
728 }
729
730 p = selected;
731 p->in_use = 1;
732 p->addr_taken = 0;
733 p->type = type;
734
735 if (keep == 2)
736 {
737 p->level = target_temp_slot_level;
738 p->keep = 1;
739 }
740 else if (keep == 3)
741 {
742 p->level = var_temp_slot_level;
743 p->keep = 0;
744 }
745 else
746 {
747 p->level = temp_slot_level;
748 p->keep = keep;
749 }
750
751 pp = temp_slots_at_level (p->level);
752 insert_slot_to_list (p, pp);
753
754 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
755 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
756 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
757
758 /* If we know the alias set for the memory that will be used, use
759 it. If there's no TYPE, then we don't know anything about the
760 alias set for the memory. */
761 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
762 set_mem_align (slot, align);
763
764 /* If a type is specified, set the relevant flags. */
765 if (type != 0)
766 {
767 RTX_UNCHANGING_P (slot) = (lang_hooks.honor_readonly
768 && TYPE_READONLY (type));
769 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
770 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
771 }
772
773 return slot;
774 }
775
776 /* Allocate a temporary stack slot and record it for possible later
777 reuse. First three arguments are same as in preceding function. */
778
779 rtx
780 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
781 {
782 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
783 }
784 \f
785 /* Assign a temporary.
786 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
787 and so that should be used in error messages. In either case, we
788 allocate of the given type.
789 KEEP is as for assign_stack_temp.
790 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
791 it is 0 if a register is OK.
792 DONT_PROMOTE is 1 if we should not promote values in register
793 to wider modes. */
794
795 rtx
796 assign_temp (tree type_or_decl, int keep, int memory_required,
797 int dont_promote ATTRIBUTE_UNUSED)
798 {
799 tree type, decl;
800 enum machine_mode mode;
801 #ifdef PROMOTE_MODE
802 int unsignedp;
803 #endif
804
805 if (DECL_P (type_or_decl))
806 decl = type_or_decl, type = TREE_TYPE (decl);
807 else
808 decl = NULL, type = type_or_decl;
809
810 mode = TYPE_MODE (type);
811 #ifdef PROMOTE_MODE
812 unsignedp = TYPE_UNSIGNED (type);
813 #endif
814
815 if (mode == BLKmode || memory_required)
816 {
817 HOST_WIDE_INT size = int_size_in_bytes (type);
818 tree size_tree;
819 rtx tmp;
820
821 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
822 problems with allocating the stack space. */
823 if (size == 0)
824 size = 1;
825
826 /* Unfortunately, we don't yet know how to allocate variable-sized
827 temporaries. However, sometimes we have a fixed upper limit on
828 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
829 instead. This is the case for Chill variable-sized strings. */
830 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
831 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
832 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
833 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
834
835 /* If we still haven't been able to get a size, see if the language
836 can compute a maximum size. */
837 if (size == -1
838 && (size_tree = lang_hooks.types.max_size (type)) != 0
839 && host_integerp (size_tree, 1))
840 size = tree_low_cst (size_tree, 1);
841
842 /* The size of the temporary may be too large to fit into an integer. */
843 /* ??? Not sure this should happen except for user silliness, so limit
844 this to things that aren't compiler-generated temporaries. The
845 rest of the time we'll abort in assign_stack_temp_for_type. */
846 if (decl && size == -1
847 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
848 {
849 error ("%Jsize of variable '%D' is too large", decl, decl);
850 size = 1;
851 }
852
853 tmp = assign_stack_temp_for_type (mode, size, keep, type);
854 return tmp;
855 }
856
857 #ifdef PROMOTE_MODE
858 if (! dont_promote)
859 mode = promote_mode (type, mode, &unsignedp, 0);
860 #endif
861
862 return gen_reg_rtx (mode);
863 }
864 \f
865 /* Combine temporary stack slots which are adjacent on the stack.
866
867 This allows for better use of already allocated stack space. This is only
868 done for BLKmode slots because we can be sure that we won't have alignment
869 problems in this case. */
870
871 void
872 combine_temp_slots (void)
873 {
874 struct temp_slot *p, *q, *next, *next_q;
875 int num_slots;
876
877 /* We can't combine slots, because the information about which slot
878 is in which alias set will be lost. */
879 if (flag_strict_aliasing)
880 return;
881
882 /* If there are a lot of temp slots, don't do anything unless
883 high levels of optimization. */
884 if (! flag_expensive_optimizations)
885 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
886 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
887 return;
888
889 for (p = avail_temp_slots; p; p = next)
890 {
891 int delete_p = 0;
892
893 next = p->next;
894
895 if (GET_MODE (p->slot) != BLKmode)
896 continue;
897
898 for (q = p->next; q; q = next_q)
899 {
900 int delete_q = 0;
901
902 next_q = q->next;
903
904 if (GET_MODE (q->slot) != BLKmode)
905 continue;
906
907 if (p->base_offset + p->full_size == q->base_offset)
908 {
909 /* Q comes after P; combine Q into P. */
910 p->size += q->size;
911 p->full_size += q->full_size;
912 delete_q = 1;
913 }
914 else if (q->base_offset + q->full_size == p->base_offset)
915 {
916 /* P comes after Q; combine P into Q. */
917 q->size += p->size;
918 q->full_size += p->full_size;
919 delete_p = 1;
920 break;
921 }
922 if (delete_q)
923 cut_slot_from_list (q, &avail_temp_slots);
924 }
925
926 /* Either delete P or advance past it. */
927 if (delete_p)
928 cut_slot_from_list (p, &avail_temp_slots);
929 }
930 }
931 \f
932 /* Find the temp slot corresponding to the object at address X. */
933
934 static struct temp_slot *
935 find_temp_slot_from_address (rtx x)
936 {
937 struct temp_slot *p;
938 rtx next;
939 int i;
940
941 for (i = max_slot_level (); i >= 0; i--)
942 for (p = *temp_slots_at_level (i); p; p = p->next)
943 {
944 if (XEXP (p->slot, 0) == x
945 || p->address == x
946 || (GET_CODE (x) == PLUS
947 && XEXP (x, 0) == virtual_stack_vars_rtx
948 && GET_CODE (XEXP (x, 1)) == CONST_INT
949 && INTVAL (XEXP (x, 1)) >= p->base_offset
950 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
951 return p;
952
953 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
954 for (next = p->address; next; next = XEXP (next, 1))
955 if (XEXP (next, 0) == x)
956 return p;
957 }
958
959 /* If we have a sum involving a register, see if it points to a temp
960 slot. */
961 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
962 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
963 return p;
964 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
965 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
966 return p;
967
968 return 0;
969 }
970
971 /* Indicate that NEW is an alternate way of referring to the temp slot
972 that previously was known by OLD. */
973
974 void
975 update_temp_slot_address (rtx old, rtx new)
976 {
977 struct temp_slot *p;
978
979 if (rtx_equal_p (old, new))
980 return;
981
982 p = find_temp_slot_from_address (old);
983
984 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
985 is a register, see if one operand of the PLUS is a temporary
986 location. If so, NEW points into it. Otherwise, if both OLD and
987 NEW are a PLUS and if there is a register in common between them.
988 If so, try a recursive call on those values. */
989 if (p == 0)
990 {
991 if (GET_CODE (old) != PLUS)
992 return;
993
994 if (REG_P (new))
995 {
996 update_temp_slot_address (XEXP (old, 0), new);
997 update_temp_slot_address (XEXP (old, 1), new);
998 return;
999 }
1000 else if (GET_CODE (new) != PLUS)
1001 return;
1002
1003 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1004 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1005 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1006 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1007 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1008 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1009 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1010 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1011
1012 return;
1013 }
1014
1015 /* Otherwise add an alias for the temp's address. */
1016 else if (p->address == 0)
1017 p->address = new;
1018 else
1019 {
1020 if (GET_CODE (p->address) != EXPR_LIST)
1021 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1022
1023 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1024 }
1025 }
1026
1027 /* If X could be a reference to a temporary slot, mark the fact that its
1028 address was taken. */
1029
1030 void
1031 mark_temp_addr_taken (rtx x)
1032 {
1033 struct temp_slot *p;
1034
1035 if (x == 0)
1036 return;
1037
1038 /* If X is not in memory or is at a constant address, it cannot be in
1039 a temporary slot. */
1040 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1041 return;
1042
1043 p = find_temp_slot_from_address (XEXP (x, 0));
1044 if (p != 0)
1045 p->addr_taken = 1;
1046 }
1047
1048 /* If X could be a reference to a temporary slot, mark that slot as
1049 belonging to the to one level higher than the current level. If X
1050 matched one of our slots, just mark that one. Otherwise, we can't
1051 easily predict which it is, so upgrade all of them. Kept slots
1052 need not be touched.
1053
1054 This is called when an ({...}) construct occurs and a statement
1055 returns a value in memory. */
1056
1057 void
1058 preserve_temp_slots (rtx x)
1059 {
1060 struct temp_slot *p = 0, *next;
1061
1062 /* If there is no result, we still might have some objects whose address
1063 were taken, so we need to make sure they stay around. */
1064 if (x == 0)
1065 {
1066 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1067 {
1068 next = p->next;
1069
1070 if (p->addr_taken)
1071 move_slot_to_level (p, temp_slot_level - 1);
1072 }
1073
1074 return;
1075 }
1076
1077 /* If X is a register that is being used as a pointer, see if we have
1078 a temporary slot we know it points to. To be consistent with
1079 the code below, we really should preserve all non-kept slots
1080 if we can't find a match, but that seems to be much too costly. */
1081 if (REG_P (x) && REG_POINTER (x))
1082 p = find_temp_slot_from_address (x);
1083
1084 /* If X is not in memory or is at a constant address, it cannot be in
1085 a temporary slot, but it can contain something whose address was
1086 taken. */
1087 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1088 {
1089 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1090 {
1091 next = p->next;
1092
1093 if (p->addr_taken)
1094 move_slot_to_level (p, temp_slot_level - 1);
1095 }
1096
1097 return;
1098 }
1099
1100 /* First see if we can find a match. */
1101 if (p == 0)
1102 p = find_temp_slot_from_address (XEXP (x, 0));
1103
1104 if (p != 0)
1105 {
1106 /* Move everything at our level whose address was taken to our new
1107 level in case we used its address. */
1108 struct temp_slot *q;
1109
1110 if (p->level == temp_slot_level)
1111 {
1112 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1113 {
1114 next = q->next;
1115
1116 if (p != q && q->addr_taken)
1117 move_slot_to_level (q, temp_slot_level - 1);
1118 }
1119
1120 move_slot_to_level (p, temp_slot_level - 1);
1121 p->addr_taken = 0;
1122 }
1123 return;
1124 }
1125
1126 /* Otherwise, preserve all non-kept slots at this level. */
1127 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1128 {
1129 next = p->next;
1130
1131 if (!p->keep)
1132 move_slot_to_level (p, temp_slot_level - 1);
1133 }
1134 }
1135
1136 /* Free all temporaries used so far. This is normally called at the
1137 end of generating code for a statement. */
1138
1139 void
1140 free_temp_slots (void)
1141 {
1142 struct temp_slot *p, *next;
1143
1144 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1145 {
1146 next = p->next;
1147
1148 if (!p->keep)
1149 make_slot_available (p);
1150 }
1151
1152 combine_temp_slots ();
1153 }
1154
1155 /* Push deeper into the nesting level for stack temporaries. */
1156
1157 void
1158 push_temp_slots (void)
1159 {
1160 temp_slot_level++;
1161 }
1162
1163 /* Pop a temporary nesting level. All slots in use in the current level
1164 are freed. */
1165
1166 void
1167 pop_temp_slots (void)
1168 {
1169 struct temp_slot *p, *next;
1170
1171 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1172 {
1173 next = p->next;
1174 make_slot_available (p);
1175 }
1176
1177 combine_temp_slots ();
1178
1179 temp_slot_level--;
1180 }
1181
1182 /* Initialize temporary slots. */
1183
1184 void
1185 init_temp_slots (void)
1186 {
1187 /* We have not allocated any temporaries yet. */
1188 avail_temp_slots = 0;
1189 used_temp_slots = 0;
1190 temp_slot_level = 0;
1191 var_temp_slot_level = 0;
1192 target_temp_slot_level = 0;
1193 }
1194 \f
1195 /* These routines are responsible for converting virtual register references
1196 to the actual hard register references once RTL generation is complete.
1197
1198 The following four variables are used for communication between the
1199 routines. They contain the offsets of the virtual registers from their
1200 respective hard registers. */
1201
1202 static int in_arg_offset;
1203 static int var_offset;
1204 static int dynamic_offset;
1205 static int out_arg_offset;
1206 static int cfa_offset;
1207
1208 /* In most machines, the stack pointer register is equivalent to the bottom
1209 of the stack. */
1210
1211 #ifndef STACK_POINTER_OFFSET
1212 #define STACK_POINTER_OFFSET 0
1213 #endif
1214
1215 /* If not defined, pick an appropriate default for the offset of dynamically
1216 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1217 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1218
1219 #ifndef STACK_DYNAMIC_OFFSET
1220
1221 /* The bottom of the stack points to the actual arguments. If
1222 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1223 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1224 stack space for register parameters is not pushed by the caller, but
1225 rather part of the fixed stack areas and hence not included in
1226 `current_function_outgoing_args_size'. Nevertheless, we must allow
1227 for it when allocating stack dynamic objects. */
1228
1229 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1230 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1231 ((ACCUMULATE_OUTGOING_ARGS \
1232 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1233 + (STACK_POINTER_OFFSET)) \
1234
1235 #else
1236 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1237 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1238 + (STACK_POINTER_OFFSET))
1239 #endif
1240 #endif
1241
1242 /* On most machines, the CFA coincides with the first incoming parm. */
1243
1244 #ifndef ARG_POINTER_CFA_OFFSET
1245 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
1246 #endif
1247
1248 \f
1249 /* Pass through the INSNS of function FNDECL and convert virtual register
1250 references to hard register references. */
1251
1252 void
1253 instantiate_virtual_regs (void)
1254 {
1255 rtx insn;
1256
1257 /* Compute the offsets to use for this function. */
1258 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1259 var_offset = STARTING_FRAME_OFFSET;
1260 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1261 out_arg_offset = STACK_POINTER_OFFSET;
1262 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1263
1264 /* Scan all variables and parameters of this function. For each that is
1265 in memory, instantiate all virtual registers if the result is a valid
1266 address. If not, we do it later. That will handle most uses of virtual
1267 regs on many machines. */
1268 instantiate_decls (current_function_decl, 1);
1269
1270 /* Initialize recognition, indicating that volatile is OK. */
1271 init_recog ();
1272
1273 /* Scan through all the insns, instantiating every virtual register still
1274 present. */
1275 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1276 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1277 || GET_CODE (insn) == CALL_INSN)
1278 {
1279 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
1280 if (INSN_DELETED_P (insn))
1281 continue;
1282 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
1283 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1284 if (GET_CODE (insn) == CALL_INSN)
1285 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
1286 NULL_RTX, 0);
1287
1288 /* Past this point all ASM statements should match. Verify that
1289 to avoid failures later in the compilation process. */
1290 if (asm_noperands (PATTERN (insn)) >= 0
1291 && ! check_asm_operands (PATTERN (insn)))
1292 instantiate_virtual_regs_lossage (insn);
1293 }
1294
1295 /* Now instantiate the remaining register equivalences for debugging info.
1296 These will not be valid addresses. */
1297 instantiate_decls (current_function_decl, 0);
1298
1299 /* Indicate that, from now on, assign_stack_local should use
1300 frame_pointer_rtx. */
1301 virtuals_instantiated = 1;
1302 }
1303
1304 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1305 all virtual registers in their DECL_RTL's.
1306
1307 If VALID_ONLY, do this only if the resulting address is still valid.
1308 Otherwise, always do it. */
1309
1310 static void
1311 instantiate_decls (tree fndecl, int valid_only)
1312 {
1313 tree decl;
1314
1315 /* Process all parameters of the function. */
1316 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1317 {
1318 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
1319 HOST_WIDE_INT size_rtl;
1320
1321 instantiate_decl (DECL_RTL (decl), size, valid_only);
1322
1323 /* If the parameter was promoted, then the incoming RTL mode may be
1324 larger than the declared type size. We must use the larger of
1325 the two sizes. */
1326 size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
1327 size = MAX (size_rtl, size);
1328 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
1329 }
1330
1331 /* Now process all variables defined in the function or its subblocks. */
1332 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
1333 }
1334
1335 /* Subroutine of instantiate_decls: Process all decls in the given
1336 BLOCK node and all its subblocks. */
1337
1338 static void
1339 instantiate_decls_1 (tree let, int valid_only)
1340 {
1341 tree t;
1342
1343 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1344 if (DECL_RTL_SET_P (t))
1345 instantiate_decl (DECL_RTL (t),
1346 int_size_in_bytes (TREE_TYPE (t)),
1347 valid_only);
1348
1349 /* Process all subblocks. */
1350 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1351 instantiate_decls_1 (t, valid_only);
1352 }
1353
1354 /* Subroutine of the preceding procedures: Given RTL representing a
1355 decl and the size of the object, do any instantiation required.
1356
1357 If VALID_ONLY is nonzero, it means that the RTL should only be
1358 changed if the new address is valid. */
1359
1360 static void
1361 instantiate_decl (rtx x, HOST_WIDE_INT size, int valid_only)
1362 {
1363 enum machine_mode mode;
1364 rtx addr;
1365
1366 /* If this is not a MEM, no need to do anything. Similarly if the
1367 address is a constant or a register that is not a virtual register. */
1368
1369 if (x == 0 || !MEM_P (x))
1370 return;
1371
1372 addr = XEXP (x, 0);
1373 if (CONSTANT_P (addr)
1374 || (REG_P (addr)
1375 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1376 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1377 return;
1378
1379 /* If we should only do this if the address is valid, copy the address.
1380 We need to do this so we can undo any changes that might make the
1381 address invalid. This copy is unfortunate, but probably can't be
1382 avoided. */
1383
1384 if (valid_only)
1385 addr = copy_rtx (addr);
1386
1387 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
1388
1389 if (valid_only && size >= 0)
1390 {
1391 unsigned HOST_WIDE_INT decl_size = size;
1392
1393 /* Now verify that the resulting address is valid for every integer or
1394 floating-point mode up to and including SIZE bytes long. We do this
1395 since the object might be accessed in any mode and frame addresses
1396 are shared. */
1397
1398 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1399 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
1400 mode = GET_MODE_WIDER_MODE (mode))
1401 if (! memory_address_p (mode, addr))
1402 return;
1403
1404 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1405 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
1406 mode = GET_MODE_WIDER_MODE (mode))
1407 if (! memory_address_p (mode, addr))
1408 return;
1409 }
1410
1411 /* Put back the address now that we have updated it and we either know
1412 it is valid or we don't care whether it is valid. */
1413
1414 XEXP (x, 0) = addr;
1415 }
1416 \f
1417 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1418 is a virtual register, return the equivalent hard register and set the
1419 offset indirectly through the pointer. Otherwise, return 0. */
1420
1421 static rtx
1422 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1423 {
1424 rtx new;
1425 HOST_WIDE_INT offset;
1426
1427 if (x == virtual_incoming_args_rtx)
1428 new = arg_pointer_rtx, offset = in_arg_offset;
1429 else if (x == virtual_stack_vars_rtx)
1430 new = frame_pointer_rtx, offset = var_offset;
1431 else if (x == virtual_stack_dynamic_rtx)
1432 new = stack_pointer_rtx, offset = dynamic_offset;
1433 else if (x == virtual_outgoing_args_rtx)
1434 new = stack_pointer_rtx, offset = out_arg_offset;
1435 else if (x == virtual_cfa_rtx)
1436 new = arg_pointer_rtx, offset = cfa_offset;
1437 else
1438 return 0;
1439
1440 *poffset = offset;
1441 return new;
1442 }
1443 \f
1444
1445 /* Called when instantiate_virtual_regs has failed to update the instruction.
1446 Usually this means that non-matching instruction has been emit, however for
1447 asm statements it may be the problem in the constraints. */
1448 static void
1449 instantiate_virtual_regs_lossage (rtx insn)
1450 {
1451 if (asm_noperands (PATTERN (insn)) >= 0)
1452 {
1453 error_for_asm (insn, "impossible constraint in `asm'");
1454 delete_insn (insn);
1455 }
1456 else
1457 abort ();
1458 }
1459 /* Given a pointer to a piece of rtx and an optional pointer to the
1460 containing object, instantiate any virtual registers present in it.
1461
1462 If EXTRA_INSNS, we always do the replacement and generate
1463 any extra insns before OBJECT. If it zero, we do nothing if replacement
1464 is not valid.
1465
1466 Return 1 if we either had nothing to do or if we were able to do the
1467 needed replacement. Return 0 otherwise; we only return zero if
1468 EXTRA_INSNS is zero.
1469
1470 We first try some simple transformations to avoid the creation of extra
1471 pseudos. */
1472
1473 static int
1474 instantiate_virtual_regs_1 (rtx *loc, rtx object, int extra_insns)
1475 {
1476 rtx x;
1477 RTX_CODE code;
1478 rtx new = 0;
1479 HOST_WIDE_INT offset = 0;
1480 rtx temp;
1481 rtx seq;
1482 int i, j;
1483 const char *fmt;
1484
1485 /* Re-start here to avoid recursion in common cases. */
1486 restart:
1487
1488 x = *loc;
1489 if (x == 0)
1490 return 1;
1491
1492 /* We may have detected and deleted invalid asm statements. */
1493 if (object && INSN_P (object) && INSN_DELETED_P (object))
1494 return 1;
1495
1496 code = GET_CODE (x);
1497
1498 /* Check for some special cases. */
1499 switch (code)
1500 {
1501 case CONST_INT:
1502 case CONST_DOUBLE:
1503 case CONST_VECTOR:
1504 case CONST:
1505 case SYMBOL_REF:
1506 case CODE_LABEL:
1507 case PC:
1508 case CC0:
1509 case ASM_INPUT:
1510 case ADDR_VEC:
1511 case ADDR_DIFF_VEC:
1512 case RETURN:
1513 return 1;
1514
1515 case SET:
1516 /* We are allowed to set the virtual registers. This means that
1517 the actual register should receive the source minus the
1518 appropriate offset. This is used, for example, in the handling
1519 of non-local gotos. */
1520 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
1521 {
1522 rtx src = SET_SRC (x);
1523
1524 /* We are setting the register, not using it, so the relevant
1525 offset is the negative of the offset to use were we using
1526 the register. */
1527 offset = - offset;
1528 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
1529
1530 /* The only valid sources here are PLUS or REG. Just do
1531 the simplest possible thing to handle them. */
1532 if (!REG_P (src) && GET_CODE (src) != PLUS)
1533 {
1534 instantiate_virtual_regs_lossage (object);
1535 return 1;
1536 }
1537
1538 start_sequence ();
1539 if (!REG_P (src))
1540 temp = force_operand (src, NULL_RTX);
1541 else
1542 temp = src;
1543 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
1544 seq = get_insns ();
1545 end_sequence ();
1546
1547 emit_insn_before (seq, object);
1548 SET_DEST (x) = new;
1549
1550 if (! validate_change (object, &SET_SRC (x), temp, 0)
1551 || ! extra_insns)
1552 instantiate_virtual_regs_lossage (object);
1553
1554 return 1;
1555 }
1556
1557 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
1558 loc = &SET_SRC (x);
1559 goto restart;
1560
1561 case PLUS:
1562 /* Handle special case of virtual register plus constant. */
1563 if (CONSTANT_P (XEXP (x, 1)))
1564 {
1565 rtx old, new_offset;
1566
1567 /* Check for (plus (plus VIRT foo) (const_int)) first. */
1568 if (GET_CODE (XEXP (x, 0)) == PLUS)
1569 {
1570 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
1571 {
1572 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
1573 extra_insns);
1574 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
1575 }
1576 else
1577 {
1578 loc = &XEXP (x, 0);
1579 goto restart;
1580 }
1581 }
1582
1583 #ifdef POINTERS_EXTEND_UNSIGNED
1584 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1585 we can commute the PLUS and SUBREG because pointers into the
1586 frame are well-behaved. */
1587 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
1588 && GET_CODE (XEXP (x, 1)) == CONST_INT
1589 && 0 != (new
1590 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
1591 &offset))
1592 && validate_change (object, loc,
1593 plus_constant (gen_lowpart (ptr_mode,
1594 new),
1595 offset
1596 + INTVAL (XEXP (x, 1))),
1597 0))
1598 return 1;
1599 #endif
1600 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
1601 {
1602 /* We know the second operand is a constant. Unless the
1603 first operand is a REG (which has been already checked),
1604 it needs to be checked. */
1605 if (!REG_P (XEXP (x, 0)))
1606 {
1607 loc = &XEXP (x, 0);
1608 goto restart;
1609 }
1610 return 1;
1611 }
1612
1613 new_offset = plus_constant (XEXP (x, 1), offset);
1614
1615 /* If the new constant is zero, try to replace the sum with just
1616 the register. */
1617 if (new_offset == const0_rtx
1618 && validate_change (object, loc, new, 0))
1619 return 1;
1620
1621 /* Next try to replace the register and new offset.
1622 There are two changes to validate here and we can't assume that
1623 in the case of old offset equals new just changing the register
1624 will yield a valid insn. In the interests of a little efficiency,
1625 however, we only call validate change once (we don't queue up the
1626 changes and then call apply_change_group). */
1627
1628 old = XEXP (x, 0);
1629 if (offset == 0
1630 ? ! validate_change (object, &XEXP (x, 0), new, 0)
1631 : (XEXP (x, 0) = new,
1632 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
1633 {
1634 if (! extra_insns)
1635 {
1636 XEXP (x, 0) = old;
1637 return 0;
1638 }
1639
1640 /* Otherwise copy the new constant into a register and replace
1641 constant with that register. */
1642 temp = gen_reg_rtx (Pmode);
1643 XEXP (x, 0) = new;
1644 if (validate_change (object, &XEXP (x, 1), temp, 0))
1645 emit_insn_before (gen_move_insn (temp, new_offset), object);
1646 else
1647 {
1648 /* If that didn't work, replace this expression with a
1649 register containing the sum. */
1650
1651 XEXP (x, 0) = old;
1652 new = gen_rtx_PLUS (Pmode, new, new_offset);
1653
1654 start_sequence ();
1655 temp = force_operand (new, NULL_RTX);
1656 seq = get_insns ();
1657 end_sequence ();
1658
1659 emit_insn_before (seq, object);
1660 if (! validate_change (object, loc, temp, 0)
1661 && ! validate_replace_rtx (x, temp, object))
1662 {
1663 instantiate_virtual_regs_lossage (object);
1664 return 1;
1665 }
1666 }
1667 }
1668
1669 return 1;
1670 }
1671
1672 /* Fall through to generic two-operand expression case. */
1673 case EXPR_LIST:
1674 case CALL:
1675 case COMPARE:
1676 case MINUS:
1677 case MULT:
1678 case DIV: case UDIV:
1679 case MOD: case UMOD:
1680 case AND: case IOR: case XOR:
1681 case ROTATERT: case ROTATE:
1682 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
1683 case NE: case EQ:
1684 case GE: case GT: case GEU: case GTU:
1685 case LE: case LT: case LEU: case LTU:
1686 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
1687 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
1688 loc = &XEXP (x, 0);
1689 goto restart;
1690
1691 case MEM:
1692 /* Most cases of MEM that convert to valid addresses have already been
1693 handled by our scan of decls. The only special handling we
1694 need here is to make a copy of the rtx to ensure it isn't being
1695 shared if we have to change it to a pseudo.
1696
1697 If the rtx is a simple reference to an address via a virtual register,
1698 it can potentially be shared. In such cases, first try to make it
1699 a valid address, which can also be shared. Otherwise, copy it and
1700 proceed normally.
1701
1702 First check for common cases that need no processing. These are
1703 usually due to instantiation already being done on a previous instance
1704 of a shared rtx. */
1705
1706 temp = XEXP (x, 0);
1707 if (CONSTANT_ADDRESS_P (temp)
1708 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1709 || temp == arg_pointer_rtx
1710 #endif
1711 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1712 || temp == hard_frame_pointer_rtx
1713 #endif
1714 || temp == frame_pointer_rtx)
1715 return 1;
1716
1717 if (GET_CODE (temp) == PLUS
1718 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
1719 && (XEXP (temp, 0) == frame_pointer_rtx
1720 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1721 || XEXP (temp, 0) == hard_frame_pointer_rtx
1722 #endif
1723 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1724 || XEXP (temp, 0) == arg_pointer_rtx
1725 #endif
1726 ))
1727 return 1;
1728
1729 if (temp == virtual_stack_vars_rtx
1730 || temp == virtual_incoming_args_rtx
1731 || (GET_CODE (temp) == PLUS
1732 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
1733 && (XEXP (temp, 0) == virtual_stack_vars_rtx
1734 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
1735 {
1736 /* This MEM may be shared. If the substitution can be done without
1737 the need to generate new pseudos, we want to do it in place
1738 so all copies of the shared rtx benefit. The call below will
1739 only make substitutions if the resulting address is still
1740 valid.
1741
1742 Note that we cannot pass X as the object in the recursive call
1743 since the insn being processed may not allow all valid
1744 addresses. However, if we were not passed on object, we can
1745 only modify X without copying it if X will have a valid
1746 address.
1747
1748 ??? Also note that this can still lose if OBJECT is an insn that
1749 has less restrictions on an address that some other insn.
1750 In that case, we will modify the shared address. This case
1751 doesn't seem very likely, though. One case where this could
1752 happen is in the case of a USE or CLOBBER reference, but we
1753 take care of that below. */
1754
1755 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
1756 object ? object : x, 0))
1757 return 1;
1758
1759 /* Otherwise make a copy and process that copy. We copy the entire
1760 RTL expression since it might be a PLUS which could also be
1761 shared. */
1762 *loc = x = copy_rtx (x);
1763 }
1764
1765 /* Fall through to generic unary operation case. */
1766 case PREFETCH:
1767 case SUBREG:
1768 case STRICT_LOW_PART:
1769 case NEG: case NOT:
1770 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
1771 case SIGN_EXTEND: case ZERO_EXTEND:
1772 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
1773 case FLOAT: case FIX:
1774 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
1775 case ABS:
1776 case SQRT:
1777 case FFS:
1778 case CLZ: case CTZ:
1779 case POPCOUNT: case PARITY:
1780 /* These case either have just one operand or we know that we need not
1781 check the rest of the operands. */
1782 loc = &XEXP (x, 0);
1783 goto restart;
1784
1785 case USE:
1786 case CLOBBER:
1787 /* If the operand is a MEM, see if the change is a valid MEM. If not,
1788 go ahead and make the invalid one, but do it to a copy. For a REG,
1789 just make the recursive call, since there's no chance of a problem. */
1790
1791 if ((MEM_P (XEXP (x, 0))
1792 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
1793 0))
1794 || (REG_P (XEXP (x, 0))
1795 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
1796 return 1;
1797
1798 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
1799 loc = &XEXP (x, 0);
1800 goto restart;
1801
1802 case REG:
1803 /* Try to replace with a PLUS. If that doesn't work, compute the sum
1804 in front of this insn and substitute the temporary. */
1805 if ((new = instantiate_new_reg (x, &offset)) != 0)
1806 {
1807 temp = plus_constant (new, offset);
1808 if (!validate_change (object, loc, temp, 0))
1809 {
1810 if (! extra_insns)
1811 return 0;
1812
1813 start_sequence ();
1814 temp = force_operand (temp, NULL_RTX);
1815 seq = get_insns ();
1816 end_sequence ();
1817
1818 emit_insn_before (seq, object);
1819 if (! validate_change (object, loc, temp, 0)
1820 && ! validate_replace_rtx (x, temp, object))
1821 instantiate_virtual_regs_lossage (object);
1822 }
1823 }
1824
1825 return 1;
1826
1827 default:
1828 break;
1829 }
1830
1831 /* Scan all subexpressions. */
1832 fmt = GET_RTX_FORMAT (code);
1833 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1834 if (*fmt == 'e')
1835 {
1836 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
1837 return 0;
1838 }
1839 else if (*fmt == 'E')
1840 for (j = 0; j < XVECLEN (x, i); j++)
1841 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
1842 extra_insns))
1843 return 0;
1844
1845 return 1;
1846 }
1847 \f
1848 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1849 This means a type for which function calls must pass an address to the
1850 function or get an address back from the function.
1851 EXP may be a type node or an expression (whose type is tested). */
1852
1853 int
1854 aggregate_value_p (tree exp, tree fntype)
1855 {
1856 int i, regno, nregs;
1857 rtx reg;
1858
1859 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1860
1861 if (fntype)
1862 switch (TREE_CODE (fntype))
1863 {
1864 case CALL_EXPR:
1865 fntype = get_callee_fndecl (fntype);
1866 fntype = fntype ? TREE_TYPE (fntype) : 0;
1867 break;
1868 case FUNCTION_DECL:
1869 fntype = TREE_TYPE (fntype);
1870 break;
1871 case FUNCTION_TYPE:
1872 case METHOD_TYPE:
1873 break;
1874 case IDENTIFIER_NODE:
1875 fntype = 0;
1876 break;
1877 default:
1878 /* We don't expect other rtl types here. */
1879 abort();
1880 }
1881
1882 if (TREE_CODE (type) == VOID_TYPE)
1883 return 0;
1884 if (targetm.calls.return_in_memory (type, fntype))
1885 return 1;
1886 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1887 and thus can't be returned in registers. */
1888 if (TREE_ADDRESSABLE (type))
1889 return 1;
1890 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1891 return 1;
1892 /* Make sure we have suitable call-clobbered regs to return
1893 the value in; if not, we must return it in memory. */
1894 reg = hard_function_value (type, 0, 0);
1895
1896 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1897 it is OK. */
1898 if (!REG_P (reg))
1899 return 0;
1900
1901 regno = REGNO (reg);
1902 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1903 for (i = 0; i < nregs; i++)
1904 if (! call_used_regs[regno + i])
1905 return 1;
1906 return 0;
1907 }
1908 \f
1909 /* Return true if we should assign DECL a pseudo register; false if it
1910 should live on the local stack. */
1911
1912 bool
1913 use_register_for_decl (tree decl)
1914 {
1915 /* Honor volatile. */
1916 if (TREE_SIDE_EFFECTS (decl))
1917 return false;
1918
1919 /* Honor addressability. */
1920 if (TREE_ADDRESSABLE (decl))
1921 return false;
1922
1923 /* Only register-like things go in registers. */
1924 if (DECL_MODE (decl) == BLKmode)
1925 return false;
1926
1927 /* If -ffloat-store specified, don't put explicit float variables
1928 into registers. */
1929 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1930 propagates values across these stores, and it probably shouldn't. */
1931 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1932 return false;
1933
1934 /* Compiler-generated temporaries can always go in registers. */
1935 if (DECL_ARTIFICIAL (decl))
1936 return true;
1937
1938 #ifdef NON_SAVING_SETJMP
1939 /* Protect variables not declared "register" from setjmp. */
1940 if (NON_SAVING_SETJMP
1941 && current_function_calls_setjmp
1942 && !DECL_REGISTER (decl))
1943 return false;
1944 #endif
1945
1946 return (optimize || DECL_REGISTER (decl));
1947 }
1948
1949 /* Return true if TYPE should be passed by invisible reference. */
1950
1951 bool
1952 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1953 tree type, bool named_arg)
1954 {
1955 if (type)
1956 {
1957 /* If this type contains non-trivial constructors, then it is
1958 forbidden for the middle-end to create any new copies. */
1959 if (TREE_ADDRESSABLE (type))
1960 return true;
1961
1962 /* GCC post 3.4 passes *all* variable sized types by reference. */
1963 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1964 return true;
1965 }
1966
1967 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1968 }
1969
1970 /* Structures to communicate between the subroutines of assign_parms.
1971 The first holds data persistent across all parameters, the second
1972 is cleared out for each parameter. */
1973
1974 struct assign_parm_data_all
1975 {
1976 CUMULATIVE_ARGS args_so_far;
1977 struct args_size stack_args_size;
1978 tree function_result_decl;
1979 tree orig_fnargs;
1980 rtx conversion_insns;
1981 HOST_WIDE_INT pretend_args_size;
1982 HOST_WIDE_INT extra_pretend_bytes;
1983 int reg_parm_stack_space;
1984 };
1985
1986 struct assign_parm_data_one
1987 {
1988 tree nominal_type;
1989 tree passed_type;
1990 rtx entry_parm;
1991 rtx stack_parm;
1992 enum machine_mode nominal_mode;
1993 enum machine_mode passed_mode;
1994 enum machine_mode promoted_mode;
1995 struct locate_and_pad_arg_data locate;
1996 int partial;
1997 BOOL_BITFIELD named_arg : 1;
1998 BOOL_BITFIELD last_named : 1;
1999 BOOL_BITFIELD passed_pointer : 1;
2000 BOOL_BITFIELD on_stack : 1;
2001 BOOL_BITFIELD loaded_in_reg : 1;
2002 };
2003
2004 /* A subroutine of assign_parms. Initialize ALL. */
2005
2006 static void
2007 assign_parms_initialize_all (struct assign_parm_data_all *all)
2008 {
2009 tree fntype;
2010
2011 memset (all, 0, sizeof (*all));
2012
2013 fntype = TREE_TYPE (current_function_decl);
2014
2015 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2016 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
2017 #else
2018 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
2019 current_function_decl, -1);
2020 #endif
2021
2022 #ifdef REG_PARM_STACK_SPACE
2023 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
2024 #endif
2025 }
2026
2027 /* If ARGS contains entries with complex types, split the entry into two
2028 entries of the component type. Return a new list of substitutions are
2029 needed, else the old list. */
2030
2031 static tree
2032 split_complex_args (tree args)
2033 {
2034 tree p;
2035
2036 /* Before allocating memory, check for the common case of no complex. */
2037 for (p = args; p; p = TREE_CHAIN (p))
2038 {
2039 tree type = TREE_TYPE (p);
2040 if (TREE_CODE (type) == COMPLEX_TYPE
2041 && targetm.calls.split_complex_arg (type))
2042 goto found;
2043 }
2044 return args;
2045
2046 found:
2047 args = copy_list (args);
2048
2049 for (p = args; p; p = TREE_CHAIN (p))
2050 {
2051 tree type = TREE_TYPE (p);
2052 if (TREE_CODE (type) == COMPLEX_TYPE
2053 && targetm.calls.split_complex_arg (type))
2054 {
2055 tree decl;
2056 tree subtype = TREE_TYPE (type);
2057
2058 /* Rewrite the PARM_DECL's type with its component. */
2059 TREE_TYPE (p) = subtype;
2060 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2061 DECL_MODE (p) = VOIDmode;
2062 DECL_SIZE (p) = NULL;
2063 DECL_SIZE_UNIT (p) = NULL;
2064 layout_decl (p, 0);
2065
2066 /* Build a second synthetic decl. */
2067 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
2068 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2069 layout_decl (decl, 0);
2070
2071 /* Splice it in; skip the new decl. */
2072 TREE_CHAIN (decl) = TREE_CHAIN (p);
2073 TREE_CHAIN (p) = decl;
2074 p = decl;
2075 }
2076 }
2077
2078 return args;
2079 }
2080
2081 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2082 the hidden struct return argument, and (abi willing) complex args.
2083 Return the new parameter list. */
2084
2085 static tree
2086 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2087 {
2088 tree fndecl = current_function_decl;
2089 tree fntype = TREE_TYPE (fndecl);
2090 tree fnargs = DECL_ARGUMENTS (fndecl);
2091
2092 /* If struct value address is treated as the first argument, make it so. */
2093 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2094 && ! current_function_returns_pcc_struct
2095 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2096 {
2097 tree type = build_pointer_type (TREE_TYPE (fntype));
2098 tree decl;
2099
2100 decl = build_decl (PARM_DECL, NULL_TREE, type);
2101 DECL_ARG_TYPE (decl) = type;
2102 DECL_ARTIFICIAL (decl) = 1;
2103
2104 TREE_CHAIN (decl) = fnargs;
2105 fnargs = decl;
2106 all->function_result_decl = decl;
2107 }
2108
2109 all->orig_fnargs = fnargs;
2110
2111 /* If the target wants to split complex arguments into scalars, do so. */
2112 if (targetm.calls.split_complex_arg)
2113 fnargs = split_complex_args (fnargs);
2114
2115 return fnargs;
2116 }
2117
2118 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2119 data for the parameter. Incorporate ABI specifics such as pass-by-
2120 reference and type promotion. */
2121
2122 static void
2123 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2124 struct assign_parm_data_one *data)
2125 {
2126 tree nominal_type, passed_type;
2127 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2128
2129 memset (data, 0, sizeof (*data));
2130
2131 /* Set LAST_NAMED if this is last named arg before last anonymous args. */
2132 if (current_function_stdarg)
2133 {
2134 tree tem;
2135 for (tem = TREE_CHAIN (parm); tem; tem = TREE_CHAIN (tem))
2136 if (DECL_NAME (tem))
2137 break;
2138 if (tem == 0)
2139 data->last_named = true;
2140 }
2141
2142 /* Set NAMED_ARG if this arg should be treated as a named arg. For
2143 most machines, if this is a varargs/stdarg function, then we treat
2144 the last named arg as if it were anonymous too. */
2145 if (targetm.calls.strict_argument_naming (&all->args_so_far))
2146 data->named_arg = 1;
2147 else
2148 data->named_arg = !data->last_named;
2149
2150 nominal_type = TREE_TYPE (parm);
2151 passed_type = DECL_ARG_TYPE (parm);
2152
2153 /* Look out for errors propagating this far. Also, if the parameter's
2154 type is void then its value doesn't matter. */
2155 if (TREE_TYPE (parm) == error_mark_node
2156 /* This can happen after weird syntax errors
2157 or if an enum type is defined among the parms. */
2158 || TREE_CODE (parm) != PARM_DECL
2159 || passed_type == NULL
2160 || VOID_TYPE_P (nominal_type))
2161 {
2162 nominal_type = passed_type = void_type_node;
2163 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2164 goto egress;
2165 }
2166
2167 /* Find mode of arg as it is passed, and mode of arg as it should be
2168 during execution of this function. */
2169 passed_mode = TYPE_MODE (passed_type);
2170 nominal_mode = TYPE_MODE (nominal_type);
2171
2172 /* If the parm is to be passed as a transparent union, use the type of
2173 the first field for the tests below. We have already verified that
2174 the modes are the same. */
2175 if (DECL_TRANSPARENT_UNION (parm)
2176 || (TREE_CODE (passed_type) == UNION_TYPE
2177 && TYPE_TRANSPARENT_UNION (passed_type)))
2178 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2179
2180 /* See if this arg was passed by invisible reference. */
2181 if (pass_by_reference (&all->args_so_far, passed_mode,
2182 passed_type, data->named_arg))
2183 {
2184 passed_type = nominal_type = build_pointer_type (passed_type);
2185 data->passed_pointer = true;
2186 passed_mode = nominal_mode = Pmode;
2187 }
2188 /* See if the frontend wants to pass this by invisible reference. */
2189 else if (passed_type != nominal_type
2190 && POINTER_TYPE_P (passed_type)
2191 && TREE_TYPE (passed_type) == nominal_type)
2192 {
2193 nominal_type = passed_type;
2194 data->passed_pointer = 1;
2195 passed_mode = nominal_mode = Pmode;
2196 }
2197
2198 /* Find mode as it is passed by the ABI. */
2199 promoted_mode = passed_mode;
2200 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2201 {
2202 int unsignedp = TYPE_UNSIGNED (passed_type);
2203 promoted_mode = promote_mode (passed_type, promoted_mode,
2204 &unsignedp, 1);
2205 }
2206
2207 egress:
2208 data->nominal_type = nominal_type;
2209 data->passed_type = passed_type;
2210 data->nominal_mode = nominal_mode;
2211 data->passed_mode = passed_mode;
2212 data->promoted_mode = promoted_mode;
2213 }
2214
2215 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2216
2217 static void
2218 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2219 struct assign_parm_data_one *data, bool no_rtl)
2220 {
2221 int varargs_pretend_bytes = 0;
2222
2223 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2224 data->promoted_mode,
2225 data->passed_type,
2226 &varargs_pretend_bytes, no_rtl);
2227
2228 /* If the back-end has requested extra stack space, record how much is
2229 needed. Do not change pretend_args_size otherwise since it may be
2230 nonzero from an earlier partial argument. */
2231 if (varargs_pretend_bytes > 0)
2232 all->pretend_args_size = varargs_pretend_bytes;
2233 }
2234
2235 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2236 the incoming location of the current parameter. */
2237
2238 static void
2239 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2240 struct assign_parm_data_one *data)
2241 {
2242 HOST_WIDE_INT pretend_bytes = 0;
2243 rtx entry_parm;
2244 bool in_regs;
2245
2246 if (data->promoted_mode == VOIDmode)
2247 {
2248 data->entry_parm = data->stack_parm = const0_rtx;
2249 return;
2250 }
2251
2252 #ifdef FUNCTION_INCOMING_ARG
2253 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2254 data->passed_type, data->named_arg);
2255 #else
2256 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2257 data->passed_type, data->named_arg);
2258 #endif
2259
2260 if (entry_parm == 0)
2261 data->promoted_mode = data->passed_mode;
2262
2263 /* Determine parm's home in the stack, in case it arrives in the stack
2264 or we should pretend it did. Compute the stack position and rtx where
2265 the argument arrives and its size.
2266
2267 There is one complexity here: If this was a parameter that would
2268 have been passed in registers, but wasn't only because it is
2269 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2270 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2271 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2272 as it was the previous time. */
2273 in_regs = entry_parm != 0;
2274 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2275 in_regs = true;
2276 #endif
2277 if (!in_regs && !data->named_arg)
2278 {
2279 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2280 {
2281 rtx tem;
2282 #ifdef FUNCTION_INCOMING_ARG
2283 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2284 data->passed_type, true);
2285 #else
2286 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2287 data->passed_type, true);
2288 #endif
2289 in_regs = tem != NULL;
2290 }
2291 }
2292
2293 /* If this parameter was passed both in registers and in the stack, use
2294 the copy on the stack. */
2295 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2296 data->passed_type))
2297 entry_parm = 0;
2298
2299 if (entry_parm)
2300 {
2301 int partial;
2302
2303 partial = FUNCTION_ARG_PARTIAL_NREGS (all->args_so_far,
2304 data->promoted_mode,
2305 data->passed_type,
2306 data->named_arg);
2307 data->partial = partial;
2308
2309 /* The caller might already have allocated stack space for the
2310 register parameters. */
2311 if (partial != 0 && all->reg_parm_stack_space == 0)
2312 {
2313 /* Part of this argument is passed in registers and part
2314 is passed on the stack. Ask the prologue code to extend
2315 the stack part so that we can recreate the full value.
2316
2317 PRETEND_BYTES is the size of the registers we need to store.
2318 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2319 stack space that the prologue should allocate.
2320
2321 Internally, gcc assumes that the argument pointer is aligned
2322 to STACK_BOUNDARY bits. This is used both for alignment
2323 optimizations (see init_emit) and to locate arguments that are
2324 aligned to more than PARM_BOUNDARY bits. We must preserve this
2325 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2326 a stack boundary. */
2327
2328 /* We assume at most one partial arg, and it must be the first
2329 argument on the stack. */
2330 if (all->extra_pretend_bytes || all->pretend_args_size)
2331 abort ();
2332
2333 pretend_bytes = partial * UNITS_PER_WORD;
2334 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2335
2336 /* We want to align relative to the actual stack pointer, so
2337 don't include this in the stack size until later. */
2338 all->extra_pretend_bytes = all->pretend_args_size;
2339 }
2340 }
2341
2342 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2343 entry_parm ? data->partial : 0, current_function_decl,
2344 &all->stack_args_size, &data->locate);
2345
2346 /* Adjust offsets to include the pretend args. */
2347 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2348 data->locate.slot_offset.constant += pretend_bytes;
2349 data->locate.offset.constant += pretend_bytes;
2350
2351 data->entry_parm = entry_parm;
2352 }
2353
2354 /* A subroutine of assign_parms. If there is actually space on the stack
2355 for this parm, count it in stack_args_size and return true. */
2356
2357 static bool
2358 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2359 struct assign_parm_data_one *data)
2360 {
2361 /* Trivially true if we've no incomming register. */
2362 if (data->entry_parm == NULL)
2363 ;
2364 /* Also true if we're partially in registers and partially not,
2365 since we've arranged to drop the entire argument on the stack. */
2366 else if (data->partial != 0)
2367 ;
2368 /* Also true if the target says that it's passed in both registers
2369 and on the stack. */
2370 else if (GET_CODE (data->entry_parm) == PARALLEL
2371 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2372 ;
2373 /* Also true if the target says that there's stack allocated for
2374 all register parameters. */
2375 else if (all->reg_parm_stack_space > 0)
2376 ;
2377 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2378 else
2379 return false;
2380
2381 all->stack_args_size.constant += data->locate.size.constant;
2382 if (data->locate.size.var)
2383 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2384
2385 return true;
2386 }
2387
2388 /* A subroutine of assign_parms. Given that this parameter is allocated
2389 stack space by the ABI, find it. */
2390
2391 static void
2392 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2393 {
2394 rtx offset_rtx, stack_parm;
2395 unsigned int align, boundary;
2396
2397 /* If we're passing this arg using a reg, make its stack home the
2398 aligned stack slot. */
2399 if (data->entry_parm)
2400 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2401 else
2402 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2403
2404 stack_parm = current_function_internal_arg_pointer;
2405 if (offset_rtx != const0_rtx)
2406 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2407 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2408
2409 set_mem_attributes (stack_parm, parm, 1);
2410
2411 boundary = FUNCTION_ARG_BOUNDARY (data->promoted_mode, data->passed_type);
2412 align = 0;
2413
2414 /* If we're padding upward, we know that the alignment of the slot
2415 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2416 intentionally forcing upward padding. Otherwise we have to come
2417 up with a guess at the alignment based on OFFSET_RTX. */
2418 if (data->locate.where_pad == upward || data->entry_parm)
2419 align = boundary;
2420 else if (GET_CODE (offset_rtx) == CONST_INT)
2421 {
2422 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2423 align = align & -align;
2424 }
2425 if (align > 0)
2426 set_mem_align (stack_parm, align);
2427
2428 if (data->entry_parm)
2429 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2430
2431 data->stack_parm = stack_parm;
2432 }
2433
2434 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2435 always valid and contiguous. */
2436
2437 static void
2438 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2439 {
2440 rtx entry_parm = data->entry_parm;
2441 rtx stack_parm = data->stack_parm;
2442
2443 /* If this parm was passed part in regs and part in memory, pretend it
2444 arrived entirely in memory by pushing the register-part onto the stack.
2445 In the special case of a DImode or DFmode that is split, we could put
2446 it together in a pseudoreg directly, but for now that's not worth
2447 bothering with. */
2448 if (data->partial != 0)
2449 {
2450 /* Handle calls that pass values in multiple non-contiguous
2451 locations. The Irix 6 ABI has examples of this. */
2452 if (GET_CODE (entry_parm) == PARALLEL)
2453 emit_group_store (validize_mem (stack_parm), entry_parm,
2454 data->passed_type,
2455 int_size_in_bytes (data->passed_type));
2456 else
2457 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2458 data->partial);
2459
2460 entry_parm = stack_parm;
2461 }
2462
2463 /* If we didn't decide this parm came in a register, by default it came
2464 on the stack. */
2465 else if (entry_parm == NULL)
2466 entry_parm = stack_parm;
2467
2468 /* When an argument is passed in multiple locations, we can't make use
2469 of this information, but we can save some copying if the whole argument
2470 is passed in a single register. */
2471 else if (GET_CODE (entry_parm) == PARALLEL
2472 && data->nominal_mode != BLKmode
2473 && data->passed_mode != BLKmode)
2474 {
2475 size_t i, len = XVECLEN (entry_parm, 0);
2476
2477 for (i = 0; i < len; i++)
2478 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2479 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2480 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2481 == data->passed_mode)
2482 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2483 {
2484 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2485 break;
2486 }
2487 }
2488
2489 data->entry_parm = entry_parm;
2490 }
2491
2492 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2493 always valid and properly aligned. */
2494
2495
2496 static void
2497 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2498 {
2499 rtx stack_parm = data->stack_parm;
2500
2501 /* If we can't trust the parm stack slot to be aligned enough for its
2502 ultimate type, don't use that slot after entry. We'll make another
2503 stack slot, if we need one. */
2504 if (STRICT_ALIGNMENT && stack_parm
2505 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2506 stack_parm = NULL;
2507
2508 /* If parm was passed in memory, and we need to convert it on entry,
2509 don't store it back in that same slot. */
2510 else if (data->entry_parm == stack_parm
2511 && data->nominal_mode != BLKmode
2512 && data->nominal_mode != data->passed_mode)
2513 stack_parm = NULL;
2514
2515 data->stack_parm = stack_parm;
2516 }
2517
2518 /* A subroutine of assign_parms. Return true if the current parameter
2519 should be stored as a BLKmode in the current frame. */
2520
2521 static bool
2522 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2523 {
2524 if (data->nominal_mode == BLKmode)
2525 return true;
2526 if (GET_CODE (data->entry_parm) == PARALLEL)
2527 return true;
2528
2529 #ifdef BLOCK_REG_PADDING
2530 if (data->locate.where_pad == (BYTES_BIG_ENDIAN ? upward : downward)
2531 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD)
2532 return true;
2533 #endif
2534
2535 return false;
2536 }
2537
2538 /* A subroutine of assign_parms. Arrange for the parameter to be
2539 present and valid in DATA->STACK_RTL. */
2540
2541 static void
2542 assign_parm_setup_block (tree parm, struct assign_parm_data_one *data)
2543 {
2544 rtx entry_parm = data->entry_parm;
2545 rtx stack_parm = data->stack_parm;
2546
2547 /* If we've a non-block object that's nevertheless passed in parts,
2548 reconstitute it in register operations rather than on the stack. */
2549 if (GET_CODE (entry_parm) == PARALLEL
2550 && data->nominal_mode != BLKmode
2551 && XVECLEN (entry_parm, 0) > 1
2552 && optimize)
2553 {
2554 rtx parmreg = gen_reg_rtx (data->nominal_mode);
2555
2556 emit_group_store (parmreg, entry_parm, data->nominal_type,
2557 int_size_in_bytes (data->nominal_type));
2558 SET_DECL_RTL (parm, parmreg);
2559 return;
2560 }
2561
2562 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2563 calls that pass values in multiple non-contiguous locations. */
2564 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2565 {
2566 HOST_WIDE_INT size = int_size_in_bytes (data->passed_type);
2567 HOST_WIDE_INT size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2568 rtx mem;
2569
2570 /* Note that we will be storing an integral number of words.
2571 So we have to be careful to ensure that we allocate an
2572 integral number of words. We do this below in the
2573 assign_stack_local if space was not allocated in the argument
2574 list. If it was, this will not work if PARM_BOUNDARY is not
2575 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2576 if it becomes a problem. Exception is when BLKmode arrives
2577 with arguments not conforming to word_mode. */
2578
2579 if (stack_parm == 0)
2580 {
2581 stack_parm = assign_stack_local (BLKmode, size_stored, 0);
2582 data->stack_parm = stack_parm;
2583 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2584 set_mem_attributes (stack_parm, parm, 1);
2585 }
2586 else if (GET_CODE (entry_parm) == PARALLEL)
2587 ;
2588 else if (size != 0 && PARM_BOUNDARY % BITS_PER_WORD != 0)
2589 abort ();
2590
2591 mem = validize_mem (stack_parm);
2592
2593 /* Handle values in multiple non-contiguous locations. */
2594 if (GET_CODE (entry_parm) == PARALLEL)
2595 emit_group_store (mem, entry_parm, data->passed_type, size);
2596
2597 else if (size == 0)
2598 ;
2599
2600 /* If SIZE is that of a mode no bigger than a word, just use
2601 that mode's store operation. */
2602 else if (size <= UNITS_PER_WORD)
2603 {
2604 enum machine_mode mode
2605 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2606
2607 if (mode != BLKmode
2608 #ifdef BLOCK_REG_PADDING
2609 && (size == UNITS_PER_WORD
2610 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2611 != (BYTES_BIG_ENDIAN ? upward : downward)))
2612 #endif
2613 )
2614 {
2615 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
2616 emit_move_insn (change_address (mem, mode, 0), reg);
2617 }
2618
2619 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2620 machine must be aligned to the left before storing
2621 to memory. Note that the previous test doesn't
2622 handle all cases (e.g. SIZE == 3). */
2623 else if (size != UNITS_PER_WORD
2624 #ifdef BLOCK_REG_PADDING
2625 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2626 == downward)
2627 #else
2628 && BYTES_BIG_ENDIAN
2629 #endif
2630 )
2631 {
2632 rtx tem, x;
2633 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2634 rtx reg = gen_rtx_REG (word_mode, REGNO (data->entry_parm));
2635
2636 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2637 build_int_2 (by, 0), NULL_RTX, 1);
2638 tem = change_address (mem, word_mode, 0);
2639 emit_move_insn (tem, x);
2640 }
2641 else
2642 move_block_from_reg (REGNO (data->entry_parm), mem,
2643 size_stored / UNITS_PER_WORD);
2644 }
2645 else
2646 move_block_from_reg (REGNO (data->entry_parm), mem,
2647 size_stored / UNITS_PER_WORD);
2648 }
2649
2650 SET_DECL_RTL (parm, stack_parm);
2651 }
2652
2653 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2654 parameter. Get it there. Perform all ABI specified conversions. */
2655
2656 static void
2657 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2658 struct assign_parm_data_one *data)
2659 {
2660 rtx parmreg;
2661 enum machine_mode promoted_nominal_mode;
2662 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2663 bool did_conversion = false;
2664
2665 /* Store the parm in a pseudoregister during the function, but we may
2666 need to do it in a wider mode. */
2667
2668 promoted_nominal_mode
2669 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 0);
2670
2671 parmreg = gen_reg_rtx (promoted_nominal_mode);
2672
2673 if (!DECL_ARTIFICIAL (parm))
2674 mark_user_reg (parmreg);
2675
2676 /* If this was an item that we received a pointer to,
2677 set DECL_RTL appropriately. */
2678 if (data->passed_pointer)
2679 {
2680 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2681 set_mem_attributes (x, parm, 1);
2682 SET_DECL_RTL (parm, x);
2683 }
2684 else
2685 {
2686 SET_DECL_RTL (parm, parmreg);
2687 maybe_set_unchanging (DECL_RTL (parm), parm);
2688 }
2689
2690 /* Copy the value into the register. */
2691 if (data->nominal_mode != data->passed_mode
2692 || promoted_nominal_mode != data->promoted_mode)
2693 {
2694 int save_tree_used;
2695
2696 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2697 mode, by the caller. We now have to convert it to
2698 NOMINAL_MODE, if different. However, PARMREG may be in
2699 a different mode than NOMINAL_MODE if it is being stored
2700 promoted.
2701
2702 If ENTRY_PARM is a hard register, it might be in a register
2703 not valid for operating in its mode (e.g., an odd-numbered
2704 register for a DFmode). In that case, moves are the only
2705 thing valid, so we can't do a convert from there. This
2706 occurs when the calling sequence allow such misaligned
2707 usages.
2708
2709 In addition, the conversion may involve a call, which could
2710 clobber parameters which haven't been copied to pseudo
2711 registers yet. Therefore, we must first copy the parm to
2712 a pseudo reg here, and save the conversion until after all
2713 parameters have been moved. */
2714
2715 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2716
2717 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2718
2719 push_to_sequence (all->conversion_insns);
2720 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2721
2722 if (GET_CODE (tempreg) == SUBREG
2723 && GET_MODE (tempreg) == data->nominal_mode
2724 && REG_P (SUBREG_REG (tempreg))
2725 && data->nominal_mode == data->passed_mode
2726 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2727 && GET_MODE_SIZE (GET_MODE (tempreg))
2728 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2729 {
2730 /* The argument is already sign/zero extended, so note it
2731 into the subreg. */
2732 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2733 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2734 }
2735
2736 /* TREE_USED gets set erroneously during expand_assignment. */
2737 save_tree_used = TREE_USED (parm);
2738 expand_assignment (parm, make_tree (data->nominal_type, tempreg), 0);
2739 TREE_USED (parm) = save_tree_used;
2740 all->conversion_insns = get_insns ();
2741 end_sequence ();
2742
2743 did_conversion = true;
2744 }
2745 else
2746 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2747
2748 /* If we were passed a pointer but the actual value can safely live
2749 in a register, put it in one. */
2750 if (data->passed_pointer
2751 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2752 /* If by-reference argument was promoted, demote it. */
2753 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2754 || use_register_for_decl (parm)))
2755 {
2756 /* We can't use nominal_mode, because it will have been set to
2757 Pmode above. We must use the actual mode of the parm. */
2758 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2759 mark_user_reg (parmreg);
2760
2761 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2762 {
2763 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2764 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2765
2766 push_to_sequence (all->conversion_insns);
2767 emit_move_insn (tempreg, DECL_RTL (parm));
2768 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2769 emit_move_insn (parmreg, tempreg);
2770 all->conversion_insns = get_insns();
2771 end_sequence ();
2772
2773 did_conversion = true;
2774 }
2775 else
2776 emit_move_insn (parmreg, DECL_RTL (parm));
2777
2778 SET_DECL_RTL (parm, parmreg);
2779
2780 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2781 now the parm. */
2782 data->stack_parm = NULL;
2783 }
2784
2785 /* If we are passed an arg by reference and it is our responsibility
2786 to make a copy, do it now.
2787 PASSED_TYPE and PASSED mode now refer to the pointer, not the
2788 original argument, so we must recreate them in the call to
2789 FUNCTION_ARG_CALLEE_COPIES. */
2790 /* ??? Later add code to handle the case that if the argument isn't
2791 modified, don't do the copy. */
2792
2793 else if (data->passed_pointer)
2794 {
2795 tree type = TREE_TYPE (data->passed_type);
2796
2797 if (FUNCTION_ARG_CALLEE_COPIES (all->args_so_far, TYPE_MODE (type),
2798 type, data->named_arg)
2799 && !TREE_ADDRESSABLE (type))
2800 {
2801 rtx copy;
2802
2803 /* This sequence may involve a library call perhaps clobbering
2804 registers that haven't been copied to pseudos yet. */
2805
2806 push_to_sequence (all->conversion_insns);
2807
2808 if (!COMPLETE_TYPE_P (type)
2809 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2810 {
2811 /* This is a variable sized object. */
2812 copy = allocate_dynamic_stack_space (expr_size (parm), NULL_RTX,
2813 TYPE_ALIGN (type));
2814 copy = gen_rtx_MEM (BLKmode, copy);
2815 }
2816 else
2817 copy = assign_stack_temp (TYPE_MODE (type),
2818 int_size_in_bytes (type), 1);
2819 set_mem_attributes (copy, parm, 1);
2820
2821 store_expr (parm, copy, 0);
2822 emit_move_insn (parmreg, XEXP (copy, 0));
2823 all->conversion_insns = get_insns ();
2824 end_sequence ();
2825
2826 did_conversion = true;
2827 }
2828 }
2829
2830 /* Mark the register as eliminable if we did no conversion and it was
2831 copied from memory at a fixed offset, and the arg pointer was not
2832 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2833 offset formed an invalid address, such memory-equivalences as we
2834 make here would screw up life analysis for it. */
2835 if (data->nominal_mode == data->passed_mode
2836 && !did_conversion
2837 && data->stack_parm != 0
2838 && MEM_P (data->stack_parm)
2839 && data->locate.offset.var == 0
2840 && reg_mentioned_p (virtual_incoming_args_rtx,
2841 XEXP (data->stack_parm, 0)))
2842 {
2843 rtx linsn = get_last_insn ();
2844 rtx sinsn, set;
2845
2846 /* Mark complex types separately. */
2847 if (GET_CODE (parmreg) == CONCAT)
2848 {
2849 enum machine_mode submode
2850 = GET_MODE_INNER (GET_MODE (parmreg));
2851 int regnor = REGNO (gen_realpart (submode, parmreg));
2852 int regnoi = REGNO (gen_imagpart (submode, parmreg));
2853 rtx stackr = gen_realpart (submode, data->stack_parm);
2854 rtx stacki = gen_imagpart (submode, data->stack_parm);
2855
2856 /* Scan backwards for the set of the real and
2857 imaginary parts. */
2858 for (sinsn = linsn; sinsn != 0;
2859 sinsn = prev_nonnote_insn (sinsn))
2860 {
2861 set = single_set (sinsn);
2862 if (set == 0)
2863 continue;
2864
2865 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2866 REG_NOTES (sinsn)
2867 = gen_rtx_EXPR_LIST (REG_EQUIV, stacki,
2868 REG_NOTES (sinsn));
2869 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2870 REG_NOTES (sinsn)
2871 = gen_rtx_EXPR_LIST (REG_EQUIV, stackr,
2872 REG_NOTES (sinsn));
2873 }
2874 }
2875 else if ((set = single_set (linsn)) != 0
2876 && SET_DEST (set) == parmreg)
2877 REG_NOTES (linsn)
2878 = gen_rtx_EXPR_LIST (REG_EQUIV,
2879 data->stack_parm, REG_NOTES (linsn));
2880 }
2881
2882 /* For pointer data type, suggest pointer register. */
2883 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2884 mark_reg_pointer (parmreg,
2885 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2886 }
2887
2888 /* A subroutine of assign_parms. Allocate stack space to hold the current
2889 parameter. Get it there. Perform all ABI specified conversions. */
2890
2891 static void
2892 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2893 struct assign_parm_data_one *data)
2894 {
2895 /* Value must be stored in the stack slot STACK_PARM during function
2896 execution. */
2897
2898 if (data->promoted_mode != data->nominal_mode)
2899 {
2900 /* Conversion is required. */
2901 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2902
2903 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2904
2905 push_to_sequence (all->conversion_insns);
2906 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2907 TYPE_UNSIGNED (TREE_TYPE (parm)));
2908
2909 if (data->stack_parm)
2910 /* ??? This may need a big-endian conversion on sparc64. */
2911 data->stack_parm
2912 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2913
2914 all->conversion_insns = get_insns ();
2915 end_sequence ();
2916 }
2917
2918 if (data->entry_parm != data->stack_parm)
2919 {
2920 if (data->stack_parm == 0)
2921 {
2922 data->stack_parm
2923 = assign_stack_local (GET_MODE (data->entry_parm),
2924 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2925 0);
2926 set_mem_attributes (data->stack_parm, parm, 1);
2927 }
2928
2929 if (data->promoted_mode != data->nominal_mode)
2930 {
2931 push_to_sequence (all->conversion_insns);
2932 emit_move_insn (validize_mem (data->stack_parm),
2933 validize_mem (data->entry_parm));
2934 all->conversion_insns = get_insns ();
2935 end_sequence ();
2936 }
2937 else
2938 emit_move_insn (validize_mem (data->stack_parm),
2939 validize_mem (data->entry_parm));
2940 }
2941
2942 SET_DECL_RTL (parm, data->stack_parm);
2943 }
2944
2945 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2946 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2947
2948 static void
2949 assign_parms_unsplit_complex (tree orig_fnargs, tree fnargs)
2950 {
2951 tree parm;
2952
2953 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2954 {
2955 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2956 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2957 {
2958 rtx tmp, real, imag;
2959 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2960
2961 real = DECL_RTL (fnargs);
2962 imag = DECL_RTL (TREE_CHAIN (fnargs));
2963 if (inner != GET_MODE (real))
2964 {
2965 real = gen_lowpart_SUBREG (inner, real);
2966 imag = gen_lowpart_SUBREG (inner, imag);
2967 }
2968 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2969 SET_DECL_RTL (parm, tmp);
2970
2971 real = DECL_INCOMING_RTL (fnargs);
2972 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2973 if (inner != GET_MODE (real))
2974 {
2975 real = gen_lowpart_SUBREG (inner, real);
2976 imag = gen_lowpart_SUBREG (inner, imag);
2977 }
2978 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2979 set_decl_incoming_rtl (parm, tmp);
2980 fnargs = TREE_CHAIN (fnargs);
2981 }
2982 else
2983 {
2984 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2985 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
2986
2987 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2988 instead of the copy of decl, i.e. FNARGS. */
2989 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2990 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2991 }
2992
2993 fnargs = TREE_CHAIN (fnargs);
2994 }
2995 }
2996
2997 /* Assign RTL expressions to the function's parameters. This may involve
2998 copying them into registers and using those registers as the DECL_RTL. */
2999
3000 void
3001 assign_parms (tree fndecl)
3002 {
3003 struct assign_parm_data_all all;
3004 tree fnargs, parm;
3005 rtx internal_arg_pointer;
3006 int varargs_setup = 0;
3007
3008 /* If the reg that the virtual arg pointer will be translated into is
3009 not a fixed reg or is the stack pointer, make a copy of the virtual
3010 arg pointer, and address parms via the copy. The frame pointer is
3011 considered fixed even though it is not marked as such.
3012
3013 The second time through, simply use ap to avoid generating rtx. */
3014
3015 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3016 || ! (fixed_regs[ARG_POINTER_REGNUM]
3017 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
3018 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3019 else
3020 internal_arg_pointer = virtual_incoming_args_rtx;
3021 current_function_internal_arg_pointer = internal_arg_pointer;
3022
3023 assign_parms_initialize_all (&all);
3024 fnargs = assign_parms_augmented_arg_list (&all);
3025
3026 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3027 {
3028 struct assign_parm_data_one data;
3029
3030 /* Extract the type of PARM; adjust it according to ABI. */
3031 assign_parm_find_data_types (&all, parm, &data);
3032
3033 /* Early out for errors and void parameters. */
3034 if (data.passed_mode == VOIDmode)
3035 {
3036 SET_DECL_RTL (parm, const0_rtx);
3037 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3038 continue;
3039 }
3040
3041 /* Handle stdargs. LAST_NAMED is a slight mis-nomer; it's also true
3042 for the unnamed dummy argument following the last named argument.
3043 See ABI silliness wrt strict_argument_naming and NAMED_ARG. So
3044 we only want to do this when we get to the actual last named
3045 argument, which will be the first time LAST_NAMED gets set. */
3046 if (data.last_named && !varargs_setup)
3047 {
3048 varargs_setup = true;
3049 assign_parms_setup_varargs (&all, &data, false);
3050 }
3051
3052 /* Find out where the parameter arrives in this function. */
3053 assign_parm_find_entry_rtl (&all, &data);
3054
3055 /* Find out where stack space for this parameter might be. */
3056 if (assign_parm_is_stack_parm (&all, &data))
3057 {
3058 assign_parm_find_stack_rtl (parm, &data);
3059 assign_parm_adjust_entry_rtl (&data);
3060 }
3061
3062 /* Record permanently how this parm was passed. */
3063 set_decl_incoming_rtl (parm, data.entry_parm);
3064
3065 /* Update info on where next arg arrives in registers. */
3066 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3067 data.passed_type, data.named_arg);
3068
3069 assign_parm_adjust_stack_rtl (&data);
3070
3071 if (assign_parm_setup_block_p (&data))
3072 assign_parm_setup_block (parm, &data);
3073 else if (data.passed_pointer || use_register_for_decl (parm))
3074 assign_parm_setup_reg (&all, parm, &data);
3075 else
3076 assign_parm_setup_stack (&all, parm, &data);
3077 }
3078
3079 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3080 assign_parms_unsplit_complex (all.orig_fnargs, fnargs);
3081
3082 /* Output all parameter conversion instructions (possibly including calls)
3083 now that all parameters have been copied out of hard registers. */
3084 emit_insn (all.conversion_insns);
3085
3086 /* If we are receiving a struct value address as the first argument, set up
3087 the RTL for the function result. As this might require code to convert
3088 the transmitted address to Pmode, we do this here to ensure that possible
3089 preliminary conversions of the address have been emitted already. */
3090 if (all.function_result_decl)
3091 {
3092 tree result = DECL_RESULT (current_function_decl);
3093 rtx addr = DECL_RTL (all.function_result_decl);
3094 rtx x;
3095
3096 addr = convert_memory_address (Pmode, addr);
3097 x = gen_rtx_MEM (DECL_MODE (result), addr);
3098 set_mem_attributes (x, result, 1);
3099 SET_DECL_RTL (result, x);
3100 }
3101
3102 /* We have aligned all the args, so add space for the pretend args. */
3103 current_function_pretend_args_size = all.pretend_args_size;
3104 all.stack_args_size.constant += all.extra_pretend_bytes;
3105 current_function_args_size = all.stack_args_size.constant;
3106
3107 /* Adjust function incoming argument size for alignment and
3108 minimum length. */
3109
3110 #ifdef REG_PARM_STACK_SPACE
3111 current_function_args_size = MAX (current_function_args_size,
3112 REG_PARM_STACK_SPACE (fndecl));
3113 #endif
3114
3115 current_function_args_size
3116 = ((current_function_args_size + STACK_BYTES - 1)
3117 / STACK_BYTES) * STACK_BYTES;
3118
3119 #ifdef ARGS_GROW_DOWNWARD
3120 current_function_arg_offset_rtx
3121 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3122 : expand_expr (size_diffop (all.stack_args_size.var,
3123 size_int (-all.stack_args_size.constant)),
3124 NULL_RTX, VOIDmode, 0));
3125 #else
3126 current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3127 #endif
3128
3129 /* See how many bytes, if any, of its args a function should try to pop
3130 on return. */
3131
3132 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3133 current_function_args_size);
3134
3135 /* For stdarg.h function, save info about
3136 regs and stack space used by the named args. */
3137
3138 current_function_args_info = all.args_so_far;
3139
3140 /* Set the rtx used for the function return value. Put this in its
3141 own variable so any optimizers that need this information don't have
3142 to include tree.h. Do this here so it gets done when an inlined
3143 function gets output. */
3144
3145 current_function_return_rtx
3146 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3147 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3148
3149 /* If scalar return value was computed in a pseudo-reg, or was a named
3150 return value that got dumped to the stack, copy that to the hard
3151 return register. */
3152 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3153 {
3154 tree decl_result = DECL_RESULT (fndecl);
3155 rtx decl_rtl = DECL_RTL (decl_result);
3156
3157 if (REG_P (decl_rtl)
3158 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3159 : DECL_REGISTER (decl_result))
3160 {
3161 rtx real_decl_rtl;
3162
3163 #ifdef FUNCTION_OUTGOING_VALUE
3164 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
3165 fndecl);
3166 #else
3167 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
3168 fndecl);
3169 #endif
3170 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3171 /* The delay slot scheduler assumes that current_function_return_rtx
3172 holds the hard register containing the return value, not a
3173 temporary pseudo. */
3174 current_function_return_rtx = real_decl_rtl;
3175 }
3176 }
3177 }
3178 \f
3179 /* Indicate whether REGNO is an incoming argument to the current function
3180 that was promoted to a wider mode. If so, return the RTX for the
3181 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3182 that REGNO is promoted from and whether the promotion was signed or
3183 unsigned. */
3184
3185 rtx
3186 promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
3187 {
3188 tree arg;
3189
3190 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3191 arg = TREE_CHAIN (arg))
3192 if (REG_P (DECL_INCOMING_RTL (arg))
3193 && REGNO (DECL_INCOMING_RTL (arg)) == regno
3194 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
3195 {
3196 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
3197 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg));
3198
3199 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
3200 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3201 && mode != DECL_MODE (arg))
3202 {
3203 *pmode = DECL_MODE (arg);
3204 *punsignedp = unsignedp;
3205 return DECL_INCOMING_RTL (arg);
3206 }
3207 }
3208
3209 return 0;
3210 }
3211
3212 \f
3213 /* Compute the size and offset from the start of the stacked arguments for a
3214 parm passed in mode PASSED_MODE and with type TYPE.
3215
3216 INITIAL_OFFSET_PTR points to the current offset into the stacked
3217 arguments.
3218
3219 The starting offset and size for this parm are returned in
3220 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3221 nonzero, the offset is that of stack slot, which is returned in
3222 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3223 padding required from the initial offset ptr to the stack slot.
3224
3225 IN_REGS is nonzero if the argument will be passed in registers. It will
3226 never be set if REG_PARM_STACK_SPACE is not defined.
3227
3228 FNDECL is the function in which the argument was defined.
3229
3230 There are two types of rounding that are done. The first, controlled by
3231 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3232 list to be aligned to the specific boundary (in bits). This rounding
3233 affects the initial and starting offsets, but not the argument size.
3234
3235 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3236 optionally rounds the size of the parm to PARM_BOUNDARY. The
3237 initial offset is not affected by this rounding, while the size always
3238 is and the starting offset may be. */
3239
3240 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3241 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3242 callers pass in the total size of args so far as
3243 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3244
3245 void
3246 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3247 int partial, tree fndecl ATTRIBUTE_UNUSED,
3248 struct args_size *initial_offset_ptr,
3249 struct locate_and_pad_arg_data *locate)
3250 {
3251 tree sizetree;
3252 enum direction where_pad;
3253 int boundary;
3254 int reg_parm_stack_space = 0;
3255 int part_size_in_regs;
3256
3257 #ifdef REG_PARM_STACK_SPACE
3258 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3259
3260 /* If we have found a stack parm before we reach the end of the
3261 area reserved for registers, skip that area. */
3262 if (! in_regs)
3263 {
3264 if (reg_parm_stack_space > 0)
3265 {
3266 if (initial_offset_ptr->var)
3267 {
3268 initial_offset_ptr->var
3269 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3270 ssize_int (reg_parm_stack_space));
3271 initial_offset_ptr->constant = 0;
3272 }
3273 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3274 initial_offset_ptr->constant = reg_parm_stack_space;
3275 }
3276 }
3277 #endif /* REG_PARM_STACK_SPACE */
3278
3279 part_size_in_regs = 0;
3280 if (reg_parm_stack_space == 0)
3281 part_size_in_regs = ((partial * UNITS_PER_WORD)
3282 / (PARM_BOUNDARY / BITS_PER_UNIT)
3283 * (PARM_BOUNDARY / BITS_PER_UNIT));
3284
3285 sizetree
3286 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3287 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3288 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3289 locate->where_pad = where_pad;
3290
3291 #ifdef ARGS_GROW_DOWNWARD
3292 locate->slot_offset.constant = -initial_offset_ptr->constant;
3293 if (initial_offset_ptr->var)
3294 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3295 initial_offset_ptr->var);
3296
3297 {
3298 tree s2 = sizetree;
3299 if (where_pad != none
3300 && (!host_integerp (sizetree, 1)
3301 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3302 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3303 SUB_PARM_SIZE (locate->slot_offset, s2);
3304 }
3305
3306 locate->slot_offset.constant += part_size_in_regs;
3307
3308 if (!in_regs
3309 #ifdef REG_PARM_STACK_SPACE
3310 || REG_PARM_STACK_SPACE (fndecl) > 0
3311 #endif
3312 )
3313 pad_to_arg_alignment (&locate->slot_offset, boundary,
3314 &locate->alignment_pad);
3315
3316 locate->size.constant = (-initial_offset_ptr->constant
3317 - locate->slot_offset.constant);
3318 if (initial_offset_ptr->var)
3319 locate->size.var = size_binop (MINUS_EXPR,
3320 size_binop (MINUS_EXPR,
3321 ssize_int (0),
3322 initial_offset_ptr->var),
3323 locate->slot_offset.var);
3324
3325 /* Pad_below needs the pre-rounded size to know how much to pad
3326 below. */
3327 locate->offset = locate->slot_offset;
3328 if (where_pad == downward)
3329 pad_below (&locate->offset, passed_mode, sizetree);
3330
3331 #else /* !ARGS_GROW_DOWNWARD */
3332 if (!in_regs
3333 #ifdef REG_PARM_STACK_SPACE
3334 || REG_PARM_STACK_SPACE (fndecl) > 0
3335 #endif
3336 )
3337 pad_to_arg_alignment (initial_offset_ptr, boundary,
3338 &locate->alignment_pad);
3339 locate->slot_offset = *initial_offset_ptr;
3340
3341 #ifdef PUSH_ROUNDING
3342 if (passed_mode != BLKmode)
3343 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3344 #endif
3345
3346 /* Pad_below needs the pre-rounded size to know how much to pad below
3347 so this must be done before rounding up. */
3348 locate->offset = locate->slot_offset;
3349 if (where_pad == downward)
3350 pad_below (&locate->offset, passed_mode, sizetree);
3351
3352 if (where_pad != none
3353 && (!host_integerp (sizetree, 1)
3354 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3355 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3356
3357 ADD_PARM_SIZE (locate->size, sizetree);
3358
3359 locate->size.constant -= part_size_in_regs;
3360 #endif /* ARGS_GROW_DOWNWARD */
3361 }
3362
3363 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3364 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3365
3366 static void
3367 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3368 struct args_size *alignment_pad)
3369 {
3370 tree save_var = NULL_TREE;
3371 HOST_WIDE_INT save_constant = 0;
3372 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3373 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3374
3375 #ifdef SPARC_STACK_BOUNDARY_HACK
3376 /* The sparc port has a bug. It sometimes claims a STACK_BOUNDARY
3377 higher than the real alignment of %sp. However, when it does this,
3378 the alignment of %sp+STACK_POINTER_OFFSET will be STACK_BOUNDARY.
3379 This is a temporary hack while the sparc port is fixed. */
3380 if (SPARC_STACK_BOUNDARY_HACK)
3381 sp_offset = 0;
3382 #endif
3383
3384 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3385 {
3386 save_var = offset_ptr->var;
3387 save_constant = offset_ptr->constant;
3388 }
3389
3390 alignment_pad->var = NULL_TREE;
3391 alignment_pad->constant = 0;
3392
3393 if (boundary > BITS_PER_UNIT)
3394 {
3395 if (offset_ptr->var)
3396 {
3397 tree sp_offset_tree = ssize_int (sp_offset);
3398 tree offset = size_binop (PLUS_EXPR,
3399 ARGS_SIZE_TREE (*offset_ptr),
3400 sp_offset_tree);
3401 #ifdef ARGS_GROW_DOWNWARD
3402 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3403 #else
3404 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3405 #endif
3406
3407 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3408 /* ARGS_SIZE_TREE includes constant term. */
3409 offset_ptr->constant = 0;
3410 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3411 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3412 save_var);
3413 }
3414 else
3415 {
3416 offset_ptr->constant = -sp_offset +
3417 #ifdef ARGS_GROW_DOWNWARD
3418 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3419 #else
3420 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3421 #endif
3422 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3423 alignment_pad->constant = offset_ptr->constant - save_constant;
3424 }
3425 }
3426 }
3427
3428 static void
3429 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3430 {
3431 if (passed_mode != BLKmode)
3432 {
3433 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3434 offset_ptr->constant
3435 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3436 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3437 - GET_MODE_SIZE (passed_mode));
3438 }
3439 else
3440 {
3441 if (TREE_CODE (sizetree) != INTEGER_CST
3442 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3443 {
3444 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3445 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3446 /* Add it in. */
3447 ADD_PARM_SIZE (*offset_ptr, s2);
3448 SUB_PARM_SIZE (*offset_ptr, sizetree);
3449 }
3450 }
3451 }
3452 \f
3453 /* Walk the tree of blocks describing the binding levels within a function
3454 and warn about variables the might be killed by setjmp or vfork.
3455 This is done after calling flow_analysis and before global_alloc
3456 clobbers the pseudo-regs to hard regs. */
3457
3458 void
3459 setjmp_vars_warning (tree block)
3460 {
3461 tree decl, sub;
3462
3463 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3464 {
3465 if (TREE_CODE (decl) == VAR_DECL
3466 && DECL_RTL_SET_P (decl)
3467 && REG_P (DECL_RTL (decl))
3468 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3469 warning ("%Jvariable '%D' might be clobbered by `longjmp' or `vfork'",
3470 decl, decl);
3471 }
3472
3473 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3474 setjmp_vars_warning (sub);
3475 }
3476
3477 /* Do the appropriate part of setjmp_vars_warning
3478 but for arguments instead of local variables. */
3479
3480 void
3481 setjmp_args_warning (void)
3482 {
3483 tree decl;
3484 for (decl = DECL_ARGUMENTS (current_function_decl);
3485 decl; decl = TREE_CHAIN (decl))
3486 if (DECL_RTL (decl) != 0
3487 && REG_P (DECL_RTL (decl))
3488 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3489 warning ("%Jargument '%D' might be clobbered by `longjmp' or `vfork'",
3490 decl, decl);
3491 }
3492
3493 \f
3494 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3495 and create duplicate blocks. */
3496 /* ??? Need an option to either create block fragments or to create
3497 abstract origin duplicates of a source block. It really depends
3498 on what optimization has been performed. */
3499
3500 void
3501 reorder_blocks (void)
3502 {
3503 tree block = DECL_INITIAL (current_function_decl);
3504 varray_type block_stack;
3505
3506 if (block == NULL_TREE)
3507 return;
3508
3509 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
3510
3511 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3512 clear_block_marks (block);
3513
3514 /* Prune the old trees away, so that they don't get in the way. */
3515 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3516 BLOCK_CHAIN (block) = NULL_TREE;
3517
3518 /* Recreate the block tree from the note nesting. */
3519 reorder_blocks_1 (get_insns (), block, &block_stack);
3520 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3521
3522 /* Remove deleted blocks from the block fragment chains. */
3523 reorder_fix_fragments (block);
3524 }
3525
3526 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3527
3528 void
3529 clear_block_marks (tree block)
3530 {
3531 while (block)
3532 {
3533 TREE_ASM_WRITTEN (block) = 0;
3534 clear_block_marks (BLOCK_SUBBLOCKS (block));
3535 block = BLOCK_CHAIN (block);
3536 }
3537 }
3538
3539 static void
3540 reorder_blocks_1 (rtx insns, tree current_block, varray_type *p_block_stack)
3541 {
3542 rtx insn;
3543
3544 for (insn = insns; insn; insn = NEXT_INSN (insn))
3545 {
3546 if (NOTE_P (insn))
3547 {
3548 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3549 {
3550 tree block = NOTE_BLOCK (insn);
3551
3552 /* If we have seen this block before, that means it now
3553 spans multiple address regions. Create a new fragment. */
3554 if (TREE_ASM_WRITTEN (block))
3555 {
3556 tree new_block = copy_node (block);
3557 tree origin;
3558
3559 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3560 ? BLOCK_FRAGMENT_ORIGIN (block)
3561 : block);
3562 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3563 BLOCK_FRAGMENT_CHAIN (new_block)
3564 = BLOCK_FRAGMENT_CHAIN (origin);
3565 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3566
3567 NOTE_BLOCK (insn) = new_block;
3568 block = new_block;
3569 }
3570
3571 BLOCK_SUBBLOCKS (block) = 0;
3572 TREE_ASM_WRITTEN (block) = 1;
3573 /* When there's only one block for the entire function,
3574 current_block == block and we mustn't do this, it
3575 will cause infinite recursion. */
3576 if (block != current_block)
3577 {
3578 BLOCK_SUPERCONTEXT (block) = current_block;
3579 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3580 BLOCK_SUBBLOCKS (current_block) = block;
3581 current_block = block;
3582 }
3583 VARRAY_PUSH_TREE (*p_block_stack, block);
3584 }
3585 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3586 {
3587 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
3588 VARRAY_POP (*p_block_stack);
3589 BLOCK_SUBBLOCKS (current_block)
3590 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3591 current_block = BLOCK_SUPERCONTEXT (current_block);
3592 }
3593 }
3594 }
3595 }
3596
3597 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
3598 appears in the block tree, select one of the fragments to become
3599 the new origin block. */
3600
3601 static void
3602 reorder_fix_fragments (tree block)
3603 {
3604 while (block)
3605 {
3606 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
3607 tree new_origin = NULL_TREE;
3608
3609 if (dup_origin)
3610 {
3611 if (! TREE_ASM_WRITTEN (dup_origin))
3612 {
3613 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
3614
3615 /* Find the first of the remaining fragments. There must
3616 be at least one -- the current block. */
3617 while (! TREE_ASM_WRITTEN (new_origin))
3618 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
3619 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
3620 }
3621 }
3622 else if (! dup_origin)
3623 new_origin = block;
3624
3625 /* Re-root the rest of the fragments to the new origin. In the
3626 case that DUP_ORIGIN was null, that means BLOCK was the origin
3627 of a chain of fragments and we want to remove those fragments
3628 that didn't make it to the output. */
3629 if (new_origin)
3630 {
3631 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
3632 tree chain = *pp;
3633
3634 while (chain)
3635 {
3636 if (TREE_ASM_WRITTEN (chain))
3637 {
3638 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
3639 *pp = chain;
3640 pp = &BLOCK_FRAGMENT_CHAIN (chain);
3641 }
3642 chain = BLOCK_FRAGMENT_CHAIN (chain);
3643 }
3644 *pp = NULL_TREE;
3645 }
3646
3647 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
3648 block = BLOCK_CHAIN (block);
3649 }
3650 }
3651
3652 /* Reverse the order of elements in the chain T of blocks,
3653 and return the new head of the chain (old last element). */
3654
3655 tree
3656 blocks_nreverse (tree t)
3657 {
3658 tree prev = 0, decl, next;
3659 for (decl = t; decl; decl = next)
3660 {
3661 next = BLOCK_CHAIN (decl);
3662 BLOCK_CHAIN (decl) = prev;
3663 prev = decl;
3664 }
3665 return prev;
3666 }
3667
3668 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3669 non-NULL, list them all into VECTOR, in a depth-first preorder
3670 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3671 blocks. */
3672
3673 static int
3674 all_blocks (tree block, tree *vector)
3675 {
3676 int n_blocks = 0;
3677
3678 while (block)
3679 {
3680 TREE_ASM_WRITTEN (block) = 0;
3681
3682 /* Record this block. */
3683 if (vector)
3684 vector[n_blocks] = block;
3685
3686 ++n_blocks;
3687
3688 /* Record the subblocks, and their subblocks... */
3689 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3690 vector ? vector + n_blocks : 0);
3691 block = BLOCK_CHAIN (block);
3692 }
3693
3694 return n_blocks;
3695 }
3696
3697 /* Return a vector containing all the blocks rooted at BLOCK. The
3698 number of elements in the vector is stored in N_BLOCKS_P. The
3699 vector is dynamically allocated; it is the caller's responsibility
3700 to call `free' on the pointer returned. */
3701
3702 static tree *
3703 get_block_vector (tree block, int *n_blocks_p)
3704 {
3705 tree *block_vector;
3706
3707 *n_blocks_p = all_blocks (block, NULL);
3708 block_vector = xmalloc (*n_blocks_p * sizeof (tree));
3709 all_blocks (block, block_vector);
3710
3711 return block_vector;
3712 }
3713
3714 static GTY(()) int next_block_index = 2;
3715
3716 /* Set BLOCK_NUMBER for all the blocks in FN. */
3717
3718 void
3719 number_blocks (tree fn)
3720 {
3721 int i;
3722 int n_blocks;
3723 tree *block_vector;
3724
3725 /* For SDB and XCOFF debugging output, we start numbering the blocks
3726 from 1 within each function, rather than keeping a running
3727 count. */
3728 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3729 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3730 next_block_index = 1;
3731 #endif
3732
3733 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3734
3735 /* The top-level BLOCK isn't numbered at all. */
3736 for (i = 1; i < n_blocks; ++i)
3737 /* We number the blocks from two. */
3738 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3739
3740 free (block_vector);
3741
3742 return;
3743 }
3744
3745 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3746
3747 tree
3748 debug_find_var_in_block_tree (tree var, tree block)
3749 {
3750 tree t;
3751
3752 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3753 if (t == var)
3754 return block;
3755
3756 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3757 {
3758 tree ret = debug_find_var_in_block_tree (var, t);
3759 if (ret)
3760 return ret;
3761 }
3762
3763 return NULL_TREE;
3764 }
3765 \f
3766 /* Allocate a function structure for FNDECL and set its contents
3767 to the defaults. */
3768
3769 void
3770 allocate_struct_function (tree fndecl)
3771 {
3772 tree result;
3773 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3774
3775 cfun = ggc_alloc_cleared (sizeof (struct function));
3776
3777 cfun->stack_alignment_needed = STACK_BOUNDARY;
3778 cfun->preferred_stack_boundary = STACK_BOUNDARY;
3779
3780 current_function_funcdef_no = funcdef_no++;
3781
3782 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3783
3784 init_stmt_for_function ();
3785 init_eh_for_function ();
3786
3787 lang_hooks.function.init (cfun);
3788 if (init_machine_status)
3789 cfun->machine = (*init_machine_status) ();
3790
3791 if (fndecl == NULL)
3792 return;
3793
3794 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3795 cfun->decl = fndecl;
3796
3797 result = DECL_RESULT (fndecl);
3798 if (aggregate_value_p (result, fndecl))
3799 {
3800 #ifdef PCC_STATIC_STRUCT_RETURN
3801 current_function_returns_pcc_struct = 1;
3802 #endif
3803 current_function_returns_struct = 1;
3804 }
3805
3806 current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
3807
3808 current_function_stdarg
3809 = (fntype
3810 && TYPE_ARG_TYPES (fntype) != 0
3811 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3812 != void_type_node));
3813 }
3814
3815 /* Reset cfun, and other non-struct-function variables to defaults as
3816 appropriate for emitting rtl at the start of a function. */
3817
3818 static void
3819 prepare_function_start (tree fndecl)
3820 {
3821 if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
3822 cfun = DECL_STRUCT_FUNCTION (fndecl);
3823 else
3824 allocate_struct_function (fndecl);
3825 init_emit ();
3826 init_varasm_status (cfun);
3827 init_expr ();
3828
3829 cse_not_expected = ! optimize;
3830
3831 /* Caller save not needed yet. */
3832 caller_save_needed = 0;
3833
3834 /* We haven't done register allocation yet. */
3835 reg_renumber = 0;
3836
3837 /* Indicate that we have not instantiated virtual registers yet. */
3838 virtuals_instantiated = 0;
3839
3840 /* Indicate that we want CONCATs now. */
3841 generating_concat_p = 1;
3842
3843 /* Indicate we have no need of a frame pointer yet. */
3844 frame_pointer_needed = 0;
3845 }
3846
3847 /* Initialize the rtl expansion mechanism so that we can do simple things
3848 like generate sequences. This is used to provide a context during global
3849 initialization of some passes. */
3850 void
3851 init_dummy_function_start (void)
3852 {
3853 prepare_function_start (NULL);
3854 }
3855
3856 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3857 and initialize static variables for generating RTL for the statements
3858 of the function. */
3859
3860 void
3861 init_function_start (tree subr)
3862 {
3863 prepare_function_start (subr);
3864
3865 /* Prevent ever trying to delete the first instruction of a
3866 function. Also tell final how to output a linenum before the
3867 function prologue. Note linenums could be missing, e.g. when
3868 compiling a Java .class file. */
3869 if (! DECL_IS_BUILTIN (subr))
3870 emit_line_note (DECL_SOURCE_LOCATION (subr));
3871
3872 /* Make sure first insn is a note even if we don't want linenums.
3873 This makes sure the first insn will never be deleted.
3874 Also, final expects a note to appear there. */
3875 emit_note (NOTE_INSN_DELETED);
3876
3877 /* Warn if this value is an aggregate type,
3878 regardless of which calling convention we are using for it. */
3879 if (warn_aggregate_return
3880 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3881 warning ("function returns an aggregate");
3882 }
3883
3884 /* Make sure all values used by the optimization passes have sane
3885 defaults. */
3886 void
3887 init_function_for_compilation (void)
3888 {
3889 reg_renumber = 0;
3890
3891 /* No prologue/epilogue insns yet. */
3892 VARRAY_GROW (prologue, 0);
3893 VARRAY_GROW (epilogue, 0);
3894 VARRAY_GROW (sibcall_epilogue, 0);
3895 }
3896
3897 /* Expand a call to __main at the beginning of a possible main function. */
3898
3899 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
3900 #undef HAS_INIT_SECTION
3901 #define HAS_INIT_SECTION
3902 #endif
3903
3904 void
3905 expand_main_function (void)
3906 {
3907 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
3908 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
3909 {
3910 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
3911 rtx tmp, seq;
3912
3913 start_sequence ();
3914 /* Forcibly align the stack. */
3915 #ifdef STACK_GROWS_DOWNWARD
3916 tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
3917 stack_pointer_rtx, 1, OPTAB_WIDEN);
3918 #else
3919 tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3920 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
3921 tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
3922 stack_pointer_rtx, 1, OPTAB_WIDEN);
3923 #endif
3924 if (tmp != stack_pointer_rtx)
3925 emit_move_insn (stack_pointer_rtx, tmp);
3926
3927 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
3928 tmp = force_reg (Pmode, const0_rtx);
3929 allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
3930 seq = get_insns ();
3931 end_sequence ();
3932
3933 for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
3934 if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
3935 break;
3936 if (tmp)
3937 emit_insn_before (seq, tmp);
3938 else
3939 emit_insn (seq);
3940 }
3941 #endif
3942
3943 #ifndef HAS_INIT_SECTION
3944 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
3945 #endif
3946 }
3947 \f
3948 /* The PENDING_SIZES represent the sizes of variable-sized types.
3949 Create RTL for the various sizes now (using temporary variables),
3950 so that we can refer to the sizes from the RTL we are generating
3951 for the current function. The PENDING_SIZES are a TREE_LIST. The
3952 TREE_VALUE of each node is a SAVE_EXPR. */
3953
3954 void
3955 expand_pending_sizes (tree pending_sizes)
3956 {
3957 tree tem;
3958
3959 /* Evaluate now the sizes of any types declared among the arguments. */
3960 for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
3961 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
3962 }
3963
3964 /* Start the RTL for a new function, and set variables used for
3965 emitting RTL.
3966 SUBR is the FUNCTION_DECL node.
3967 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
3968 the function's parameters, which must be run at any return statement. */
3969
3970 void
3971 expand_function_start (tree subr)
3972 {
3973 /* Make sure volatile mem refs aren't considered
3974 valid operands of arithmetic insns. */
3975 init_recog_no_volatile ();
3976
3977 current_function_profile
3978 = (profile_flag
3979 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
3980
3981 current_function_limit_stack
3982 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
3983
3984 /* Make the label for return statements to jump to. Do not special
3985 case machines with special return instructions -- they will be
3986 handled later during jump, ifcvt, or epilogue creation. */
3987 return_label = gen_label_rtx ();
3988
3989 /* Initialize rtx used to return the value. */
3990 /* Do this before assign_parms so that we copy the struct value address
3991 before any library calls that assign parms might generate. */
3992
3993 /* Decide whether to return the value in memory or in a register. */
3994 if (aggregate_value_p (DECL_RESULT (subr), subr))
3995 {
3996 /* Returning something that won't go in a register. */
3997 rtx value_address = 0;
3998
3999 #ifdef PCC_STATIC_STRUCT_RETURN
4000 if (current_function_returns_pcc_struct)
4001 {
4002 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4003 value_address = assemble_static_space (size);
4004 }
4005 else
4006 #endif
4007 {
4008 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 1);
4009 /* Expect to be passed the address of a place to store the value.
4010 If it is passed as an argument, assign_parms will take care of
4011 it. */
4012 if (sv)
4013 {
4014 value_address = gen_reg_rtx (Pmode);
4015 emit_move_insn (value_address, sv);
4016 }
4017 }
4018 if (value_address)
4019 {
4020 rtx x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
4021 set_mem_attributes (x, DECL_RESULT (subr), 1);
4022 SET_DECL_RTL (DECL_RESULT (subr), x);
4023 }
4024 }
4025 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4026 /* If return mode is void, this decl rtl should not be used. */
4027 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4028 else
4029 {
4030 /* Compute the return values into a pseudo reg, which we will copy
4031 into the true return register after the cleanups are done. */
4032
4033 /* In order to figure out what mode to use for the pseudo, we
4034 figure out what the mode of the eventual return register will
4035 actually be, and use that. */
4036 rtx hard_reg
4037 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
4038 subr, 1);
4039
4040 /* Structures that are returned in registers are not aggregate_value_p,
4041 so we may see a PARALLEL or a REG. */
4042 if (REG_P (hard_reg))
4043 SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
4044 else if (GET_CODE (hard_reg) == PARALLEL)
4045 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4046 else
4047 abort ();
4048
4049 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4050 result to the real return register(s). */
4051 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4052 }
4053
4054 /* Initialize rtx for parameters and local variables.
4055 In some cases this requires emitting insns. */
4056 assign_parms (subr);
4057
4058 /* If function gets a static chain arg, store it. */
4059 if (cfun->static_chain_decl)
4060 {
4061 tree parm = cfun->static_chain_decl;
4062 rtx local = gen_reg_rtx (Pmode);
4063
4064 set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
4065 SET_DECL_RTL (parm, local);
4066 maybe_set_unchanging (local, parm);
4067 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4068
4069 emit_move_insn (local, static_chain_incoming_rtx);
4070 }
4071
4072 /* If the function receives a non-local goto, then store the
4073 bits we need to restore the frame pointer. */
4074 if (cfun->nonlocal_goto_save_area)
4075 {
4076 tree t_save;
4077 rtx r_save;
4078
4079 /* ??? We need to do this save early. Unfortunately here is
4080 before the frame variable gets declared. Help out... */
4081 expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4082
4083 t_save = build4 (ARRAY_REF, ptr_type_node,
4084 cfun->nonlocal_goto_save_area,
4085 integer_zero_node, NULL_TREE, NULL_TREE);
4086 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4087 r_save = convert_memory_address (Pmode, r_save);
4088
4089 emit_move_insn (r_save, virtual_stack_vars_rtx);
4090 update_nonlocal_goto_save_area ();
4091 }
4092
4093 /* The following was moved from init_function_start.
4094 The move is supposed to make sdb output more accurate. */
4095 /* Indicate the beginning of the function body,
4096 as opposed to parm setup. */
4097 emit_note (NOTE_INSN_FUNCTION_BEG);
4098
4099 if (!NOTE_P (get_last_insn ()))
4100 emit_note (NOTE_INSN_DELETED);
4101 parm_birth_insn = get_last_insn ();
4102
4103 if (current_function_profile)
4104 {
4105 #ifdef PROFILE_HOOK
4106 PROFILE_HOOK (current_function_funcdef_no);
4107 #endif
4108 }
4109
4110 /* After the display initializations is where the tail-recursion label
4111 should go, if we end up needing one. Ensure we have a NOTE here
4112 since some things (like trampolines) get placed before this. */
4113 tail_recursion_reentry = emit_note (NOTE_INSN_DELETED);
4114
4115 /* Evaluate now the sizes of any types declared among the arguments. */
4116 expand_pending_sizes (nreverse (get_pending_sizes ()));
4117
4118 /* Make sure there is a line number after the function entry setup code. */
4119 force_next_line_note ();
4120 }
4121 \f
4122 /* Undo the effects of init_dummy_function_start. */
4123 void
4124 expand_dummy_function_end (void)
4125 {
4126 /* End any sequences that failed to be closed due to syntax errors. */
4127 while (in_sequence_p ())
4128 end_sequence ();
4129
4130 /* Outside function body, can't compute type's actual size
4131 until next function's body starts. */
4132
4133 free_after_parsing (cfun);
4134 free_after_compilation (cfun);
4135 cfun = 0;
4136 }
4137
4138 /* Call DOIT for each hard register used as a return value from
4139 the current function. */
4140
4141 void
4142 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4143 {
4144 rtx outgoing = current_function_return_rtx;
4145
4146 if (! outgoing)
4147 return;
4148
4149 if (REG_P (outgoing))
4150 (*doit) (outgoing, arg);
4151 else if (GET_CODE (outgoing) == PARALLEL)
4152 {
4153 int i;
4154
4155 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4156 {
4157 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4158
4159 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4160 (*doit) (x, arg);
4161 }
4162 }
4163 }
4164
4165 static void
4166 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4167 {
4168 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4169 }
4170
4171 void
4172 clobber_return_register (void)
4173 {
4174 diddle_return_value (do_clobber_return_reg, NULL);
4175
4176 /* In case we do use pseudo to return value, clobber it too. */
4177 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4178 {
4179 tree decl_result = DECL_RESULT (current_function_decl);
4180 rtx decl_rtl = DECL_RTL (decl_result);
4181 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4182 {
4183 do_clobber_return_reg (decl_rtl, NULL);
4184 }
4185 }
4186 }
4187
4188 static void
4189 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4190 {
4191 emit_insn (gen_rtx_USE (VOIDmode, reg));
4192 }
4193
4194 void
4195 use_return_register (void)
4196 {
4197 diddle_return_value (do_use_return_reg, NULL);
4198 }
4199
4200 /* Possibly warn about unused parameters. */
4201 void
4202 do_warn_unused_parameter (tree fn)
4203 {
4204 tree decl;
4205
4206 for (decl = DECL_ARGUMENTS (fn);
4207 decl; decl = TREE_CHAIN (decl))
4208 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4209 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
4210 warning ("%Junused parameter '%D'", decl, decl);
4211 }
4212
4213 static GTY(()) rtx initial_trampoline;
4214
4215 /* Generate RTL for the end of the current function. */
4216
4217 void
4218 expand_function_end (void)
4219 {
4220 rtx clobber_after;
4221
4222 /* If arg_pointer_save_area was referenced only from a nested
4223 function, we will not have initialized it yet. Do that now. */
4224 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4225 get_arg_pointer_save_area (cfun);
4226
4227 /* If we are doing stack checking and this function makes calls,
4228 do a stack probe at the start of the function to ensure we have enough
4229 space for another stack frame. */
4230 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4231 {
4232 rtx insn, seq;
4233
4234 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4235 if (CALL_P (insn))
4236 {
4237 start_sequence ();
4238 probe_stack_range (STACK_CHECK_PROTECT,
4239 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4240 seq = get_insns ();
4241 end_sequence ();
4242 emit_insn_before (seq, tail_recursion_reentry);
4243 break;
4244 }
4245 }
4246
4247 /* Possibly warn about unused parameters.
4248 When frontend does unit-at-a-time, the warning is already
4249 issued at finalization time. */
4250 if (warn_unused_parameter
4251 && !lang_hooks.callgraph.expand_function)
4252 do_warn_unused_parameter (current_function_decl);
4253
4254 /* End any sequences that failed to be closed due to syntax errors. */
4255 while (in_sequence_p ())
4256 end_sequence ();
4257
4258 clear_pending_stack_adjust ();
4259 do_pending_stack_adjust ();
4260
4261 /* @@@ This is a kludge. We want to ensure that instructions that
4262 may trap are not moved into the epilogue by scheduling, because
4263 we don't always emit unwind information for the epilogue.
4264 However, not all machine descriptions define a blockage insn, so
4265 emit an ASM_INPUT to act as one. */
4266 if (flag_non_call_exceptions)
4267 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
4268
4269 /* Mark the end of the function body.
4270 If control reaches this insn, the function can drop through
4271 without returning a value. */
4272 emit_note (NOTE_INSN_FUNCTION_END);
4273
4274 /* Must mark the last line number note in the function, so that the test
4275 coverage code can avoid counting the last line twice. This just tells
4276 the code to ignore the immediately following line note, since there
4277 already exists a copy of this note somewhere above. This line number
4278 note is still needed for debugging though, so we can't delete it. */
4279 if (flag_test_coverage)
4280 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
4281
4282 /* Output a linenumber for the end of the function.
4283 SDB depends on this. */
4284 force_next_line_note ();
4285 emit_line_note (input_location);
4286
4287 /* Before the return label (if any), clobber the return
4288 registers so that they are not propagated live to the rest of
4289 the function. This can only happen with functions that drop
4290 through; if there had been a return statement, there would
4291 have either been a return rtx, or a jump to the return label.
4292
4293 We delay actual code generation after the current_function_value_rtx
4294 is computed. */
4295 clobber_after = get_last_insn ();
4296
4297 /* Output the label for the actual return from the function,
4298 if one is expected. This happens either because a function epilogue
4299 is used instead of a return instruction, or because a return was done
4300 with a goto in order to run local cleanups, or because of pcc-style
4301 structure returning. */
4302 if (return_label)
4303 emit_label (return_label);
4304
4305 /* Let except.c know where it should emit the call to unregister
4306 the function context for sjlj exceptions. */
4307 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
4308 sjlj_emit_function_exit_after (get_last_insn ());
4309
4310 /* If we had calls to alloca, and this machine needs
4311 an accurate stack pointer to exit the function,
4312 insert some code to save and restore the stack pointer. */
4313 if (! EXIT_IGNORE_STACK
4314 && current_function_calls_alloca)
4315 {
4316 rtx tem = 0;
4317
4318 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4319 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4320 }
4321
4322 /* If scalar return value was computed in a pseudo-reg, or was a named
4323 return value that got dumped to the stack, copy that to the hard
4324 return register. */
4325 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4326 {
4327 tree decl_result = DECL_RESULT (current_function_decl);
4328 rtx decl_rtl = DECL_RTL (decl_result);
4329
4330 if (REG_P (decl_rtl)
4331 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4332 : DECL_REGISTER (decl_result))
4333 {
4334 rtx real_decl_rtl = current_function_return_rtx;
4335
4336 /* This should be set in assign_parms. */
4337 if (! REG_FUNCTION_VALUE_P (real_decl_rtl))
4338 abort ();
4339
4340 /* If this is a BLKmode structure being returned in registers,
4341 then use the mode computed in expand_return. Note that if
4342 decl_rtl is memory, then its mode may have been changed,
4343 but that current_function_return_rtx has not. */
4344 if (GET_MODE (real_decl_rtl) == BLKmode)
4345 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4346
4347 /* If a named return value dumped decl_return to memory, then
4348 we may need to re-do the PROMOTE_MODE signed/unsigned
4349 extension. */
4350 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4351 {
4352 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4353
4354 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4355 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4356 &unsignedp, 1);
4357
4358 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4359 }
4360 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4361 {
4362 /* If expand_function_start has created a PARALLEL for decl_rtl,
4363 move the result to the real return registers. Otherwise, do
4364 a group load from decl_rtl for a named return. */
4365 if (GET_CODE (decl_rtl) == PARALLEL)
4366 emit_group_move (real_decl_rtl, decl_rtl);
4367 else
4368 emit_group_load (real_decl_rtl, decl_rtl,
4369 TREE_TYPE (decl_result),
4370 int_size_in_bytes (TREE_TYPE (decl_result)));
4371 }
4372 else
4373 emit_move_insn (real_decl_rtl, decl_rtl);
4374 }
4375 }
4376
4377 /* If returning a structure, arrange to return the address of the value
4378 in a place where debuggers expect to find it.
4379
4380 If returning a structure PCC style,
4381 the caller also depends on this value.
4382 And current_function_returns_pcc_struct is not necessarily set. */
4383 if (current_function_returns_struct
4384 || current_function_returns_pcc_struct)
4385 {
4386 rtx value_address
4387 = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4388 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4389 #ifdef FUNCTION_OUTGOING_VALUE
4390 rtx outgoing
4391 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
4392 current_function_decl);
4393 #else
4394 rtx outgoing
4395 = FUNCTION_VALUE (build_pointer_type (type), current_function_decl);
4396 #endif
4397
4398 /* Mark this as a function return value so integrate will delete the
4399 assignment and USE below when inlining this function. */
4400 REG_FUNCTION_VALUE_P (outgoing) = 1;
4401
4402 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4403 value_address = convert_memory_address (GET_MODE (outgoing),
4404 value_address);
4405
4406 emit_move_insn (outgoing, value_address);
4407
4408 /* Show return register used to hold result (in this case the address
4409 of the result. */
4410 current_function_return_rtx = outgoing;
4411 }
4412
4413 /* If this is an implementation of throw, do what's necessary to
4414 communicate between __builtin_eh_return and the epilogue. */
4415 expand_eh_return ();
4416
4417 /* Emit the actual code to clobber return register. */
4418 {
4419 rtx seq, after;
4420
4421 start_sequence ();
4422 clobber_return_register ();
4423 seq = get_insns ();
4424 end_sequence ();
4425
4426 after = emit_insn_after (seq, clobber_after);
4427 }
4428
4429 /* Output the label for the naked return from the function, if one is
4430 expected. This is currently used only by __builtin_return. */
4431 if (naked_return_label)
4432 emit_label (naked_return_label);
4433
4434 /* ??? This should no longer be necessary since stupid is no longer with
4435 us, but there are some parts of the compiler (eg reload_combine, and
4436 sh mach_dep_reorg) that still try and compute their own lifetime info
4437 instead of using the general framework. */
4438 use_return_register ();
4439 }
4440
4441 rtx
4442 get_arg_pointer_save_area (struct function *f)
4443 {
4444 rtx ret = f->x_arg_pointer_save_area;
4445
4446 if (! ret)
4447 {
4448 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
4449 f->x_arg_pointer_save_area = ret;
4450 }
4451
4452 if (f == cfun && ! f->arg_pointer_save_area_init)
4453 {
4454 rtx seq;
4455
4456 /* Save the arg pointer at the beginning of the function. The
4457 generated stack slot may not be a valid memory address, so we
4458 have to check it and fix it if necessary. */
4459 start_sequence ();
4460 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4461 seq = get_insns ();
4462 end_sequence ();
4463
4464 push_topmost_sequence ();
4465 emit_insn_after (seq, get_insns ());
4466 pop_topmost_sequence ();
4467 }
4468
4469 return ret;
4470 }
4471 \f
4472 /* Extend a vector that records the INSN_UIDs of INSNS
4473 (a list of one or more insns). */
4474
4475 static void
4476 record_insns (rtx insns, varray_type *vecp)
4477 {
4478 int i, len;
4479 rtx tmp;
4480
4481 tmp = insns;
4482 len = 0;
4483 while (tmp != NULL_RTX)
4484 {
4485 len++;
4486 tmp = NEXT_INSN (tmp);
4487 }
4488
4489 i = VARRAY_SIZE (*vecp);
4490 VARRAY_GROW (*vecp, i + len);
4491 tmp = insns;
4492 while (tmp != NULL_RTX)
4493 {
4494 VARRAY_INT (*vecp, i) = INSN_UID (tmp);
4495 i++;
4496 tmp = NEXT_INSN (tmp);
4497 }
4498 }
4499
4500 /* Set the locator of the insn chain starting at INSN to LOC. */
4501 static void
4502 set_insn_locators (rtx insn, int loc)
4503 {
4504 while (insn != NULL_RTX)
4505 {
4506 if (INSN_P (insn))
4507 INSN_LOCATOR (insn) = loc;
4508 insn = NEXT_INSN (insn);
4509 }
4510 }
4511
4512 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4513 be running after reorg, SEQUENCE rtl is possible. */
4514
4515 static int
4516 contains (rtx insn, varray_type vec)
4517 {
4518 int i, j;
4519
4520 if (NONJUMP_INSN_P (insn)
4521 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4522 {
4523 int count = 0;
4524 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4525 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
4526 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
4527 count++;
4528 return count;
4529 }
4530 else
4531 {
4532 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
4533 if (INSN_UID (insn) == VARRAY_INT (vec, j))
4534 return 1;
4535 }
4536 return 0;
4537 }
4538
4539 int
4540 prologue_epilogue_contains (rtx insn)
4541 {
4542 if (contains (insn, prologue))
4543 return 1;
4544 if (contains (insn, epilogue))
4545 return 1;
4546 return 0;
4547 }
4548
4549 int
4550 sibcall_epilogue_contains (rtx insn)
4551 {
4552 if (sibcall_epilogue)
4553 return contains (insn, sibcall_epilogue);
4554 return 0;
4555 }
4556
4557 #ifdef HAVE_return
4558 /* Insert gen_return at the end of block BB. This also means updating
4559 block_for_insn appropriately. */
4560
4561 static void
4562 emit_return_into_block (basic_block bb, rtx line_note)
4563 {
4564 emit_jump_insn_after (gen_return (), BB_END (bb));
4565 if (line_note)
4566 emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
4567 }
4568 #endif /* HAVE_return */
4569
4570 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4571
4572 /* These functions convert the epilogue into a variant that does not modify the
4573 stack pointer. This is used in cases where a function returns an object
4574 whose size is not known until it is computed. The called function leaves the
4575 object on the stack, leaves the stack depressed, and returns a pointer to
4576 the object.
4577
4578 What we need to do is track all modifications and references to the stack
4579 pointer, deleting the modifications and changing the references to point to
4580 the location the stack pointer would have pointed to had the modifications
4581 taken place.
4582
4583 These functions need to be portable so we need to make as few assumptions
4584 about the epilogue as we can. However, the epilogue basically contains
4585 three things: instructions to reset the stack pointer, instructions to
4586 reload registers, possibly including the frame pointer, and an
4587 instruction to return to the caller.
4588
4589 If we can't be sure of what a relevant epilogue insn is doing, we abort.
4590 We also make no attempt to validate the insns we make since if they are
4591 invalid, we probably can't do anything valid. The intent is that these
4592 routines get "smarter" as more and more machines start to use them and
4593 they try operating on different epilogues.
4594
4595 We use the following structure to track what the part of the epilogue that
4596 we've already processed has done. We keep two copies of the SP equivalence,
4597 one for use during the insn we are processing and one for use in the next
4598 insn. The difference is because one part of a PARALLEL may adjust SP
4599 and the other may use it. */
4600
4601 struct epi_info
4602 {
4603 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
4604 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
4605 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
4606 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
4607 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
4608 should be set to once we no longer need
4609 its value. */
4610 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4611 for registers. */
4612 };
4613
4614 static void handle_epilogue_set (rtx, struct epi_info *);
4615 static void update_epilogue_consts (rtx, rtx, void *);
4616 static void emit_equiv_load (struct epi_info *);
4617
4618 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4619 no modifications to the stack pointer. Return the new list of insns. */
4620
4621 static rtx
4622 keep_stack_depressed (rtx insns)
4623 {
4624 int j;
4625 struct epi_info info;
4626 rtx insn, next;
4627
4628 /* If the epilogue is just a single instruction, it must be OK as is. */
4629 if (NEXT_INSN (insns) == NULL_RTX)
4630 return insns;
4631
4632 /* Otherwise, start a sequence, initialize the information we have, and
4633 process all the insns we were given. */
4634 start_sequence ();
4635
4636 info.sp_equiv_reg = stack_pointer_rtx;
4637 info.sp_offset = 0;
4638 info.equiv_reg_src = 0;
4639
4640 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4641 info.const_equiv[j] = 0;
4642
4643 insn = insns;
4644 next = NULL_RTX;
4645 while (insn != NULL_RTX)
4646 {
4647 next = NEXT_INSN (insn);
4648
4649 if (!INSN_P (insn))
4650 {
4651 add_insn (insn);
4652 insn = next;
4653 continue;
4654 }
4655
4656 /* If this insn references the register that SP is equivalent to and
4657 we have a pending load to that register, we must force out the load
4658 first and then indicate we no longer know what SP's equivalent is. */
4659 if (info.equiv_reg_src != 0
4660 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
4661 {
4662 emit_equiv_load (&info);
4663 info.sp_equiv_reg = 0;
4664 }
4665
4666 info.new_sp_equiv_reg = info.sp_equiv_reg;
4667 info.new_sp_offset = info.sp_offset;
4668
4669 /* If this is a (RETURN) and the return address is on the stack,
4670 update the address and change to an indirect jump. */
4671 if (GET_CODE (PATTERN (insn)) == RETURN
4672 || (GET_CODE (PATTERN (insn)) == PARALLEL
4673 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4674 {
4675 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4676 rtx base = 0;
4677 HOST_WIDE_INT offset = 0;
4678 rtx jump_insn, jump_set;
4679
4680 /* If the return address is in a register, we can emit the insn
4681 unchanged. Otherwise, it must be a MEM and we see what the
4682 base register and offset are. In any case, we have to emit any
4683 pending load to the equivalent reg of SP, if any. */
4684 if (REG_P (retaddr))
4685 {
4686 emit_equiv_load (&info);
4687 add_insn (insn);
4688 insn = next;
4689 continue;
4690 }
4691 else if (MEM_P (retaddr)
4692 && REG_P (XEXP (retaddr, 0)))
4693 base = gen_rtx_REG (Pmode, REGNO (XEXP (retaddr, 0))), offset = 0;
4694 else if (MEM_P (retaddr)
4695 && GET_CODE (XEXP (retaddr, 0)) == PLUS
4696 && REG_P (XEXP (XEXP (retaddr, 0), 0))
4697 && GET_CODE (XEXP (XEXP (retaddr, 0), 1)) == CONST_INT)
4698 {
4699 base = gen_rtx_REG (Pmode, REGNO (XEXP (XEXP (retaddr, 0), 0)));
4700 offset = INTVAL (XEXP (XEXP (retaddr, 0), 1));
4701 }
4702 else
4703 abort ();
4704
4705 /* If the base of the location containing the return pointer
4706 is SP, we must update it with the replacement address. Otherwise,
4707 just build the necessary MEM. */
4708 retaddr = plus_constant (base, offset);
4709 if (base == stack_pointer_rtx)
4710 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4711 plus_constant (info.sp_equiv_reg,
4712 info.sp_offset));
4713
4714 retaddr = gen_rtx_MEM (Pmode, retaddr);
4715
4716 /* If there is a pending load to the equivalent register for SP
4717 and we reference that register, we must load our address into
4718 a scratch register and then do that load. */
4719 if (info.equiv_reg_src
4720 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4721 {
4722 unsigned int regno;
4723 rtx reg;
4724
4725 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4726 if (HARD_REGNO_MODE_OK (regno, Pmode)
4727 && !fixed_regs[regno]
4728 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
4729 && !REGNO_REG_SET_P (EXIT_BLOCK_PTR->global_live_at_start,
4730 regno)
4731 && !refers_to_regno_p (regno,
4732 regno + hard_regno_nregs[regno]
4733 [Pmode],
4734 info.equiv_reg_src, NULL)
4735 && info.const_equiv[regno] == 0)
4736 break;
4737
4738 if (regno == FIRST_PSEUDO_REGISTER)
4739 abort ();
4740
4741 reg = gen_rtx_REG (Pmode, regno);
4742 emit_move_insn (reg, retaddr);
4743 retaddr = reg;
4744 }
4745
4746 emit_equiv_load (&info);
4747 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4748
4749 /* Show the SET in the above insn is a RETURN. */
4750 jump_set = single_set (jump_insn);
4751 if (jump_set == 0)
4752 abort ();
4753 else
4754 SET_IS_RETURN_P (jump_set) = 1;
4755 }
4756
4757 /* If SP is not mentioned in the pattern and its equivalent register, if
4758 any, is not modified, just emit it. Otherwise, if neither is set,
4759 replace the reference to SP and emit the insn. If none of those are
4760 true, handle each SET individually. */
4761 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4762 && (info.sp_equiv_reg == stack_pointer_rtx
4763 || !reg_set_p (info.sp_equiv_reg, insn)))
4764 add_insn (insn);
4765 else if (! reg_set_p (stack_pointer_rtx, insn)
4766 && (info.sp_equiv_reg == stack_pointer_rtx
4767 || !reg_set_p (info.sp_equiv_reg, insn)))
4768 {
4769 if (! validate_replace_rtx (stack_pointer_rtx,
4770 plus_constant (info.sp_equiv_reg,
4771 info.sp_offset),
4772 insn))
4773 abort ();
4774
4775 add_insn (insn);
4776 }
4777 else if (GET_CODE (PATTERN (insn)) == SET)
4778 handle_epilogue_set (PATTERN (insn), &info);
4779 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4780 {
4781 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4782 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4783 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4784 }
4785 else
4786 add_insn (insn);
4787
4788 info.sp_equiv_reg = info.new_sp_equiv_reg;
4789 info.sp_offset = info.new_sp_offset;
4790
4791 /* Now update any constants this insn sets. */
4792 note_stores (PATTERN (insn), update_epilogue_consts, &info);
4793 insn = next;
4794 }
4795
4796 insns = get_insns ();
4797 end_sequence ();
4798 return insns;
4799 }
4800
4801 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
4802 structure that contains information about what we've seen so far. We
4803 process this SET by either updating that data or by emitting one or
4804 more insns. */
4805
4806 static void
4807 handle_epilogue_set (rtx set, struct epi_info *p)
4808 {
4809 /* First handle the case where we are setting SP. Record what it is being
4810 set from. If unknown, abort. */
4811 if (reg_set_p (stack_pointer_rtx, set))
4812 {
4813 if (SET_DEST (set) != stack_pointer_rtx)
4814 abort ();
4815
4816 if (GET_CODE (SET_SRC (set)) == PLUS)
4817 {
4818 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
4819 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4820 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
4821 else if (REG_P (XEXP (SET_SRC (set), 1))
4822 && REGNO (XEXP (SET_SRC (set), 1)) < FIRST_PSEUDO_REGISTER
4823 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))] != 0)
4824 p->new_sp_offset
4825 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4826 else
4827 abort ();
4828 }
4829 else
4830 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4831
4832 /* If we are adjusting SP, we adjust from the old data. */
4833 if (p->new_sp_equiv_reg == stack_pointer_rtx)
4834 {
4835 p->new_sp_equiv_reg = p->sp_equiv_reg;
4836 p->new_sp_offset += p->sp_offset;
4837 }
4838
4839 if (p->new_sp_equiv_reg == 0 || !REG_P (p->new_sp_equiv_reg))
4840 abort ();
4841
4842 return;
4843 }
4844
4845 /* Next handle the case where we are setting SP's equivalent register.
4846 If we already have a value to set it to, abort. We could update, but
4847 there seems little point in handling that case. Note that we have
4848 to allow for the case where we are setting the register set in
4849 the previous part of a PARALLEL inside a single insn. But use the
4850 old offset for any updates within this insn. We must allow for the case
4851 where the register is being set in a different (usually wider) mode than
4852 Pmode). */
4853 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
4854 {
4855 if (p->equiv_reg_src != 0
4856 || !REG_P (p->new_sp_equiv_reg)
4857 || !REG_P (SET_DEST (set))
4858 || GET_MODE_BITSIZE (GET_MODE (SET_DEST (set))) > BITS_PER_WORD
4859 || REGNO (p->new_sp_equiv_reg) != REGNO (SET_DEST (set)))
4860 abort ();
4861 else
4862 p->equiv_reg_src
4863 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4864 plus_constant (p->sp_equiv_reg,
4865 p->sp_offset));
4866 }
4867
4868 /* Otherwise, replace any references to SP in the insn to its new value
4869 and emit the insn. */
4870 else
4871 {
4872 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4873 plus_constant (p->sp_equiv_reg,
4874 p->sp_offset));
4875 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
4876 plus_constant (p->sp_equiv_reg,
4877 p->sp_offset));
4878 emit_insn (set);
4879 }
4880 }
4881
4882 /* Update the tracking information for registers set to constants. */
4883
4884 static void
4885 update_epilogue_consts (rtx dest, rtx x, void *data)
4886 {
4887 struct epi_info *p = (struct epi_info *) data;
4888 rtx new;
4889
4890 if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4891 return;
4892
4893 /* If we are either clobbering a register or doing a partial set,
4894 show we don't know the value. */
4895 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
4896 p->const_equiv[REGNO (dest)] = 0;
4897
4898 /* If we are setting it to a constant, record that constant. */
4899 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
4900 p->const_equiv[REGNO (dest)] = SET_SRC (x);
4901
4902 /* If this is a binary operation between a register we have been tracking
4903 and a constant, see if we can compute a new constant value. */
4904 else if (ARITHMETIC_P (SET_SRC (x))
4905 && REG_P (XEXP (SET_SRC (x), 0))
4906 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
4907 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
4908 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
4909 && 0 != (new = simplify_binary_operation
4910 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
4911 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
4912 XEXP (SET_SRC (x), 1)))
4913 && GET_CODE (new) == CONST_INT)
4914 p->const_equiv[REGNO (dest)] = new;
4915
4916 /* Otherwise, we can't do anything with this value. */
4917 else
4918 p->const_equiv[REGNO (dest)] = 0;
4919 }
4920
4921 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
4922
4923 static void
4924 emit_equiv_load (struct epi_info *p)
4925 {
4926 if (p->equiv_reg_src != 0)
4927 {
4928 rtx dest = p->sp_equiv_reg;
4929
4930 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
4931 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
4932 REGNO (p->sp_equiv_reg));
4933
4934 emit_move_insn (dest, p->equiv_reg_src);
4935 p->equiv_reg_src = 0;
4936 }
4937 }
4938 #endif
4939
4940 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
4941 this into place with notes indicating where the prologue ends and where
4942 the epilogue begins. Update the basic block information when possible. */
4943
4944 void
4945 thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
4946 {
4947 int inserted = 0;
4948 edge e;
4949 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
4950 rtx seq;
4951 #endif
4952 #ifdef HAVE_prologue
4953 rtx prologue_end = NULL_RTX;
4954 #endif
4955 #if defined (HAVE_epilogue) || defined(HAVE_return)
4956 rtx epilogue_end = NULL_RTX;
4957 #endif
4958
4959 #ifdef HAVE_prologue
4960 if (HAVE_prologue)
4961 {
4962 start_sequence ();
4963 seq = gen_prologue ();
4964 emit_insn (seq);
4965
4966 /* Retain a map of the prologue insns. */
4967 record_insns (seq, &prologue);
4968 prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
4969
4970 seq = get_insns ();
4971 end_sequence ();
4972 set_insn_locators (seq, prologue_locator);
4973
4974 /* Can't deal with multiple successors of the entry block
4975 at the moment. Function should always have at least one
4976 entry point. */
4977 if (!ENTRY_BLOCK_PTR->succ || ENTRY_BLOCK_PTR->succ->succ_next)
4978 abort ();
4979
4980 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
4981 inserted = 1;
4982 }
4983 #endif
4984
4985 /* If the exit block has no non-fake predecessors, we don't need
4986 an epilogue. */
4987 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
4988 if ((e->flags & EDGE_FAKE) == 0)
4989 break;
4990 if (e == NULL)
4991 goto epilogue_done;
4992
4993 #ifdef HAVE_return
4994 if (optimize && HAVE_return)
4995 {
4996 /* If we're allowed to generate a simple return instruction,
4997 then by definition we don't need a full epilogue. Examine
4998 the block that falls through to EXIT. If it does not
4999 contain any code, examine its predecessors and try to
5000 emit (conditional) return instructions. */
5001
5002 basic_block last;
5003 edge e_next;
5004 rtx label;
5005
5006 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5007 if (e->flags & EDGE_FALLTHRU)
5008 break;
5009 if (e == NULL)
5010 goto epilogue_done;
5011 last = e->src;
5012
5013 /* Verify that there are no active instructions in the last block. */
5014 label = BB_END (last);
5015 while (label && !LABEL_P (label))
5016 {
5017 if (active_insn_p (label))
5018 break;
5019 label = PREV_INSN (label);
5020 }
5021
5022 if (BB_HEAD (last) == label && LABEL_P (label))
5023 {
5024 rtx epilogue_line_note = NULL_RTX;
5025
5026 /* Locate the line number associated with the closing brace,
5027 if we can find one. */
5028 for (seq = get_last_insn ();
5029 seq && ! active_insn_p (seq);
5030 seq = PREV_INSN (seq))
5031 if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0)
5032 {
5033 epilogue_line_note = seq;
5034 break;
5035 }
5036
5037 for (e = last->pred; e; e = e_next)
5038 {
5039 basic_block bb = e->src;
5040 rtx jump;
5041
5042 e_next = e->pred_next;
5043 if (bb == ENTRY_BLOCK_PTR)
5044 continue;
5045
5046 jump = BB_END (bb);
5047 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5048 continue;
5049
5050 /* If we have an unconditional jump, we can replace that
5051 with a simple return instruction. */
5052 if (simplejump_p (jump))
5053 {
5054 emit_return_into_block (bb, epilogue_line_note);
5055 delete_insn (jump);
5056 }
5057
5058 /* If we have a conditional jump, we can try to replace
5059 that with a conditional return instruction. */
5060 else if (condjump_p (jump))
5061 {
5062 if (! redirect_jump (jump, 0, 0))
5063 continue;
5064
5065 /* If this block has only one successor, it both jumps
5066 and falls through to the fallthru block, so we can't
5067 delete the edge. */
5068 if (bb->succ->succ_next == NULL)
5069 continue;
5070 }
5071 else
5072 continue;
5073
5074 /* Fix up the CFG for the successful change we just made. */
5075 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5076 }
5077
5078 /* Emit a return insn for the exit fallthru block. Whether
5079 this is still reachable will be determined later. */
5080
5081 emit_barrier_after (BB_END (last));
5082 emit_return_into_block (last, epilogue_line_note);
5083 epilogue_end = BB_END (last);
5084 last->succ->flags &= ~EDGE_FALLTHRU;
5085 goto epilogue_done;
5086 }
5087 }
5088 #endif
5089 /* Find the edge that falls through to EXIT. Other edges may exist
5090 due to RETURN instructions, but those don't need epilogues.
5091 There really shouldn't be a mixture -- either all should have
5092 been converted or none, however... */
5093
5094 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5095 if (e->flags & EDGE_FALLTHRU)
5096 break;
5097 if (e == NULL)
5098 goto epilogue_done;
5099
5100 #ifdef HAVE_epilogue
5101 if (HAVE_epilogue)
5102 {
5103 start_sequence ();
5104 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5105
5106 seq = gen_epilogue ();
5107
5108 #ifdef INCOMING_RETURN_ADDR_RTX
5109 /* If this function returns with the stack depressed and we can support
5110 it, massage the epilogue to actually do that. */
5111 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5112 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
5113 seq = keep_stack_depressed (seq);
5114 #endif
5115
5116 emit_jump_insn (seq);
5117
5118 /* Retain a map of the epilogue insns. */
5119 record_insns (seq, &epilogue);
5120 set_insn_locators (seq, epilogue_locator);
5121
5122 seq = get_insns ();
5123 end_sequence ();
5124
5125 insert_insn_on_edge (seq, e);
5126 inserted = 1;
5127 }
5128 else
5129 #endif
5130 {
5131 basic_block cur_bb;
5132
5133 if (! next_active_insn (BB_END (e->src)))
5134 goto epilogue_done;
5135 /* We have a fall-through edge to the exit block, the source is not
5136 at the end of the function, and there will be an assembler epilogue
5137 at the end of the function.
5138 We can't use force_nonfallthru here, because that would try to
5139 use return. Inserting a jump 'by hand' is extremely messy, so
5140 we take advantage of cfg_layout_finalize using
5141 fixup_fallthru_exit_predecessor. */
5142 cfg_layout_initialize (0);
5143 FOR_EACH_BB (cur_bb)
5144 if (cur_bb->index >= 0 && cur_bb->next_bb->index >= 0)
5145 cur_bb->rbi->next = cur_bb->next_bb;
5146 cfg_layout_finalize ();
5147 }
5148 epilogue_done:
5149
5150 if (inserted)
5151 commit_edge_insertions ();
5152
5153 #ifdef HAVE_sibcall_epilogue
5154 /* Emit sibling epilogues before any sibling call sites. */
5155 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5156 {
5157 basic_block bb = e->src;
5158 rtx insn = BB_END (bb);
5159 rtx i;
5160 rtx newinsn;
5161
5162 if (!CALL_P (insn)
5163 || ! SIBLING_CALL_P (insn))
5164 continue;
5165
5166 start_sequence ();
5167 emit_insn (gen_sibcall_epilogue ());
5168 seq = get_insns ();
5169 end_sequence ();
5170
5171 /* Retain a map of the epilogue insns. Used in life analysis to
5172 avoid getting rid of sibcall epilogue insns. Do this before we
5173 actually emit the sequence. */
5174 record_insns (seq, &sibcall_epilogue);
5175 set_insn_locators (seq, epilogue_locator);
5176
5177 i = PREV_INSN (insn);
5178 newinsn = emit_insn_before (seq, insn);
5179 }
5180 #endif
5181
5182 #ifdef HAVE_prologue
5183 /* This is probably all useless now that we use locators. */
5184 if (prologue_end)
5185 {
5186 rtx insn, prev;
5187
5188 /* GDB handles `break f' by setting a breakpoint on the first
5189 line note after the prologue. Which means (1) that if
5190 there are line number notes before where we inserted the
5191 prologue we should move them, and (2) we should generate a
5192 note before the end of the first basic block, if there isn't
5193 one already there.
5194
5195 ??? This behavior is completely broken when dealing with
5196 multiple entry functions. We simply place the note always
5197 into first basic block and let alternate entry points
5198 to be missed.
5199 */
5200
5201 for (insn = prologue_end; insn; insn = prev)
5202 {
5203 prev = PREV_INSN (insn);
5204 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5205 {
5206 /* Note that we cannot reorder the first insn in the
5207 chain, since rest_of_compilation relies on that
5208 remaining constant. */
5209 if (prev == NULL)
5210 break;
5211 reorder_insns (insn, insn, prologue_end);
5212 }
5213 }
5214
5215 /* Find the last line number note in the first block. */
5216 for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
5217 insn != prologue_end && insn;
5218 insn = PREV_INSN (insn))
5219 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5220 break;
5221
5222 /* If we didn't find one, make a copy of the first line number
5223 we run across. */
5224 if (! insn)
5225 {
5226 for (insn = next_active_insn (prologue_end);
5227 insn;
5228 insn = PREV_INSN (insn))
5229 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5230 {
5231 emit_note_copy_after (insn, prologue_end);
5232 break;
5233 }
5234 }
5235 }
5236 #endif
5237 #ifdef HAVE_epilogue
5238 if (epilogue_end)
5239 {
5240 rtx insn, next;
5241
5242 /* Similarly, move any line notes that appear after the epilogue.
5243 There is no need, however, to be quite so anal about the existence
5244 of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly)
5245 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5246 info generation. */
5247 for (insn = epilogue_end; insn; insn = next)
5248 {
5249 next = NEXT_INSN (insn);
5250 if (NOTE_P (insn)
5251 && (NOTE_LINE_NUMBER (insn) > 0
5252 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
5253 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
5254 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5255 }
5256 }
5257 #endif
5258 }
5259
5260 /* Reposition the prologue-end and epilogue-begin notes after instruction
5261 scheduling and delayed branch scheduling. */
5262
5263 void
5264 reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
5265 {
5266 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5267 rtx insn, last, note;
5268 int len;
5269
5270 if ((len = VARRAY_SIZE (prologue)) > 0)
5271 {
5272 last = 0, note = 0;
5273
5274 /* Scan from the beginning until we reach the last prologue insn.
5275 We apparently can't depend on basic_block_{head,end} after
5276 reorg has run. */
5277 for (insn = f; insn; insn = NEXT_INSN (insn))
5278 {
5279 if (NOTE_P (insn))
5280 {
5281 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5282 note = insn;
5283 }
5284 else if (contains (insn, prologue))
5285 {
5286 last = insn;
5287 if (--len == 0)
5288 break;
5289 }
5290 }
5291
5292 if (last)
5293 {
5294 /* Find the prologue-end note if we haven't already, and
5295 move it to just after the last prologue insn. */
5296 if (note == 0)
5297 {
5298 for (note = last; (note = NEXT_INSN (note));)
5299 if (NOTE_P (note)
5300 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5301 break;
5302 }
5303
5304 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5305 if (LABEL_P (last))
5306 last = NEXT_INSN (last);
5307 reorder_insns (note, note, last);
5308 }
5309 }
5310
5311 if ((len = VARRAY_SIZE (epilogue)) > 0)
5312 {
5313 last = 0, note = 0;
5314
5315 /* Scan from the end until we reach the first epilogue insn.
5316 We apparently can't depend on basic_block_{head,end} after
5317 reorg has run. */
5318 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5319 {
5320 if (NOTE_P (insn))
5321 {
5322 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5323 note = insn;
5324 }
5325 else if (contains (insn, epilogue))
5326 {
5327 last = insn;
5328 if (--len == 0)
5329 break;
5330 }
5331 }
5332
5333 if (last)
5334 {
5335 /* Find the epilogue-begin note if we haven't already, and
5336 move it to just before the first epilogue insn. */
5337 if (note == 0)
5338 {
5339 for (note = insn; (note = PREV_INSN (note));)
5340 if (NOTE_P (note)
5341 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5342 break;
5343 }
5344
5345 if (PREV_INSN (last) != note)
5346 reorder_insns (note, note, PREV_INSN (last));
5347 }
5348 }
5349 #endif /* HAVE_prologue or HAVE_epilogue */
5350 }
5351
5352 /* Called once, at initialization, to initialize function.c. */
5353
5354 void
5355 init_function_once (void)
5356 {
5357 VARRAY_INT_INIT (prologue, 0, "prologue");
5358 VARRAY_INT_INIT (epilogue, 0, "epilogue");
5359 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
5360 }
5361
5362 /* Resets insn_block_boundaries array. */
5363
5364 void
5365 reset_block_changes (void)
5366 {
5367 VARRAY_TREE_INIT (cfun->ib_boundaries_block, 100, "ib_boundaries_block");
5368 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, NULL_TREE);
5369 }
5370
5371 /* Record the boundary for BLOCK. */
5372 void
5373 record_block_change (tree block)
5374 {
5375 int i, n;
5376 tree last_block;
5377
5378 if (!block)
5379 return;
5380
5381 last_block = VARRAY_TOP_TREE (cfun->ib_boundaries_block);
5382 VARRAY_POP (cfun->ib_boundaries_block);
5383 n = get_max_uid ();
5384 for (i = VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block); i < n; i++)
5385 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, last_block);
5386
5387 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, block);
5388 }
5389
5390 /* Finishes record of boundaries. */
5391 void finalize_block_changes (void)
5392 {
5393 record_block_change (DECL_INITIAL (current_function_decl));
5394 }
5395
5396 /* For INSN return the BLOCK it belongs to. */
5397 void
5398 check_block_change (rtx insn, tree *block)
5399 {
5400 unsigned uid = INSN_UID (insn);
5401
5402 if (uid >= VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block))
5403 return;
5404
5405 *block = VARRAY_TREE (cfun->ib_boundaries_block, uid);
5406 }
5407
5408 /* Releases the ib_boundaries_block records. */
5409 void
5410 free_block_changes (void)
5411 {
5412 cfun->ib_boundaries_block = NULL;
5413 }
5414
5415 /* Returns the name of the current function. */
5416 const char *
5417 current_function_name (void)
5418 {
5419 return lang_hooks.decl_printable_name (cfun->decl, 2);
5420 }
5421
5422 #include "gt-function.h"