]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/function.c
function.c (free_after_parsing): Replace with cxx_push_function_context from C++...
[thirdparty/gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "basic-block.h"
54 #include "toplev.h"
55 #include "hashtab.h"
56 #include "ggc.h"
57 #include "tm_p.h"
58 #include "integrate.h"
59 #include "langhooks.h"
60 #include "target.h"
61 #include "cfglayout.h"
62 #include "tree-gimple.h"
63 #include "tree-pass.h"
64 #include "predict.h"
65 #include "df.h"
66 #include "timevar.h"
67 #include "vecprim.h"
68
69 /* So we can assign to cfun in this file. */
70 #undef cfun
71
72 #ifndef LOCAL_ALIGNMENT
73 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
74 #endif
75
76 #ifndef STACK_ALIGNMENT_NEEDED
77 #define STACK_ALIGNMENT_NEEDED 1
78 #endif
79
80 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
81
82 /* Some systems use __main in a way incompatible with its use in gcc, in these
83 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
84 give the same symbol without quotes for an alternative entry point. You
85 must define both, or neither. */
86 #ifndef NAME__MAIN
87 #define NAME__MAIN "__main"
88 #endif
89
90 /* Round a value to the lowest integer less than it that is a multiple of
91 the required alignment. Avoid using division in case the value is
92 negative. Assume the alignment is a power of two. */
93 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
94
95 /* Similar, but round to the next highest integer that meets the
96 alignment. */
97 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
98
99 /* Nonzero if function being compiled doesn't contain any calls
100 (ignoring the prologue and epilogue). This is set prior to
101 local register allocation and is valid for the remaining
102 compiler passes. */
103 int current_function_is_leaf;
104
105 /* Nonzero if function being compiled doesn't modify the stack pointer
106 (ignoring the prologue and epilogue). This is only valid after
107 pass_stack_ptr_mod has run. */
108 int current_function_sp_is_unchanging;
109
110 /* Nonzero if the function being compiled is a leaf function which only
111 uses leaf registers. This is valid after reload (specifically after
112 sched2) and is useful only if the port defines LEAF_REGISTERS. */
113 int current_function_uses_only_leaf_regs;
114
115 /* Nonzero once virtual register instantiation has been done.
116 assign_stack_local uses frame_pointer_rtx when this is nonzero.
117 calls.c:emit_library_call_value_1 uses it to set up
118 post-instantiation libcalls. */
119 int virtuals_instantiated;
120
121 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
122 static GTY(()) int funcdef_no;
123
124 /* These variables hold pointers to functions to create and destroy
125 target specific, per-function data structures. */
126 struct machine_function * (*init_machine_status) (void);
127
128 /* The currently compiled function. */
129 struct function *cfun = 0;
130
131 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
132 static VEC(int,heap) *prologue;
133 static VEC(int,heap) *epilogue;
134
135 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
136 in this function. */
137 static VEC(int,heap) *sibcall_epilogue;
138 \f
139 /* In order to evaluate some expressions, such as function calls returning
140 structures in memory, we need to temporarily allocate stack locations.
141 We record each allocated temporary in the following structure.
142
143 Associated with each temporary slot is a nesting level. When we pop up
144 one level, all temporaries associated with the previous level are freed.
145 Normally, all temporaries are freed after the execution of the statement
146 in which they were created. However, if we are inside a ({...}) grouping,
147 the result may be in a temporary and hence must be preserved. If the
148 result could be in a temporary, we preserve it if we can determine which
149 one it is in. If we cannot determine which temporary may contain the
150 result, all temporaries are preserved. A temporary is preserved by
151 pretending it was allocated at the previous nesting level.
152
153 Automatic variables are also assigned temporary slots, at the nesting
154 level where they are defined. They are marked a "kept" so that
155 free_temp_slots will not free them. */
156
157 struct temp_slot GTY(())
158 {
159 /* Points to next temporary slot. */
160 struct temp_slot *next;
161 /* Points to previous temporary slot. */
162 struct temp_slot *prev;
163
164 /* The rtx to used to reference the slot. */
165 rtx slot;
166 /* The rtx used to represent the address if not the address of the
167 slot above. May be an EXPR_LIST if multiple addresses exist. */
168 rtx address;
169 /* The alignment (in bits) of the slot. */
170 unsigned int align;
171 /* The size, in units, of the slot. */
172 HOST_WIDE_INT size;
173 /* The type of the object in the slot, or zero if it doesn't correspond
174 to a type. We use this to determine whether a slot can be reused.
175 It can be reused if objects of the type of the new slot will always
176 conflict with objects of the type of the old slot. */
177 tree type;
178 /* Nonzero if this temporary is currently in use. */
179 char in_use;
180 /* Nonzero if this temporary has its address taken. */
181 char addr_taken;
182 /* Nesting level at which this slot is being used. */
183 int level;
184 /* Nonzero if this should survive a call to free_temp_slots. */
185 int keep;
186 /* The offset of the slot from the frame_pointer, including extra space
187 for alignment. This info is for combine_temp_slots. */
188 HOST_WIDE_INT base_offset;
189 /* The size of the slot, including extra space for alignment. This
190 info is for combine_temp_slots. */
191 HOST_WIDE_INT full_size;
192 };
193 \f
194 /* Forward declarations. */
195
196 static struct temp_slot *find_temp_slot_from_address (rtx);
197 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
198 static void pad_below (struct args_size *, enum machine_mode, tree);
199 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
200 static int all_blocks (tree, tree *);
201 static tree *get_block_vector (tree, int *);
202 extern tree debug_find_var_in_block_tree (tree, tree);
203 /* We always define `record_insns' even if it's not used so that we
204 can always export `prologue_epilogue_contains'. */
205 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
206 static int contains (const_rtx, VEC(int,heap) **);
207 #ifdef HAVE_return
208 static void emit_return_into_block (basic_block);
209 #endif
210 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
211 static rtx keep_stack_depressed (rtx);
212 #endif
213 static void prepare_function_start (void);
214 static void do_clobber_return_reg (rtx, void *);
215 static void do_use_return_reg (rtx, void *);
216 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
217 \f
218 /* Pointer to chain of `struct function' for containing functions. */
219 struct function *outer_function_chain;
220
221 /* Given a function decl for a containing function,
222 return the `struct function' for it. */
223
224 struct function *
225 find_function_data (tree decl)
226 {
227 struct function *p;
228
229 for (p = outer_function_chain; p; p = p->outer)
230 if (p->decl == decl)
231 return p;
232
233 gcc_unreachable ();
234 }
235
236 /* Save the current context for compilation of a nested function.
237 This is called from language-specific code. */
238
239 void
240 push_function_context (void)
241 {
242 if (cfun == 0)
243 allocate_struct_function (NULL, false);
244
245 cfun->outer = outer_function_chain;
246 outer_function_chain = cfun;
247 set_cfun (NULL);
248 }
249
250 /* Restore the last saved context, at the end of a nested function.
251 This function is called from language-specific code. */
252
253 void
254 pop_function_context (void)
255 {
256 struct function *p = outer_function_chain;
257
258 set_cfun (p);
259 outer_function_chain = p->outer;
260 current_function_decl = p->decl;
261
262 /* Reset variables that have known state during rtx generation. */
263 virtuals_instantiated = 0;
264 generating_concat_p = 1;
265 }
266
267 /* Clear out all parts of the state in F that can safely be discarded
268 after the function has been parsed, but not compiled, to let
269 garbage collection reclaim the memory. */
270
271 void
272 free_after_parsing (struct function *f)
273 {
274 f->language = 0;
275 }
276
277 /* Clear out all parts of the state in F that can safely be discarded
278 after the function has been compiled, to let garbage collection
279 reclaim the memory. */
280
281 void
282 free_after_compilation (struct function *f)
283 {
284 VEC_free (int, heap, prologue);
285 VEC_free (int, heap, epilogue);
286 VEC_free (int, heap, sibcall_epilogue);
287 if (rtl.emit.regno_pointer_align)
288 free (rtl.emit.regno_pointer_align);
289
290 memset (&rtl, 0, sizeof (rtl));
291 f->eh = NULL;
292 f->machine = NULL;
293 f->cfg = NULL;
294
295 f->arg_offset_rtx = NULL;
296 f->return_rtx = NULL;
297 f->internal_arg_pointer = NULL;
298 f->epilogue_delay_list = NULL;
299 }
300 \f
301 /* Return size needed for stack frame based on slots so far allocated.
302 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
303 the caller may have to do that. */
304
305 HOST_WIDE_INT
306 get_frame_size (void)
307 {
308 if (FRAME_GROWS_DOWNWARD)
309 return -frame_offset;
310 else
311 return frame_offset;
312 }
313
314 /* Issue an error message and return TRUE if frame OFFSET overflows in
315 the signed target pointer arithmetics for function FUNC. Otherwise
316 return FALSE. */
317
318 bool
319 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
320 {
321 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
322
323 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
324 /* Leave room for the fixed part of the frame. */
325 - 64 * UNITS_PER_WORD)
326 {
327 error ("%Jtotal size of local objects too large", func);
328 return TRUE;
329 }
330
331 return FALSE;
332 }
333
334 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
335 with machine mode MODE.
336
337 ALIGN controls the amount of alignment for the address of the slot:
338 0 means according to MODE,
339 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
340 -2 means use BITS_PER_UNIT,
341 positive specifies alignment boundary in bits.
342
343 We do not round to stack_boundary here. */
344
345 rtx
346 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
347 {
348 rtx x, addr;
349 int bigend_correction = 0;
350 unsigned int alignment;
351 int frame_off, frame_alignment, frame_phase;
352
353 if (align == 0)
354 {
355 tree type;
356
357 if (mode == BLKmode)
358 alignment = BIGGEST_ALIGNMENT;
359 else
360 alignment = GET_MODE_ALIGNMENT (mode);
361
362 /* Allow the target to (possibly) increase the alignment of this
363 stack slot. */
364 type = lang_hooks.types.type_for_mode (mode, 0);
365 if (type)
366 alignment = LOCAL_ALIGNMENT (type, alignment);
367
368 alignment /= BITS_PER_UNIT;
369 }
370 else if (align == -1)
371 {
372 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
373 size = CEIL_ROUND (size, alignment);
374 }
375 else if (align == -2)
376 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
377 else
378 alignment = align / BITS_PER_UNIT;
379
380 if (FRAME_GROWS_DOWNWARD)
381 frame_offset -= size;
382
383 /* Ignore alignment we can't do with expected alignment of the boundary. */
384 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
385 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
386
387 if (cfun->stack_alignment_needed < alignment * BITS_PER_UNIT)
388 cfun->stack_alignment_needed = alignment * BITS_PER_UNIT;
389
390 /* Calculate how many bytes the start of local variables is off from
391 stack alignment. */
392 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
393 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
394 frame_phase = frame_off ? frame_alignment - frame_off : 0;
395
396 /* Round the frame offset to the specified alignment. The default is
397 to always honor requests to align the stack but a port may choose to
398 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
399 if (STACK_ALIGNMENT_NEEDED
400 || mode != BLKmode
401 || size != 0)
402 {
403 /* We must be careful here, since FRAME_OFFSET might be negative and
404 division with a negative dividend isn't as well defined as we might
405 like. So we instead assume that ALIGNMENT is a power of two and
406 use logical operations which are unambiguous. */
407 if (FRAME_GROWS_DOWNWARD)
408 frame_offset
409 = (FLOOR_ROUND (frame_offset - frame_phase,
410 (unsigned HOST_WIDE_INT) alignment)
411 + frame_phase);
412 else
413 frame_offset
414 = (CEIL_ROUND (frame_offset - frame_phase,
415 (unsigned HOST_WIDE_INT) alignment)
416 + frame_phase);
417 }
418
419 /* On a big-endian machine, if we are allocating more space than we will use,
420 use the least significant bytes of those that are allocated. */
421 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
422 bigend_correction = size - GET_MODE_SIZE (mode);
423
424 /* If we have already instantiated virtual registers, return the actual
425 address relative to the frame pointer. */
426 if (virtuals_instantiated)
427 addr = plus_constant (frame_pointer_rtx,
428 trunc_int_for_mode
429 (frame_offset + bigend_correction
430 + STARTING_FRAME_OFFSET, Pmode));
431 else
432 addr = plus_constant (virtual_stack_vars_rtx,
433 trunc_int_for_mode
434 (frame_offset + bigend_correction,
435 Pmode));
436
437 if (!FRAME_GROWS_DOWNWARD)
438 frame_offset += size;
439
440 x = gen_rtx_MEM (mode, addr);
441 MEM_NOTRAP_P (x) = 1;
442
443 stack_slot_list
444 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
445
446 if (frame_offset_overflow (frame_offset, current_function_decl))
447 frame_offset = 0;
448
449 return x;
450 }
451 \f
452 /* Removes temporary slot TEMP from LIST. */
453
454 static void
455 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
456 {
457 if (temp->next)
458 temp->next->prev = temp->prev;
459 if (temp->prev)
460 temp->prev->next = temp->next;
461 else
462 *list = temp->next;
463
464 temp->prev = temp->next = NULL;
465 }
466
467 /* Inserts temporary slot TEMP to LIST. */
468
469 static void
470 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
471 {
472 temp->next = *list;
473 if (*list)
474 (*list)->prev = temp;
475 temp->prev = NULL;
476 *list = temp;
477 }
478
479 /* Returns the list of used temp slots at LEVEL. */
480
481 static struct temp_slot **
482 temp_slots_at_level (int level)
483 {
484 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
485 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
486
487 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
488 }
489
490 /* Returns the maximal temporary slot level. */
491
492 static int
493 max_slot_level (void)
494 {
495 if (!used_temp_slots)
496 return -1;
497
498 return VEC_length (temp_slot_p, used_temp_slots) - 1;
499 }
500
501 /* Moves temporary slot TEMP to LEVEL. */
502
503 static void
504 move_slot_to_level (struct temp_slot *temp, int level)
505 {
506 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
507 insert_slot_to_list (temp, temp_slots_at_level (level));
508 temp->level = level;
509 }
510
511 /* Make temporary slot TEMP available. */
512
513 static void
514 make_slot_available (struct temp_slot *temp)
515 {
516 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
517 insert_slot_to_list (temp, &avail_temp_slots);
518 temp->in_use = 0;
519 temp->level = -1;
520 }
521 \f
522 /* Allocate a temporary stack slot and record it for possible later
523 reuse.
524
525 MODE is the machine mode to be given to the returned rtx.
526
527 SIZE is the size in units of the space required. We do no rounding here
528 since assign_stack_local will do any required rounding.
529
530 KEEP is 1 if this slot is to be retained after a call to
531 free_temp_slots. Automatic variables for a block are allocated
532 with this flag. KEEP values of 2 or 3 were needed respectively
533 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
534 or for SAVE_EXPRs, but they are now unused.
535
536 TYPE is the type that will be used for the stack slot. */
537
538 rtx
539 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
540 int keep, tree type)
541 {
542 unsigned int align;
543 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
544 rtx slot;
545
546 /* If SIZE is -1 it means that somebody tried to allocate a temporary
547 of a variable size. */
548 gcc_assert (size != -1);
549
550 /* These are now unused. */
551 gcc_assert (keep <= 1);
552
553 if (mode == BLKmode)
554 align = BIGGEST_ALIGNMENT;
555 else
556 align = GET_MODE_ALIGNMENT (mode);
557
558 if (! type)
559 type = lang_hooks.types.type_for_mode (mode, 0);
560
561 if (type)
562 align = LOCAL_ALIGNMENT (type, align);
563
564 /* Try to find an available, already-allocated temporary of the proper
565 mode which meets the size and alignment requirements. Choose the
566 smallest one with the closest alignment.
567
568 If assign_stack_temp is called outside of the tree->rtl expansion,
569 we cannot reuse the stack slots (that may still refer to
570 VIRTUAL_STACK_VARS_REGNUM). */
571 if (!virtuals_instantiated)
572 {
573 for (p = avail_temp_slots; p; p = p->next)
574 {
575 if (p->align >= align && p->size >= size
576 && GET_MODE (p->slot) == mode
577 && objects_must_conflict_p (p->type, type)
578 && (best_p == 0 || best_p->size > p->size
579 || (best_p->size == p->size && best_p->align > p->align)))
580 {
581 if (p->align == align && p->size == size)
582 {
583 selected = p;
584 cut_slot_from_list (selected, &avail_temp_slots);
585 best_p = 0;
586 break;
587 }
588 best_p = p;
589 }
590 }
591 }
592
593 /* Make our best, if any, the one to use. */
594 if (best_p)
595 {
596 selected = best_p;
597 cut_slot_from_list (selected, &avail_temp_slots);
598
599 /* If there are enough aligned bytes left over, make them into a new
600 temp_slot so that the extra bytes don't get wasted. Do this only
601 for BLKmode slots, so that we can be sure of the alignment. */
602 if (GET_MODE (best_p->slot) == BLKmode)
603 {
604 int alignment = best_p->align / BITS_PER_UNIT;
605 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
606
607 if (best_p->size - rounded_size >= alignment)
608 {
609 p = ggc_alloc (sizeof (struct temp_slot));
610 p->in_use = p->addr_taken = 0;
611 p->size = best_p->size - rounded_size;
612 p->base_offset = best_p->base_offset + rounded_size;
613 p->full_size = best_p->full_size - rounded_size;
614 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
615 p->align = best_p->align;
616 p->address = 0;
617 p->type = best_p->type;
618 insert_slot_to_list (p, &avail_temp_slots);
619
620 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
621 stack_slot_list);
622
623 best_p->size = rounded_size;
624 best_p->full_size = rounded_size;
625 }
626 }
627 }
628
629 /* If we still didn't find one, make a new temporary. */
630 if (selected == 0)
631 {
632 HOST_WIDE_INT frame_offset_old = frame_offset;
633
634 p = ggc_alloc (sizeof (struct temp_slot));
635
636 /* We are passing an explicit alignment request to assign_stack_local.
637 One side effect of that is assign_stack_local will not round SIZE
638 to ensure the frame offset remains suitably aligned.
639
640 So for requests which depended on the rounding of SIZE, we go ahead
641 and round it now. We also make sure ALIGNMENT is at least
642 BIGGEST_ALIGNMENT. */
643 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
644 p->slot = assign_stack_local (mode,
645 (mode == BLKmode
646 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
647 : size),
648 align);
649
650 p->align = align;
651
652 /* The following slot size computation is necessary because we don't
653 know the actual size of the temporary slot until assign_stack_local
654 has performed all the frame alignment and size rounding for the
655 requested temporary. Note that extra space added for alignment
656 can be either above or below this stack slot depending on which
657 way the frame grows. We include the extra space if and only if it
658 is above this slot. */
659 if (FRAME_GROWS_DOWNWARD)
660 p->size = frame_offset_old - frame_offset;
661 else
662 p->size = size;
663
664 /* Now define the fields used by combine_temp_slots. */
665 if (FRAME_GROWS_DOWNWARD)
666 {
667 p->base_offset = frame_offset;
668 p->full_size = frame_offset_old - frame_offset;
669 }
670 else
671 {
672 p->base_offset = frame_offset_old;
673 p->full_size = frame_offset - frame_offset_old;
674 }
675 p->address = 0;
676
677 selected = p;
678 }
679
680 p = selected;
681 p->in_use = 1;
682 p->addr_taken = 0;
683 p->type = type;
684 p->level = temp_slot_level;
685 p->keep = keep;
686
687 pp = temp_slots_at_level (p->level);
688 insert_slot_to_list (p, pp);
689
690 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
691 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
692 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
693
694 /* If we know the alias set for the memory that will be used, use
695 it. If there's no TYPE, then we don't know anything about the
696 alias set for the memory. */
697 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
698 set_mem_align (slot, align);
699
700 /* If a type is specified, set the relevant flags. */
701 if (type != 0)
702 {
703 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
704 MEM_SET_IN_STRUCT_P (slot, (AGGREGATE_TYPE_P (type)
705 || TREE_CODE (type) == COMPLEX_TYPE));
706 }
707 MEM_NOTRAP_P (slot) = 1;
708
709 return slot;
710 }
711
712 /* Allocate a temporary stack slot and record it for possible later
713 reuse. First three arguments are same as in preceding function. */
714
715 rtx
716 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
717 {
718 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
719 }
720 \f
721 /* Assign a temporary.
722 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
723 and so that should be used in error messages. In either case, we
724 allocate of the given type.
725 KEEP is as for assign_stack_temp.
726 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
727 it is 0 if a register is OK.
728 DONT_PROMOTE is 1 if we should not promote values in register
729 to wider modes. */
730
731 rtx
732 assign_temp (tree type_or_decl, int keep, int memory_required,
733 int dont_promote ATTRIBUTE_UNUSED)
734 {
735 tree type, decl;
736 enum machine_mode mode;
737 #ifdef PROMOTE_MODE
738 int unsignedp;
739 #endif
740
741 if (DECL_P (type_or_decl))
742 decl = type_or_decl, type = TREE_TYPE (decl);
743 else
744 decl = NULL, type = type_or_decl;
745
746 mode = TYPE_MODE (type);
747 #ifdef PROMOTE_MODE
748 unsignedp = TYPE_UNSIGNED (type);
749 #endif
750
751 if (mode == BLKmode || memory_required)
752 {
753 HOST_WIDE_INT size = int_size_in_bytes (type);
754 rtx tmp;
755
756 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
757 problems with allocating the stack space. */
758 if (size == 0)
759 size = 1;
760
761 /* Unfortunately, we don't yet know how to allocate variable-sized
762 temporaries. However, sometimes we can find a fixed upper limit on
763 the size, so try that instead. */
764 else if (size == -1)
765 size = max_int_size_in_bytes (type);
766
767 /* The size of the temporary may be too large to fit into an integer. */
768 /* ??? Not sure this should happen except for user silliness, so limit
769 this to things that aren't compiler-generated temporaries. The
770 rest of the time we'll die in assign_stack_temp_for_type. */
771 if (decl && size == -1
772 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
773 {
774 error ("size of variable %q+D is too large", decl);
775 size = 1;
776 }
777
778 tmp = assign_stack_temp_for_type (mode, size, keep, type);
779 return tmp;
780 }
781
782 #ifdef PROMOTE_MODE
783 if (! dont_promote)
784 mode = promote_mode (type, mode, &unsignedp, 0);
785 #endif
786
787 return gen_reg_rtx (mode);
788 }
789 \f
790 /* Combine temporary stack slots which are adjacent on the stack.
791
792 This allows for better use of already allocated stack space. This is only
793 done for BLKmode slots because we can be sure that we won't have alignment
794 problems in this case. */
795
796 static void
797 combine_temp_slots (void)
798 {
799 struct temp_slot *p, *q, *next, *next_q;
800 int num_slots;
801
802 /* We can't combine slots, because the information about which slot
803 is in which alias set will be lost. */
804 if (flag_strict_aliasing)
805 return;
806
807 /* If there are a lot of temp slots, don't do anything unless
808 high levels of optimization. */
809 if (! flag_expensive_optimizations)
810 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
811 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
812 return;
813
814 for (p = avail_temp_slots; p; p = next)
815 {
816 int delete_p = 0;
817
818 next = p->next;
819
820 if (GET_MODE (p->slot) != BLKmode)
821 continue;
822
823 for (q = p->next; q; q = next_q)
824 {
825 int delete_q = 0;
826
827 next_q = q->next;
828
829 if (GET_MODE (q->slot) != BLKmode)
830 continue;
831
832 if (p->base_offset + p->full_size == q->base_offset)
833 {
834 /* Q comes after P; combine Q into P. */
835 p->size += q->size;
836 p->full_size += q->full_size;
837 delete_q = 1;
838 }
839 else if (q->base_offset + q->full_size == p->base_offset)
840 {
841 /* P comes after Q; combine P into Q. */
842 q->size += p->size;
843 q->full_size += p->full_size;
844 delete_p = 1;
845 break;
846 }
847 if (delete_q)
848 cut_slot_from_list (q, &avail_temp_slots);
849 }
850
851 /* Either delete P or advance past it. */
852 if (delete_p)
853 cut_slot_from_list (p, &avail_temp_slots);
854 }
855 }
856 \f
857 /* Find the temp slot corresponding to the object at address X. */
858
859 static struct temp_slot *
860 find_temp_slot_from_address (rtx x)
861 {
862 struct temp_slot *p;
863 rtx next;
864 int i;
865
866 for (i = max_slot_level (); i >= 0; i--)
867 for (p = *temp_slots_at_level (i); p; p = p->next)
868 {
869 if (XEXP (p->slot, 0) == x
870 || p->address == x
871 || (GET_CODE (x) == PLUS
872 && XEXP (x, 0) == virtual_stack_vars_rtx
873 && GET_CODE (XEXP (x, 1)) == CONST_INT
874 && INTVAL (XEXP (x, 1)) >= p->base_offset
875 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
876 return p;
877
878 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
879 for (next = p->address; next; next = XEXP (next, 1))
880 if (XEXP (next, 0) == x)
881 return p;
882 }
883
884 /* If we have a sum involving a register, see if it points to a temp
885 slot. */
886 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
887 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
888 return p;
889 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
890 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
891 return p;
892
893 return 0;
894 }
895
896 /* Indicate that NEW is an alternate way of referring to the temp slot
897 that previously was known by OLD. */
898
899 void
900 update_temp_slot_address (rtx old, rtx new)
901 {
902 struct temp_slot *p;
903
904 if (rtx_equal_p (old, new))
905 return;
906
907 p = find_temp_slot_from_address (old);
908
909 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
910 is a register, see if one operand of the PLUS is a temporary
911 location. If so, NEW points into it. Otherwise, if both OLD and
912 NEW are a PLUS and if there is a register in common between them.
913 If so, try a recursive call on those values. */
914 if (p == 0)
915 {
916 if (GET_CODE (old) != PLUS)
917 return;
918
919 if (REG_P (new))
920 {
921 update_temp_slot_address (XEXP (old, 0), new);
922 update_temp_slot_address (XEXP (old, 1), new);
923 return;
924 }
925 else if (GET_CODE (new) != PLUS)
926 return;
927
928 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
929 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
930 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
931 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
932 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
933 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
934 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
935 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
936
937 return;
938 }
939
940 /* Otherwise add an alias for the temp's address. */
941 else if (p->address == 0)
942 p->address = new;
943 else
944 {
945 if (GET_CODE (p->address) != EXPR_LIST)
946 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
947
948 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
949 }
950 }
951
952 /* If X could be a reference to a temporary slot, mark the fact that its
953 address was taken. */
954
955 void
956 mark_temp_addr_taken (rtx x)
957 {
958 struct temp_slot *p;
959
960 if (x == 0)
961 return;
962
963 /* If X is not in memory or is at a constant address, it cannot be in
964 a temporary slot. */
965 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
966 return;
967
968 p = find_temp_slot_from_address (XEXP (x, 0));
969 if (p != 0)
970 p->addr_taken = 1;
971 }
972
973 /* If X could be a reference to a temporary slot, mark that slot as
974 belonging to the to one level higher than the current level. If X
975 matched one of our slots, just mark that one. Otherwise, we can't
976 easily predict which it is, so upgrade all of them. Kept slots
977 need not be touched.
978
979 This is called when an ({...}) construct occurs and a statement
980 returns a value in memory. */
981
982 void
983 preserve_temp_slots (rtx x)
984 {
985 struct temp_slot *p = 0, *next;
986
987 /* If there is no result, we still might have some objects whose address
988 were taken, so we need to make sure they stay around. */
989 if (x == 0)
990 {
991 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
992 {
993 next = p->next;
994
995 if (p->addr_taken)
996 move_slot_to_level (p, temp_slot_level - 1);
997 }
998
999 return;
1000 }
1001
1002 /* If X is a register that is being used as a pointer, see if we have
1003 a temporary slot we know it points to. To be consistent with
1004 the code below, we really should preserve all non-kept slots
1005 if we can't find a match, but that seems to be much too costly. */
1006 if (REG_P (x) && REG_POINTER (x))
1007 p = find_temp_slot_from_address (x);
1008
1009 /* If X is not in memory or is at a constant address, it cannot be in
1010 a temporary slot, but it can contain something whose address was
1011 taken. */
1012 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1013 {
1014 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1015 {
1016 next = p->next;
1017
1018 if (p->addr_taken)
1019 move_slot_to_level (p, temp_slot_level - 1);
1020 }
1021
1022 return;
1023 }
1024
1025 /* First see if we can find a match. */
1026 if (p == 0)
1027 p = find_temp_slot_from_address (XEXP (x, 0));
1028
1029 if (p != 0)
1030 {
1031 /* Move everything at our level whose address was taken to our new
1032 level in case we used its address. */
1033 struct temp_slot *q;
1034
1035 if (p->level == temp_slot_level)
1036 {
1037 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1038 {
1039 next = q->next;
1040
1041 if (p != q && q->addr_taken)
1042 move_slot_to_level (q, temp_slot_level - 1);
1043 }
1044
1045 move_slot_to_level (p, temp_slot_level - 1);
1046 p->addr_taken = 0;
1047 }
1048 return;
1049 }
1050
1051 /* Otherwise, preserve all non-kept slots at this level. */
1052 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1053 {
1054 next = p->next;
1055
1056 if (!p->keep)
1057 move_slot_to_level (p, temp_slot_level - 1);
1058 }
1059 }
1060
1061 /* Free all temporaries used so far. This is normally called at the
1062 end of generating code for a statement. */
1063
1064 void
1065 free_temp_slots (void)
1066 {
1067 struct temp_slot *p, *next;
1068
1069 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1070 {
1071 next = p->next;
1072
1073 if (!p->keep)
1074 make_slot_available (p);
1075 }
1076
1077 combine_temp_slots ();
1078 }
1079
1080 /* Push deeper into the nesting level for stack temporaries. */
1081
1082 void
1083 push_temp_slots (void)
1084 {
1085 temp_slot_level++;
1086 }
1087
1088 /* Pop a temporary nesting level. All slots in use in the current level
1089 are freed. */
1090
1091 void
1092 pop_temp_slots (void)
1093 {
1094 struct temp_slot *p, *next;
1095
1096 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1097 {
1098 next = p->next;
1099 make_slot_available (p);
1100 }
1101
1102 combine_temp_slots ();
1103
1104 temp_slot_level--;
1105 }
1106
1107 /* Initialize temporary slots. */
1108
1109 void
1110 init_temp_slots (void)
1111 {
1112 /* We have not allocated any temporaries yet. */
1113 avail_temp_slots = 0;
1114 used_temp_slots = 0;
1115 temp_slot_level = 0;
1116 }
1117 \f
1118 /* These routines are responsible for converting virtual register references
1119 to the actual hard register references once RTL generation is complete.
1120
1121 The following four variables are used for communication between the
1122 routines. They contain the offsets of the virtual registers from their
1123 respective hard registers. */
1124
1125 static int in_arg_offset;
1126 static int var_offset;
1127 static int dynamic_offset;
1128 static int out_arg_offset;
1129 static int cfa_offset;
1130
1131 /* In most machines, the stack pointer register is equivalent to the bottom
1132 of the stack. */
1133
1134 #ifndef STACK_POINTER_OFFSET
1135 #define STACK_POINTER_OFFSET 0
1136 #endif
1137
1138 /* If not defined, pick an appropriate default for the offset of dynamically
1139 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1140 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1141
1142 #ifndef STACK_DYNAMIC_OFFSET
1143
1144 /* The bottom of the stack points to the actual arguments. If
1145 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1146 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1147 stack space for register parameters is not pushed by the caller, but
1148 rather part of the fixed stack areas and hence not included in
1149 `current_function_outgoing_args_size'. Nevertheless, we must allow
1150 for it when allocating stack dynamic objects. */
1151
1152 #if defined(REG_PARM_STACK_SPACE)
1153 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1154 ((ACCUMULATE_OUTGOING_ARGS \
1155 ? (current_function_outgoing_args_size \
1156 + (OUTGOING_REG_PARM_STACK_SPACE ? 0 : REG_PARM_STACK_SPACE (FNDECL))) \
1157 : 0) + (STACK_POINTER_OFFSET))
1158 #else
1159 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1160 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1161 + (STACK_POINTER_OFFSET))
1162 #endif
1163 #endif
1164
1165 \f
1166 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1167 is a virtual register, return the equivalent hard register and set the
1168 offset indirectly through the pointer. Otherwise, return 0. */
1169
1170 static rtx
1171 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1172 {
1173 rtx new;
1174 HOST_WIDE_INT offset;
1175
1176 if (x == virtual_incoming_args_rtx)
1177 new = arg_pointer_rtx, offset = in_arg_offset;
1178 else if (x == virtual_stack_vars_rtx)
1179 new = frame_pointer_rtx, offset = var_offset;
1180 else if (x == virtual_stack_dynamic_rtx)
1181 new = stack_pointer_rtx, offset = dynamic_offset;
1182 else if (x == virtual_outgoing_args_rtx)
1183 new = stack_pointer_rtx, offset = out_arg_offset;
1184 else if (x == virtual_cfa_rtx)
1185 {
1186 #ifdef FRAME_POINTER_CFA_OFFSET
1187 new = frame_pointer_rtx;
1188 #else
1189 new = arg_pointer_rtx;
1190 #endif
1191 offset = cfa_offset;
1192 }
1193 else
1194 return NULL_RTX;
1195
1196 *poffset = offset;
1197 return new;
1198 }
1199
1200 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1201 Instantiate any virtual registers present inside of *LOC. The expression
1202 is simplified, as much as possible, but is not to be considered "valid"
1203 in any sense implied by the target. If any change is made, set CHANGED
1204 to true. */
1205
1206 static int
1207 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1208 {
1209 HOST_WIDE_INT offset;
1210 bool *changed = (bool *) data;
1211 rtx x, new;
1212
1213 x = *loc;
1214 if (x == 0)
1215 return 0;
1216
1217 switch (GET_CODE (x))
1218 {
1219 case REG:
1220 new = instantiate_new_reg (x, &offset);
1221 if (new)
1222 {
1223 *loc = plus_constant (new, offset);
1224 if (changed)
1225 *changed = true;
1226 }
1227 return -1;
1228
1229 case PLUS:
1230 new = instantiate_new_reg (XEXP (x, 0), &offset);
1231 if (new)
1232 {
1233 new = plus_constant (new, offset);
1234 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
1235 if (changed)
1236 *changed = true;
1237 return -1;
1238 }
1239
1240 /* FIXME -- from old code */
1241 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1242 we can commute the PLUS and SUBREG because pointers into the
1243 frame are well-behaved. */
1244 break;
1245
1246 default:
1247 break;
1248 }
1249
1250 return 0;
1251 }
1252
1253 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1254 matches the predicate for insn CODE operand OPERAND. */
1255
1256 static int
1257 safe_insn_predicate (int code, int operand, rtx x)
1258 {
1259 const struct insn_operand_data *op_data;
1260
1261 if (code < 0)
1262 return true;
1263
1264 op_data = &insn_data[code].operand[operand];
1265 if (op_data->predicate == NULL)
1266 return true;
1267
1268 return op_data->predicate (x, op_data->mode);
1269 }
1270
1271 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1272 registers present inside of insn. The result will be a valid insn. */
1273
1274 static void
1275 instantiate_virtual_regs_in_insn (rtx insn)
1276 {
1277 HOST_WIDE_INT offset;
1278 int insn_code, i;
1279 bool any_change = false;
1280 rtx set, new, x, seq;
1281
1282 /* There are some special cases to be handled first. */
1283 set = single_set (insn);
1284 if (set)
1285 {
1286 /* We're allowed to assign to a virtual register. This is interpreted
1287 to mean that the underlying register gets assigned the inverse
1288 transformation. This is used, for example, in the handling of
1289 non-local gotos. */
1290 new = instantiate_new_reg (SET_DEST (set), &offset);
1291 if (new)
1292 {
1293 start_sequence ();
1294
1295 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1296 x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
1297 GEN_INT (-offset));
1298 x = force_operand (x, new);
1299 if (x != new)
1300 emit_move_insn (new, x);
1301
1302 seq = get_insns ();
1303 end_sequence ();
1304
1305 emit_insn_before (seq, insn);
1306 delete_insn (insn);
1307 return;
1308 }
1309
1310 /* Handle a straight copy from a virtual register by generating a
1311 new add insn. The difference between this and falling through
1312 to the generic case is avoiding a new pseudo and eliminating a
1313 move insn in the initial rtl stream. */
1314 new = instantiate_new_reg (SET_SRC (set), &offset);
1315 if (new && offset != 0
1316 && REG_P (SET_DEST (set))
1317 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1318 {
1319 start_sequence ();
1320
1321 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1322 new, GEN_INT (offset), SET_DEST (set),
1323 1, OPTAB_LIB_WIDEN);
1324 if (x != SET_DEST (set))
1325 emit_move_insn (SET_DEST (set), x);
1326
1327 seq = get_insns ();
1328 end_sequence ();
1329
1330 emit_insn_before (seq, insn);
1331 delete_insn (insn);
1332 return;
1333 }
1334
1335 extract_insn (insn);
1336 insn_code = INSN_CODE (insn);
1337
1338 /* Handle a plus involving a virtual register by determining if the
1339 operands remain valid if they're modified in place. */
1340 if (GET_CODE (SET_SRC (set)) == PLUS
1341 && recog_data.n_operands >= 3
1342 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1343 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1344 && GET_CODE (recog_data.operand[2]) == CONST_INT
1345 && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
1346 {
1347 offset += INTVAL (recog_data.operand[2]);
1348
1349 /* If the sum is zero, then replace with a plain move. */
1350 if (offset == 0
1351 && REG_P (SET_DEST (set))
1352 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1353 {
1354 start_sequence ();
1355 emit_move_insn (SET_DEST (set), new);
1356 seq = get_insns ();
1357 end_sequence ();
1358
1359 emit_insn_before (seq, insn);
1360 delete_insn (insn);
1361 return;
1362 }
1363
1364 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1365
1366 /* Using validate_change and apply_change_group here leaves
1367 recog_data in an invalid state. Since we know exactly what
1368 we want to check, do those two by hand. */
1369 if (safe_insn_predicate (insn_code, 1, new)
1370 && safe_insn_predicate (insn_code, 2, x))
1371 {
1372 *recog_data.operand_loc[1] = recog_data.operand[1] = new;
1373 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1374 any_change = true;
1375
1376 /* Fall through into the regular operand fixup loop in
1377 order to take care of operands other than 1 and 2. */
1378 }
1379 }
1380 }
1381 else
1382 {
1383 extract_insn (insn);
1384 insn_code = INSN_CODE (insn);
1385 }
1386
1387 /* In the general case, we expect virtual registers to appear only in
1388 operands, and then only as either bare registers or inside memories. */
1389 for (i = 0; i < recog_data.n_operands; ++i)
1390 {
1391 x = recog_data.operand[i];
1392 switch (GET_CODE (x))
1393 {
1394 case MEM:
1395 {
1396 rtx addr = XEXP (x, 0);
1397 bool changed = false;
1398
1399 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1400 if (!changed)
1401 continue;
1402
1403 start_sequence ();
1404 x = replace_equiv_address (x, addr);
1405 /* It may happen that the address with the virtual reg
1406 was valid (e.g. based on the virtual stack reg, which might
1407 be acceptable to the predicates with all offsets), whereas
1408 the address now isn't anymore, for instance when the address
1409 is still offsetted, but the base reg isn't virtual-stack-reg
1410 anymore. Below we would do a force_reg on the whole operand,
1411 but this insn might actually only accept memory. Hence,
1412 before doing that last resort, try to reload the address into
1413 a register, so this operand stays a MEM. */
1414 if (!safe_insn_predicate (insn_code, i, x))
1415 {
1416 addr = force_reg (GET_MODE (addr), addr);
1417 x = replace_equiv_address (x, addr);
1418 }
1419 seq = get_insns ();
1420 end_sequence ();
1421 if (seq)
1422 emit_insn_before (seq, insn);
1423 }
1424 break;
1425
1426 case REG:
1427 new = instantiate_new_reg (x, &offset);
1428 if (new == NULL)
1429 continue;
1430 if (offset == 0)
1431 x = new;
1432 else
1433 {
1434 start_sequence ();
1435
1436 /* Careful, special mode predicates may have stuff in
1437 insn_data[insn_code].operand[i].mode that isn't useful
1438 to us for computing a new value. */
1439 /* ??? Recognize address_operand and/or "p" constraints
1440 to see if (plus new offset) is a valid before we put
1441 this through expand_simple_binop. */
1442 x = expand_simple_binop (GET_MODE (x), PLUS, new,
1443 GEN_INT (offset), NULL_RTX,
1444 1, OPTAB_LIB_WIDEN);
1445 seq = get_insns ();
1446 end_sequence ();
1447 emit_insn_before (seq, insn);
1448 }
1449 break;
1450
1451 case SUBREG:
1452 new = instantiate_new_reg (SUBREG_REG (x), &offset);
1453 if (new == NULL)
1454 continue;
1455 if (offset != 0)
1456 {
1457 start_sequence ();
1458 new = expand_simple_binop (GET_MODE (new), PLUS, new,
1459 GEN_INT (offset), NULL_RTX,
1460 1, OPTAB_LIB_WIDEN);
1461 seq = get_insns ();
1462 end_sequence ();
1463 emit_insn_before (seq, insn);
1464 }
1465 x = simplify_gen_subreg (recog_data.operand_mode[i], new,
1466 GET_MODE (new), SUBREG_BYTE (x));
1467 break;
1468
1469 default:
1470 continue;
1471 }
1472
1473 /* At this point, X contains the new value for the operand.
1474 Validate the new value vs the insn predicate. Note that
1475 asm insns will have insn_code -1 here. */
1476 if (!safe_insn_predicate (insn_code, i, x))
1477 {
1478 start_sequence ();
1479 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1480 seq = get_insns ();
1481 end_sequence ();
1482 if (seq)
1483 emit_insn_before (seq, insn);
1484 }
1485
1486 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1487 any_change = true;
1488 }
1489
1490 if (any_change)
1491 {
1492 /* Propagate operand changes into the duplicates. */
1493 for (i = 0; i < recog_data.n_dups; ++i)
1494 *recog_data.dup_loc[i]
1495 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1496
1497 /* Force re-recognition of the instruction for validation. */
1498 INSN_CODE (insn) = -1;
1499 }
1500
1501 if (asm_noperands (PATTERN (insn)) >= 0)
1502 {
1503 if (!check_asm_operands (PATTERN (insn)))
1504 {
1505 error_for_asm (insn, "impossible constraint in %<asm%>");
1506 delete_insn (insn);
1507 }
1508 }
1509 else
1510 {
1511 if (recog_memoized (insn) < 0)
1512 fatal_insn_not_found (insn);
1513 }
1514 }
1515
1516 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1517 do any instantiation required. */
1518
1519 void
1520 instantiate_decl_rtl (rtx x)
1521 {
1522 rtx addr;
1523
1524 if (x == 0)
1525 return;
1526
1527 /* If this is a CONCAT, recurse for the pieces. */
1528 if (GET_CODE (x) == CONCAT)
1529 {
1530 instantiate_decl_rtl (XEXP (x, 0));
1531 instantiate_decl_rtl (XEXP (x, 1));
1532 return;
1533 }
1534
1535 /* If this is not a MEM, no need to do anything. Similarly if the
1536 address is a constant or a register that is not a virtual register. */
1537 if (!MEM_P (x))
1538 return;
1539
1540 addr = XEXP (x, 0);
1541 if (CONSTANT_P (addr)
1542 || (REG_P (addr)
1543 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1544 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1545 return;
1546
1547 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1548 }
1549
1550 /* Helper for instantiate_decls called via walk_tree: Process all decls
1551 in the given DECL_VALUE_EXPR. */
1552
1553 static tree
1554 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1555 {
1556 tree t = *tp;
1557 if (! EXPR_P (t) && ! GIMPLE_STMT_P (t))
1558 {
1559 *walk_subtrees = 0;
1560 if (DECL_P (t) && DECL_RTL_SET_P (t))
1561 instantiate_decl_rtl (DECL_RTL (t));
1562 }
1563 return NULL;
1564 }
1565
1566 /* Subroutine of instantiate_decls: Process all decls in the given
1567 BLOCK node and all its subblocks. */
1568
1569 static void
1570 instantiate_decls_1 (tree let)
1571 {
1572 tree t;
1573
1574 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1575 {
1576 if (DECL_RTL_SET_P (t))
1577 instantiate_decl_rtl (DECL_RTL (t));
1578 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1579 {
1580 tree v = DECL_VALUE_EXPR (t);
1581 walk_tree (&v, instantiate_expr, NULL, NULL);
1582 }
1583 }
1584
1585 /* Process all subblocks. */
1586 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1587 instantiate_decls_1 (t);
1588 }
1589
1590 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1591 all virtual registers in their DECL_RTL's. */
1592
1593 static void
1594 instantiate_decls (tree fndecl)
1595 {
1596 tree decl;
1597
1598 /* Process all parameters of the function. */
1599 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1600 {
1601 instantiate_decl_rtl (DECL_RTL (decl));
1602 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1603 if (DECL_HAS_VALUE_EXPR_P (decl))
1604 {
1605 tree v = DECL_VALUE_EXPR (decl);
1606 walk_tree (&v, instantiate_expr, NULL, NULL);
1607 }
1608 }
1609
1610 /* Now process all variables defined in the function or its subblocks. */
1611 instantiate_decls_1 (DECL_INITIAL (fndecl));
1612 }
1613
1614 /* Pass through the INSNS of function FNDECL and convert virtual register
1615 references to hard register references. */
1616
1617 static unsigned int
1618 instantiate_virtual_regs (void)
1619 {
1620 rtx insn;
1621
1622 /* Compute the offsets to use for this function. */
1623 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1624 var_offset = STARTING_FRAME_OFFSET;
1625 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1626 out_arg_offset = STACK_POINTER_OFFSET;
1627 #ifdef FRAME_POINTER_CFA_OFFSET
1628 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1629 #else
1630 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1631 #endif
1632
1633 /* Initialize recognition, indicating that volatile is OK. */
1634 init_recog ();
1635
1636 /* Scan through all the insns, instantiating every virtual register still
1637 present. */
1638 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1639 if (INSN_P (insn))
1640 {
1641 /* These patterns in the instruction stream can never be recognized.
1642 Fortunately, they shouldn't contain virtual registers either. */
1643 if (GET_CODE (PATTERN (insn)) == USE
1644 || GET_CODE (PATTERN (insn)) == CLOBBER
1645 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1646 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1647 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1648 continue;
1649
1650 instantiate_virtual_regs_in_insn (insn);
1651
1652 if (INSN_DELETED_P (insn))
1653 continue;
1654
1655 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1656
1657 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1658 if (GET_CODE (insn) == CALL_INSN)
1659 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1660 instantiate_virtual_regs_in_rtx, NULL);
1661 }
1662
1663 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1664 instantiate_decls (current_function_decl);
1665
1666 targetm.instantiate_decls ();
1667
1668 /* Indicate that, from now on, assign_stack_local should use
1669 frame_pointer_rtx. */
1670 virtuals_instantiated = 1;
1671 return 0;
1672 }
1673
1674 struct rtl_opt_pass pass_instantiate_virtual_regs =
1675 {
1676 {
1677 RTL_PASS,
1678 "vregs", /* name */
1679 NULL, /* gate */
1680 instantiate_virtual_regs, /* execute */
1681 NULL, /* sub */
1682 NULL, /* next */
1683 0, /* static_pass_number */
1684 0, /* tv_id */
1685 0, /* properties_required */
1686 0, /* properties_provided */
1687 0, /* properties_destroyed */
1688 0, /* todo_flags_start */
1689 TODO_dump_func /* todo_flags_finish */
1690 }
1691 };
1692
1693 \f
1694 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1695 This means a type for which function calls must pass an address to the
1696 function or get an address back from the function.
1697 EXP may be a type node or an expression (whose type is tested). */
1698
1699 int
1700 aggregate_value_p (const_tree exp, const_tree fntype)
1701 {
1702 int i, regno, nregs;
1703 rtx reg;
1704
1705 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1706
1707 /* DECL node associated with FNTYPE when relevant, which we might need to
1708 check for by-invisible-reference returns, typically for CALL_EXPR input
1709 EXPressions. */
1710 const_tree fndecl = NULL_TREE;
1711
1712 if (fntype)
1713 switch (TREE_CODE (fntype))
1714 {
1715 case CALL_EXPR:
1716 fndecl = get_callee_fndecl (fntype);
1717 fntype = fndecl ? TREE_TYPE (fndecl) : 0;
1718 break;
1719 case FUNCTION_DECL:
1720 fndecl = fntype;
1721 fntype = TREE_TYPE (fndecl);
1722 break;
1723 case FUNCTION_TYPE:
1724 case METHOD_TYPE:
1725 break;
1726 case IDENTIFIER_NODE:
1727 fntype = 0;
1728 break;
1729 default:
1730 /* We don't expect other rtl types here. */
1731 gcc_unreachable ();
1732 }
1733
1734 if (TREE_CODE (type) == VOID_TYPE)
1735 return 0;
1736
1737 /* If the front end has decided that this needs to be passed by
1738 reference, do so. */
1739 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1740 && DECL_BY_REFERENCE (exp))
1741 return 1;
1742
1743 /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
1744 called function RESULT_DECL, meaning the function returns in memory by
1745 invisible reference. This check lets front-ends not set TREE_ADDRESSABLE
1746 on the function type, which used to be the way to request such a return
1747 mechanism but might now be causing troubles at gimplification time if
1748 temporaries with the function type need to be created. */
1749 if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
1750 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
1751 return 1;
1752
1753 if (targetm.calls.return_in_memory (type, fntype))
1754 return 1;
1755 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1756 and thus can't be returned in registers. */
1757 if (TREE_ADDRESSABLE (type))
1758 return 1;
1759 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1760 return 1;
1761 /* Make sure we have suitable call-clobbered regs to return
1762 the value in; if not, we must return it in memory. */
1763 reg = hard_function_value (type, 0, fntype, 0);
1764
1765 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1766 it is OK. */
1767 if (!REG_P (reg))
1768 return 0;
1769
1770 regno = REGNO (reg);
1771 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1772 for (i = 0; i < nregs; i++)
1773 if (! call_used_regs[regno + i])
1774 return 1;
1775 return 0;
1776 }
1777 \f
1778 /* Return true if we should assign DECL a pseudo register; false if it
1779 should live on the local stack. */
1780
1781 bool
1782 use_register_for_decl (const_tree decl)
1783 {
1784 /* Honor volatile. */
1785 if (TREE_SIDE_EFFECTS (decl))
1786 return false;
1787
1788 /* Honor addressability. */
1789 if (TREE_ADDRESSABLE (decl))
1790 return false;
1791
1792 /* Only register-like things go in registers. */
1793 if (DECL_MODE (decl) == BLKmode)
1794 return false;
1795
1796 /* If -ffloat-store specified, don't put explicit float variables
1797 into registers. */
1798 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1799 propagates values across these stores, and it probably shouldn't. */
1800 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1801 return false;
1802
1803 /* If we're not interested in tracking debugging information for
1804 this decl, then we can certainly put it in a register. */
1805 if (DECL_IGNORED_P (decl))
1806 return true;
1807
1808 return (optimize || DECL_REGISTER (decl));
1809 }
1810
1811 /* Return true if TYPE should be passed by invisible reference. */
1812
1813 bool
1814 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1815 tree type, bool named_arg)
1816 {
1817 if (type)
1818 {
1819 /* If this type contains non-trivial constructors, then it is
1820 forbidden for the middle-end to create any new copies. */
1821 if (TREE_ADDRESSABLE (type))
1822 return true;
1823
1824 /* GCC post 3.4 passes *all* variable sized types by reference. */
1825 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1826 return true;
1827 }
1828
1829 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1830 }
1831
1832 /* Return true if TYPE, which is passed by reference, should be callee
1833 copied instead of caller copied. */
1834
1835 bool
1836 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1837 tree type, bool named_arg)
1838 {
1839 if (type && TREE_ADDRESSABLE (type))
1840 return false;
1841 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1842 }
1843
1844 /* Structures to communicate between the subroutines of assign_parms.
1845 The first holds data persistent across all parameters, the second
1846 is cleared out for each parameter. */
1847
1848 struct assign_parm_data_all
1849 {
1850 CUMULATIVE_ARGS args_so_far;
1851 struct args_size stack_args_size;
1852 tree function_result_decl;
1853 tree orig_fnargs;
1854 rtx first_conversion_insn;
1855 rtx last_conversion_insn;
1856 HOST_WIDE_INT pretend_args_size;
1857 HOST_WIDE_INT extra_pretend_bytes;
1858 int reg_parm_stack_space;
1859 };
1860
1861 struct assign_parm_data_one
1862 {
1863 tree nominal_type;
1864 tree passed_type;
1865 rtx entry_parm;
1866 rtx stack_parm;
1867 enum machine_mode nominal_mode;
1868 enum machine_mode passed_mode;
1869 enum machine_mode promoted_mode;
1870 struct locate_and_pad_arg_data locate;
1871 int partial;
1872 BOOL_BITFIELD named_arg : 1;
1873 BOOL_BITFIELD passed_pointer : 1;
1874 BOOL_BITFIELD on_stack : 1;
1875 BOOL_BITFIELD loaded_in_reg : 1;
1876 };
1877
1878 /* A subroutine of assign_parms. Initialize ALL. */
1879
1880 static void
1881 assign_parms_initialize_all (struct assign_parm_data_all *all)
1882 {
1883 tree fntype;
1884
1885 memset (all, 0, sizeof (*all));
1886
1887 fntype = TREE_TYPE (current_function_decl);
1888
1889 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
1890 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1891 #else
1892 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1893 current_function_decl, -1);
1894 #endif
1895
1896 #ifdef REG_PARM_STACK_SPACE
1897 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1898 #endif
1899 }
1900
1901 /* If ARGS contains entries with complex types, split the entry into two
1902 entries of the component type. Return a new list of substitutions are
1903 needed, else the old list. */
1904
1905 static tree
1906 split_complex_args (tree args)
1907 {
1908 tree p;
1909
1910 /* Before allocating memory, check for the common case of no complex. */
1911 for (p = args; p; p = TREE_CHAIN (p))
1912 {
1913 tree type = TREE_TYPE (p);
1914 if (TREE_CODE (type) == COMPLEX_TYPE
1915 && targetm.calls.split_complex_arg (type))
1916 goto found;
1917 }
1918 return args;
1919
1920 found:
1921 args = copy_list (args);
1922
1923 for (p = args; p; p = TREE_CHAIN (p))
1924 {
1925 tree type = TREE_TYPE (p);
1926 if (TREE_CODE (type) == COMPLEX_TYPE
1927 && targetm.calls.split_complex_arg (type))
1928 {
1929 tree decl;
1930 tree subtype = TREE_TYPE (type);
1931 bool addressable = TREE_ADDRESSABLE (p);
1932
1933 /* Rewrite the PARM_DECL's type with its component. */
1934 TREE_TYPE (p) = subtype;
1935 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1936 DECL_MODE (p) = VOIDmode;
1937 DECL_SIZE (p) = NULL;
1938 DECL_SIZE_UNIT (p) = NULL;
1939 /* If this arg must go in memory, put it in a pseudo here.
1940 We can't allow it to go in memory as per normal parms,
1941 because the usual place might not have the imag part
1942 adjacent to the real part. */
1943 DECL_ARTIFICIAL (p) = addressable;
1944 DECL_IGNORED_P (p) = addressable;
1945 TREE_ADDRESSABLE (p) = 0;
1946 layout_decl (p, 0);
1947
1948 /* Build a second synthetic decl. */
1949 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
1950 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
1951 DECL_ARTIFICIAL (decl) = addressable;
1952 DECL_IGNORED_P (decl) = addressable;
1953 layout_decl (decl, 0);
1954
1955 /* Splice it in; skip the new decl. */
1956 TREE_CHAIN (decl) = TREE_CHAIN (p);
1957 TREE_CHAIN (p) = decl;
1958 p = decl;
1959 }
1960 }
1961
1962 return args;
1963 }
1964
1965 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
1966 the hidden struct return argument, and (abi willing) complex args.
1967 Return the new parameter list. */
1968
1969 static tree
1970 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
1971 {
1972 tree fndecl = current_function_decl;
1973 tree fntype = TREE_TYPE (fndecl);
1974 tree fnargs = DECL_ARGUMENTS (fndecl);
1975
1976 /* If struct value address is treated as the first argument, make it so. */
1977 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
1978 && ! current_function_returns_pcc_struct
1979 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
1980 {
1981 tree type = build_pointer_type (TREE_TYPE (fntype));
1982 tree decl;
1983
1984 decl = build_decl (PARM_DECL, NULL_TREE, type);
1985 DECL_ARG_TYPE (decl) = type;
1986 DECL_ARTIFICIAL (decl) = 1;
1987 DECL_IGNORED_P (decl) = 1;
1988
1989 TREE_CHAIN (decl) = fnargs;
1990 fnargs = decl;
1991 all->function_result_decl = decl;
1992 }
1993
1994 all->orig_fnargs = fnargs;
1995
1996 /* If the target wants to split complex arguments into scalars, do so. */
1997 if (targetm.calls.split_complex_arg)
1998 fnargs = split_complex_args (fnargs);
1999
2000 return fnargs;
2001 }
2002
2003 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2004 data for the parameter. Incorporate ABI specifics such as pass-by-
2005 reference and type promotion. */
2006
2007 static void
2008 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2009 struct assign_parm_data_one *data)
2010 {
2011 tree nominal_type, passed_type;
2012 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2013
2014 memset (data, 0, sizeof (*data));
2015
2016 /* NAMED_ARG is a mis-nomer. We really mean 'non-varadic'. */
2017 if (!current_function_stdarg)
2018 data->named_arg = 1; /* No varadic parms. */
2019 else if (TREE_CHAIN (parm))
2020 data->named_arg = 1; /* Not the last non-varadic parm. */
2021 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2022 data->named_arg = 1; /* Only varadic ones are unnamed. */
2023 else
2024 data->named_arg = 0; /* Treat as varadic. */
2025
2026 nominal_type = TREE_TYPE (parm);
2027 passed_type = DECL_ARG_TYPE (parm);
2028
2029 /* Look out for errors propagating this far. Also, if the parameter's
2030 type is void then its value doesn't matter. */
2031 if (TREE_TYPE (parm) == error_mark_node
2032 /* This can happen after weird syntax errors
2033 or if an enum type is defined among the parms. */
2034 || TREE_CODE (parm) != PARM_DECL
2035 || passed_type == NULL
2036 || VOID_TYPE_P (nominal_type))
2037 {
2038 nominal_type = passed_type = void_type_node;
2039 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2040 goto egress;
2041 }
2042
2043 /* Find mode of arg as it is passed, and mode of arg as it should be
2044 during execution of this function. */
2045 passed_mode = TYPE_MODE (passed_type);
2046 nominal_mode = TYPE_MODE (nominal_type);
2047
2048 /* If the parm is to be passed as a transparent union, use the type of
2049 the first field for the tests below. We have already verified that
2050 the modes are the same. */
2051 if (TREE_CODE (passed_type) == UNION_TYPE
2052 && TYPE_TRANSPARENT_UNION (passed_type))
2053 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2054
2055 /* See if this arg was passed by invisible reference. */
2056 if (pass_by_reference (&all->args_so_far, passed_mode,
2057 passed_type, data->named_arg))
2058 {
2059 passed_type = nominal_type = build_pointer_type (passed_type);
2060 data->passed_pointer = true;
2061 passed_mode = nominal_mode = Pmode;
2062 }
2063
2064 /* Find mode as it is passed by the ABI. */
2065 promoted_mode = passed_mode;
2066 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2067 {
2068 int unsignedp = TYPE_UNSIGNED (passed_type);
2069 promoted_mode = promote_mode (passed_type, promoted_mode,
2070 &unsignedp, 1);
2071 }
2072
2073 egress:
2074 data->nominal_type = nominal_type;
2075 data->passed_type = passed_type;
2076 data->nominal_mode = nominal_mode;
2077 data->passed_mode = passed_mode;
2078 data->promoted_mode = promoted_mode;
2079 }
2080
2081 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2082
2083 static void
2084 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2085 struct assign_parm_data_one *data, bool no_rtl)
2086 {
2087 int varargs_pretend_bytes = 0;
2088
2089 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2090 data->promoted_mode,
2091 data->passed_type,
2092 &varargs_pretend_bytes, no_rtl);
2093
2094 /* If the back-end has requested extra stack space, record how much is
2095 needed. Do not change pretend_args_size otherwise since it may be
2096 nonzero from an earlier partial argument. */
2097 if (varargs_pretend_bytes > 0)
2098 all->pretend_args_size = varargs_pretend_bytes;
2099 }
2100
2101 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2102 the incoming location of the current parameter. */
2103
2104 static void
2105 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2106 struct assign_parm_data_one *data)
2107 {
2108 HOST_WIDE_INT pretend_bytes = 0;
2109 rtx entry_parm;
2110 bool in_regs;
2111
2112 if (data->promoted_mode == VOIDmode)
2113 {
2114 data->entry_parm = data->stack_parm = const0_rtx;
2115 return;
2116 }
2117
2118 #ifdef FUNCTION_INCOMING_ARG
2119 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2120 data->passed_type, data->named_arg);
2121 #else
2122 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2123 data->passed_type, data->named_arg);
2124 #endif
2125
2126 if (entry_parm == 0)
2127 data->promoted_mode = data->passed_mode;
2128
2129 /* Determine parm's home in the stack, in case it arrives in the stack
2130 or we should pretend it did. Compute the stack position and rtx where
2131 the argument arrives and its size.
2132
2133 There is one complexity here: If this was a parameter that would
2134 have been passed in registers, but wasn't only because it is
2135 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2136 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2137 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2138 as it was the previous time. */
2139 in_regs = entry_parm != 0;
2140 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2141 in_regs = true;
2142 #endif
2143 if (!in_regs && !data->named_arg)
2144 {
2145 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2146 {
2147 rtx tem;
2148 #ifdef FUNCTION_INCOMING_ARG
2149 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2150 data->passed_type, true);
2151 #else
2152 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2153 data->passed_type, true);
2154 #endif
2155 in_regs = tem != NULL;
2156 }
2157 }
2158
2159 /* If this parameter was passed both in registers and in the stack, use
2160 the copy on the stack. */
2161 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2162 data->passed_type))
2163 entry_parm = 0;
2164
2165 if (entry_parm)
2166 {
2167 int partial;
2168
2169 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2170 data->promoted_mode,
2171 data->passed_type,
2172 data->named_arg);
2173 data->partial = partial;
2174
2175 /* The caller might already have allocated stack space for the
2176 register parameters. */
2177 if (partial != 0 && all->reg_parm_stack_space == 0)
2178 {
2179 /* Part of this argument is passed in registers and part
2180 is passed on the stack. Ask the prologue code to extend
2181 the stack part so that we can recreate the full value.
2182
2183 PRETEND_BYTES is the size of the registers we need to store.
2184 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2185 stack space that the prologue should allocate.
2186
2187 Internally, gcc assumes that the argument pointer is aligned
2188 to STACK_BOUNDARY bits. This is used both for alignment
2189 optimizations (see init_emit) and to locate arguments that are
2190 aligned to more than PARM_BOUNDARY bits. We must preserve this
2191 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2192 a stack boundary. */
2193
2194 /* We assume at most one partial arg, and it must be the first
2195 argument on the stack. */
2196 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2197
2198 pretend_bytes = partial;
2199 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2200
2201 /* We want to align relative to the actual stack pointer, so
2202 don't include this in the stack size until later. */
2203 all->extra_pretend_bytes = all->pretend_args_size;
2204 }
2205 }
2206
2207 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2208 entry_parm ? data->partial : 0, current_function_decl,
2209 &all->stack_args_size, &data->locate);
2210
2211 /* Adjust offsets to include the pretend args. */
2212 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2213 data->locate.slot_offset.constant += pretend_bytes;
2214 data->locate.offset.constant += pretend_bytes;
2215
2216 data->entry_parm = entry_parm;
2217 }
2218
2219 /* A subroutine of assign_parms. If there is actually space on the stack
2220 for this parm, count it in stack_args_size and return true. */
2221
2222 static bool
2223 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2224 struct assign_parm_data_one *data)
2225 {
2226 /* Trivially true if we've no incoming register. */
2227 if (data->entry_parm == NULL)
2228 ;
2229 /* Also true if we're partially in registers and partially not,
2230 since we've arranged to drop the entire argument on the stack. */
2231 else if (data->partial != 0)
2232 ;
2233 /* Also true if the target says that it's passed in both registers
2234 and on the stack. */
2235 else if (GET_CODE (data->entry_parm) == PARALLEL
2236 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2237 ;
2238 /* Also true if the target says that there's stack allocated for
2239 all register parameters. */
2240 else if (all->reg_parm_stack_space > 0)
2241 ;
2242 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2243 else
2244 return false;
2245
2246 all->stack_args_size.constant += data->locate.size.constant;
2247 if (data->locate.size.var)
2248 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2249
2250 return true;
2251 }
2252
2253 /* A subroutine of assign_parms. Given that this parameter is allocated
2254 stack space by the ABI, find it. */
2255
2256 static void
2257 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2258 {
2259 rtx offset_rtx, stack_parm;
2260 unsigned int align, boundary;
2261
2262 /* If we're passing this arg using a reg, make its stack home the
2263 aligned stack slot. */
2264 if (data->entry_parm)
2265 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2266 else
2267 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2268
2269 stack_parm = current_function_internal_arg_pointer;
2270 if (offset_rtx != const0_rtx)
2271 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2272 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2273
2274 set_mem_attributes (stack_parm, parm, 1);
2275
2276 boundary = data->locate.boundary;
2277 align = BITS_PER_UNIT;
2278
2279 /* If we're padding upward, we know that the alignment of the slot
2280 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2281 intentionally forcing upward padding. Otherwise we have to come
2282 up with a guess at the alignment based on OFFSET_RTX. */
2283 if (data->locate.where_pad != downward || data->entry_parm)
2284 align = boundary;
2285 else if (GET_CODE (offset_rtx) == CONST_INT)
2286 {
2287 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2288 align = align & -align;
2289 }
2290 set_mem_align (stack_parm, align);
2291
2292 if (data->entry_parm)
2293 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2294
2295 data->stack_parm = stack_parm;
2296 }
2297
2298 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2299 always valid and contiguous. */
2300
2301 static void
2302 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2303 {
2304 rtx entry_parm = data->entry_parm;
2305 rtx stack_parm = data->stack_parm;
2306
2307 /* If this parm was passed part in regs and part in memory, pretend it
2308 arrived entirely in memory by pushing the register-part onto the stack.
2309 In the special case of a DImode or DFmode that is split, we could put
2310 it together in a pseudoreg directly, but for now that's not worth
2311 bothering with. */
2312 if (data->partial != 0)
2313 {
2314 /* Handle calls that pass values in multiple non-contiguous
2315 locations. The Irix 6 ABI has examples of this. */
2316 if (GET_CODE (entry_parm) == PARALLEL)
2317 emit_group_store (validize_mem (stack_parm), entry_parm,
2318 data->passed_type,
2319 int_size_in_bytes (data->passed_type));
2320 else
2321 {
2322 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2323 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2324 data->partial / UNITS_PER_WORD);
2325 }
2326
2327 entry_parm = stack_parm;
2328 }
2329
2330 /* If we didn't decide this parm came in a register, by default it came
2331 on the stack. */
2332 else if (entry_parm == NULL)
2333 entry_parm = stack_parm;
2334
2335 /* When an argument is passed in multiple locations, we can't make use
2336 of this information, but we can save some copying if the whole argument
2337 is passed in a single register. */
2338 else if (GET_CODE (entry_parm) == PARALLEL
2339 && data->nominal_mode != BLKmode
2340 && data->passed_mode != BLKmode)
2341 {
2342 size_t i, len = XVECLEN (entry_parm, 0);
2343
2344 for (i = 0; i < len; i++)
2345 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2346 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2347 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2348 == data->passed_mode)
2349 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2350 {
2351 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2352 break;
2353 }
2354 }
2355
2356 data->entry_parm = entry_parm;
2357 }
2358
2359 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2360 always valid and properly aligned. */
2361
2362 static void
2363 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2364 {
2365 rtx stack_parm = data->stack_parm;
2366
2367 /* If we can't trust the parm stack slot to be aligned enough for its
2368 ultimate type, don't use that slot after entry. We'll make another
2369 stack slot, if we need one. */
2370 if (stack_parm
2371 && ((STRICT_ALIGNMENT
2372 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2373 || (data->nominal_type
2374 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2375 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2376 stack_parm = NULL;
2377
2378 /* If parm was passed in memory, and we need to convert it on entry,
2379 don't store it back in that same slot. */
2380 else if (data->entry_parm == stack_parm
2381 && data->nominal_mode != BLKmode
2382 && data->nominal_mode != data->passed_mode)
2383 stack_parm = NULL;
2384
2385 /* If stack protection is in effect for this function, don't leave any
2386 pointers in their passed stack slots. */
2387 else if (cfun->stack_protect_guard
2388 && (flag_stack_protect == 2
2389 || data->passed_pointer
2390 || POINTER_TYPE_P (data->nominal_type)))
2391 stack_parm = NULL;
2392
2393 data->stack_parm = stack_parm;
2394 }
2395
2396 /* A subroutine of assign_parms. Return true if the current parameter
2397 should be stored as a BLKmode in the current frame. */
2398
2399 static bool
2400 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2401 {
2402 if (data->nominal_mode == BLKmode)
2403 return true;
2404 if (GET_CODE (data->entry_parm) == PARALLEL)
2405 return true;
2406
2407 #ifdef BLOCK_REG_PADDING
2408 /* Only assign_parm_setup_block knows how to deal with register arguments
2409 that are padded at the least significant end. */
2410 if (REG_P (data->entry_parm)
2411 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2412 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2413 == (BYTES_BIG_ENDIAN ? upward : downward)))
2414 return true;
2415 #endif
2416
2417 return false;
2418 }
2419
2420 /* A subroutine of assign_parms. Arrange for the parameter to be
2421 present and valid in DATA->STACK_RTL. */
2422
2423 static void
2424 assign_parm_setup_block (struct assign_parm_data_all *all,
2425 tree parm, struct assign_parm_data_one *data)
2426 {
2427 rtx entry_parm = data->entry_parm;
2428 rtx stack_parm = data->stack_parm;
2429 HOST_WIDE_INT size;
2430 HOST_WIDE_INT size_stored;
2431 rtx orig_entry_parm = entry_parm;
2432
2433 if (GET_CODE (entry_parm) == PARALLEL)
2434 entry_parm = emit_group_move_into_temps (entry_parm);
2435
2436 /* If we've a non-block object that's nevertheless passed in parts,
2437 reconstitute it in register operations rather than on the stack. */
2438 if (GET_CODE (entry_parm) == PARALLEL
2439 && data->nominal_mode != BLKmode)
2440 {
2441 rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
2442
2443 if ((XVECLEN (entry_parm, 0) > 1
2444 || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
2445 && use_register_for_decl (parm))
2446 {
2447 rtx parmreg = gen_reg_rtx (data->nominal_mode);
2448
2449 push_to_sequence2 (all->first_conversion_insn,
2450 all->last_conversion_insn);
2451
2452 /* For values returned in multiple registers, handle possible
2453 incompatible calls to emit_group_store.
2454
2455 For example, the following would be invalid, and would have to
2456 be fixed by the conditional below:
2457
2458 emit_group_store ((reg:SF), (parallel:DF))
2459 emit_group_store ((reg:SI), (parallel:DI))
2460
2461 An example of this are doubles in e500 v2:
2462 (parallel:DF (expr_list (reg:SI) (const_int 0))
2463 (expr_list (reg:SI) (const_int 4))). */
2464 if (data->nominal_mode != data->passed_mode)
2465 {
2466 rtx t = gen_reg_rtx (GET_MODE (entry_parm));
2467 emit_group_store (t, entry_parm, NULL_TREE,
2468 GET_MODE_SIZE (GET_MODE (entry_parm)));
2469 convert_move (parmreg, t, 0);
2470 }
2471 else
2472 emit_group_store (parmreg, entry_parm, data->nominal_type,
2473 int_size_in_bytes (data->nominal_type));
2474
2475 all->first_conversion_insn = get_insns ();
2476 all->last_conversion_insn = get_last_insn ();
2477 end_sequence ();
2478
2479 SET_DECL_RTL (parm, parmreg);
2480 return;
2481 }
2482 }
2483
2484 size = int_size_in_bytes (data->passed_type);
2485 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2486 if (stack_parm == 0)
2487 {
2488 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2489 stack_parm = assign_stack_local (BLKmode, size_stored,
2490 DECL_ALIGN (parm));
2491 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2492 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2493 set_mem_attributes (stack_parm, parm, 1);
2494 }
2495
2496 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2497 calls that pass values in multiple non-contiguous locations. */
2498 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2499 {
2500 rtx mem;
2501
2502 /* Note that we will be storing an integral number of words.
2503 So we have to be careful to ensure that we allocate an
2504 integral number of words. We do this above when we call
2505 assign_stack_local if space was not allocated in the argument
2506 list. If it was, this will not work if PARM_BOUNDARY is not
2507 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2508 if it becomes a problem. Exception is when BLKmode arrives
2509 with arguments not conforming to word_mode. */
2510
2511 if (data->stack_parm == 0)
2512 ;
2513 else if (GET_CODE (entry_parm) == PARALLEL)
2514 ;
2515 else
2516 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2517
2518 mem = validize_mem (stack_parm);
2519
2520 /* Handle values in multiple non-contiguous locations. */
2521 if (GET_CODE (entry_parm) == PARALLEL)
2522 {
2523 push_to_sequence2 (all->first_conversion_insn,
2524 all->last_conversion_insn);
2525 emit_group_store (mem, entry_parm, data->passed_type, size);
2526 all->first_conversion_insn = get_insns ();
2527 all->last_conversion_insn = get_last_insn ();
2528 end_sequence ();
2529 }
2530
2531 else if (size == 0)
2532 ;
2533
2534 /* If SIZE is that of a mode no bigger than a word, just use
2535 that mode's store operation. */
2536 else if (size <= UNITS_PER_WORD)
2537 {
2538 enum machine_mode mode
2539 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2540
2541 if (mode != BLKmode
2542 #ifdef BLOCK_REG_PADDING
2543 && (size == UNITS_PER_WORD
2544 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2545 != (BYTES_BIG_ENDIAN ? upward : downward)))
2546 #endif
2547 )
2548 {
2549 rtx reg;
2550
2551 /* We are really truncating a word_mode value containing
2552 SIZE bytes into a value of mode MODE. If such an
2553 operation requires no actual instructions, we can refer
2554 to the value directly in mode MODE, otherwise we must
2555 start with the register in word_mode and explicitly
2556 convert it. */
2557 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2558 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2559 else
2560 {
2561 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2562 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2563 }
2564 emit_move_insn (change_address (mem, mode, 0), reg);
2565 }
2566
2567 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2568 machine must be aligned to the left before storing
2569 to memory. Note that the previous test doesn't
2570 handle all cases (e.g. SIZE == 3). */
2571 else if (size != UNITS_PER_WORD
2572 #ifdef BLOCK_REG_PADDING
2573 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2574 == downward)
2575 #else
2576 && BYTES_BIG_ENDIAN
2577 #endif
2578 )
2579 {
2580 rtx tem, x;
2581 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2582 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2583
2584 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2585 build_int_cst (NULL_TREE, by),
2586 NULL_RTX, 1);
2587 tem = change_address (mem, word_mode, 0);
2588 emit_move_insn (tem, x);
2589 }
2590 else
2591 move_block_from_reg (REGNO (entry_parm), mem,
2592 size_stored / UNITS_PER_WORD);
2593 }
2594 else
2595 move_block_from_reg (REGNO (entry_parm), mem,
2596 size_stored / UNITS_PER_WORD);
2597 }
2598 else if (data->stack_parm == 0)
2599 {
2600 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2601 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2602 BLOCK_OP_NORMAL);
2603 all->first_conversion_insn = get_insns ();
2604 all->last_conversion_insn = get_last_insn ();
2605 end_sequence ();
2606 }
2607
2608 data->stack_parm = stack_parm;
2609 SET_DECL_RTL (parm, stack_parm);
2610 }
2611
2612 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2613 parameter. Get it there. Perform all ABI specified conversions. */
2614
2615 static void
2616 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2617 struct assign_parm_data_one *data)
2618 {
2619 rtx parmreg;
2620 enum machine_mode promoted_nominal_mode;
2621 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2622 bool did_conversion = false;
2623
2624 /* Store the parm in a pseudoregister during the function, but we may
2625 need to do it in a wider mode. */
2626
2627 /* This is not really promoting for a call. However we need to be
2628 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2629 promoted_nominal_mode
2630 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
2631
2632 parmreg = gen_reg_rtx (promoted_nominal_mode);
2633
2634 if (!DECL_ARTIFICIAL (parm))
2635 mark_user_reg (parmreg);
2636
2637 /* If this was an item that we received a pointer to,
2638 set DECL_RTL appropriately. */
2639 if (data->passed_pointer)
2640 {
2641 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2642 set_mem_attributes (x, parm, 1);
2643 SET_DECL_RTL (parm, x);
2644 }
2645 else
2646 SET_DECL_RTL (parm, parmreg);
2647
2648 /* Copy the value into the register. */
2649 if (data->nominal_mode != data->passed_mode
2650 || promoted_nominal_mode != data->promoted_mode)
2651 {
2652 int save_tree_used;
2653
2654 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2655 mode, by the caller. We now have to convert it to
2656 NOMINAL_MODE, if different. However, PARMREG may be in
2657 a different mode than NOMINAL_MODE if it is being stored
2658 promoted.
2659
2660 If ENTRY_PARM is a hard register, it might be in a register
2661 not valid for operating in its mode (e.g., an odd-numbered
2662 register for a DFmode). In that case, moves are the only
2663 thing valid, so we can't do a convert from there. This
2664 occurs when the calling sequence allow such misaligned
2665 usages.
2666
2667 In addition, the conversion may involve a call, which could
2668 clobber parameters which haven't been copied to pseudo
2669 registers yet. Therefore, we must first copy the parm to
2670 a pseudo reg here, and save the conversion until after all
2671 parameters have been moved. */
2672
2673 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2674
2675 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2676
2677 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2678 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2679
2680 if (GET_CODE (tempreg) == SUBREG
2681 && GET_MODE (tempreg) == data->nominal_mode
2682 && REG_P (SUBREG_REG (tempreg))
2683 && data->nominal_mode == data->passed_mode
2684 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2685 && GET_MODE_SIZE (GET_MODE (tempreg))
2686 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2687 {
2688 /* The argument is already sign/zero extended, so note it
2689 into the subreg. */
2690 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2691 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2692 }
2693
2694 /* TREE_USED gets set erroneously during expand_assignment. */
2695 save_tree_used = TREE_USED (parm);
2696 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
2697 TREE_USED (parm) = save_tree_used;
2698 all->first_conversion_insn = get_insns ();
2699 all->last_conversion_insn = get_last_insn ();
2700 end_sequence ();
2701
2702 did_conversion = true;
2703 }
2704 else
2705 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2706
2707 /* If we were passed a pointer but the actual value can safely live
2708 in a register, put it in one. */
2709 if (data->passed_pointer
2710 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2711 /* If by-reference argument was promoted, demote it. */
2712 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2713 || use_register_for_decl (parm)))
2714 {
2715 /* We can't use nominal_mode, because it will have been set to
2716 Pmode above. We must use the actual mode of the parm. */
2717 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2718 mark_user_reg (parmreg);
2719
2720 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2721 {
2722 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2723 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2724
2725 push_to_sequence2 (all->first_conversion_insn,
2726 all->last_conversion_insn);
2727 emit_move_insn (tempreg, DECL_RTL (parm));
2728 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2729 emit_move_insn (parmreg, tempreg);
2730 all->first_conversion_insn = get_insns ();
2731 all->last_conversion_insn = get_last_insn ();
2732 end_sequence ();
2733
2734 did_conversion = true;
2735 }
2736 else
2737 emit_move_insn (parmreg, DECL_RTL (parm));
2738
2739 SET_DECL_RTL (parm, parmreg);
2740
2741 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2742 now the parm. */
2743 data->stack_parm = NULL;
2744 }
2745
2746 /* Mark the register as eliminable if we did no conversion and it was
2747 copied from memory at a fixed offset, and the arg pointer was not
2748 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2749 offset formed an invalid address, such memory-equivalences as we
2750 make here would screw up life analysis for it. */
2751 if (data->nominal_mode == data->passed_mode
2752 && !did_conversion
2753 && data->stack_parm != 0
2754 && MEM_P (data->stack_parm)
2755 && data->locate.offset.var == 0
2756 && reg_mentioned_p (virtual_incoming_args_rtx,
2757 XEXP (data->stack_parm, 0)))
2758 {
2759 rtx linsn = get_last_insn ();
2760 rtx sinsn, set;
2761
2762 /* Mark complex types separately. */
2763 if (GET_CODE (parmreg) == CONCAT)
2764 {
2765 enum machine_mode submode
2766 = GET_MODE_INNER (GET_MODE (parmreg));
2767 int regnor = REGNO (XEXP (parmreg, 0));
2768 int regnoi = REGNO (XEXP (parmreg, 1));
2769 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2770 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2771 GET_MODE_SIZE (submode));
2772
2773 /* Scan backwards for the set of the real and
2774 imaginary parts. */
2775 for (sinsn = linsn; sinsn != 0;
2776 sinsn = prev_nonnote_insn (sinsn))
2777 {
2778 set = single_set (sinsn);
2779 if (set == 0)
2780 continue;
2781
2782 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2783 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
2784 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2785 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
2786 }
2787 }
2788 else if ((set = single_set (linsn)) != 0
2789 && SET_DEST (set) == parmreg)
2790 set_unique_reg_note (linsn, REG_EQUIV, data->stack_parm);
2791 }
2792
2793 /* For pointer data type, suggest pointer register. */
2794 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2795 mark_reg_pointer (parmreg,
2796 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2797 }
2798
2799 /* A subroutine of assign_parms. Allocate stack space to hold the current
2800 parameter. Get it there. Perform all ABI specified conversions. */
2801
2802 static void
2803 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2804 struct assign_parm_data_one *data)
2805 {
2806 /* Value must be stored in the stack slot STACK_PARM during function
2807 execution. */
2808 bool to_conversion = false;
2809
2810 if (data->promoted_mode != data->nominal_mode)
2811 {
2812 /* Conversion is required. */
2813 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2814
2815 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2816
2817 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2818 to_conversion = true;
2819
2820 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2821 TYPE_UNSIGNED (TREE_TYPE (parm)));
2822
2823 if (data->stack_parm)
2824 /* ??? This may need a big-endian conversion on sparc64. */
2825 data->stack_parm
2826 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2827 }
2828
2829 if (data->entry_parm != data->stack_parm)
2830 {
2831 rtx src, dest;
2832
2833 if (data->stack_parm == 0)
2834 {
2835 data->stack_parm
2836 = assign_stack_local (GET_MODE (data->entry_parm),
2837 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2838 TYPE_ALIGN (data->passed_type));
2839 set_mem_attributes (data->stack_parm, parm, 1);
2840 }
2841
2842 dest = validize_mem (data->stack_parm);
2843 src = validize_mem (data->entry_parm);
2844
2845 if (MEM_P (src))
2846 {
2847 /* Use a block move to handle potentially misaligned entry_parm. */
2848 if (!to_conversion)
2849 push_to_sequence2 (all->first_conversion_insn,
2850 all->last_conversion_insn);
2851 to_conversion = true;
2852
2853 emit_block_move (dest, src,
2854 GEN_INT (int_size_in_bytes (data->passed_type)),
2855 BLOCK_OP_NORMAL);
2856 }
2857 else
2858 emit_move_insn (dest, src);
2859 }
2860
2861 if (to_conversion)
2862 {
2863 all->first_conversion_insn = get_insns ();
2864 all->last_conversion_insn = get_last_insn ();
2865 end_sequence ();
2866 }
2867
2868 SET_DECL_RTL (parm, data->stack_parm);
2869 }
2870
2871 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2872 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2873
2874 static void
2875 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
2876 {
2877 tree parm;
2878 tree orig_fnargs = all->orig_fnargs;
2879
2880 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2881 {
2882 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2883 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2884 {
2885 rtx tmp, real, imag;
2886 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2887
2888 real = DECL_RTL (fnargs);
2889 imag = DECL_RTL (TREE_CHAIN (fnargs));
2890 if (inner != GET_MODE (real))
2891 {
2892 real = gen_lowpart_SUBREG (inner, real);
2893 imag = gen_lowpart_SUBREG (inner, imag);
2894 }
2895
2896 if (TREE_ADDRESSABLE (parm))
2897 {
2898 rtx rmem, imem;
2899 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2900
2901 /* split_complex_arg put the real and imag parts in
2902 pseudos. Move them to memory. */
2903 tmp = assign_stack_local (DECL_MODE (parm), size,
2904 TYPE_ALIGN (TREE_TYPE (parm)));
2905 set_mem_attributes (tmp, parm, 1);
2906 rmem = adjust_address_nv (tmp, inner, 0);
2907 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2908 push_to_sequence2 (all->first_conversion_insn,
2909 all->last_conversion_insn);
2910 emit_move_insn (rmem, real);
2911 emit_move_insn (imem, imag);
2912 all->first_conversion_insn = get_insns ();
2913 all->last_conversion_insn = get_last_insn ();
2914 end_sequence ();
2915 }
2916 else
2917 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2918 SET_DECL_RTL (parm, tmp);
2919
2920 real = DECL_INCOMING_RTL (fnargs);
2921 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2922 if (inner != GET_MODE (real))
2923 {
2924 real = gen_lowpart_SUBREG (inner, real);
2925 imag = gen_lowpart_SUBREG (inner, imag);
2926 }
2927 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2928 set_decl_incoming_rtl (parm, tmp, false);
2929 fnargs = TREE_CHAIN (fnargs);
2930 }
2931 else
2932 {
2933 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2934 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs), false);
2935
2936 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2937 instead of the copy of decl, i.e. FNARGS. */
2938 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2939 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2940 }
2941
2942 fnargs = TREE_CHAIN (fnargs);
2943 }
2944 }
2945
2946 /* Assign RTL expressions to the function's parameters. This may involve
2947 copying them into registers and using those registers as the DECL_RTL. */
2948
2949 static void
2950 assign_parms (tree fndecl)
2951 {
2952 struct assign_parm_data_all all;
2953 tree fnargs, parm;
2954
2955 current_function_internal_arg_pointer
2956 = targetm.calls.internal_arg_pointer ();
2957
2958 assign_parms_initialize_all (&all);
2959 fnargs = assign_parms_augmented_arg_list (&all);
2960
2961 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2962 {
2963 struct assign_parm_data_one data;
2964
2965 /* Extract the type of PARM; adjust it according to ABI. */
2966 assign_parm_find_data_types (&all, parm, &data);
2967
2968 /* Early out for errors and void parameters. */
2969 if (data.passed_mode == VOIDmode)
2970 {
2971 SET_DECL_RTL (parm, const0_rtx);
2972 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
2973 continue;
2974 }
2975
2976 if (current_function_stdarg && !TREE_CHAIN (parm))
2977 assign_parms_setup_varargs (&all, &data, false);
2978
2979 /* Find out where the parameter arrives in this function. */
2980 assign_parm_find_entry_rtl (&all, &data);
2981
2982 /* Find out where stack space for this parameter might be. */
2983 if (assign_parm_is_stack_parm (&all, &data))
2984 {
2985 assign_parm_find_stack_rtl (parm, &data);
2986 assign_parm_adjust_entry_rtl (&data);
2987 }
2988
2989 /* Record permanently how this parm was passed. */
2990 set_decl_incoming_rtl (parm, data.entry_parm, data.passed_pointer);
2991
2992 /* Update info on where next arg arrives in registers. */
2993 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
2994 data.passed_type, data.named_arg);
2995
2996 assign_parm_adjust_stack_rtl (&data);
2997
2998 if (assign_parm_setup_block_p (&data))
2999 assign_parm_setup_block (&all, parm, &data);
3000 else if (data.passed_pointer || use_register_for_decl (parm))
3001 assign_parm_setup_reg (&all, parm, &data);
3002 else
3003 assign_parm_setup_stack (&all, parm, &data);
3004 }
3005
3006 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3007 assign_parms_unsplit_complex (&all, fnargs);
3008
3009 /* Output all parameter conversion instructions (possibly including calls)
3010 now that all parameters have been copied out of hard registers. */
3011 emit_insn (all.first_conversion_insn);
3012
3013 /* If we are receiving a struct value address as the first argument, set up
3014 the RTL for the function result. As this might require code to convert
3015 the transmitted address to Pmode, we do this here to ensure that possible
3016 preliminary conversions of the address have been emitted already. */
3017 if (all.function_result_decl)
3018 {
3019 tree result = DECL_RESULT (current_function_decl);
3020 rtx addr = DECL_RTL (all.function_result_decl);
3021 rtx x;
3022
3023 if (DECL_BY_REFERENCE (result))
3024 x = addr;
3025 else
3026 {
3027 addr = convert_memory_address (Pmode, addr);
3028 x = gen_rtx_MEM (DECL_MODE (result), addr);
3029 set_mem_attributes (x, result, 1);
3030 }
3031 SET_DECL_RTL (result, x);
3032 }
3033
3034 /* We have aligned all the args, so add space for the pretend args. */
3035 current_function_pretend_args_size = all.pretend_args_size;
3036 all.stack_args_size.constant += all.extra_pretend_bytes;
3037 current_function_args_size = all.stack_args_size.constant;
3038
3039 /* Adjust function incoming argument size for alignment and
3040 minimum length. */
3041
3042 #ifdef REG_PARM_STACK_SPACE
3043 current_function_args_size = MAX (current_function_args_size,
3044 REG_PARM_STACK_SPACE (fndecl));
3045 #endif
3046
3047 current_function_args_size = CEIL_ROUND (current_function_args_size,
3048 PARM_BOUNDARY / BITS_PER_UNIT);
3049
3050 #ifdef ARGS_GROW_DOWNWARD
3051 current_function_arg_offset_rtx
3052 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3053 : expand_expr (size_diffop (all.stack_args_size.var,
3054 size_int (-all.stack_args_size.constant)),
3055 NULL_RTX, VOIDmode, 0));
3056 #else
3057 current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3058 #endif
3059
3060 /* See how many bytes, if any, of its args a function should try to pop
3061 on return. */
3062
3063 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3064 current_function_args_size);
3065
3066 /* For stdarg.h function, save info about
3067 regs and stack space used by the named args. */
3068
3069 current_function_args_info = all.args_so_far;
3070
3071 /* Set the rtx used for the function return value. Put this in its
3072 own variable so any optimizers that need this information don't have
3073 to include tree.h. Do this here so it gets done when an inlined
3074 function gets output. */
3075
3076 current_function_return_rtx
3077 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3078 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3079
3080 /* If scalar return value was computed in a pseudo-reg, or was a named
3081 return value that got dumped to the stack, copy that to the hard
3082 return register. */
3083 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3084 {
3085 tree decl_result = DECL_RESULT (fndecl);
3086 rtx decl_rtl = DECL_RTL (decl_result);
3087
3088 if (REG_P (decl_rtl)
3089 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3090 : DECL_REGISTER (decl_result))
3091 {
3092 rtx real_decl_rtl;
3093
3094 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3095 fndecl, true);
3096 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3097 /* The delay slot scheduler assumes that current_function_return_rtx
3098 holds the hard register containing the return value, not a
3099 temporary pseudo. */
3100 current_function_return_rtx = real_decl_rtl;
3101 }
3102 }
3103 }
3104
3105 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3106 For all seen types, gimplify their sizes. */
3107
3108 static tree
3109 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3110 {
3111 tree t = *tp;
3112
3113 *walk_subtrees = 0;
3114 if (TYPE_P (t))
3115 {
3116 if (POINTER_TYPE_P (t))
3117 *walk_subtrees = 1;
3118 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3119 && !TYPE_SIZES_GIMPLIFIED (t))
3120 {
3121 gimplify_type_sizes (t, (tree *) data);
3122 *walk_subtrees = 1;
3123 }
3124 }
3125
3126 return NULL;
3127 }
3128
3129 /* Gimplify the parameter list for current_function_decl. This involves
3130 evaluating SAVE_EXPRs of variable sized parameters and generating code
3131 to implement callee-copies reference parameters. Returns a list of
3132 statements to add to the beginning of the function, or NULL if nothing
3133 to do. */
3134
3135 tree
3136 gimplify_parameters (void)
3137 {
3138 struct assign_parm_data_all all;
3139 tree fnargs, parm, stmts = NULL;
3140
3141 assign_parms_initialize_all (&all);
3142 fnargs = assign_parms_augmented_arg_list (&all);
3143
3144 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3145 {
3146 struct assign_parm_data_one data;
3147
3148 /* Extract the type of PARM; adjust it according to ABI. */
3149 assign_parm_find_data_types (&all, parm, &data);
3150
3151 /* Early out for errors and void parameters. */
3152 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3153 continue;
3154
3155 /* Update info on where next arg arrives in registers. */
3156 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3157 data.passed_type, data.named_arg);
3158
3159 /* ??? Once upon a time variable_size stuffed parameter list
3160 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3161 turned out to be less than manageable in the gimple world.
3162 Now we have to hunt them down ourselves. */
3163 walk_tree_without_duplicates (&data.passed_type,
3164 gimplify_parm_type, &stmts);
3165
3166 if (!TREE_CONSTANT (DECL_SIZE (parm)))
3167 {
3168 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3169 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3170 }
3171
3172 if (data.passed_pointer)
3173 {
3174 tree type = TREE_TYPE (data.passed_type);
3175 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3176 type, data.named_arg))
3177 {
3178 tree local, t;
3179
3180 /* For constant sized objects, this is trivial; for
3181 variable-sized objects, we have to play games. */
3182 if (TREE_CONSTANT (DECL_SIZE (parm)))
3183 {
3184 local = create_tmp_var (type, get_name (parm));
3185 DECL_IGNORED_P (local) = 0;
3186 }
3187 else
3188 {
3189 tree ptr_type, addr;
3190
3191 ptr_type = build_pointer_type (type);
3192 addr = create_tmp_var (ptr_type, get_name (parm));
3193 DECL_IGNORED_P (addr) = 0;
3194 local = build_fold_indirect_ref (addr);
3195
3196 t = built_in_decls[BUILT_IN_ALLOCA];
3197 t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
3198 t = fold_convert (ptr_type, t);
3199 t = build_gimple_modify_stmt (addr, t);
3200 gimplify_and_add (t, &stmts);
3201 }
3202
3203 t = build_gimple_modify_stmt (local, parm);
3204 gimplify_and_add (t, &stmts);
3205
3206 SET_DECL_VALUE_EXPR (parm, local);
3207 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3208 }
3209 }
3210 }
3211
3212 return stmts;
3213 }
3214 \f
3215 /* Compute the size and offset from the start of the stacked arguments for a
3216 parm passed in mode PASSED_MODE and with type TYPE.
3217
3218 INITIAL_OFFSET_PTR points to the current offset into the stacked
3219 arguments.
3220
3221 The starting offset and size for this parm are returned in
3222 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3223 nonzero, the offset is that of stack slot, which is returned in
3224 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3225 padding required from the initial offset ptr to the stack slot.
3226
3227 IN_REGS is nonzero if the argument will be passed in registers. It will
3228 never be set if REG_PARM_STACK_SPACE is not defined.
3229
3230 FNDECL is the function in which the argument was defined.
3231
3232 There are two types of rounding that are done. The first, controlled by
3233 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3234 list to be aligned to the specific boundary (in bits). This rounding
3235 affects the initial and starting offsets, but not the argument size.
3236
3237 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3238 optionally rounds the size of the parm to PARM_BOUNDARY. The
3239 initial offset is not affected by this rounding, while the size always
3240 is and the starting offset may be. */
3241
3242 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3243 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3244 callers pass in the total size of args so far as
3245 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3246
3247 void
3248 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3249 int partial, tree fndecl ATTRIBUTE_UNUSED,
3250 struct args_size *initial_offset_ptr,
3251 struct locate_and_pad_arg_data *locate)
3252 {
3253 tree sizetree;
3254 enum direction where_pad;
3255 unsigned int boundary;
3256 int reg_parm_stack_space = 0;
3257 int part_size_in_regs;
3258
3259 #ifdef REG_PARM_STACK_SPACE
3260 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3261
3262 /* If we have found a stack parm before we reach the end of the
3263 area reserved for registers, skip that area. */
3264 if (! in_regs)
3265 {
3266 if (reg_parm_stack_space > 0)
3267 {
3268 if (initial_offset_ptr->var)
3269 {
3270 initial_offset_ptr->var
3271 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3272 ssize_int (reg_parm_stack_space));
3273 initial_offset_ptr->constant = 0;
3274 }
3275 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3276 initial_offset_ptr->constant = reg_parm_stack_space;
3277 }
3278 }
3279 #endif /* REG_PARM_STACK_SPACE */
3280
3281 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3282
3283 sizetree
3284 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3285 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3286 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3287 locate->where_pad = where_pad;
3288 locate->boundary = boundary;
3289
3290 /* Remember if the outgoing parameter requires extra alignment on the
3291 calling function side. */
3292 if (boundary > PREFERRED_STACK_BOUNDARY)
3293 boundary = PREFERRED_STACK_BOUNDARY;
3294 if (cfun->stack_alignment_needed < boundary)
3295 cfun->stack_alignment_needed = boundary;
3296
3297 #ifdef ARGS_GROW_DOWNWARD
3298 locate->slot_offset.constant = -initial_offset_ptr->constant;
3299 if (initial_offset_ptr->var)
3300 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3301 initial_offset_ptr->var);
3302
3303 {
3304 tree s2 = sizetree;
3305 if (where_pad != none
3306 && (!host_integerp (sizetree, 1)
3307 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3308 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3309 SUB_PARM_SIZE (locate->slot_offset, s2);
3310 }
3311
3312 locate->slot_offset.constant += part_size_in_regs;
3313
3314 if (!in_regs
3315 #ifdef REG_PARM_STACK_SPACE
3316 || REG_PARM_STACK_SPACE (fndecl) > 0
3317 #endif
3318 )
3319 pad_to_arg_alignment (&locate->slot_offset, boundary,
3320 &locate->alignment_pad);
3321
3322 locate->size.constant = (-initial_offset_ptr->constant
3323 - locate->slot_offset.constant);
3324 if (initial_offset_ptr->var)
3325 locate->size.var = size_binop (MINUS_EXPR,
3326 size_binop (MINUS_EXPR,
3327 ssize_int (0),
3328 initial_offset_ptr->var),
3329 locate->slot_offset.var);
3330
3331 /* Pad_below needs the pre-rounded size to know how much to pad
3332 below. */
3333 locate->offset = locate->slot_offset;
3334 if (where_pad == downward)
3335 pad_below (&locate->offset, passed_mode, sizetree);
3336
3337 #else /* !ARGS_GROW_DOWNWARD */
3338 if (!in_regs
3339 #ifdef REG_PARM_STACK_SPACE
3340 || REG_PARM_STACK_SPACE (fndecl) > 0
3341 #endif
3342 )
3343 pad_to_arg_alignment (initial_offset_ptr, boundary,
3344 &locate->alignment_pad);
3345 locate->slot_offset = *initial_offset_ptr;
3346
3347 #ifdef PUSH_ROUNDING
3348 if (passed_mode != BLKmode)
3349 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3350 #endif
3351
3352 /* Pad_below needs the pre-rounded size to know how much to pad below
3353 so this must be done before rounding up. */
3354 locate->offset = locate->slot_offset;
3355 if (where_pad == downward)
3356 pad_below (&locate->offset, passed_mode, sizetree);
3357
3358 if (where_pad != none
3359 && (!host_integerp (sizetree, 1)
3360 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3361 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3362
3363 ADD_PARM_SIZE (locate->size, sizetree);
3364
3365 locate->size.constant -= part_size_in_regs;
3366 #endif /* ARGS_GROW_DOWNWARD */
3367 }
3368
3369 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3370 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3371
3372 static void
3373 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3374 struct args_size *alignment_pad)
3375 {
3376 tree save_var = NULL_TREE;
3377 HOST_WIDE_INT save_constant = 0;
3378 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3379 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3380
3381 #ifdef SPARC_STACK_BOUNDARY_HACK
3382 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3383 the real alignment of %sp. However, when it does this, the
3384 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3385 if (SPARC_STACK_BOUNDARY_HACK)
3386 sp_offset = 0;
3387 #endif
3388
3389 if (boundary > PARM_BOUNDARY)
3390 {
3391 save_var = offset_ptr->var;
3392 save_constant = offset_ptr->constant;
3393 }
3394
3395 alignment_pad->var = NULL_TREE;
3396 alignment_pad->constant = 0;
3397
3398 if (boundary > BITS_PER_UNIT)
3399 {
3400 if (offset_ptr->var)
3401 {
3402 tree sp_offset_tree = ssize_int (sp_offset);
3403 tree offset = size_binop (PLUS_EXPR,
3404 ARGS_SIZE_TREE (*offset_ptr),
3405 sp_offset_tree);
3406 #ifdef ARGS_GROW_DOWNWARD
3407 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3408 #else
3409 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3410 #endif
3411
3412 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3413 /* ARGS_SIZE_TREE includes constant term. */
3414 offset_ptr->constant = 0;
3415 if (boundary > PARM_BOUNDARY)
3416 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3417 save_var);
3418 }
3419 else
3420 {
3421 offset_ptr->constant = -sp_offset +
3422 #ifdef ARGS_GROW_DOWNWARD
3423 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3424 #else
3425 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3426 #endif
3427 if (boundary > PARM_BOUNDARY)
3428 alignment_pad->constant = offset_ptr->constant - save_constant;
3429 }
3430 }
3431 }
3432
3433 static void
3434 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3435 {
3436 if (passed_mode != BLKmode)
3437 {
3438 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3439 offset_ptr->constant
3440 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3441 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3442 - GET_MODE_SIZE (passed_mode));
3443 }
3444 else
3445 {
3446 if (TREE_CODE (sizetree) != INTEGER_CST
3447 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3448 {
3449 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3450 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3451 /* Add it in. */
3452 ADD_PARM_SIZE (*offset_ptr, s2);
3453 SUB_PARM_SIZE (*offset_ptr, sizetree);
3454 }
3455 }
3456 }
3457 \f
3458
3459 /* True if register REGNO was alive at a place where `setjmp' was
3460 called and was set more than once or is an argument. Such regs may
3461 be clobbered by `longjmp'. */
3462
3463 static bool
3464 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3465 {
3466 /* There appear to be cases where some local vars never reach the
3467 backend but have bogus regnos. */
3468 if (regno >= max_reg_num ())
3469 return false;
3470
3471 return ((REG_N_SETS (regno) > 1
3472 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3473 && REGNO_REG_SET_P (setjmp_crosses, regno));
3474 }
3475
3476 /* Walk the tree of blocks describing the binding levels within a
3477 function and warn about variables the might be killed by setjmp or
3478 vfork. This is done after calling flow_analysis before register
3479 allocation since that will clobber the pseudo-regs to hard
3480 regs. */
3481
3482 static void
3483 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3484 {
3485 tree decl, sub;
3486
3487 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3488 {
3489 if (TREE_CODE (decl) == VAR_DECL
3490 && DECL_RTL_SET_P (decl)
3491 && REG_P (DECL_RTL (decl))
3492 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3493 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3494 " %<longjmp%> or %<vfork%>", decl);
3495 }
3496
3497 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3498 setjmp_vars_warning (setjmp_crosses, sub);
3499 }
3500
3501 /* Do the appropriate part of setjmp_vars_warning
3502 but for arguments instead of local variables. */
3503
3504 static void
3505 setjmp_args_warning (bitmap setjmp_crosses)
3506 {
3507 tree decl;
3508 for (decl = DECL_ARGUMENTS (current_function_decl);
3509 decl; decl = TREE_CHAIN (decl))
3510 if (DECL_RTL (decl) != 0
3511 && REG_P (DECL_RTL (decl))
3512 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3513 warning (OPT_Wclobbered,
3514 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3515 decl);
3516 }
3517
3518 /* Generate warning messages for variables live across setjmp. */
3519
3520 void
3521 generate_setjmp_warnings (void)
3522 {
3523 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
3524
3525 if (n_basic_blocks == NUM_FIXED_BLOCKS
3526 || bitmap_empty_p (setjmp_crosses))
3527 return;
3528
3529 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
3530 setjmp_args_warning (setjmp_crosses);
3531 }
3532
3533 \f
3534 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3535 and create duplicate blocks. */
3536 /* ??? Need an option to either create block fragments or to create
3537 abstract origin duplicates of a source block. It really depends
3538 on what optimization has been performed. */
3539
3540 void
3541 reorder_blocks (void)
3542 {
3543 tree block = DECL_INITIAL (current_function_decl);
3544 VEC(tree,heap) *block_stack;
3545
3546 if (block == NULL_TREE)
3547 return;
3548
3549 block_stack = VEC_alloc (tree, heap, 10);
3550
3551 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3552 clear_block_marks (block);
3553
3554 /* Prune the old trees away, so that they don't get in the way. */
3555 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3556 BLOCK_CHAIN (block) = NULL_TREE;
3557
3558 /* Recreate the block tree from the note nesting. */
3559 reorder_blocks_1 (get_insns (), block, &block_stack);
3560 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3561
3562 VEC_free (tree, heap, block_stack);
3563 }
3564
3565 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3566
3567 void
3568 clear_block_marks (tree block)
3569 {
3570 while (block)
3571 {
3572 TREE_ASM_WRITTEN (block) = 0;
3573 clear_block_marks (BLOCK_SUBBLOCKS (block));
3574 block = BLOCK_CHAIN (block);
3575 }
3576 }
3577
3578 static void
3579 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3580 {
3581 rtx insn;
3582
3583 for (insn = insns; insn; insn = NEXT_INSN (insn))
3584 {
3585 if (NOTE_P (insn))
3586 {
3587 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
3588 {
3589 tree block = NOTE_BLOCK (insn);
3590 tree origin;
3591
3592 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3593 ? BLOCK_FRAGMENT_ORIGIN (block)
3594 : block);
3595
3596 /* If we have seen this block before, that means it now
3597 spans multiple address regions. Create a new fragment. */
3598 if (TREE_ASM_WRITTEN (block))
3599 {
3600 tree new_block = copy_node (block);
3601
3602 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3603 BLOCK_FRAGMENT_CHAIN (new_block)
3604 = BLOCK_FRAGMENT_CHAIN (origin);
3605 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3606
3607 NOTE_BLOCK (insn) = new_block;
3608 block = new_block;
3609 }
3610
3611 BLOCK_SUBBLOCKS (block) = 0;
3612 TREE_ASM_WRITTEN (block) = 1;
3613 /* When there's only one block for the entire function,
3614 current_block == block and we mustn't do this, it
3615 will cause infinite recursion. */
3616 if (block != current_block)
3617 {
3618 if (block != origin)
3619 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
3620
3621 BLOCK_SUPERCONTEXT (block) = current_block;
3622 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3623 BLOCK_SUBBLOCKS (current_block) = block;
3624 current_block = origin;
3625 }
3626 VEC_safe_push (tree, heap, *p_block_stack, block);
3627 }
3628 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
3629 {
3630 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3631 BLOCK_SUBBLOCKS (current_block)
3632 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3633 current_block = BLOCK_SUPERCONTEXT (current_block);
3634 }
3635 }
3636 }
3637 }
3638
3639 /* Reverse the order of elements in the chain T of blocks,
3640 and return the new head of the chain (old last element). */
3641
3642 tree
3643 blocks_nreverse (tree t)
3644 {
3645 tree prev = 0, decl, next;
3646 for (decl = t; decl; decl = next)
3647 {
3648 next = BLOCK_CHAIN (decl);
3649 BLOCK_CHAIN (decl) = prev;
3650 prev = decl;
3651 }
3652 return prev;
3653 }
3654
3655 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3656 non-NULL, list them all into VECTOR, in a depth-first preorder
3657 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3658 blocks. */
3659
3660 static int
3661 all_blocks (tree block, tree *vector)
3662 {
3663 int n_blocks = 0;
3664
3665 while (block)
3666 {
3667 TREE_ASM_WRITTEN (block) = 0;
3668
3669 /* Record this block. */
3670 if (vector)
3671 vector[n_blocks] = block;
3672
3673 ++n_blocks;
3674
3675 /* Record the subblocks, and their subblocks... */
3676 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3677 vector ? vector + n_blocks : 0);
3678 block = BLOCK_CHAIN (block);
3679 }
3680
3681 return n_blocks;
3682 }
3683
3684 /* Return a vector containing all the blocks rooted at BLOCK. The
3685 number of elements in the vector is stored in N_BLOCKS_P. The
3686 vector is dynamically allocated; it is the caller's responsibility
3687 to call `free' on the pointer returned. */
3688
3689 static tree *
3690 get_block_vector (tree block, int *n_blocks_p)
3691 {
3692 tree *block_vector;
3693
3694 *n_blocks_p = all_blocks (block, NULL);
3695 block_vector = XNEWVEC (tree, *n_blocks_p);
3696 all_blocks (block, block_vector);
3697
3698 return block_vector;
3699 }
3700
3701 static GTY(()) int next_block_index = 2;
3702
3703 /* Set BLOCK_NUMBER for all the blocks in FN. */
3704
3705 void
3706 number_blocks (tree fn)
3707 {
3708 int i;
3709 int n_blocks;
3710 tree *block_vector;
3711
3712 /* For SDB and XCOFF debugging output, we start numbering the blocks
3713 from 1 within each function, rather than keeping a running
3714 count. */
3715 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3716 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3717 next_block_index = 1;
3718 #endif
3719
3720 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3721
3722 /* The top-level BLOCK isn't numbered at all. */
3723 for (i = 1; i < n_blocks; ++i)
3724 /* We number the blocks from two. */
3725 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3726
3727 free (block_vector);
3728
3729 return;
3730 }
3731
3732 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3733
3734 tree
3735 debug_find_var_in_block_tree (tree var, tree block)
3736 {
3737 tree t;
3738
3739 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3740 if (t == var)
3741 return block;
3742
3743 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3744 {
3745 tree ret = debug_find_var_in_block_tree (var, t);
3746 if (ret)
3747 return ret;
3748 }
3749
3750 return NULL_TREE;
3751 }
3752 \f
3753 /* Keep track of whether we're in a dummy function context. If we are,
3754 we don't want to invoke the set_current_function hook, because we'll
3755 get into trouble if the hook calls target_reinit () recursively or
3756 when the initial initialization is not yet complete. */
3757
3758 static bool in_dummy_function;
3759
3760 /* Invoke the target hook when setting cfun. */
3761
3762 static void
3763 invoke_set_current_function_hook (tree fndecl)
3764 {
3765 if (!in_dummy_function)
3766 targetm.set_current_function (fndecl);
3767 }
3768
3769 /* cfun should never be set directly; use this function. */
3770
3771 void
3772 set_cfun (struct function *new_cfun)
3773 {
3774 if (cfun != new_cfun)
3775 {
3776 cfun = new_cfun;
3777 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
3778 }
3779 }
3780
3781 /* Keep track of the cfun stack. */
3782
3783 typedef struct function *function_p;
3784
3785 DEF_VEC_P(function_p);
3786 DEF_VEC_ALLOC_P(function_p,heap);
3787
3788 /* Initialized with NOGC, making this poisonous to the garbage collector. */
3789
3790 static VEC(function_p,heap) *cfun_stack;
3791
3792 /* We save the value of in_system_header here when pushing the first
3793 function on the cfun stack, and we restore it from here when
3794 popping the last function. */
3795
3796 static bool saved_in_system_header;
3797
3798 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
3799
3800 void
3801 push_cfun (struct function *new_cfun)
3802 {
3803 if (cfun == NULL)
3804 saved_in_system_header = in_system_header;
3805 VEC_safe_push (function_p, heap, cfun_stack, cfun);
3806 if (new_cfun)
3807 in_system_header = DECL_IN_SYSTEM_HEADER (new_cfun->decl);
3808 set_cfun (new_cfun);
3809 }
3810
3811 /* Pop cfun from the stack. */
3812
3813 void
3814 pop_cfun (void)
3815 {
3816 struct function *new_cfun = VEC_pop (function_p, cfun_stack);
3817 in_system_header = ((new_cfun == NULL) ? saved_in_system_header
3818 : DECL_IN_SYSTEM_HEADER (new_cfun->decl));
3819 set_cfun (new_cfun);
3820 }
3821
3822 /* Return value of funcdef and increase it. */
3823 int
3824 get_next_funcdef_no (void)
3825 {
3826 return funcdef_no++;
3827 }
3828
3829 /* Allocate a function structure for FNDECL and set its contents
3830 to the defaults. Set cfun to the newly-allocated object.
3831 Some of the helper functions invoked during initialization assume
3832 that cfun has already been set. Therefore, assign the new object
3833 directly into cfun and invoke the back end hook explicitly at the
3834 very end, rather than initializing a temporary and calling set_cfun
3835 on it.
3836
3837 ABSTRACT_P is true if this is a function that will never be seen by
3838 the middle-end. Such functions are front-end concepts (like C++
3839 function templates) that do not correspond directly to functions
3840 placed in object files. */
3841
3842 void
3843 allocate_struct_function (tree fndecl, bool abstract_p)
3844 {
3845 tree result;
3846 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3847
3848 cfun = ggc_alloc_cleared (sizeof (struct function));
3849
3850 cfun->stack_alignment_needed = STACK_BOUNDARY;
3851 cfun->preferred_stack_boundary = STACK_BOUNDARY;
3852
3853 current_function_funcdef_no = get_next_funcdef_no ();
3854
3855 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3856
3857 init_eh_for_function ();
3858
3859 if (init_machine_status)
3860 cfun->machine = (*init_machine_status) ();
3861
3862 if (fndecl != NULL)
3863 {
3864 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3865 cfun->decl = fndecl;
3866
3867 result = DECL_RESULT (fndecl);
3868 if (!abstract_p && aggregate_value_p (result, fndecl))
3869 {
3870 #ifdef PCC_STATIC_STRUCT_RETURN
3871 current_function_returns_pcc_struct = 1;
3872 #endif
3873 current_function_returns_struct = 1;
3874 }
3875
3876 current_function_stdarg
3877 = (fntype
3878 && TYPE_ARG_TYPES (fntype) != 0
3879 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3880 != void_type_node));
3881
3882 /* Assume all registers in stdarg functions need to be saved. */
3883 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3884 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3885 }
3886
3887 invoke_set_current_function_hook (fndecl);
3888 }
3889
3890 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
3891 instead of just setting it. */
3892
3893 void
3894 push_struct_function (tree fndecl)
3895 {
3896 if (cfun == NULL)
3897 saved_in_system_header = in_system_header;
3898 VEC_safe_push (function_p, heap, cfun_stack, cfun);
3899 if (fndecl)
3900 in_system_header = DECL_IN_SYSTEM_HEADER (fndecl);
3901 allocate_struct_function (fndecl, false);
3902 }
3903
3904 /* Reset cfun, and other non-struct-function variables to defaults as
3905 appropriate for emitting rtl at the start of a function. */
3906
3907 static void
3908 prepare_function_start (void)
3909 {
3910 gcc_assert (!rtl.emit.x_last_insn);
3911 init_emit ();
3912 init_varasm_status ();
3913 init_expr ();
3914
3915 cse_not_expected = ! optimize;
3916
3917 /* Caller save not needed yet. */
3918 caller_save_needed = 0;
3919
3920 /* We haven't done register allocation yet. */
3921 reg_renumber = 0;
3922
3923 /* Indicate that we have not instantiated virtual registers yet. */
3924 virtuals_instantiated = 0;
3925
3926 /* Indicate that we want CONCATs now. */
3927 generating_concat_p = 1;
3928
3929 /* Indicate we have no need of a frame pointer yet. */
3930 frame_pointer_needed = 0;
3931 }
3932
3933 /* Initialize the rtl expansion mechanism so that we can do simple things
3934 like generate sequences. This is used to provide a context during global
3935 initialization of some passes. You must call expand_dummy_function_end
3936 to exit this context. */
3937
3938 void
3939 init_dummy_function_start (void)
3940 {
3941 gcc_assert (!in_dummy_function);
3942 in_dummy_function = true;
3943 push_struct_function (NULL_TREE);
3944 prepare_function_start ();
3945 }
3946
3947 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3948 and initialize static variables for generating RTL for the statements
3949 of the function. */
3950
3951 void
3952 init_function_start (tree subr)
3953 {
3954 if (subr && DECL_STRUCT_FUNCTION (subr))
3955 set_cfun (DECL_STRUCT_FUNCTION (subr));
3956 else
3957 allocate_struct_function (subr, false);
3958 prepare_function_start ();
3959
3960 /* Warn if this value is an aggregate type,
3961 regardless of which calling convention we are using for it. */
3962 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3963 warning (OPT_Waggregate_return, "function returns an aggregate");
3964 }
3965
3966 /* Make sure all values used by the optimization passes have sane
3967 defaults. */
3968 unsigned int
3969 init_function_for_compilation (void)
3970 {
3971 reg_renumber = 0;
3972
3973 /* No prologue/epilogue insns yet. Make sure that these vectors are
3974 empty. */
3975 gcc_assert (VEC_length (int, prologue) == 0);
3976 gcc_assert (VEC_length (int, epilogue) == 0);
3977 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
3978 return 0;
3979 }
3980
3981 struct rtl_opt_pass pass_init_function =
3982 {
3983 {
3984 RTL_PASS,
3985 NULL, /* name */
3986 NULL, /* gate */
3987 init_function_for_compilation, /* execute */
3988 NULL, /* sub */
3989 NULL, /* next */
3990 0, /* static_pass_number */
3991 0, /* tv_id */
3992 0, /* properties_required */
3993 0, /* properties_provided */
3994 0, /* properties_destroyed */
3995 0, /* todo_flags_start */
3996 0 /* todo_flags_finish */
3997 }
3998 };
3999
4000
4001 void
4002 expand_main_function (void)
4003 {
4004 #if (defined(INVOKE__main) \
4005 || (!defined(HAS_INIT_SECTION) \
4006 && !defined(INIT_SECTION_ASM_OP) \
4007 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4008 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4009 #endif
4010 }
4011 \f
4012 /* Expand code to initialize the stack_protect_guard. This is invoked at
4013 the beginning of a function to be protected. */
4014
4015 #ifndef HAVE_stack_protect_set
4016 # define HAVE_stack_protect_set 0
4017 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4018 #endif
4019
4020 void
4021 stack_protect_prologue (void)
4022 {
4023 tree guard_decl = targetm.stack_protect_guard ();
4024 rtx x, y;
4025
4026 /* Avoid expand_expr here, because we don't want guard_decl pulled
4027 into registers unless absolutely necessary. And we know that
4028 cfun->stack_protect_guard is a local stack slot, so this skips
4029 all the fluff. */
4030 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4031 y = validize_mem (DECL_RTL (guard_decl));
4032
4033 /* Allow the target to copy from Y to X without leaking Y into a
4034 register. */
4035 if (HAVE_stack_protect_set)
4036 {
4037 rtx insn = gen_stack_protect_set (x, y);
4038 if (insn)
4039 {
4040 emit_insn (insn);
4041 return;
4042 }
4043 }
4044
4045 /* Otherwise do a straight move. */
4046 emit_move_insn (x, y);
4047 }
4048
4049 /* Expand code to verify the stack_protect_guard. This is invoked at
4050 the end of a function to be protected. */
4051
4052 #ifndef HAVE_stack_protect_test
4053 # define HAVE_stack_protect_test 0
4054 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4055 #endif
4056
4057 void
4058 stack_protect_epilogue (void)
4059 {
4060 tree guard_decl = targetm.stack_protect_guard ();
4061 rtx label = gen_label_rtx ();
4062 rtx x, y, tmp;
4063
4064 /* Avoid expand_expr here, because we don't want guard_decl pulled
4065 into registers unless absolutely necessary. And we know that
4066 cfun->stack_protect_guard is a local stack slot, so this skips
4067 all the fluff. */
4068 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4069 y = validize_mem (DECL_RTL (guard_decl));
4070
4071 /* Allow the target to compare Y with X without leaking either into
4072 a register. */
4073 switch (HAVE_stack_protect_test != 0)
4074 {
4075 case 1:
4076 tmp = gen_stack_protect_test (x, y, label);
4077 if (tmp)
4078 {
4079 emit_insn (tmp);
4080 break;
4081 }
4082 /* FALLTHRU */
4083
4084 default:
4085 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4086 break;
4087 }
4088
4089 /* The noreturn predictor has been moved to the tree level. The rtl-level
4090 predictors estimate this branch about 20%, which isn't enough to get
4091 things moved out of line. Since this is the only extant case of adding
4092 a noreturn function at the rtl level, it doesn't seem worth doing ought
4093 except adding the prediction by hand. */
4094 tmp = get_last_insn ();
4095 if (JUMP_P (tmp))
4096 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4097
4098 expand_expr_stmt (targetm.stack_protect_fail ());
4099 emit_label (label);
4100 }
4101 \f
4102 /* Start the RTL for a new function, and set variables used for
4103 emitting RTL.
4104 SUBR is the FUNCTION_DECL node.
4105 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4106 the function's parameters, which must be run at any return statement. */
4107
4108 void
4109 expand_function_start (tree subr)
4110 {
4111 /* Make sure volatile mem refs aren't considered
4112 valid operands of arithmetic insns. */
4113 init_recog_no_volatile ();
4114
4115 current_function_profile
4116 = (profile_flag
4117 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4118
4119 current_function_limit_stack
4120 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4121
4122 /* Make the label for return statements to jump to. Do not special
4123 case machines with special return instructions -- they will be
4124 handled later during jump, ifcvt, or epilogue creation. */
4125 return_label = gen_label_rtx ();
4126
4127 /* Initialize rtx used to return the value. */
4128 /* Do this before assign_parms so that we copy the struct value address
4129 before any library calls that assign parms might generate. */
4130
4131 /* Decide whether to return the value in memory or in a register. */
4132 if (aggregate_value_p (DECL_RESULT (subr), subr))
4133 {
4134 /* Returning something that won't go in a register. */
4135 rtx value_address = 0;
4136
4137 #ifdef PCC_STATIC_STRUCT_RETURN
4138 if (current_function_returns_pcc_struct)
4139 {
4140 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4141 value_address = assemble_static_space (size);
4142 }
4143 else
4144 #endif
4145 {
4146 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4147 /* Expect to be passed the address of a place to store the value.
4148 If it is passed as an argument, assign_parms will take care of
4149 it. */
4150 if (sv)
4151 {
4152 value_address = gen_reg_rtx (Pmode);
4153 emit_move_insn (value_address, sv);
4154 }
4155 }
4156 if (value_address)
4157 {
4158 rtx x = value_address;
4159 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4160 {
4161 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4162 set_mem_attributes (x, DECL_RESULT (subr), 1);
4163 }
4164 SET_DECL_RTL (DECL_RESULT (subr), x);
4165 }
4166 }
4167 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4168 /* If return mode is void, this decl rtl should not be used. */
4169 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4170 else
4171 {
4172 /* Compute the return values into a pseudo reg, which we will copy
4173 into the true return register after the cleanups are done. */
4174 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4175 if (TYPE_MODE (return_type) != BLKmode
4176 && targetm.calls.return_in_msb (return_type))
4177 /* expand_function_end will insert the appropriate padding in
4178 this case. Use the return value's natural (unpadded) mode
4179 within the function proper. */
4180 SET_DECL_RTL (DECL_RESULT (subr),
4181 gen_reg_rtx (TYPE_MODE (return_type)));
4182 else
4183 {
4184 /* In order to figure out what mode to use for the pseudo, we
4185 figure out what the mode of the eventual return register will
4186 actually be, and use that. */
4187 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4188
4189 /* Structures that are returned in registers are not
4190 aggregate_value_p, so we may see a PARALLEL or a REG. */
4191 if (REG_P (hard_reg))
4192 SET_DECL_RTL (DECL_RESULT (subr),
4193 gen_reg_rtx (GET_MODE (hard_reg)));
4194 else
4195 {
4196 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4197 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4198 }
4199 }
4200
4201 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4202 result to the real return register(s). */
4203 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4204 }
4205
4206 /* Initialize rtx for parameters and local variables.
4207 In some cases this requires emitting insns. */
4208 assign_parms (subr);
4209
4210 /* If function gets a static chain arg, store it. */
4211 if (cfun->static_chain_decl)
4212 {
4213 tree parm = cfun->static_chain_decl;
4214 rtx local = gen_reg_rtx (Pmode);
4215
4216 set_decl_incoming_rtl (parm, static_chain_incoming_rtx, false);
4217 SET_DECL_RTL (parm, local);
4218 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4219
4220 emit_move_insn (local, static_chain_incoming_rtx);
4221 }
4222
4223 /* If the function receives a non-local goto, then store the
4224 bits we need to restore the frame pointer. */
4225 if (cfun->nonlocal_goto_save_area)
4226 {
4227 tree t_save;
4228 rtx r_save;
4229
4230 /* ??? We need to do this save early. Unfortunately here is
4231 before the frame variable gets declared. Help out... */
4232 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4233 if (!DECL_RTL_SET_P (var))
4234 expand_decl (var);
4235
4236 t_save = build4 (ARRAY_REF, ptr_type_node,
4237 cfun->nonlocal_goto_save_area,
4238 integer_zero_node, NULL_TREE, NULL_TREE);
4239 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4240 r_save = convert_memory_address (Pmode, r_save);
4241
4242 emit_move_insn (r_save, virtual_stack_vars_rtx);
4243 update_nonlocal_goto_save_area ();
4244 }
4245
4246 /* The following was moved from init_function_start.
4247 The move is supposed to make sdb output more accurate. */
4248 /* Indicate the beginning of the function body,
4249 as opposed to parm setup. */
4250 emit_note (NOTE_INSN_FUNCTION_BEG);
4251
4252 gcc_assert (NOTE_P (get_last_insn ()));
4253
4254 parm_birth_insn = get_last_insn ();
4255
4256 if (current_function_profile)
4257 {
4258 #ifdef PROFILE_HOOK
4259 PROFILE_HOOK (current_function_funcdef_no);
4260 #endif
4261 }
4262
4263 /* After the display initializations is where the stack checking
4264 probe should go. */
4265 if(flag_stack_check)
4266 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4267
4268 /* Make sure there is a line number after the function entry setup code. */
4269 force_next_line_note ();
4270 }
4271 \f
4272 /* Undo the effects of init_dummy_function_start. */
4273 void
4274 expand_dummy_function_end (void)
4275 {
4276 gcc_assert (in_dummy_function);
4277
4278 /* End any sequences that failed to be closed due to syntax errors. */
4279 while (in_sequence_p ())
4280 end_sequence ();
4281
4282 /* Outside function body, can't compute type's actual size
4283 until next function's body starts. */
4284
4285 free_after_parsing (cfun);
4286 free_after_compilation (cfun);
4287 pop_cfun ();
4288 in_dummy_function = false;
4289 }
4290
4291 /* Call DOIT for each hard register used as a return value from
4292 the current function. */
4293
4294 void
4295 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4296 {
4297 rtx outgoing = current_function_return_rtx;
4298
4299 if (! outgoing)
4300 return;
4301
4302 if (REG_P (outgoing))
4303 (*doit) (outgoing, arg);
4304 else if (GET_CODE (outgoing) == PARALLEL)
4305 {
4306 int i;
4307
4308 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4309 {
4310 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4311
4312 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4313 (*doit) (x, arg);
4314 }
4315 }
4316 }
4317
4318 static void
4319 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4320 {
4321 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4322 }
4323
4324 void
4325 clobber_return_register (void)
4326 {
4327 diddle_return_value (do_clobber_return_reg, NULL);
4328
4329 /* In case we do use pseudo to return value, clobber it too. */
4330 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4331 {
4332 tree decl_result = DECL_RESULT (current_function_decl);
4333 rtx decl_rtl = DECL_RTL (decl_result);
4334 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4335 {
4336 do_clobber_return_reg (decl_rtl, NULL);
4337 }
4338 }
4339 }
4340
4341 static void
4342 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4343 {
4344 emit_insn (gen_rtx_USE (VOIDmode, reg));
4345 }
4346
4347 static void
4348 use_return_register (void)
4349 {
4350 diddle_return_value (do_use_return_reg, NULL);
4351 }
4352
4353 /* Possibly warn about unused parameters. */
4354 void
4355 do_warn_unused_parameter (tree fn)
4356 {
4357 tree decl;
4358
4359 for (decl = DECL_ARGUMENTS (fn);
4360 decl; decl = TREE_CHAIN (decl))
4361 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4362 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4363 && !TREE_NO_WARNING (decl))
4364 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4365 }
4366
4367 static GTY(()) rtx initial_trampoline;
4368
4369 /* Generate RTL for the end of the current function. */
4370
4371 void
4372 expand_function_end (void)
4373 {
4374 rtx clobber_after;
4375
4376 /* If arg_pointer_save_area was referenced only from a nested
4377 function, we will not have initialized it yet. Do that now. */
4378 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4379 get_arg_pointer_save_area ();
4380
4381 /* If we are doing stack checking and this function makes calls,
4382 do a stack probe at the start of the function to ensure we have enough
4383 space for another stack frame. */
4384 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4385 {
4386 rtx insn, seq;
4387
4388 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4389 if (CALL_P (insn))
4390 {
4391 start_sequence ();
4392 probe_stack_range (STACK_CHECK_PROTECT,
4393 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4394 seq = get_insns ();
4395 end_sequence ();
4396 emit_insn_before (seq, stack_check_probe_note);
4397 break;
4398 }
4399 }
4400
4401 /* End any sequences that failed to be closed due to syntax errors. */
4402 while (in_sequence_p ())
4403 end_sequence ();
4404
4405 clear_pending_stack_adjust ();
4406 do_pending_stack_adjust ();
4407
4408 /* Output a linenumber for the end of the function.
4409 SDB depends on this. */
4410 force_next_line_note ();
4411 set_curr_insn_source_location (input_location);
4412
4413 /* Before the return label (if any), clobber the return
4414 registers so that they are not propagated live to the rest of
4415 the function. This can only happen with functions that drop
4416 through; if there had been a return statement, there would
4417 have either been a return rtx, or a jump to the return label.
4418
4419 We delay actual code generation after the current_function_value_rtx
4420 is computed. */
4421 clobber_after = get_last_insn ();
4422
4423 /* Output the label for the actual return from the function. */
4424 emit_label (return_label);
4425
4426 if (USING_SJLJ_EXCEPTIONS)
4427 {
4428 /* Let except.c know where it should emit the call to unregister
4429 the function context for sjlj exceptions. */
4430 if (flag_exceptions)
4431 sjlj_emit_function_exit_after (get_last_insn ());
4432 }
4433 else
4434 {
4435 /* We want to ensure that instructions that may trap are not
4436 moved into the epilogue by scheduling, because we don't
4437 always emit unwind information for the epilogue. */
4438 if (flag_non_call_exceptions)
4439 emit_insn (gen_blockage ());
4440 }
4441
4442 /* If this is an implementation of throw, do what's necessary to
4443 communicate between __builtin_eh_return and the epilogue. */
4444 expand_eh_return ();
4445
4446 /* If scalar return value was computed in a pseudo-reg, or was a named
4447 return value that got dumped to the stack, copy that to the hard
4448 return register. */
4449 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4450 {
4451 tree decl_result = DECL_RESULT (current_function_decl);
4452 rtx decl_rtl = DECL_RTL (decl_result);
4453
4454 if (REG_P (decl_rtl)
4455 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4456 : DECL_REGISTER (decl_result))
4457 {
4458 rtx real_decl_rtl = current_function_return_rtx;
4459
4460 /* This should be set in assign_parms. */
4461 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4462
4463 /* If this is a BLKmode structure being returned in registers,
4464 then use the mode computed in expand_return. Note that if
4465 decl_rtl is memory, then its mode may have been changed,
4466 but that current_function_return_rtx has not. */
4467 if (GET_MODE (real_decl_rtl) == BLKmode)
4468 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4469
4470 /* If a non-BLKmode return value should be padded at the least
4471 significant end of the register, shift it left by the appropriate
4472 amount. BLKmode results are handled using the group load/store
4473 machinery. */
4474 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4475 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4476 {
4477 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4478 REGNO (real_decl_rtl)),
4479 decl_rtl);
4480 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4481 }
4482 /* If a named return value dumped decl_return to memory, then
4483 we may need to re-do the PROMOTE_MODE signed/unsigned
4484 extension. */
4485 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4486 {
4487 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4488
4489 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4490 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4491 &unsignedp, 1);
4492
4493 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4494 }
4495 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4496 {
4497 /* If expand_function_start has created a PARALLEL for decl_rtl,
4498 move the result to the real return registers. Otherwise, do
4499 a group load from decl_rtl for a named return. */
4500 if (GET_CODE (decl_rtl) == PARALLEL)
4501 emit_group_move (real_decl_rtl, decl_rtl);
4502 else
4503 emit_group_load (real_decl_rtl, decl_rtl,
4504 TREE_TYPE (decl_result),
4505 int_size_in_bytes (TREE_TYPE (decl_result)));
4506 }
4507 /* In the case of complex integer modes smaller than a word, we'll
4508 need to generate some non-trivial bitfield insertions. Do that
4509 on a pseudo and not the hard register. */
4510 else if (GET_CODE (decl_rtl) == CONCAT
4511 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4512 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4513 {
4514 int old_generating_concat_p;
4515 rtx tmp;
4516
4517 old_generating_concat_p = generating_concat_p;
4518 generating_concat_p = 0;
4519 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4520 generating_concat_p = old_generating_concat_p;
4521
4522 emit_move_insn (tmp, decl_rtl);
4523 emit_move_insn (real_decl_rtl, tmp);
4524 }
4525 else
4526 emit_move_insn (real_decl_rtl, decl_rtl);
4527 }
4528 }
4529
4530 /* If returning a structure, arrange to return the address of the value
4531 in a place where debuggers expect to find it.
4532
4533 If returning a structure PCC style,
4534 the caller also depends on this value.
4535 And current_function_returns_pcc_struct is not necessarily set. */
4536 if (current_function_returns_struct
4537 || current_function_returns_pcc_struct)
4538 {
4539 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4540 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4541 rtx outgoing;
4542
4543 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4544 type = TREE_TYPE (type);
4545 else
4546 value_address = XEXP (value_address, 0);
4547
4548 outgoing = targetm.calls.function_value (build_pointer_type (type),
4549 current_function_decl, true);
4550
4551 /* Mark this as a function return value so integrate will delete the
4552 assignment and USE below when inlining this function. */
4553 REG_FUNCTION_VALUE_P (outgoing) = 1;
4554
4555 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4556 value_address = convert_memory_address (GET_MODE (outgoing),
4557 value_address);
4558
4559 emit_move_insn (outgoing, value_address);
4560
4561 /* Show return register used to hold result (in this case the address
4562 of the result. */
4563 current_function_return_rtx = outgoing;
4564 }
4565
4566 /* Emit the actual code to clobber return register. */
4567 {
4568 rtx seq;
4569
4570 start_sequence ();
4571 clobber_return_register ();
4572 expand_naked_return ();
4573 seq = get_insns ();
4574 end_sequence ();
4575
4576 emit_insn_after (seq, clobber_after);
4577 }
4578
4579 /* Output the label for the naked return from the function. */
4580 emit_label (naked_return_label);
4581
4582 /* @@@ This is a kludge. We want to ensure that instructions that
4583 may trap are not moved into the epilogue by scheduling, because
4584 we don't always emit unwind information for the epilogue. */
4585 if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions)
4586 emit_insn (gen_blockage ());
4587
4588 /* If stack protection is enabled for this function, check the guard. */
4589 if (cfun->stack_protect_guard)
4590 stack_protect_epilogue ();
4591
4592 /* If we had calls to alloca, and this machine needs
4593 an accurate stack pointer to exit the function,
4594 insert some code to save and restore the stack pointer. */
4595 if (! EXIT_IGNORE_STACK
4596 && current_function_calls_alloca)
4597 {
4598 rtx tem = 0;
4599
4600 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4601 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4602 }
4603
4604 /* ??? This should no longer be necessary since stupid is no longer with
4605 us, but there are some parts of the compiler (eg reload_combine, and
4606 sh mach_dep_reorg) that still try and compute their own lifetime info
4607 instead of using the general framework. */
4608 use_return_register ();
4609 }
4610
4611 rtx
4612 get_arg_pointer_save_area (void)
4613 {
4614 rtx ret = arg_pointer_save_area;
4615
4616 if (! ret)
4617 {
4618 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4619 arg_pointer_save_area = ret;
4620 }
4621
4622 if (! cfun->arg_pointer_save_area_init)
4623 {
4624 rtx seq;
4625
4626 /* Save the arg pointer at the beginning of the function. The
4627 generated stack slot may not be a valid memory address, so we
4628 have to check it and fix it if necessary. */
4629 start_sequence ();
4630 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4631 seq = get_insns ();
4632 end_sequence ();
4633
4634 push_topmost_sequence ();
4635 emit_insn_after (seq, entry_of_function ());
4636 pop_topmost_sequence ();
4637 }
4638
4639 return ret;
4640 }
4641 \f
4642 /* Extend a vector that records the INSN_UIDs of INSNS
4643 (a list of one or more insns). */
4644
4645 static void
4646 record_insns (rtx insns, VEC(int,heap) **vecp)
4647 {
4648 rtx tmp;
4649
4650 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4651 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4652 }
4653
4654 /* Set the locator of the insn chain starting at INSN to LOC. */
4655 static void
4656 set_insn_locators (rtx insn, int loc)
4657 {
4658 while (insn != NULL_RTX)
4659 {
4660 if (INSN_P (insn))
4661 INSN_LOCATOR (insn) = loc;
4662 insn = NEXT_INSN (insn);
4663 }
4664 }
4665
4666 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4667 be running after reorg, SEQUENCE rtl is possible. */
4668
4669 static int
4670 contains (const_rtx insn, VEC(int,heap) **vec)
4671 {
4672 int i, j;
4673
4674 if (NONJUMP_INSN_P (insn)
4675 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4676 {
4677 int count = 0;
4678 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4679 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4680 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4681 == VEC_index (int, *vec, j))
4682 count++;
4683 return count;
4684 }
4685 else
4686 {
4687 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4688 if (INSN_UID (insn) == VEC_index (int, *vec, j))
4689 return 1;
4690 }
4691 return 0;
4692 }
4693
4694 int
4695 prologue_epilogue_contains (const_rtx insn)
4696 {
4697 if (contains (insn, &prologue))
4698 return 1;
4699 if (contains (insn, &epilogue))
4700 return 1;
4701 return 0;
4702 }
4703
4704 int
4705 sibcall_epilogue_contains (const_rtx insn)
4706 {
4707 if (sibcall_epilogue)
4708 return contains (insn, &sibcall_epilogue);
4709 return 0;
4710 }
4711
4712 #ifdef HAVE_return
4713 /* Insert gen_return at the end of block BB. This also means updating
4714 block_for_insn appropriately. */
4715
4716 static void
4717 emit_return_into_block (basic_block bb)
4718 {
4719 emit_jump_insn_after (gen_return (), BB_END (bb));
4720 }
4721 #endif /* HAVE_return */
4722
4723 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4724
4725 /* These functions convert the epilogue into a variant that does not
4726 modify the stack pointer. This is used in cases where a function
4727 returns an object whose size is not known until it is computed.
4728 The called function leaves the object on the stack, leaves the
4729 stack depressed, and returns a pointer to the object.
4730
4731 What we need to do is track all modifications and references to the
4732 stack pointer, deleting the modifications and changing the
4733 references to point to the location the stack pointer would have
4734 pointed to had the modifications taken place.
4735
4736 These functions need to be portable so we need to make as few
4737 assumptions about the epilogue as we can. However, the epilogue
4738 basically contains three things: instructions to reset the stack
4739 pointer, instructions to reload registers, possibly including the
4740 frame pointer, and an instruction to return to the caller.
4741
4742 We must be sure of what a relevant epilogue insn is doing. We also
4743 make no attempt to validate the insns we make since if they are
4744 invalid, we probably can't do anything valid. The intent is that
4745 these routines get "smarter" as more and more machines start to use
4746 them and they try operating on different epilogues.
4747
4748 We use the following structure to track what the part of the
4749 epilogue that we've already processed has done. We keep two copies
4750 of the SP equivalence, one for use during the insn we are
4751 processing and one for use in the next insn. The difference is
4752 because one part of a PARALLEL may adjust SP and the other may use
4753 it. */
4754
4755 struct epi_info
4756 {
4757 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
4758 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
4759 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
4760 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
4761 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
4762 should be set to once we no longer need
4763 its value. */
4764 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4765 for registers. */
4766 };
4767
4768 static void handle_epilogue_set (rtx, struct epi_info *);
4769 static void update_epilogue_consts (rtx, const_rtx, void *);
4770 static void emit_equiv_load (struct epi_info *);
4771
4772 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4773 no modifications to the stack pointer. Return the new list of insns. */
4774
4775 static rtx
4776 keep_stack_depressed (rtx insns)
4777 {
4778 int j;
4779 struct epi_info info;
4780 rtx insn, next;
4781
4782 /* If the epilogue is just a single instruction, it must be OK as is. */
4783 if (NEXT_INSN (insns) == NULL_RTX)
4784 return insns;
4785
4786 /* Otherwise, start a sequence, initialize the information we have, and
4787 process all the insns we were given. */
4788 start_sequence ();
4789
4790 info.sp_equiv_reg = stack_pointer_rtx;
4791 info.sp_offset = 0;
4792 info.equiv_reg_src = 0;
4793
4794 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4795 info.const_equiv[j] = 0;
4796
4797 insn = insns;
4798 next = NULL_RTX;
4799 while (insn != NULL_RTX)
4800 {
4801 next = NEXT_INSN (insn);
4802
4803 if (!INSN_P (insn))
4804 {
4805 add_insn (insn);
4806 insn = next;
4807 continue;
4808 }
4809
4810 /* If this insn references the register that SP is equivalent to and
4811 we have a pending load to that register, we must force out the load
4812 first and then indicate we no longer know what SP's equivalent is. */
4813 if (info.equiv_reg_src != 0
4814 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
4815 {
4816 emit_equiv_load (&info);
4817 info.sp_equiv_reg = 0;
4818 }
4819
4820 info.new_sp_equiv_reg = info.sp_equiv_reg;
4821 info.new_sp_offset = info.sp_offset;
4822
4823 /* If this is a (RETURN) and the return address is on the stack,
4824 update the address and change to an indirect jump. */
4825 if (GET_CODE (PATTERN (insn)) == RETURN
4826 || (GET_CODE (PATTERN (insn)) == PARALLEL
4827 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4828 {
4829 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4830 rtx base = 0;
4831 HOST_WIDE_INT offset = 0;
4832 rtx jump_insn, jump_set;
4833
4834 /* If the return address is in a register, we can emit the insn
4835 unchanged. Otherwise, it must be a MEM and we see what the
4836 base register and offset are. In any case, we have to emit any
4837 pending load to the equivalent reg of SP, if any. */
4838 if (REG_P (retaddr))
4839 {
4840 emit_equiv_load (&info);
4841 add_insn (insn);
4842 insn = next;
4843 continue;
4844 }
4845 else
4846 {
4847 rtx ret_ptr;
4848 gcc_assert (MEM_P (retaddr));
4849
4850 ret_ptr = XEXP (retaddr, 0);
4851
4852 if (REG_P (ret_ptr))
4853 {
4854 base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
4855 offset = 0;
4856 }
4857 else
4858 {
4859 gcc_assert (GET_CODE (ret_ptr) == PLUS
4860 && REG_P (XEXP (ret_ptr, 0))
4861 && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
4862 base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
4863 offset = INTVAL (XEXP (ret_ptr, 1));
4864 }
4865 }
4866
4867 /* If the base of the location containing the return pointer
4868 is SP, we must update it with the replacement address. Otherwise,
4869 just build the necessary MEM. */
4870 retaddr = plus_constant (base, offset);
4871 if (base == stack_pointer_rtx)
4872 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4873 plus_constant (info.sp_equiv_reg,
4874 info.sp_offset));
4875
4876 retaddr = gen_rtx_MEM (Pmode, retaddr);
4877 MEM_NOTRAP_P (retaddr) = 1;
4878
4879 /* If there is a pending load to the equivalent register for SP
4880 and we reference that register, we must load our address into
4881 a scratch register and then do that load. */
4882 if (info.equiv_reg_src
4883 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4884 {
4885 unsigned int regno;
4886 rtx reg;
4887
4888 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4889 if (HARD_REGNO_MODE_OK (regno, Pmode)
4890 && !fixed_regs[regno]
4891 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
4892 && !REGNO_REG_SET_P
4893 (DF_LR_IN (EXIT_BLOCK_PTR), regno)
4894 && !refers_to_regno_p (regno,
4895 end_hard_regno (Pmode, regno),
4896 info.equiv_reg_src, NULL)
4897 && info.const_equiv[regno] == 0)
4898 break;
4899
4900 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
4901
4902 reg = gen_rtx_REG (Pmode, regno);
4903 emit_move_insn (reg, retaddr);
4904 retaddr = reg;
4905 }
4906
4907 emit_equiv_load (&info);
4908 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4909
4910 /* Show the SET in the above insn is a RETURN. */
4911 jump_set = single_set (jump_insn);
4912 gcc_assert (jump_set);
4913 SET_IS_RETURN_P (jump_set) = 1;
4914 }
4915
4916 /* If SP is not mentioned in the pattern and its equivalent register, if
4917 any, is not modified, just emit it. Otherwise, if neither is set,
4918 replace the reference to SP and emit the insn. If none of those are
4919 true, handle each SET individually. */
4920 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4921 && (info.sp_equiv_reg == stack_pointer_rtx
4922 || !reg_set_p (info.sp_equiv_reg, insn)))
4923 add_insn (insn);
4924 else if (! reg_set_p (stack_pointer_rtx, insn)
4925 && (info.sp_equiv_reg == stack_pointer_rtx
4926 || !reg_set_p (info.sp_equiv_reg, insn)))
4927 {
4928 int changed;
4929
4930 changed = validate_replace_rtx (stack_pointer_rtx,
4931 plus_constant (info.sp_equiv_reg,
4932 info.sp_offset),
4933 insn);
4934 gcc_assert (changed);
4935
4936 add_insn (insn);
4937 }
4938 else if (GET_CODE (PATTERN (insn)) == SET)
4939 handle_epilogue_set (PATTERN (insn), &info);
4940 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4941 {
4942 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4943 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4944 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4945 }
4946 else
4947 add_insn (insn);
4948
4949 info.sp_equiv_reg = info.new_sp_equiv_reg;
4950 info.sp_offset = info.new_sp_offset;
4951
4952 /* Now update any constants this insn sets. */
4953 note_stores (PATTERN (insn), update_epilogue_consts, &info);
4954 insn = next;
4955 }
4956
4957 insns = get_insns ();
4958 end_sequence ();
4959 return insns;
4960 }
4961
4962 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
4963 structure that contains information about what we've seen so far. We
4964 process this SET by either updating that data or by emitting one or
4965 more insns. */
4966
4967 static void
4968 handle_epilogue_set (rtx set, struct epi_info *p)
4969 {
4970 /* First handle the case where we are setting SP. Record what it is being
4971 set from, which we must be able to determine */
4972 if (reg_set_p (stack_pointer_rtx, set))
4973 {
4974 gcc_assert (SET_DEST (set) == stack_pointer_rtx);
4975
4976 if (GET_CODE (SET_SRC (set)) == PLUS)
4977 {
4978 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
4979 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4980 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
4981 else
4982 {
4983 gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
4984 && (REGNO (XEXP (SET_SRC (set), 1))
4985 < FIRST_PSEUDO_REGISTER)
4986 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4987 p->new_sp_offset
4988 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4989 }
4990 }
4991 else
4992 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4993
4994 /* If we are adjusting SP, we adjust from the old data. */
4995 if (p->new_sp_equiv_reg == stack_pointer_rtx)
4996 {
4997 p->new_sp_equiv_reg = p->sp_equiv_reg;
4998 p->new_sp_offset += p->sp_offset;
4999 }
5000
5001 gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
5002
5003 return;
5004 }
5005
5006 /* Next handle the case where we are setting SP's equivalent
5007 register. We must not already have a value to set it to. We
5008 could update, but there seems little point in handling that case.
5009 Note that we have to allow for the case where we are setting the
5010 register set in the previous part of a PARALLEL inside a single
5011 insn. But use the old offset for any updates within this insn.
5012 We must allow for the case where the register is being set in a
5013 different (usually wider) mode than Pmode). */
5014 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
5015 {
5016 gcc_assert (!p->equiv_reg_src
5017 && REG_P (p->new_sp_equiv_reg)
5018 && REG_P (SET_DEST (set))
5019 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
5020 <= BITS_PER_WORD)
5021 && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
5022 p->equiv_reg_src
5023 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5024 plus_constant (p->sp_equiv_reg,
5025 p->sp_offset));
5026 }
5027
5028 /* Otherwise, replace any references to SP in the insn to its new value
5029 and emit the insn. */
5030 else
5031 {
5032 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5033 plus_constant (p->sp_equiv_reg,
5034 p->sp_offset));
5035 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
5036 plus_constant (p->sp_equiv_reg,
5037 p->sp_offset));
5038 emit_insn (set);
5039 }
5040 }
5041
5042 /* Update the tracking information for registers set to constants. */
5043
5044 static void
5045 update_epilogue_consts (rtx dest, const_rtx x, void *data)
5046 {
5047 struct epi_info *p = (struct epi_info *) data;
5048 rtx new;
5049
5050 if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5051 return;
5052
5053 /* If we are either clobbering a register or doing a partial set,
5054 show we don't know the value. */
5055 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
5056 p->const_equiv[REGNO (dest)] = 0;
5057
5058 /* If we are setting it to a constant, record that constant. */
5059 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
5060 p->const_equiv[REGNO (dest)] = SET_SRC (x);
5061
5062 /* If this is a binary operation between a register we have been tracking
5063 and a constant, see if we can compute a new constant value. */
5064 else if (ARITHMETIC_P (SET_SRC (x))
5065 && REG_P (XEXP (SET_SRC (x), 0))
5066 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
5067 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
5068 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5069 && 0 != (new = simplify_binary_operation
5070 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
5071 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
5072 XEXP (SET_SRC (x), 1)))
5073 && GET_CODE (new) == CONST_INT)
5074 p->const_equiv[REGNO (dest)] = new;
5075
5076 /* Otherwise, we can't do anything with this value. */
5077 else
5078 p->const_equiv[REGNO (dest)] = 0;
5079 }
5080
5081 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
5082
5083 static void
5084 emit_equiv_load (struct epi_info *p)
5085 {
5086 if (p->equiv_reg_src != 0)
5087 {
5088 rtx dest = p->sp_equiv_reg;
5089
5090 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
5091 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
5092 REGNO (p->sp_equiv_reg));
5093
5094 emit_move_insn (dest, p->equiv_reg_src);
5095 p->equiv_reg_src = 0;
5096 }
5097 }
5098 #endif
5099
5100 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5101 this into place with notes indicating where the prologue ends and where
5102 the epilogue begins. Update the basic block information when possible. */
5103
5104 static void
5105 thread_prologue_and_epilogue_insns (void)
5106 {
5107 int inserted = 0;
5108 edge e;
5109 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
5110 rtx seq;
5111 #endif
5112 #if defined (HAVE_epilogue) || defined(HAVE_return)
5113 rtx epilogue_end = NULL_RTX;
5114 #endif
5115 edge_iterator ei;
5116
5117 #ifdef HAVE_prologue
5118 if (HAVE_prologue)
5119 {
5120 start_sequence ();
5121 seq = gen_prologue ();
5122 emit_insn (seq);
5123
5124 /* Insert an explicit USE for the frame pointer
5125 if the profiling is on and the frame pointer is required. */
5126 if (current_function_profile && frame_pointer_needed)
5127 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
5128
5129 /* Retain a map of the prologue insns. */
5130 record_insns (seq, &prologue);
5131 emit_note (NOTE_INSN_PROLOGUE_END);
5132
5133 #ifndef PROFILE_BEFORE_PROLOGUE
5134 /* Ensure that instructions are not moved into the prologue when
5135 profiling is on. The call to the profiling routine can be
5136 emitted within the live range of a call-clobbered register. */
5137 if (current_function_profile)
5138 emit_insn (gen_blockage ());
5139 #endif
5140
5141 seq = get_insns ();
5142 end_sequence ();
5143 set_insn_locators (seq, prologue_locator);
5144
5145 /* Can't deal with multiple successors of the entry block
5146 at the moment. Function should always have at least one
5147 entry point. */
5148 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5149
5150 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
5151 inserted = 1;
5152 }
5153 #endif
5154
5155 /* If the exit block has no non-fake predecessors, we don't need
5156 an epilogue. */
5157 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5158 if ((e->flags & EDGE_FAKE) == 0)
5159 break;
5160 if (e == NULL)
5161 goto epilogue_done;
5162
5163 #ifdef HAVE_return
5164 if (optimize && HAVE_return)
5165 {
5166 /* If we're allowed to generate a simple return instruction,
5167 then by definition we don't need a full epilogue. Examine
5168 the block that falls through to EXIT. If it does not
5169 contain any code, examine its predecessors and try to
5170 emit (conditional) return instructions. */
5171
5172 basic_block last;
5173 rtx label;
5174
5175 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5176 if (e->flags & EDGE_FALLTHRU)
5177 break;
5178 if (e == NULL)
5179 goto epilogue_done;
5180 last = e->src;
5181
5182 /* Verify that there are no active instructions in the last block. */
5183 label = BB_END (last);
5184 while (label && !LABEL_P (label))
5185 {
5186 if (active_insn_p (label))
5187 break;
5188 label = PREV_INSN (label);
5189 }
5190
5191 if (BB_HEAD (last) == label && LABEL_P (label))
5192 {
5193 edge_iterator ei2;
5194
5195 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5196 {
5197 basic_block bb = e->src;
5198 rtx jump;
5199
5200 if (bb == ENTRY_BLOCK_PTR)
5201 {
5202 ei_next (&ei2);
5203 continue;
5204 }
5205
5206 jump = BB_END (bb);
5207 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5208 {
5209 ei_next (&ei2);
5210 continue;
5211 }
5212
5213 /* If we have an unconditional jump, we can replace that
5214 with a simple return instruction. */
5215 if (simplejump_p (jump))
5216 {
5217 emit_return_into_block (bb);
5218 delete_insn (jump);
5219 }
5220
5221 /* If we have a conditional jump, we can try to replace
5222 that with a conditional return instruction. */
5223 else if (condjump_p (jump))
5224 {
5225 if (! redirect_jump (jump, 0, 0))
5226 {
5227 ei_next (&ei2);
5228 continue;
5229 }
5230
5231 /* If this block has only one successor, it both jumps
5232 and falls through to the fallthru block, so we can't
5233 delete the edge. */
5234 if (single_succ_p (bb))
5235 {
5236 ei_next (&ei2);
5237 continue;
5238 }
5239 }
5240 else
5241 {
5242 ei_next (&ei2);
5243 continue;
5244 }
5245
5246 /* Fix up the CFG for the successful change we just made. */
5247 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5248 }
5249
5250 /* Emit a return insn for the exit fallthru block. Whether
5251 this is still reachable will be determined later. */
5252
5253 emit_barrier_after (BB_END (last));
5254 emit_return_into_block (last);
5255 epilogue_end = BB_END (last);
5256 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5257 goto epilogue_done;
5258 }
5259 }
5260 #endif
5261 /* Find the edge that falls through to EXIT. Other edges may exist
5262 due to RETURN instructions, but those don't need epilogues.
5263 There really shouldn't be a mixture -- either all should have
5264 been converted or none, however... */
5265
5266 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5267 if (e->flags & EDGE_FALLTHRU)
5268 break;
5269 if (e == NULL)
5270 goto epilogue_done;
5271
5272 #ifdef HAVE_epilogue
5273 if (HAVE_epilogue)
5274 {
5275 start_sequence ();
5276 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5277
5278 seq = gen_epilogue ();
5279
5280 #ifdef INCOMING_RETURN_ADDR_RTX
5281 /* If this function returns with the stack depressed and we can support
5282 it, massage the epilogue to actually do that. */
5283 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5284 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
5285 seq = keep_stack_depressed (seq);
5286 #endif
5287
5288 emit_jump_insn (seq);
5289
5290 /* Retain a map of the epilogue insns. */
5291 record_insns (seq, &epilogue);
5292 set_insn_locators (seq, epilogue_locator);
5293
5294 seq = get_insns ();
5295 end_sequence ();
5296
5297 insert_insn_on_edge (seq, e);
5298 inserted = 1;
5299 }
5300 else
5301 #endif
5302 {
5303 basic_block cur_bb;
5304
5305 if (! next_active_insn (BB_END (e->src)))
5306 goto epilogue_done;
5307 /* We have a fall-through edge to the exit block, the source is not
5308 at the end of the function, and there will be an assembler epilogue
5309 at the end of the function.
5310 We can't use force_nonfallthru here, because that would try to
5311 use return. Inserting a jump 'by hand' is extremely messy, so
5312 we take advantage of cfg_layout_finalize using
5313 fixup_fallthru_exit_predecessor. */
5314 cfg_layout_initialize (0);
5315 FOR_EACH_BB (cur_bb)
5316 if (cur_bb->index >= NUM_FIXED_BLOCKS
5317 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5318 cur_bb->aux = cur_bb->next_bb;
5319 cfg_layout_finalize ();
5320 }
5321 epilogue_done:
5322
5323 if (inserted)
5324 {
5325 commit_edge_insertions ();
5326
5327 /* The epilogue insns we inserted may cause the exit edge to no longer
5328 be fallthru. */
5329 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5330 {
5331 if (((e->flags & EDGE_FALLTHRU) != 0)
5332 && returnjump_p (BB_END (e->src)))
5333 e->flags &= ~EDGE_FALLTHRU;
5334 }
5335 }
5336
5337 #ifdef HAVE_sibcall_epilogue
5338 /* Emit sibling epilogues before any sibling call sites. */
5339 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5340 {
5341 basic_block bb = e->src;
5342 rtx insn = BB_END (bb);
5343
5344 if (!CALL_P (insn)
5345 || ! SIBLING_CALL_P (insn))
5346 {
5347 ei_next (&ei);
5348 continue;
5349 }
5350
5351 start_sequence ();
5352 emit_insn (gen_sibcall_epilogue ());
5353 seq = get_insns ();
5354 end_sequence ();
5355
5356 /* Retain a map of the epilogue insns. Used in life analysis to
5357 avoid getting rid of sibcall epilogue insns. Do this before we
5358 actually emit the sequence. */
5359 record_insns (seq, &sibcall_epilogue);
5360 set_insn_locators (seq, epilogue_locator);
5361
5362 emit_insn_before (seq, insn);
5363 ei_next (&ei);
5364 }
5365 #endif
5366
5367 #ifdef HAVE_epilogue
5368 if (epilogue_end)
5369 {
5370 rtx insn, next;
5371
5372 /* Similarly, move any line notes that appear after the epilogue.
5373 There is no need, however, to be quite so anal about the existence
5374 of such a note. Also possibly move
5375 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5376 info generation. */
5377 for (insn = epilogue_end; insn; insn = next)
5378 {
5379 next = NEXT_INSN (insn);
5380 if (NOTE_P (insn)
5381 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
5382 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5383 }
5384 }
5385 #endif
5386
5387 /* Threading the prologue and epilogue changes the artificial refs
5388 in the entry and exit blocks. */
5389 epilogue_completed = 1;
5390 df_update_entry_exit_and_calls ();
5391 }
5392
5393 /* Reposition the prologue-end and epilogue-begin notes after instruction
5394 scheduling and delayed branch scheduling. */
5395
5396 void
5397 reposition_prologue_and_epilogue_notes (void)
5398 {
5399 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5400 rtx insn, last, note;
5401 int len;
5402
5403 if ((len = VEC_length (int, prologue)) > 0)
5404 {
5405 last = 0, note = 0;
5406
5407 /* Scan from the beginning until we reach the last prologue insn.
5408 We apparently can't depend on basic_block_{head,end} after
5409 reorg has run. */
5410 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5411 {
5412 if (NOTE_P (insn))
5413 {
5414 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
5415 note = insn;
5416 }
5417 else if (contains (insn, &prologue))
5418 {
5419 last = insn;
5420 if (--len == 0)
5421 break;
5422 }
5423 }
5424
5425 if (last)
5426 {
5427 /* Find the prologue-end note if we haven't already, and
5428 move it to just after the last prologue insn. */
5429 if (note == 0)
5430 {
5431 for (note = last; (note = NEXT_INSN (note));)
5432 if (NOTE_P (note)
5433 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
5434 break;
5435 }
5436
5437 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5438 if (LABEL_P (last))
5439 last = NEXT_INSN (last);
5440 reorder_insns (note, note, last);
5441 }
5442 }
5443
5444 if ((len = VEC_length (int, epilogue)) > 0)
5445 {
5446 last = 0, note = 0;
5447
5448 /* Scan from the end until we reach the first epilogue insn.
5449 We apparently can't depend on basic_block_{head,end} after
5450 reorg has run. */
5451 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5452 {
5453 if (NOTE_P (insn))
5454 {
5455 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
5456 note = insn;
5457 }
5458 else if (contains (insn, &epilogue))
5459 {
5460 last = insn;
5461 if (--len == 0)
5462 break;
5463 }
5464 }
5465
5466 if (last)
5467 {
5468 /* Find the epilogue-begin note if we haven't already, and
5469 move it to just before the first epilogue insn. */
5470 if (note == 0)
5471 {
5472 for (note = insn; (note = PREV_INSN (note));)
5473 if (NOTE_P (note)
5474 && NOTE_KIND (note) == NOTE_INSN_EPILOGUE_BEG)
5475 break;
5476 }
5477
5478 if (PREV_INSN (last) != note)
5479 reorder_insns (note, note, PREV_INSN (last));
5480 }
5481 }
5482 #endif /* HAVE_prologue or HAVE_epilogue */
5483 }
5484
5485 /* Returns the name of the current function. */
5486 const char *
5487 current_function_name (void)
5488 {
5489 return lang_hooks.decl_printable_name (cfun->decl, 2);
5490 }
5491
5492 /* Returns the raw (mangled) name of the current function. */
5493 const char *
5494 current_function_assembler_name (void)
5495 {
5496 return IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (cfun->decl));
5497 }
5498 \f
5499
5500 static unsigned int
5501 rest_of_handle_check_leaf_regs (void)
5502 {
5503 #ifdef LEAF_REGISTERS
5504 current_function_uses_only_leaf_regs
5505 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5506 #endif
5507 return 0;
5508 }
5509
5510 /* Insert a TYPE into the used types hash table of CFUN. */
5511 static void
5512 used_types_insert_helper (tree type, struct function *func)
5513 {
5514 if (type != NULL && func != NULL)
5515 {
5516 void **slot;
5517
5518 if (func->used_types_hash == NULL)
5519 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5520 htab_eq_pointer, NULL);
5521 slot = htab_find_slot (func->used_types_hash, type, INSERT);
5522 if (*slot == NULL)
5523 *slot = type;
5524 }
5525 }
5526
5527 /* Given a type, insert it into the used hash table in cfun. */
5528 void
5529 used_types_insert (tree t)
5530 {
5531 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5532 t = TREE_TYPE (t);
5533 t = TYPE_MAIN_VARIANT (t);
5534 if (debug_info_level > DINFO_LEVEL_NONE)
5535 used_types_insert_helper (t, cfun);
5536 }
5537
5538 struct rtl_opt_pass pass_leaf_regs =
5539 {
5540 {
5541 RTL_PASS,
5542 NULL, /* name */
5543 NULL, /* gate */
5544 rest_of_handle_check_leaf_regs, /* execute */
5545 NULL, /* sub */
5546 NULL, /* next */
5547 0, /* static_pass_number */
5548 0, /* tv_id */
5549 0, /* properties_required */
5550 0, /* properties_provided */
5551 0, /* properties_destroyed */
5552 0, /* todo_flags_start */
5553 0 /* todo_flags_finish */
5554 }
5555 };
5556
5557 static unsigned int
5558 rest_of_handle_thread_prologue_and_epilogue (void)
5559 {
5560 if (optimize)
5561 cleanup_cfg (CLEANUP_EXPENSIVE);
5562 /* On some machines, the prologue and epilogue code, or parts thereof,
5563 can be represented as RTL. Doing so lets us schedule insns between
5564 it and the rest of the code and also allows delayed branch
5565 scheduling to operate in the epilogue. */
5566
5567 thread_prologue_and_epilogue_insns ();
5568 return 0;
5569 }
5570
5571 struct rtl_opt_pass pass_thread_prologue_and_epilogue =
5572 {
5573 {
5574 RTL_PASS,
5575 "pro_and_epilogue", /* name */
5576 NULL, /* gate */
5577 rest_of_handle_thread_prologue_and_epilogue, /* execute */
5578 NULL, /* sub */
5579 NULL, /* next */
5580 0, /* static_pass_number */
5581 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
5582 0, /* properties_required */
5583 0, /* properties_provided */
5584 0, /* properties_destroyed */
5585 TODO_verify_flow, /* todo_flags_start */
5586 TODO_dump_func |
5587 TODO_df_verify |
5588 TODO_df_finish | TODO_verify_rtl_sharing |
5589 TODO_ggc_collect /* todo_flags_finish */
5590 }
5591 };
5592 \f
5593
5594 /* This mini-pass fixes fall-out from SSA in asm statements that have
5595 in-out constraints. Say you start with
5596
5597 orig = inout;
5598 asm ("": "+mr" (inout));
5599 use (orig);
5600
5601 which is transformed very early to use explicit output and match operands:
5602
5603 orig = inout;
5604 asm ("": "=mr" (inout) : "0" (inout));
5605 use (orig);
5606
5607 Or, after SSA and copyprop,
5608
5609 asm ("": "=mr" (inout_2) : "0" (inout_1));
5610 use (inout_1);
5611
5612 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
5613 they represent two separate values, so they will get different pseudo
5614 registers during expansion. Then, since the two operands need to match
5615 per the constraints, but use different pseudo registers, reload can
5616 only register a reload for these operands. But reloads can only be
5617 satisfied by hardregs, not by memory, so we need a register for this
5618 reload, just because we are presented with non-matching operands.
5619 So, even though we allow memory for this operand, no memory can be
5620 used for it, just because the two operands don't match. This can
5621 cause reload failures on register-starved targets.
5622
5623 So it's a symptom of reload not being able to use memory for reloads
5624 or, alternatively it's also a symptom of both operands not coming into
5625 reload as matching (in which case the pseudo could go to memory just
5626 fine, as the alternative allows it, and no reload would be necessary).
5627 We fix the latter problem here, by transforming
5628
5629 asm ("": "=mr" (inout_2) : "0" (inout_1));
5630
5631 back to
5632
5633 inout_2 = inout_1;
5634 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
5635
5636 static void
5637 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
5638 {
5639 int i;
5640 bool changed = false;
5641 rtx op = SET_SRC (p_sets[0]);
5642 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
5643 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
5644 bool *output_matched = alloca (noutputs * sizeof (bool));
5645
5646 memset (output_matched, 0, noutputs * sizeof (bool));
5647 for (i = 0; i < ninputs; i++)
5648 {
5649 rtx input, output, insns;
5650 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
5651 char *end;
5652 int match, j;
5653
5654 match = strtoul (constraint, &end, 10);
5655 if (end == constraint)
5656 continue;
5657
5658 gcc_assert (match < noutputs);
5659 output = SET_DEST (p_sets[match]);
5660 input = RTVEC_ELT (inputs, i);
5661 /* Only do the transformation for pseudos. */
5662 if (! REG_P (output)
5663 || rtx_equal_p (output, input)
5664 || (GET_MODE (input) != VOIDmode
5665 && GET_MODE (input) != GET_MODE (output)))
5666 continue;
5667
5668 /* We can't do anything if the output is also used as input,
5669 as we're going to overwrite it. */
5670 for (j = 0; j < ninputs; j++)
5671 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
5672 break;
5673 if (j != ninputs)
5674 continue;
5675
5676 /* Avoid changing the same input several times. For
5677 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
5678 only change in once (to out1), rather than changing it
5679 first to out1 and afterwards to out2. */
5680 if (i > 0)
5681 {
5682 for (j = 0; j < noutputs; j++)
5683 if (output_matched[j] && input == SET_DEST (p_sets[j]))
5684 break;
5685 if (j != noutputs)
5686 continue;
5687 }
5688 output_matched[match] = true;
5689
5690 start_sequence ();
5691 emit_move_insn (output, input);
5692 insns = get_insns ();
5693 end_sequence ();
5694 emit_insn_before (insns, insn);
5695
5696 /* Now replace all mentions of the input with output. We can't
5697 just replace the occurence in inputs[i], as the register might
5698 also be used in some other input (or even in an address of an
5699 output), which would mean possibly increasing the number of
5700 inputs by one (namely 'output' in addition), which might pose
5701 a too complicated problem for reload to solve. E.g. this situation:
5702
5703 asm ("" : "=r" (output), "=m" (input) : "0" (input))
5704
5705 Here 'input' is used in two occurrences as input (once for the
5706 input operand, once for the address in the second output operand).
5707 If we would replace only the occurence of the input operand (to
5708 make the matching) we would be left with this:
5709
5710 output = input
5711 asm ("" : "=r" (output), "=m" (input) : "0" (output))
5712
5713 Now we suddenly have two different input values (containing the same
5714 value, but different pseudos) where we formerly had only one.
5715 With more complicated asms this might lead to reload failures
5716 which wouldn't have happen without this pass. So, iterate over
5717 all operands and replace all occurrences of the register used. */
5718 for (j = 0; j < noutputs; j++)
5719 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
5720 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
5721 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
5722 input, output);
5723 for (j = 0; j < ninputs; j++)
5724 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
5725 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
5726 input, output);
5727
5728 changed = true;
5729 }
5730
5731 if (changed)
5732 df_insn_rescan (insn);
5733 }
5734
5735 static unsigned
5736 rest_of_match_asm_constraints (void)
5737 {
5738 basic_block bb;
5739 rtx insn, pat, *p_sets;
5740 int noutputs;
5741
5742 if (!cfun->has_asm_statement)
5743 return 0;
5744
5745 df_set_flags (DF_DEFER_INSN_RESCAN);
5746 FOR_EACH_BB (bb)
5747 {
5748 FOR_BB_INSNS (bb, insn)
5749 {
5750 if (!INSN_P (insn))
5751 continue;
5752
5753 pat = PATTERN (insn);
5754 if (GET_CODE (pat) == PARALLEL)
5755 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
5756 else if (GET_CODE (pat) == SET)
5757 p_sets = &PATTERN (insn), noutputs = 1;
5758 else
5759 continue;
5760
5761 if (GET_CODE (*p_sets) == SET
5762 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
5763 match_asm_constraints_1 (insn, p_sets, noutputs);
5764 }
5765 }
5766
5767 return TODO_df_finish;
5768 }
5769
5770 struct rtl_opt_pass pass_match_asm_constraints =
5771 {
5772 {
5773 RTL_PASS,
5774 "asmcons", /* name */
5775 NULL, /* gate */
5776 rest_of_match_asm_constraints, /* execute */
5777 NULL, /* sub */
5778 NULL, /* next */
5779 0, /* static_pass_number */
5780 0, /* tv_id */
5781 0, /* properties_required */
5782 0, /* properties_provided */
5783 0, /* properties_destroyed */
5784 0, /* todo_flags_start */
5785 TODO_dump_func /* todo_flags_finish */
5786 }
5787 };
5788
5789
5790 #include "gt-function.h"