]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/function.c
alias.c (rtx_equal_for_memref_p): Use predicates to test rtx classes and new rtx...
[thirdparty/gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include "system.h"
43 #include "coretypes.h"
44 #include "tm.h"
45 #include "rtl.h"
46 #include "tree.h"
47 #include "flags.h"
48 #include "except.h"
49 #include "function.h"
50 #include "expr.h"
51 #include "optabs.h"
52 #include "libfuncs.h"
53 #include "regs.h"
54 #include "hard-reg-set.h"
55 #include "insn-config.h"
56 #include "recog.h"
57 #include "output.h"
58 #include "basic-block.h"
59 #include "toplev.h"
60 #include "hashtab.h"
61 #include "ggc.h"
62 #include "tm_p.h"
63 #include "integrate.h"
64 #include "langhooks.h"
65 #include "target.h"
66
67 #ifndef TRAMPOLINE_ALIGNMENT
68 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
69 #endif
70
71 #ifndef LOCAL_ALIGNMENT
72 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
73 #endif
74
75 #ifndef STACK_ALIGNMENT_NEEDED
76 #define STACK_ALIGNMENT_NEEDED 1
77 #endif
78
79 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
80
81 /* Some systems use __main in a way incompatible with its use in gcc, in these
82 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
83 give the same symbol without quotes for an alternative entry point. You
84 must define both, or neither. */
85 #ifndef NAME__MAIN
86 #define NAME__MAIN "__main"
87 #endif
88
89 /* Round a value to the lowest integer less than it that is a multiple of
90 the required alignment. Avoid using division in case the value is
91 negative. Assume the alignment is a power of two. */
92 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
93
94 /* Similar, but round to the next highest integer that meets the
95 alignment. */
96 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
97
98 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
99 during rtl generation. If they are different register numbers, this is
100 always true. It may also be true if
101 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
102 generation. See fix_lexical_addr for details. */
103
104 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
105 #define NEED_SEPARATE_AP
106 #endif
107
108 /* Nonzero if function being compiled doesn't contain any calls
109 (ignoring the prologue and epilogue). This is set prior to
110 local register allocation and is valid for the remaining
111 compiler passes. */
112 int current_function_is_leaf;
113
114 /* Nonzero if function being compiled doesn't contain any instructions
115 that can throw an exception. This is set prior to final. */
116
117 int current_function_nothrow;
118
119 /* Nonzero if function being compiled doesn't modify the stack pointer
120 (ignoring the prologue and epilogue). This is only valid after
121 life_analysis has run. */
122 int current_function_sp_is_unchanging;
123
124 /* Nonzero if the function being compiled is a leaf function which only
125 uses leaf registers. This is valid after reload (specifically after
126 sched2) and is useful only if the port defines LEAF_REGISTERS. */
127 int current_function_uses_only_leaf_regs;
128
129 /* Nonzero once virtual register instantiation has been done.
130 assign_stack_local uses frame_pointer_rtx when this is nonzero.
131 calls.c:emit_library_call_value_1 uses it to set up
132 post-instantiation libcalls. */
133 int virtuals_instantiated;
134
135 /* Nonzero if at least one trampoline has been created. */
136 int trampolines_created;
137
138 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
139 static GTY(()) int funcdef_no;
140
141 /* These variables hold pointers to functions to create and destroy
142 target specific, per-function data structures. */
143 struct machine_function * (*init_machine_status) (void);
144
145 /* The FUNCTION_DECL for an inline function currently being expanded. */
146 tree inline_function_decl;
147
148 /* The currently compiled function. */
149 struct function *cfun = 0;
150
151 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
152 static GTY(()) varray_type prologue;
153 static GTY(()) varray_type epilogue;
154
155 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
156 in this function. */
157 static GTY(()) varray_type sibcall_epilogue;
158 \f
159 /* In order to evaluate some expressions, such as function calls returning
160 structures in memory, we need to temporarily allocate stack locations.
161 We record each allocated temporary in the following structure.
162
163 Associated with each temporary slot is a nesting level. When we pop up
164 one level, all temporaries associated with the previous level are freed.
165 Normally, all temporaries are freed after the execution of the statement
166 in which they were created. However, if we are inside a ({...}) grouping,
167 the result may be in a temporary and hence must be preserved. If the
168 result could be in a temporary, we preserve it if we can determine which
169 one it is in. If we cannot determine which temporary may contain the
170 result, all temporaries are preserved. A temporary is preserved by
171 pretending it was allocated at the previous nesting level.
172
173 Automatic variables are also assigned temporary slots, at the nesting
174 level where they are defined. They are marked a "kept" so that
175 free_temp_slots will not free them. */
176
177 struct temp_slot GTY(())
178 {
179 /* Points to next temporary slot. */
180 struct temp_slot *next;
181 /* The rtx to used to reference the slot. */
182 rtx slot;
183 /* The rtx used to represent the address if not the address of the
184 slot above. May be an EXPR_LIST if multiple addresses exist. */
185 rtx address;
186 /* The alignment (in bits) of the slot. */
187 unsigned int align;
188 /* The size, in units, of the slot. */
189 HOST_WIDE_INT size;
190 /* The type of the object in the slot, or zero if it doesn't correspond
191 to a type. We use this to determine whether a slot can be reused.
192 It can be reused if objects of the type of the new slot will always
193 conflict with objects of the type of the old slot. */
194 tree type;
195 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
196 tree rtl_expr;
197 /* Nonzero if this temporary is currently in use. */
198 char in_use;
199 /* Nonzero if this temporary has its address taken. */
200 char addr_taken;
201 /* Nesting level at which this slot is being used. */
202 int level;
203 /* Nonzero if this should survive a call to free_temp_slots. */
204 int keep;
205 /* The offset of the slot from the frame_pointer, including extra space
206 for alignment. This info is for combine_temp_slots. */
207 HOST_WIDE_INT base_offset;
208 /* The size of the slot, including extra space for alignment. This
209 info is for combine_temp_slots. */
210 HOST_WIDE_INT full_size;
211 };
212 \f
213 /* This structure is used to record MEMs or pseudos used to replace VAR, any
214 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
215 maintain this list in case two operands of an insn were required to match;
216 in that case we must ensure we use the same replacement. */
217
218 struct fixup_replacement GTY(())
219 {
220 rtx old;
221 rtx new;
222 struct fixup_replacement *next;
223 };
224
225 struct insns_for_mem_entry
226 {
227 /* A MEM. */
228 rtx key;
229 /* These are the INSNs which reference the MEM. */
230 rtx insns;
231 };
232
233 /* Forward declarations. */
234
235 static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
236 struct function *);
237 static struct temp_slot *find_temp_slot_from_address (rtx);
238 static void put_reg_into_stack (struct function *, rtx, tree, enum machine_mode,
239 enum machine_mode, int, unsigned int, int, htab_t);
240 static void schedule_fixup_var_refs (struct function *, rtx, tree, enum machine_mode,
241 htab_t);
242 static void fixup_var_refs (rtx, enum machine_mode, int, rtx, htab_t);
243 static struct fixup_replacement
244 *find_fixup_replacement (struct fixup_replacement **, rtx);
245 static void fixup_var_refs_insns (rtx, rtx, enum machine_mode, int, int, rtx);
246 static void fixup_var_refs_insns_with_hash (htab_t, rtx, enum machine_mode, int, rtx);
247 static void fixup_var_refs_insn (rtx, rtx, enum machine_mode, int, int, rtx);
248 static void fixup_var_refs_1 (rtx, enum machine_mode, rtx *, rtx,
249 struct fixup_replacement **, rtx);
250 static rtx fixup_memory_subreg (rtx, rtx, enum machine_mode, int);
251 static rtx walk_fixup_memory_subreg (rtx, rtx, enum machine_mode, int);
252 static rtx fixup_stack_1 (rtx, rtx);
253 static void optimize_bit_field (rtx, rtx, rtx *);
254 static void instantiate_decls (tree, int);
255 static void instantiate_decls_1 (tree, int);
256 static void instantiate_decl (rtx, HOST_WIDE_INT, int);
257 static rtx instantiate_new_reg (rtx, HOST_WIDE_INT *);
258 static int instantiate_virtual_regs_1 (rtx *, rtx, int);
259 static void delete_handlers (void);
260 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
261 static void pad_below (struct args_size *, enum machine_mode, tree);
262 static rtx round_trampoline_addr (rtx);
263 static rtx adjust_trampoline_addr (rtx);
264 static tree *identify_blocks_1 (rtx, tree *, tree *, tree *);
265 static void reorder_blocks_0 (tree);
266 static void reorder_blocks_1 (rtx, tree, varray_type *);
267 static void reorder_fix_fragments (tree);
268 static tree blocks_nreverse (tree);
269 static int all_blocks (tree, tree *);
270 static tree *get_block_vector (tree, int *);
271 extern tree debug_find_var_in_block_tree (tree, tree);
272 /* We always define `record_insns' even if it's not used so that we
273 can always export `prologue_epilogue_contains'. */
274 static void record_insns (rtx, varray_type *) ATTRIBUTE_UNUSED;
275 static int contains (rtx, varray_type);
276 #ifdef HAVE_return
277 static void emit_return_into_block (basic_block, rtx);
278 #endif
279 static void put_addressof_into_stack (rtx, htab_t);
280 static bool purge_addressof_1 (rtx *, rtx, int, int, int, htab_t);
281 static void purge_single_hard_subreg_set (rtx);
282 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
283 static rtx keep_stack_depressed (rtx);
284 #endif
285 static int is_addressof (rtx *, void *);
286 static hashval_t insns_for_mem_hash (const void *);
287 static int insns_for_mem_comp (const void *, const void *);
288 static int insns_for_mem_walk (rtx *, void *);
289 static void compute_insns_for_mem (rtx, rtx, htab_t);
290 static void prepare_function_start (tree);
291 static void do_clobber_return_reg (rtx, void *);
292 static void do_use_return_reg (rtx, void *);
293 static void instantiate_virtual_regs_lossage (rtx);
294 static tree split_complex_args (tree);
295 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
296 \f
297 /* Pointer to chain of `struct function' for containing functions. */
298 struct function *outer_function_chain;
299
300 /* List of insns that were postponed by purge_addressof_1. */
301 static rtx postponed_insns;
302
303 /* Given a function decl for a containing function,
304 return the `struct function' for it. */
305
306 struct function *
307 find_function_data (tree decl)
308 {
309 struct function *p;
310
311 for (p = outer_function_chain; p; p = p->outer)
312 if (p->decl == decl)
313 return p;
314
315 abort ();
316 }
317
318 /* Save the current context for compilation of a nested function.
319 This is called from language-specific code. The caller should use
320 the enter_nested langhook to save any language-specific state,
321 since this function knows only about language-independent
322 variables. */
323
324 void
325 push_function_context_to (tree context)
326 {
327 struct function *p;
328
329 if (context)
330 {
331 if (context == current_function_decl)
332 cfun->contains_functions = 1;
333 else
334 {
335 struct function *containing = find_function_data (context);
336 containing->contains_functions = 1;
337 }
338 }
339
340 if (cfun == 0)
341 init_dummy_function_start ();
342 p = cfun;
343
344 p->outer = outer_function_chain;
345 outer_function_chain = p;
346 p->fixup_var_refs_queue = 0;
347
348 (*lang_hooks.function.enter_nested) (p);
349
350 cfun = 0;
351 }
352
353 void
354 push_function_context (void)
355 {
356 push_function_context_to (current_function_decl);
357 }
358
359 /* Restore the last saved context, at the end of a nested function.
360 This function is called from language-specific code. */
361
362 void
363 pop_function_context_from (tree context ATTRIBUTE_UNUSED)
364 {
365 struct function *p = outer_function_chain;
366 struct var_refs_queue *queue;
367
368 cfun = p;
369 outer_function_chain = p->outer;
370
371 current_function_decl = p->decl;
372 reg_renumber = 0;
373
374 restore_emit_status (p);
375
376 (*lang_hooks.function.leave_nested) (p);
377
378 /* Finish doing put_var_into_stack for any of our variables which became
379 addressable during the nested function. If only one entry has to be
380 fixed up, just do that one. Otherwise, first make a list of MEMs that
381 are not to be unshared. */
382 if (p->fixup_var_refs_queue == 0)
383 ;
384 else if (p->fixup_var_refs_queue->next == 0)
385 fixup_var_refs (p->fixup_var_refs_queue->modified,
386 p->fixup_var_refs_queue->promoted_mode,
387 p->fixup_var_refs_queue->unsignedp,
388 p->fixup_var_refs_queue->modified, 0);
389 else
390 {
391 rtx list = 0;
392
393 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
394 list = gen_rtx_EXPR_LIST (VOIDmode, queue->modified, list);
395
396 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
397 fixup_var_refs (queue->modified, queue->promoted_mode,
398 queue->unsignedp, list, 0);
399
400 }
401
402 p->fixup_var_refs_queue = 0;
403
404 /* Reset variables that have known state during rtx generation. */
405 rtx_equal_function_value_matters = 1;
406 virtuals_instantiated = 0;
407 generating_concat_p = 1;
408 }
409
410 void
411 pop_function_context (void)
412 {
413 pop_function_context_from (current_function_decl);
414 }
415
416 /* Clear out all parts of the state in F that can safely be discarded
417 after the function has been parsed, but not compiled, to let
418 garbage collection reclaim the memory. */
419
420 void
421 free_after_parsing (struct function *f)
422 {
423 /* f->expr->forced_labels is used by code generation. */
424 /* f->emit->regno_reg_rtx is used by code generation. */
425 /* f->varasm is used by code generation. */
426 /* f->eh->eh_return_stub_label is used by code generation. */
427
428 (*lang_hooks.function.final) (f);
429 f->stmt = NULL;
430 }
431
432 /* Clear out all parts of the state in F that can safely be discarded
433 after the function has been compiled, to let garbage collection
434 reclaim the memory. */
435
436 void
437 free_after_compilation (struct function *f)
438 {
439 f->eh = NULL;
440 f->expr = NULL;
441 f->emit = NULL;
442 f->varasm = NULL;
443 f->machine = NULL;
444
445 f->x_temp_slots = NULL;
446 f->arg_offset_rtx = NULL;
447 f->return_rtx = NULL;
448 f->internal_arg_pointer = NULL;
449 f->x_nonlocal_labels = NULL;
450 f->x_nonlocal_goto_handler_slots = NULL;
451 f->x_nonlocal_goto_handler_labels = NULL;
452 f->x_nonlocal_goto_stack_level = NULL;
453 f->x_cleanup_label = NULL;
454 f->x_return_label = NULL;
455 f->x_naked_return_label = NULL;
456 f->computed_goto_common_label = NULL;
457 f->computed_goto_common_reg = NULL;
458 f->x_save_expr_regs = NULL;
459 f->x_stack_slot_list = NULL;
460 f->x_rtl_expr_chain = NULL;
461 f->x_tail_recursion_label = NULL;
462 f->x_tail_recursion_reentry = NULL;
463 f->x_arg_pointer_save_area = NULL;
464 f->x_clobber_return_insn = NULL;
465 f->x_context_display = NULL;
466 f->x_trampoline_list = NULL;
467 f->x_parm_birth_insn = NULL;
468 f->x_last_parm_insn = NULL;
469 f->x_parm_reg_stack_loc = NULL;
470 f->fixup_var_refs_queue = NULL;
471 f->original_arg_vector = NULL;
472 f->original_decl_initial = NULL;
473 f->inl_last_parm_insn = NULL;
474 f->epilogue_delay_list = NULL;
475 }
476 \f
477 /* Allocate fixed slots in the stack frame of the current function. */
478
479 /* Return size needed for stack frame based on slots so far allocated in
480 function F.
481 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
482 the caller may have to do that. */
483
484 HOST_WIDE_INT
485 get_func_frame_size (struct function *f)
486 {
487 #ifdef FRAME_GROWS_DOWNWARD
488 return -f->x_frame_offset;
489 #else
490 return f->x_frame_offset;
491 #endif
492 }
493
494 /* Return size needed for stack frame based on slots so far allocated.
495 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
496 the caller may have to do that. */
497 HOST_WIDE_INT
498 get_frame_size (void)
499 {
500 return get_func_frame_size (cfun);
501 }
502
503 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
504 with machine mode MODE.
505
506 ALIGN controls the amount of alignment for the address of the slot:
507 0 means according to MODE,
508 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
509 positive specifies alignment boundary in bits.
510
511 We do not round to stack_boundary here.
512
513 FUNCTION specifies the function to allocate in. */
514
515 static rtx
516 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
517 struct function *function)
518 {
519 rtx x, addr;
520 int bigend_correction = 0;
521 int alignment;
522 int frame_off, frame_alignment, frame_phase;
523
524 if (align == 0)
525 {
526 tree type;
527
528 if (mode == BLKmode)
529 alignment = BIGGEST_ALIGNMENT;
530 else
531 alignment = GET_MODE_ALIGNMENT (mode);
532
533 /* Allow the target to (possibly) increase the alignment of this
534 stack slot. */
535 type = (*lang_hooks.types.type_for_mode) (mode, 0);
536 if (type)
537 alignment = LOCAL_ALIGNMENT (type, alignment);
538
539 alignment /= BITS_PER_UNIT;
540 }
541 else if (align == -1)
542 {
543 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
544 size = CEIL_ROUND (size, alignment);
545 }
546 else
547 alignment = align / BITS_PER_UNIT;
548
549 #ifdef FRAME_GROWS_DOWNWARD
550 function->x_frame_offset -= size;
551 #endif
552
553 /* Ignore alignment we can't do with expected alignment of the boundary. */
554 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
555 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
556
557 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
558 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
559
560 /* Calculate how many bytes the start of local variables is off from
561 stack alignment. */
562 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
563 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
564 frame_phase = frame_off ? frame_alignment - frame_off : 0;
565
566 /* Round the frame offset to the specified alignment. The default is
567 to always honor requests to align the stack but a port may choose to
568 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
569 if (STACK_ALIGNMENT_NEEDED
570 || mode != BLKmode
571 || size != 0)
572 {
573 /* We must be careful here, since FRAME_OFFSET might be negative and
574 division with a negative dividend isn't as well defined as we might
575 like. So we instead assume that ALIGNMENT is a power of two and
576 use logical operations which are unambiguous. */
577 #ifdef FRAME_GROWS_DOWNWARD
578 function->x_frame_offset
579 = (FLOOR_ROUND (function->x_frame_offset - frame_phase, alignment)
580 + frame_phase);
581 #else
582 function->x_frame_offset
583 = (CEIL_ROUND (function->x_frame_offset - frame_phase, alignment)
584 + frame_phase);
585 #endif
586 }
587
588 /* On a big-endian machine, if we are allocating more space than we will use,
589 use the least significant bytes of those that are allocated. */
590 if (BYTES_BIG_ENDIAN && mode != BLKmode)
591 bigend_correction = size - GET_MODE_SIZE (mode);
592
593 /* If we have already instantiated virtual registers, return the actual
594 address relative to the frame pointer. */
595 if (function == cfun && virtuals_instantiated)
596 addr = plus_constant (frame_pointer_rtx,
597 trunc_int_for_mode
598 (frame_offset + bigend_correction
599 + STARTING_FRAME_OFFSET, Pmode));
600 else
601 addr = plus_constant (virtual_stack_vars_rtx,
602 trunc_int_for_mode
603 (function->x_frame_offset + bigend_correction,
604 Pmode));
605
606 #ifndef FRAME_GROWS_DOWNWARD
607 function->x_frame_offset += size;
608 #endif
609
610 x = gen_rtx_MEM (mode, addr);
611
612 function->x_stack_slot_list
613 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
614
615 return x;
616 }
617
618 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
619 current function. */
620
621 rtx
622 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
623 {
624 return assign_stack_local_1 (mode, size, align, cfun);
625 }
626 \f
627 /* Allocate a temporary stack slot and record it for possible later
628 reuse.
629
630 MODE is the machine mode to be given to the returned rtx.
631
632 SIZE is the size in units of the space required. We do no rounding here
633 since assign_stack_local will do any required rounding.
634
635 KEEP is 1 if this slot is to be retained after a call to
636 free_temp_slots. Automatic variables for a block are allocated
637 with this flag. KEEP is 2 if we allocate a longer term temporary,
638 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
639 if we are to allocate something at an inner level to be treated as
640 a variable in the block (e.g., a SAVE_EXPR).
641
642 TYPE is the type that will be used for the stack slot. */
643
644 rtx
645 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, int keep,
646 tree type)
647 {
648 unsigned int align;
649 struct temp_slot *p, *best_p = 0;
650 rtx slot;
651
652 /* If SIZE is -1 it means that somebody tried to allocate a temporary
653 of a variable size. */
654 if (size == -1)
655 abort ();
656
657 if (mode == BLKmode)
658 align = BIGGEST_ALIGNMENT;
659 else
660 align = GET_MODE_ALIGNMENT (mode);
661
662 if (! type)
663 type = (*lang_hooks.types.type_for_mode) (mode, 0);
664
665 if (type)
666 align = LOCAL_ALIGNMENT (type, align);
667
668 /* Try to find an available, already-allocated temporary of the proper
669 mode which meets the size and alignment requirements. Choose the
670 smallest one with the closest alignment. */
671 for (p = temp_slots; p; p = p->next)
672 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
673 && ! p->in_use
674 && objects_must_conflict_p (p->type, type)
675 && (best_p == 0 || best_p->size > p->size
676 || (best_p->size == p->size && best_p->align > p->align)))
677 {
678 if (p->align == align && p->size == size)
679 {
680 best_p = 0;
681 break;
682 }
683 best_p = p;
684 }
685
686 /* Make our best, if any, the one to use. */
687 if (best_p)
688 {
689 /* If there are enough aligned bytes left over, make them into a new
690 temp_slot so that the extra bytes don't get wasted. Do this only
691 for BLKmode slots, so that we can be sure of the alignment. */
692 if (GET_MODE (best_p->slot) == BLKmode)
693 {
694 int alignment = best_p->align / BITS_PER_UNIT;
695 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
696
697 if (best_p->size - rounded_size >= alignment)
698 {
699 p = ggc_alloc (sizeof (struct temp_slot));
700 p->in_use = p->addr_taken = 0;
701 p->size = best_p->size - rounded_size;
702 p->base_offset = best_p->base_offset + rounded_size;
703 p->full_size = best_p->full_size - rounded_size;
704 p->slot = gen_rtx_MEM (BLKmode,
705 plus_constant (XEXP (best_p->slot, 0),
706 rounded_size));
707 p->align = best_p->align;
708 p->address = 0;
709 p->rtl_expr = 0;
710 p->type = best_p->type;
711 p->next = temp_slots;
712 temp_slots = p;
713
714 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
715 stack_slot_list);
716
717 best_p->size = rounded_size;
718 best_p->full_size = rounded_size;
719 }
720 }
721
722 p = best_p;
723 }
724
725 /* If we still didn't find one, make a new temporary. */
726 if (p == 0)
727 {
728 HOST_WIDE_INT frame_offset_old = frame_offset;
729
730 p = ggc_alloc (sizeof (struct temp_slot));
731
732 /* We are passing an explicit alignment request to assign_stack_local.
733 One side effect of that is assign_stack_local will not round SIZE
734 to ensure the frame offset remains suitably aligned.
735
736 So for requests which depended on the rounding of SIZE, we go ahead
737 and round it now. We also make sure ALIGNMENT is at least
738 BIGGEST_ALIGNMENT. */
739 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
740 abort ();
741 p->slot = assign_stack_local (mode,
742 (mode == BLKmode
743 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
744 : size),
745 align);
746
747 p->align = align;
748
749 /* The following slot size computation is necessary because we don't
750 know the actual size of the temporary slot until assign_stack_local
751 has performed all the frame alignment and size rounding for the
752 requested temporary. Note that extra space added for alignment
753 can be either above or below this stack slot depending on which
754 way the frame grows. We include the extra space if and only if it
755 is above this slot. */
756 #ifdef FRAME_GROWS_DOWNWARD
757 p->size = frame_offset_old - frame_offset;
758 #else
759 p->size = size;
760 #endif
761
762 /* Now define the fields used by combine_temp_slots. */
763 #ifdef FRAME_GROWS_DOWNWARD
764 p->base_offset = frame_offset;
765 p->full_size = frame_offset_old - frame_offset;
766 #else
767 p->base_offset = frame_offset_old;
768 p->full_size = frame_offset - frame_offset_old;
769 #endif
770 p->address = 0;
771 p->next = temp_slots;
772 temp_slots = p;
773 }
774
775 p->in_use = 1;
776 p->addr_taken = 0;
777 p->rtl_expr = seq_rtl_expr;
778 p->type = type;
779
780 if (keep == 2)
781 {
782 p->level = target_temp_slot_level;
783 p->keep = 0;
784 }
785 else if (keep == 3)
786 {
787 p->level = var_temp_slot_level;
788 p->keep = 0;
789 }
790 else
791 {
792 p->level = temp_slot_level;
793 p->keep = keep;
794 }
795
796
797 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
798 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
799 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
800
801 /* If we know the alias set for the memory that will be used, use
802 it. If there's no TYPE, then we don't know anything about the
803 alias set for the memory. */
804 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
805 set_mem_align (slot, align);
806
807 /* If a type is specified, set the relevant flags. */
808 if (type != 0)
809 {
810 RTX_UNCHANGING_P (slot) = (lang_hooks.honor_readonly
811 && TYPE_READONLY (type));
812 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
813 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
814 }
815
816 return slot;
817 }
818
819 /* Allocate a temporary stack slot and record it for possible later
820 reuse. First three arguments are same as in preceding function. */
821
822 rtx
823 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
824 {
825 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
826 }
827 \f
828 /* Assign a temporary.
829 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
830 and so that should be used in error messages. In either case, we
831 allocate of the given type.
832 KEEP is as for assign_stack_temp.
833 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
834 it is 0 if a register is OK.
835 DONT_PROMOTE is 1 if we should not promote values in register
836 to wider modes. */
837
838 rtx
839 assign_temp (tree type_or_decl, int keep, int memory_required,
840 int dont_promote ATTRIBUTE_UNUSED)
841 {
842 tree type, decl;
843 enum machine_mode mode;
844 #ifndef PROMOTE_FOR_CALL_ONLY
845 int unsignedp;
846 #endif
847
848 if (DECL_P (type_or_decl))
849 decl = type_or_decl, type = TREE_TYPE (decl);
850 else
851 decl = NULL, type = type_or_decl;
852
853 mode = TYPE_MODE (type);
854 #ifndef PROMOTE_FOR_CALL_ONLY
855 unsignedp = TREE_UNSIGNED (type);
856 #endif
857
858 if (mode == BLKmode || memory_required)
859 {
860 HOST_WIDE_INT size = int_size_in_bytes (type);
861 rtx tmp;
862
863 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
864 problems with allocating the stack space. */
865 if (size == 0)
866 size = 1;
867
868 /* Unfortunately, we don't yet know how to allocate variable-sized
869 temporaries. However, sometimes we have a fixed upper limit on
870 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
871 instead. This is the case for Chill variable-sized strings. */
872 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
873 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
874 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
875 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
876
877 /* The size of the temporary may be too large to fit into an integer. */
878 /* ??? Not sure this should happen except for user silliness, so limit
879 this to things that aren't compiler-generated temporaries. The
880 rest of the time we'll abort in assign_stack_temp_for_type. */
881 if (decl && size == -1
882 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
883 {
884 error ("%Jsize of variable '%D' is too large", decl, decl);
885 size = 1;
886 }
887
888 tmp = assign_stack_temp_for_type (mode, size, keep, type);
889 return tmp;
890 }
891
892 #ifndef PROMOTE_FOR_CALL_ONLY
893 if (! dont_promote)
894 mode = promote_mode (type, mode, &unsignedp, 0);
895 #endif
896
897 return gen_reg_rtx (mode);
898 }
899 \f
900 /* Combine temporary stack slots which are adjacent on the stack.
901
902 This allows for better use of already allocated stack space. This is only
903 done for BLKmode slots because we can be sure that we won't have alignment
904 problems in this case. */
905
906 void
907 combine_temp_slots (void)
908 {
909 struct temp_slot *p, *q;
910 struct temp_slot *prev_p, *prev_q;
911 int num_slots;
912
913 /* We can't combine slots, because the information about which slot
914 is in which alias set will be lost. */
915 if (flag_strict_aliasing)
916 return;
917
918 /* If there are a lot of temp slots, don't do anything unless
919 high levels of optimization. */
920 if (! flag_expensive_optimizations)
921 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
922 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
923 return;
924
925 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
926 {
927 int delete_p = 0;
928
929 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
930 for (q = p->next, prev_q = p; q; q = prev_q->next)
931 {
932 int delete_q = 0;
933 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
934 {
935 if (p->base_offset + p->full_size == q->base_offset)
936 {
937 /* Q comes after P; combine Q into P. */
938 p->size += q->size;
939 p->full_size += q->full_size;
940 delete_q = 1;
941 }
942 else if (q->base_offset + q->full_size == p->base_offset)
943 {
944 /* P comes after Q; combine P into Q. */
945 q->size += p->size;
946 q->full_size += p->full_size;
947 delete_p = 1;
948 break;
949 }
950 }
951 /* Either delete Q or advance past it. */
952 if (delete_q)
953 prev_q->next = q->next;
954 else
955 prev_q = q;
956 }
957 /* Either delete P or advance past it. */
958 if (delete_p)
959 {
960 if (prev_p)
961 prev_p->next = p->next;
962 else
963 temp_slots = p->next;
964 }
965 else
966 prev_p = p;
967 }
968 }
969 \f
970 /* Find the temp slot corresponding to the object at address X. */
971
972 static struct temp_slot *
973 find_temp_slot_from_address (rtx x)
974 {
975 struct temp_slot *p;
976 rtx next;
977
978 for (p = temp_slots; p; p = p->next)
979 {
980 if (! p->in_use)
981 continue;
982
983 else if (XEXP (p->slot, 0) == x
984 || p->address == x
985 || (GET_CODE (x) == PLUS
986 && XEXP (x, 0) == virtual_stack_vars_rtx
987 && GET_CODE (XEXP (x, 1)) == CONST_INT
988 && INTVAL (XEXP (x, 1)) >= p->base_offset
989 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
990 return p;
991
992 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
993 for (next = p->address; next; next = XEXP (next, 1))
994 if (XEXP (next, 0) == x)
995 return p;
996 }
997
998 /* If we have a sum involving a register, see if it points to a temp
999 slot. */
1000 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
1001 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
1002 return p;
1003 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
1004 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
1005 return p;
1006
1007 return 0;
1008 }
1009
1010 /* Indicate that NEW is an alternate way of referring to the temp slot
1011 that previously was known by OLD. */
1012
1013 void
1014 update_temp_slot_address (rtx old, rtx new)
1015 {
1016 struct temp_slot *p;
1017
1018 if (rtx_equal_p (old, new))
1019 return;
1020
1021 p = find_temp_slot_from_address (old);
1022
1023 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1024 is a register, see if one operand of the PLUS is a temporary
1025 location. If so, NEW points into it. Otherwise, if both OLD and
1026 NEW are a PLUS and if there is a register in common between them.
1027 If so, try a recursive call on those values. */
1028 if (p == 0)
1029 {
1030 if (GET_CODE (old) != PLUS)
1031 return;
1032
1033 if (GET_CODE (new) == REG)
1034 {
1035 update_temp_slot_address (XEXP (old, 0), new);
1036 update_temp_slot_address (XEXP (old, 1), new);
1037 return;
1038 }
1039 else if (GET_CODE (new) != PLUS)
1040 return;
1041
1042 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1043 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1044 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1045 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1046 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1047 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1048 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1049 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1050
1051 return;
1052 }
1053
1054 /* Otherwise add an alias for the temp's address. */
1055 else if (p->address == 0)
1056 p->address = new;
1057 else
1058 {
1059 if (GET_CODE (p->address) != EXPR_LIST)
1060 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1061
1062 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1063 }
1064 }
1065
1066 /* If X could be a reference to a temporary slot, mark the fact that its
1067 address was taken. */
1068
1069 void
1070 mark_temp_addr_taken (rtx x)
1071 {
1072 struct temp_slot *p;
1073
1074 if (x == 0)
1075 return;
1076
1077 /* If X is not in memory or is at a constant address, it cannot be in
1078 a temporary slot. */
1079 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1080 return;
1081
1082 p = find_temp_slot_from_address (XEXP (x, 0));
1083 if (p != 0)
1084 p->addr_taken = 1;
1085 }
1086
1087 /* If X could be a reference to a temporary slot, mark that slot as
1088 belonging to the to one level higher than the current level. If X
1089 matched one of our slots, just mark that one. Otherwise, we can't
1090 easily predict which it is, so upgrade all of them. Kept slots
1091 need not be touched.
1092
1093 This is called when an ({...}) construct occurs and a statement
1094 returns a value in memory. */
1095
1096 void
1097 preserve_temp_slots (rtx x)
1098 {
1099 struct temp_slot *p = 0;
1100
1101 /* If there is no result, we still might have some objects whose address
1102 were taken, so we need to make sure they stay around. */
1103 if (x == 0)
1104 {
1105 for (p = temp_slots; p; p = p->next)
1106 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1107 p->level--;
1108
1109 return;
1110 }
1111
1112 /* If X is a register that is being used as a pointer, see if we have
1113 a temporary slot we know it points to. To be consistent with
1114 the code below, we really should preserve all non-kept slots
1115 if we can't find a match, but that seems to be much too costly. */
1116 if (GET_CODE (x) == REG && REG_POINTER (x))
1117 p = find_temp_slot_from_address (x);
1118
1119 /* If X is not in memory or is at a constant address, it cannot be in
1120 a temporary slot, but it can contain something whose address was
1121 taken. */
1122 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1123 {
1124 for (p = temp_slots; p; p = p->next)
1125 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1126 p->level--;
1127
1128 return;
1129 }
1130
1131 /* First see if we can find a match. */
1132 if (p == 0)
1133 p = find_temp_slot_from_address (XEXP (x, 0));
1134
1135 if (p != 0)
1136 {
1137 /* Move everything at our level whose address was taken to our new
1138 level in case we used its address. */
1139 struct temp_slot *q;
1140
1141 if (p->level == temp_slot_level)
1142 {
1143 for (q = temp_slots; q; q = q->next)
1144 if (q != p && q->addr_taken && q->level == p->level)
1145 q->level--;
1146
1147 p->level--;
1148 p->addr_taken = 0;
1149 }
1150 return;
1151 }
1152
1153 /* Otherwise, preserve all non-kept slots at this level. */
1154 for (p = temp_slots; p; p = p->next)
1155 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1156 p->level--;
1157 }
1158
1159 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1160 with that RTL_EXPR, promote it into a temporary slot at the present
1161 level so it will not be freed when we free slots made in the
1162 RTL_EXPR. */
1163
1164 void
1165 preserve_rtl_expr_result (rtx x)
1166 {
1167 struct temp_slot *p;
1168
1169 /* If X is not in memory or is at a constant address, it cannot be in
1170 a temporary slot. */
1171 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1172 return;
1173
1174 /* If we can find a match, move it to our level unless it is already at
1175 an upper level. */
1176 p = find_temp_slot_from_address (XEXP (x, 0));
1177 if (p != 0)
1178 {
1179 p->level = MIN (p->level, temp_slot_level);
1180 p->rtl_expr = 0;
1181 }
1182
1183 return;
1184 }
1185
1186 /* Free all temporaries used so far. This is normally called at the end
1187 of generating code for a statement. Don't free any temporaries
1188 currently in use for an RTL_EXPR that hasn't yet been emitted.
1189 We could eventually do better than this since it can be reused while
1190 generating the same RTL_EXPR, but this is complex and probably not
1191 worthwhile. */
1192
1193 void
1194 free_temp_slots (void)
1195 {
1196 struct temp_slot *p;
1197
1198 for (p = temp_slots; p; p = p->next)
1199 if (p->in_use && p->level == temp_slot_level && ! p->keep
1200 && p->rtl_expr == 0)
1201 p->in_use = 0;
1202
1203 combine_temp_slots ();
1204 }
1205
1206 /* Free all temporary slots used in T, an RTL_EXPR node. */
1207
1208 void
1209 free_temps_for_rtl_expr (tree t)
1210 {
1211 struct temp_slot *p;
1212
1213 for (p = temp_slots; p; p = p->next)
1214 if (p->rtl_expr == t)
1215 {
1216 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1217 needs to be preserved. This can happen if a temporary in
1218 the RTL_EXPR was addressed; preserve_temp_slots will move
1219 the temporary into a higher level. */
1220 if (temp_slot_level <= p->level)
1221 p->in_use = 0;
1222 else
1223 p->rtl_expr = NULL_TREE;
1224 }
1225
1226 combine_temp_slots ();
1227 }
1228
1229 /* Mark all temporaries ever allocated in this function as not suitable
1230 for reuse until the current level is exited. */
1231
1232 void
1233 mark_all_temps_used (void)
1234 {
1235 struct temp_slot *p;
1236
1237 for (p = temp_slots; p; p = p->next)
1238 {
1239 p->in_use = p->keep = 1;
1240 p->level = MIN (p->level, temp_slot_level);
1241 }
1242 }
1243
1244 /* Push deeper into the nesting level for stack temporaries. */
1245
1246 void
1247 push_temp_slots (void)
1248 {
1249 temp_slot_level++;
1250 }
1251
1252 /* Pop a temporary nesting level. All slots in use in the current level
1253 are freed. */
1254
1255 void
1256 pop_temp_slots (void)
1257 {
1258 struct temp_slot *p;
1259
1260 for (p = temp_slots; p; p = p->next)
1261 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1262 p->in_use = 0;
1263
1264 combine_temp_slots ();
1265
1266 temp_slot_level--;
1267 }
1268
1269 /* Initialize temporary slots. */
1270
1271 void
1272 init_temp_slots (void)
1273 {
1274 /* We have not allocated any temporaries yet. */
1275 temp_slots = 0;
1276 temp_slot_level = 0;
1277 var_temp_slot_level = 0;
1278 target_temp_slot_level = 0;
1279 }
1280 \f
1281 /* Retroactively move an auto variable from a register to a stack
1282 slot. This is done when an address-reference to the variable is
1283 seen. If RESCAN is true, all previously emitted instructions are
1284 examined and modified to handle the fact that DECL is now
1285 addressable. */
1286
1287 void
1288 put_var_into_stack (tree decl, int rescan)
1289 {
1290 rtx reg;
1291 enum machine_mode promoted_mode, decl_mode;
1292 struct function *function = 0;
1293 tree context;
1294 int can_use_addressof;
1295 int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1296 int usedp = (TREE_USED (decl)
1297 || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1298
1299 context = decl_function_context (decl);
1300
1301 /* Get the current rtl used for this object and its original mode. */
1302 reg = (TREE_CODE (decl) == SAVE_EXPR
1303 ? SAVE_EXPR_RTL (decl)
1304 : DECL_RTL_IF_SET (decl));
1305
1306 /* No need to do anything if decl has no rtx yet
1307 since in that case caller is setting TREE_ADDRESSABLE
1308 and a stack slot will be assigned when the rtl is made. */
1309 if (reg == 0)
1310 return;
1311
1312 /* Get the declared mode for this object. */
1313 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1314 : DECL_MODE (decl));
1315 /* Get the mode it's actually stored in. */
1316 promoted_mode = GET_MODE (reg);
1317
1318 /* If this variable comes from an outer function, find that
1319 function's saved context. Don't use find_function_data here,
1320 because it might not be in any active function.
1321 FIXME: Is that really supposed to happen?
1322 It does in ObjC at least. */
1323 if (context != current_function_decl && context != inline_function_decl)
1324 for (function = outer_function_chain; function; function = function->outer)
1325 if (function->decl == context)
1326 break;
1327
1328 /* If this is a variable-sized object or a structure passed by invisible
1329 reference, with a pseudo to address it, put that pseudo into the stack
1330 if the var is non-local. */
1331 if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1332 && GET_CODE (reg) == MEM
1333 && GET_CODE (XEXP (reg, 0)) == REG
1334 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1335 {
1336 reg = XEXP (reg, 0);
1337 decl_mode = promoted_mode = GET_MODE (reg);
1338 }
1339
1340 /* If this variable lives in the current function and we don't need to put it
1341 in the stack for the sake of setjmp or the non-locality, try to keep it in
1342 a register until we know we actually need the address. */
1343 can_use_addressof
1344 = (function == 0
1345 && ! (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl))
1346 && optimize > 0
1347 /* FIXME make it work for promoted modes too */
1348 && decl_mode == promoted_mode
1349 #ifdef NON_SAVING_SETJMP
1350 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1351 #endif
1352 );
1353
1354 /* If we can't use ADDRESSOF, make sure we see through one we already
1355 generated. */
1356 if (! can_use_addressof && GET_CODE (reg) == MEM
1357 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1358 reg = XEXP (XEXP (reg, 0), 0);
1359
1360 /* Now we should have a value that resides in one or more pseudo regs. */
1361
1362 if (GET_CODE (reg) == REG)
1363 {
1364 if (can_use_addressof)
1365 gen_mem_addressof (reg, decl, rescan);
1366 else
1367 put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
1368 decl_mode, volatilep, 0, usedp, 0);
1369 }
1370 else if (GET_CODE (reg) == CONCAT)
1371 {
1372 /* A CONCAT contains two pseudos; put them both in the stack.
1373 We do it so they end up consecutive.
1374 We fixup references to the parts only after we fixup references
1375 to the whole CONCAT, lest we do double fixups for the latter
1376 references. */
1377 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1378 tree part_type = (*lang_hooks.types.type_for_mode) (part_mode, 0);
1379 rtx lopart = XEXP (reg, 0);
1380 rtx hipart = XEXP (reg, 1);
1381 #ifdef FRAME_GROWS_DOWNWARD
1382 /* Since part 0 should have a lower address, do it second. */
1383 put_reg_into_stack (function, hipart, part_type, part_mode,
1384 part_mode, volatilep, 0, 0, 0);
1385 put_reg_into_stack (function, lopart, part_type, part_mode,
1386 part_mode, volatilep, 0, 0, 0);
1387 #else
1388 put_reg_into_stack (function, lopart, part_type, part_mode,
1389 part_mode, volatilep, 0, 0, 0);
1390 put_reg_into_stack (function, hipart, part_type, part_mode,
1391 part_mode, volatilep, 0, 0, 0);
1392 #endif
1393
1394 /* Change the CONCAT into a combined MEM for both parts. */
1395 PUT_CODE (reg, MEM);
1396 MEM_ATTRS (reg) = 0;
1397
1398 /* set_mem_attributes uses DECL_RTL to avoid re-generating of
1399 already computed alias sets. Here we want to re-generate. */
1400 if (DECL_P (decl))
1401 SET_DECL_RTL (decl, NULL);
1402 set_mem_attributes (reg, decl, 1);
1403 if (DECL_P (decl))
1404 SET_DECL_RTL (decl, reg);
1405
1406 /* The two parts are in memory order already.
1407 Use the lower parts address as ours. */
1408 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1409 /* Prevent sharing of rtl that might lose. */
1410 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1411 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1412 if (usedp && rescan)
1413 {
1414 schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1415 promoted_mode, 0);
1416 schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1417 schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1418 }
1419 }
1420 else
1421 return;
1422 }
1423
1424 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1425 into the stack frame of FUNCTION (0 means the current function).
1426 DECL_MODE is the machine mode of the user-level data type.
1427 PROMOTED_MODE is the machine mode of the register.
1428 VOLATILE_P is nonzero if this is for a "volatile" decl.
1429 USED_P is nonzero if this reg might have already been used in an insn. */
1430
1431 static void
1432 put_reg_into_stack (struct function *function, rtx reg, tree type,
1433 enum machine_mode promoted_mode, enum machine_mode decl_mode,
1434 int volatile_p, unsigned int original_regno, int used_p, htab_t ht)
1435 {
1436 struct function *func = function ? function : cfun;
1437 rtx new = 0;
1438 unsigned int regno = original_regno;
1439
1440 if (regno == 0)
1441 regno = REGNO (reg);
1442
1443 if (regno < func->x_max_parm_reg)
1444 new = func->x_parm_reg_stack_loc[regno];
1445
1446 if (new == 0)
1447 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1448
1449 PUT_CODE (reg, MEM);
1450 PUT_MODE (reg, decl_mode);
1451 XEXP (reg, 0) = XEXP (new, 0);
1452 MEM_ATTRS (reg) = 0;
1453 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1454 MEM_VOLATILE_P (reg) = volatile_p;
1455
1456 /* If this is a memory ref that contains aggregate components,
1457 mark it as such for cse and loop optimize. If we are reusing a
1458 previously generated stack slot, then we need to copy the bit in
1459 case it was set for other reasons. For instance, it is set for
1460 __builtin_va_alist. */
1461 if (type)
1462 {
1463 MEM_SET_IN_STRUCT_P (reg,
1464 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1465 set_mem_alias_set (reg, get_alias_set (type));
1466 }
1467
1468 if (used_p)
1469 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
1470 }
1471
1472 /* Make sure that all refs to the variable, previously made
1473 when it was a register, are fixed up to be valid again.
1474 See function above for meaning of arguments. */
1475
1476 static void
1477 schedule_fixup_var_refs (struct function *function, rtx reg, tree type,
1478 enum machine_mode promoted_mode, htab_t ht)
1479 {
1480 int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
1481
1482 if (function != 0)
1483 {
1484 struct var_refs_queue *temp;
1485
1486 temp = ggc_alloc (sizeof (struct var_refs_queue));
1487 temp->modified = reg;
1488 temp->promoted_mode = promoted_mode;
1489 temp->unsignedp = unsigned_p;
1490 temp->next = function->fixup_var_refs_queue;
1491 function->fixup_var_refs_queue = temp;
1492 }
1493 else
1494 /* Variable is local; fix it up now. */
1495 fixup_var_refs (reg, promoted_mode, unsigned_p, reg, ht);
1496 }
1497 \f
1498 static void
1499 fixup_var_refs (rtx var, enum machine_mode promoted_mode, int unsignedp,
1500 rtx may_share, htab_t ht)
1501 {
1502 tree pending;
1503 rtx first_insn = get_insns ();
1504 struct sequence_stack *stack = seq_stack;
1505 tree rtl_exps = rtl_expr_chain;
1506 int save_volatile_ok = volatile_ok;
1507
1508 /* If there's a hash table, it must record all uses of VAR. */
1509 if (ht)
1510 {
1511 if (stack != 0)
1512 abort ();
1513 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp,
1514 may_share);
1515 return;
1516 }
1517
1518 /* Volatile is valid in MEMs because all we're doing in changing the
1519 address inside. */
1520 volatile_ok = 1;
1521 fixup_var_refs_insns (first_insn, var, promoted_mode, unsignedp,
1522 stack == 0, may_share);
1523
1524 /* Scan all pending sequences too. */
1525 for (; stack; stack = stack->next)
1526 {
1527 push_to_full_sequence (stack->first, stack->last);
1528 fixup_var_refs_insns (stack->first, var, promoted_mode, unsignedp,
1529 stack->next != 0, may_share);
1530 /* Update remembered end of sequence
1531 in case we added an insn at the end. */
1532 stack->last = get_last_insn ();
1533 end_sequence ();
1534 }
1535
1536 /* Scan all waiting RTL_EXPRs too. */
1537 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1538 {
1539 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1540 if (seq != const0_rtx && seq != 0)
1541 {
1542 push_to_sequence (seq);
1543 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1544 may_share);
1545 end_sequence ();
1546 }
1547 }
1548
1549 volatile_ok = save_volatile_ok;
1550 }
1551 \f
1552 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1553 some part of an insn. Return a struct fixup_replacement whose OLD
1554 value is equal to X. Allocate a new structure if no such entry exists. */
1555
1556 static struct fixup_replacement *
1557 find_fixup_replacement (struct fixup_replacement **replacements, rtx x)
1558 {
1559 struct fixup_replacement *p;
1560
1561 /* See if we have already replaced this. */
1562 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1563 ;
1564
1565 if (p == 0)
1566 {
1567 p = xmalloc (sizeof (struct fixup_replacement));
1568 p->old = x;
1569 p->new = 0;
1570 p->next = *replacements;
1571 *replacements = p;
1572 }
1573
1574 return p;
1575 }
1576
1577 /* Scan the insn-chain starting with INSN for refs to VAR and fix them
1578 up. TOPLEVEL is nonzero if this chain is the main chain of insns
1579 for the current function. MAY_SHARE is either a MEM that is not
1580 to be unshared or a list of them. */
1581
1582 static void
1583 fixup_var_refs_insns (rtx insn, rtx var, enum machine_mode promoted_mode,
1584 int unsignedp, int toplevel, rtx may_share)
1585 {
1586 while (insn)
1587 {
1588 /* fixup_var_refs_insn might modify insn, so save its next
1589 pointer now. */
1590 rtx next = NEXT_INSN (insn);
1591
1592 /* CALL_PLACEHOLDERs are special; we have to switch into each of
1593 the three sequences they (potentially) contain, and process
1594 them recursively. The CALL_INSN itself is not interesting. */
1595
1596 if (GET_CODE (insn) == CALL_INSN
1597 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1598 {
1599 int i;
1600
1601 /* Look at the Normal call, sibling call and tail recursion
1602 sequences attached to the CALL_PLACEHOLDER. */
1603 for (i = 0; i < 3; i++)
1604 {
1605 rtx seq = XEXP (PATTERN (insn), i);
1606 if (seq)
1607 {
1608 push_to_sequence (seq);
1609 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1610 may_share);
1611 XEXP (PATTERN (insn), i) = get_insns ();
1612 end_sequence ();
1613 }
1614 }
1615 }
1616
1617 else if (INSN_P (insn))
1618 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel,
1619 may_share);
1620
1621 insn = next;
1622 }
1623 }
1624
1625 /* Look up the insns which reference VAR in HT and fix them up. Other
1626 arguments are the same as fixup_var_refs_insns.
1627
1628 N.B. No need for special processing of CALL_PLACEHOLDERs here,
1629 because the hash table will point straight to the interesting insn
1630 (inside the CALL_PLACEHOLDER). */
1631
1632 static void
1633 fixup_var_refs_insns_with_hash (htab_t ht, rtx var, enum machine_mode promoted_mode,
1634 int unsignedp, rtx may_share)
1635 {
1636 struct insns_for_mem_entry tmp;
1637 struct insns_for_mem_entry *ime;
1638 rtx insn_list;
1639
1640 tmp.key = var;
1641 ime = htab_find (ht, &tmp);
1642 for (insn_list = ime->insns; insn_list != 0; insn_list = XEXP (insn_list, 1))
1643 if (INSN_P (XEXP (insn_list, 0)))
1644 fixup_var_refs_insn (XEXP (insn_list, 0), var, promoted_mode,
1645 unsignedp, 1, may_share);
1646 }
1647
1648
1649 /* Per-insn processing by fixup_var_refs_insns(_with_hash). INSN is
1650 the insn under examination, VAR is the variable to fix up
1651 references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
1652 TOPLEVEL is nonzero if this is the main insn chain for this
1653 function. */
1654
1655 static void
1656 fixup_var_refs_insn (rtx insn, rtx var, enum machine_mode promoted_mode,
1657 int unsignedp, int toplevel, rtx no_share)
1658 {
1659 rtx call_dest = 0;
1660 rtx set, prev, prev_set;
1661 rtx note;
1662
1663 /* Remember the notes in case we delete the insn. */
1664 note = REG_NOTES (insn);
1665
1666 /* If this is a CLOBBER of VAR, delete it.
1667
1668 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1669 and REG_RETVAL notes too. */
1670 if (GET_CODE (PATTERN (insn)) == CLOBBER
1671 && (XEXP (PATTERN (insn), 0) == var
1672 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1673 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1674 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1675 {
1676 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1677 /* The REG_LIBCALL note will go away since we are going to
1678 turn INSN into a NOTE, so just delete the
1679 corresponding REG_RETVAL note. */
1680 remove_note (XEXP (note, 0),
1681 find_reg_note (XEXP (note, 0), REG_RETVAL,
1682 NULL_RTX));
1683
1684 delete_insn (insn);
1685 }
1686
1687 /* The insn to load VAR from a home in the arglist
1688 is now a no-op. When we see it, just delete it.
1689 Similarly if this is storing VAR from a register from which
1690 it was loaded in the previous insn. This will occur
1691 when an ADDRESSOF was made for an arglist slot. */
1692 else if (toplevel
1693 && (set = single_set (insn)) != 0
1694 && SET_DEST (set) == var
1695 /* If this represents the result of an insn group,
1696 don't delete the insn. */
1697 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1698 && (rtx_equal_p (SET_SRC (set), var)
1699 || (GET_CODE (SET_SRC (set)) == REG
1700 && (prev = prev_nonnote_insn (insn)) != 0
1701 && (prev_set = single_set (prev)) != 0
1702 && SET_DEST (prev_set) == SET_SRC (set)
1703 && rtx_equal_p (SET_SRC (prev_set), var))))
1704 {
1705 delete_insn (insn);
1706 }
1707 else
1708 {
1709 struct fixup_replacement *replacements = 0;
1710 rtx next_insn = NEXT_INSN (insn);
1711
1712 if (SMALL_REGISTER_CLASSES)
1713 {
1714 /* If the insn that copies the results of a CALL_INSN
1715 into a pseudo now references VAR, we have to use an
1716 intermediate pseudo since we want the life of the
1717 return value register to be only a single insn.
1718
1719 If we don't use an intermediate pseudo, such things as
1720 address computations to make the address of VAR valid
1721 if it is not can be placed between the CALL_INSN and INSN.
1722
1723 To make sure this doesn't happen, we record the destination
1724 of the CALL_INSN and see if the next insn uses both that
1725 and VAR. */
1726
1727 if (call_dest != 0 && GET_CODE (insn) == INSN
1728 && reg_mentioned_p (var, PATTERN (insn))
1729 && reg_mentioned_p (call_dest, PATTERN (insn)))
1730 {
1731 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1732
1733 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1734
1735 PATTERN (insn) = replace_rtx (PATTERN (insn),
1736 call_dest, temp);
1737 }
1738
1739 if (GET_CODE (insn) == CALL_INSN
1740 && GET_CODE (PATTERN (insn)) == SET)
1741 call_dest = SET_DEST (PATTERN (insn));
1742 else if (GET_CODE (insn) == CALL_INSN
1743 && GET_CODE (PATTERN (insn)) == PARALLEL
1744 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1745 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1746 else
1747 call_dest = 0;
1748 }
1749
1750 /* See if we have to do anything to INSN now that VAR is in
1751 memory. If it needs to be loaded into a pseudo, use a single
1752 pseudo for the entire insn in case there is a MATCH_DUP
1753 between two operands. We pass a pointer to the head of
1754 a list of struct fixup_replacements. If fixup_var_refs_1
1755 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1756 it will record them in this list.
1757
1758 If it allocated a pseudo for any replacement, we copy into
1759 it here. */
1760
1761 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1762 &replacements, no_share);
1763
1764 /* If this is last_parm_insn, and any instructions were output
1765 after it to fix it up, then we must set last_parm_insn to
1766 the last such instruction emitted. */
1767 if (insn == last_parm_insn)
1768 last_parm_insn = PREV_INSN (next_insn);
1769
1770 while (replacements)
1771 {
1772 struct fixup_replacement *next;
1773
1774 if (GET_CODE (replacements->new) == REG)
1775 {
1776 rtx insert_before;
1777 rtx seq;
1778
1779 /* OLD might be a (subreg (mem)). */
1780 if (GET_CODE (replacements->old) == SUBREG)
1781 replacements->old
1782 = fixup_memory_subreg (replacements->old, insn,
1783 promoted_mode, 0);
1784 else
1785 replacements->old
1786 = fixup_stack_1 (replacements->old, insn);
1787
1788 insert_before = insn;
1789
1790 /* If we are changing the mode, do a conversion.
1791 This might be wasteful, but combine.c will
1792 eliminate much of the waste. */
1793
1794 if (GET_MODE (replacements->new)
1795 != GET_MODE (replacements->old))
1796 {
1797 start_sequence ();
1798 convert_move (replacements->new,
1799 replacements->old, unsignedp);
1800 seq = get_insns ();
1801 end_sequence ();
1802 }
1803 else
1804 seq = gen_move_insn (replacements->new,
1805 replacements->old);
1806
1807 emit_insn_before (seq, insert_before);
1808 }
1809
1810 next = replacements->next;
1811 free (replacements);
1812 replacements = next;
1813 }
1814 }
1815
1816 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1817 But don't touch other insns referred to by reg-notes;
1818 we will get them elsewhere. */
1819 while (note)
1820 {
1821 if (GET_CODE (note) != INSN_LIST)
1822 XEXP (note, 0)
1823 = walk_fixup_memory_subreg (XEXP (note, 0), insn,
1824 promoted_mode, 1);
1825 note = XEXP (note, 1);
1826 }
1827 }
1828 \f
1829 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1830 See if the rtx expression at *LOC in INSN needs to be changed.
1831
1832 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1833 contain a list of original rtx's and replacements. If we find that we need
1834 to modify this insn by replacing a memory reference with a pseudo or by
1835 making a new MEM to implement a SUBREG, we consult that list to see if
1836 we have already chosen a replacement. If none has already been allocated,
1837 we allocate it and update the list. fixup_var_refs_insn will copy VAR
1838 or the SUBREG, as appropriate, to the pseudo. */
1839
1840 static void
1841 fixup_var_refs_1 (rtx var, enum machine_mode promoted_mode, rtx *loc, rtx insn,
1842 struct fixup_replacement **replacements, rtx no_share)
1843 {
1844 int i;
1845 rtx x = *loc;
1846 RTX_CODE code = GET_CODE (x);
1847 const char *fmt;
1848 rtx tem, tem1;
1849 struct fixup_replacement *replacement;
1850
1851 switch (code)
1852 {
1853 case ADDRESSOF:
1854 if (XEXP (x, 0) == var)
1855 {
1856 /* Prevent sharing of rtl that might lose. */
1857 rtx sub = copy_rtx (XEXP (var, 0));
1858
1859 if (! validate_change (insn, loc, sub, 0))
1860 {
1861 rtx y = gen_reg_rtx (GET_MODE (sub));
1862 rtx seq, new_insn;
1863
1864 /* We should be able to replace with a register or all is lost.
1865 Note that we can't use validate_change to verify this, since
1866 we're not caring for replacing all dups simultaneously. */
1867 if (! validate_replace_rtx (*loc, y, insn))
1868 abort ();
1869
1870 /* Careful! First try to recognize a direct move of the
1871 value, mimicking how things are done in gen_reload wrt
1872 PLUS. Consider what happens when insn is a conditional
1873 move instruction and addsi3 clobbers flags. */
1874
1875 start_sequence ();
1876 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1877 seq = get_insns ();
1878 end_sequence ();
1879
1880 if (recog_memoized (new_insn) < 0)
1881 {
1882 /* That failed. Fall back on force_operand and hope. */
1883
1884 start_sequence ();
1885 sub = force_operand (sub, y);
1886 if (sub != y)
1887 emit_insn (gen_move_insn (y, sub));
1888 seq = get_insns ();
1889 end_sequence ();
1890 }
1891
1892 #ifdef HAVE_cc0
1893 /* Don't separate setter from user. */
1894 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1895 insn = PREV_INSN (insn);
1896 #endif
1897
1898 emit_insn_before (seq, insn);
1899 }
1900 }
1901 return;
1902
1903 case MEM:
1904 if (var == x)
1905 {
1906 /* If we already have a replacement, use it. Otherwise,
1907 try to fix up this address in case it is invalid. */
1908
1909 replacement = find_fixup_replacement (replacements, var);
1910 if (replacement->new)
1911 {
1912 *loc = replacement->new;
1913 return;
1914 }
1915
1916 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1917
1918 /* Unless we are forcing memory to register or we changed the mode,
1919 we can leave things the way they are if the insn is valid. */
1920
1921 INSN_CODE (insn) = -1;
1922 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1923 && recog_memoized (insn) >= 0)
1924 return;
1925
1926 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1927 return;
1928 }
1929
1930 /* If X contains VAR, we need to unshare it here so that we update
1931 each occurrence separately. But all identical MEMs in one insn
1932 must be replaced with the same rtx because of the possibility of
1933 MATCH_DUPs. */
1934
1935 if (reg_mentioned_p (var, x))
1936 {
1937 replacement = find_fixup_replacement (replacements, x);
1938 if (replacement->new == 0)
1939 replacement->new = copy_most_rtx (x, no_share);
1940
1941 *loc = x = replacement->new;
1942 code = GET_CODE (x);
1943 }
1944 break;
1945
1946 case REG:
1947 case CC0:
1948 case PC:
1949 case CONST_INT:
1950 case CONST:
1951 case SYMBOL_REF:
1952 case LABEL_REF:
1953 case CONST_DOUBLE:
1954 case CONST_VECTOR:
1955 return;
1956
1957 case SIGN_EXTRACT:
1958 case ZERO_EXTRACT:
1959 /* Note that in some cases those types of expressions are altered
1960 by optimize_bit_field, and do not survive to get here. */
1961 if (XEXP (x, 0) == var
1962 || (GET_CODE (XEXP (x, 0)) == SUBREG
1963 && SUBREG_REG (XEXP (x, 0)) == var))
1964 {
1965 /* Get TEM as a valid MEM in the mode presently in the insn.
1966
1967 We don't worry about the possibility of MATCH_DUP here; it
1968 is highly unlikely and would be tricky to handle. */
1969
1970 tem = XEXP (x, 0);
1971 if (GET_CODE (tem) == SUBREG)
1972 {
1973 if (GET_MODE_BITSIZE (GET_MODE (tem))
1974 > GET_MODE_BITSIZE (GET_MODE (var)))
1975 {
1976 replacement = find_fixup_replacement (replacements, var);
1977 if (replacement->new == 0)
1978 replacement->new = gen_reg_rtx (GET_MODE (var));
1979 SUBREG_REG (tem) = replacement->new;
1980
1981 /* The following code works only if we have a MEM, so we
1982 need to handle the subreg here. We directly substitute
1983 it assuming that a subreg must be OK here. We already
1984 scheduled a replacement to copy the mem into the
1985 subreg. */
1986 XEXP (x, 0) = tem;
1987 return;
1988 }
1989 else
1990 tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
1991 }
1992 else
1993 tem = fixup_stack_1 (tem, insn);
1994
1995 /* Unless we want to load from memory, get TEM into the proper mode
1996 for an extract from memory. This can only be done if the
1997 extract is at a constant position and length. */
1998
1999 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2000 && GET_CODE (XEXP (x, 2)) == CONST_INT
2001 && ! mode_dependent_address_p (XEXP (tem, 0))
2002 && ! MEM_VOLATILE_P (tem))
2003 {
2004 enum machine_mode wanted_mode = VOIDmode;
2005 enum machine_mode is_mode = GET_MODE (tem);
2006 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2007
2008 if (GET_CODE (x) == ZERO_EXTRACT)
2009 {
2010 enum machine_mode new_mode
2011 = mode_for_extraction (EP_extzv, 1);
2012 if (new_mode != MAX_MACHINE_MODE)
2013 wanted_mode = new_mode;
2014 }
2015 else if (GET_CODE (x) == SIGN_EXTRACT)
2016 {
2017 enum machine_mode new_mode
2018 = mode_for_extraction (EP_extv, 1);
2019 if (new_mode != MAX_MACHINE_MODE)
2020 wanted_mode = new_mode;
2021 }
2022
2023 /* If we have a narrower mode, we can do something. */
2024 if (wanted_mode != VOIDmode
2025 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2026 {
2027 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2028 rtx old_pos = XEXP (x, 2);
2029 rtx newmem;
2030
2031 /* If the bytes and bits are counted differently, we
2032 must adjust the offset. */
2033 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2034 offset = (GET_MODE_SIZE (is_mode)
2035 - GET_MODE_SIZE (wanted_mode) - offset);
2036
2037 pos %= GET_MODE_BITSIZE (wanted_mode);
2038
2039 newmem = adjust_address_nv (tem, wanted_mode, offset);
2040
2041 /* Make the change and see if the insn remains valid. */
2042 INSN_CODE (insn) = -1;
2043 XEXP (x, 0) = newmem;
2044 XEXP (x, 2) = GEN_INT (pos);
2045
2046 if (recog_memoized (insn) >= 0)
2047 return;
2048
2049 /* Otherwise, restore old position. XEXP (x, 0) will be
2050 restored later. */
2051 XEXP (x, 2) = old_pos;
2052 }
2053 }
2054
2055 /* If we get here, the bitfield extract insn can't accept a memory
2056 reference. Copy the input into a register. */
2057
2058 tem1 = gen_reg_rtx (GET_MODE (tem));
2059 emit_insn_before (gen_move_insn (tem1, tem), insn);
2060 XEXP (x, 0) = tem1;
2061 return;
2062 }
2063 break;
2064
2065 case SUBREG:
2066 if (SUBREG_REG (x) == var)
2067 {
2068 /* If this is a special SUBREG made because VAR was promoted
2069 from a wider mode, replace it with VAR and call ourself
2070 recursively, this time saying that the object previously
2071 had its current mode (by virtue of the SUBREG). */
2072
2073 if (SUBREG_PROMOTED_VAR_P (x))
2074 {
2075 *loc = var;
2076 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements,
2077 no_share);
2078 return;
2079 }
2080
2081 /* If this SUBREG makes VAR wider, it has become a paradoxical
2082 SUBREG with VAR in memory, but these aren't allowed at this
2083 stage of the compilation. So load VAR into a pseudo and take
2084 a SUBREG of that pseudo. */
2085 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2086 {
2087 replacement = find_fixup_replacement (replacements, var);
2088 if (replacement->new == 0)
2089 replacement->new = gen_reg_rtx (promoted_mode);
2090 SUBREG_REG (x) = replacement->new;
2091 return;
2092 }
2093
2094 /* See if we have already found a replacement for this SUBREG.
2095 If so, use it. Otherwise, make a MEM and see if the insn
2096 is recognized. If not, or if we should force MEM into a register,
2097 make a pseudo for this SUBREG. */
2098 replacement = find_fixup_replacement (replacements, x);
2099 if (replacement->new)
2100 {
2101 enum machine_mode mode = GET_MODE (x);
2102 *loc = replacement->new;
2103
2104 /* Careful! We may have just replaced a SUBREG by a MEM, which
2105 means that the insn may have become invalid again. We can't
2106 in this case make a new replacement since we already have one
2107 and we must deal with MATCH_DUPs. */
2108 if (GET_CODE (replacement->new) == MEM)
2109 {
2110 INSN_CODE (insn) = -1;
2111 if (recog_memoized (insn) >= 0)
2112 return;
2113
2114 fixup_var_refs_1 (replacement->new, mode, &PATTERN (insn),
2115 insn, replacements, no_share);
2116 }
2117
2118 return;
2119 }
2120
2121 replacement->new = *loc = fixup_memory_subreg (x, insn,
2122 promoted_mode, 0);
2123
2124 INSN_CODE (insn) = -1;
2125 if (! flag_force_mem && recog_memoized (insn) >= 0)
2126 return;
2127
2128 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2129 return;
2130 }
2131 break;
2132
2133 case SET:
2134 /* First do special simplification of bit-field references. */
2135 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2136 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2137 optimize_bit_field (x, insn, 0);
2138 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2139 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2140 optimize_bit_field (x, insn, 0);
2141
2142 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2143 into a register and then store it back out. */
2144 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2145 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2146 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2147 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2148 > GET_MODE_SIZE (GET_MODE (var))))
2149 {
2150 replacement = find_fixup_replacement (replacements, var);
2151 if (replacement->new == 0)
2152 replacement->new = gen_reg_rtx (GET_MODE (var));
2153
2154 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2155 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2156 }
2157
2158 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2159 insn into a pseudo and store the low part of the pseudo into VAR. */
2160 if (GET_CODE (SET_DEST (x)) == SUBREG
2161 && SUBREG_REG (SET_DEST (x)) == var
2162 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2163 > GET_MODE_SIZE (GET_MODE (var))))
2164 {
2165 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2166 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2167 tem)),
2168 insn);
2169 break;
2170 }
2171
2172 {
2173 rtx dest = SET_DEST (x);
2174 rtx src = SET_SRC (x);
2175 rtx outerdest = dest;
2176
2177 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2178 || GET_CODE (dest) == SIGN_EXTRACT
2179 || GET_CODE (dest) == ZERO_EXTRACT)
2180 dest = XEXP (dest, 0);
2181
2182 if (GET_CODE (src) == SUBREG)
2183 src = SUBREG_REG (src);
2184
2185 /* If VAR does not appear at the top level of the SET
2186 just scan the lower levels of the tree. */
2187
2188 if (src != var && dest != var)
2189 break;
2190
2191 /* We will need to rerecognize this insn. */
2192 INSN_CODE (insn) = -1;
2193
2194 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var
2195 && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
2196 {
2197 /* Since this case will return, ensure we fixup all the
2198 operands here. */
2199 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2200 insn, replacements, no_share);
2201 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2202 insn, replacements, no_share);
2203 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2204 insn, replacements, no_share);
2205
2206 tem = XEXP (outerdest, 0);
2207
2208 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2209 that may appear inside a ZERO_EXTRACT.
2210 This was legitimate when the MEM was a REG. */
2211 if (GET_CODE (tem) == SUBREG
2212 && SUBREG_REG (tem) == var)
2213 tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2214 else
2215 tem = fixup_stack_1 (tem, insn);
2216
2217 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2218 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2219 && ! mode_dependent_address_p (XEXP (tem, 0))
2220 && ! MEM_VOLATILE_P (tem))
2221 {
2222 enum machine_mode wanted_mode;
2223 enum machine_mode is_mode = GET_MODE (tem);
2224 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2225
2226 wanted_mode = mode_for_extraction (EP_insv, 0);
2227
2228 /* If we have a narrower mode, we can do something. */
2229 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2230 {
2231 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2232 rtx old_pos = XEXP (outerdest, 2);
2233 rtx newmem;
2234
2235 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2236 offset = (GET_MODE_SIZE (is_mode)
2237 - GET_MODE_SIZE (wanted_mode) - offset);
2238
2239 pos %= GET_MODE_BITSIZE (wanted_mode);
2240
2241 newmem = adjust_address_nv (tem, wanted_mode, offset);
2242
2243 /* Make the change and see if the insn remains valid. */
2244 INSN_CODE (insn) = -1;
2245 XEXP (outerdest, 0) = newmem;
2246 XEXP (outerdest, 2) = GEN_INT (pos);
2247
2248 if (recog_memoized (insn) >= 0)
2249 return;
2250
2251 /* Otherwise, restore old position. XEXP (x, 0) will be
2252 restored later. */
2253 XEXP (outerdest, 2) = old_pos;
2254 }
2255 }
2256
2257 /* If we get here, the bit-field store doesn't allow memory
2258 or isn't located at a constant position. Load the value into
2259 a register, do the store, and put it back into memory. */
2260
2261 tem1 = gen_reg_rtx (GET_MODE (tem));
2262 emit_insn_before (gen_move_insn (tem1, tem), insn);
2263 emit_insn_after (gen_move_insn (tem, tem1), insn);
2264 XEXP (outerdest, 0) = tem1;
2265 return;
2266 }
2267
2268 /* STRICT_LOW_PART is a no-op on memory references
2269 and it can cause combinations to be unrecognizable,
2270 so eliminate it. */
2271
2272 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2273 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2274
2275 /* A valid insn to copy VAR into or out of a register
2276 must be left alone, to avoid an infinite loop here.
2277 If the reference to VAR is by a subreg, fix that up,
2278 since SUBREG is not valid for a memref.
2279 Also fix up the address of the stack slot.
2280
2281 Note that we must not try to recognize the insn until
2282 after we know that we have valid addresses and no
2283 (subreg (mem ...) ...) constructs, since these interfere
2284 with determining the validity of the insn. */
2285
2286 if ((SET_SRC (x) == var
2287 || (GET_CODE (SET_SRC (x)) == SUBREG
2288 && SUBREG_REG (SET_SRC (x)) == var))
2289 && (GET_CODE (SET_DEST (x)) == REG
2290 || (GET_CODE (SET_DEST (x)) == SUBREG
2291 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2292 && GET_MODE (var) == promoted_mode
2293 && x == single_set (insn))
2294 {
2295 rtx pat, last;
2296
2297 if (GET_CODE (SET_SRC (x)) == SUBREG
2298 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
2299 > GET_MODE_SIZE (GET_MODE (var))))
2300 {
2301 /* This (subreg VAR) is now a paradoxical subreg. We need
2302 to replace VAR instead of the subreg. */
2303 replacement = find_fixup_replacement (replacements, var);
2304 if (replacement->new == NULL_RTX)
2305 replacement->new = gen_reg_rtx (GET_MODE (var));
2306 SUBREG_REG (SET_SRC (x)) = replacement->new;
2307 }
2308 else
2309 {
2310 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2311 if (replacement->new)
2312 SET_SRC (x) = replacement->new;
2313 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2314 SET_SRC (x) = replacement->new
2315 = fixup_memory_subreg (SET_SRC (x), insn, promoted_mode,
2316 0);
2317 else
2318 SET_SRC (x) = replacement->new
2319 = fixup_stack_1 (SET_SRC (x), insn);
2320 }
2321
2322 if (recog_memoized (insn) >= 0)
2323 return;
2324
2325 /* INSN is not valid, but we know that we want to
2326 copy SET_SRC (x) to SET_DEST (x) in some way. So
2327 we generate the move and see whether it requires more
2328 than one insn. If it does, we emit those insns and
2329 delete INSN. Otherwise, we can just replace the pattern
2330 of INSN; we have already verified above that INSN has
2331 no other function that to do X. */
2332
2333 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2334 if (NEXT_INSN (pat) != NULL_RTX)
2335 {
2336 last = emit_insn_before (pat, insn);
2337
2338 /* INSN might have REG_RETVAL or other important notes, so
2339 we need to store the pattern of the last insn in the
2340 sequence into INSN similarly to the normal case. LAST
2341 should not have REG_NOTES, but we allow them if INSN has
2342 no REG_NOTES. */
2343 if (REG_NOTES (last) && REG_NOTES (insn))
2344 abort ();
2345 if (REG_NOTES (last))
2346 REG_NOTES (insn) = REG_NOTES (last);
2347 PATTERN (insn) = PATTERN (last);
2348
2349 delete_insn (last);
2350 }
2351 else
2352 PATTERN (insn) = PATTERN (pat);
2353
2354 return;
2355 }
2356
2357 if ((SET_DEST (x) == var
2358 || (GET_CODE (SET_DEST (x)) == SUBREG
2359 && SUBREG_REG (SET_DEST (x)) == var))
2360 && (GET_CODE (SET_SRC (x)) == REG
2361 || (GET_CODE (SET_SRC (x)) == SUBREG
2362 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2363 && GET_MODE (var) == promoted_mode
2364 && x == single_set (insn))
2365 {
2366 rtx pat, last;
2367
2368 if (GET_CODE (SET_DEST (x)) == SUBREG)
2369 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn,
2370 promoted_mode, 0);
2371 else
2372 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2373
2374 if (recog_memoized (insn) >= 0)
2375 return;
2376
2377 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2378 if (NEXT_INSN (pat) != NULL_RTX)
2379 {
2380 last = emit_insn_before (pat, insn);
2381
2382 /* INSN might have REG_RETVAL or other important notes, so
2383 we need to store the pattern of the last insn in the
2384 sequence into INSN similarly to the normal case. LAST
2385 should not have REG_NOTES, but we allow them if INSN has
2386 no REG_NOTES. */
2387 if (REG_NOTES (last) && REG_NOTES (insn))
2388 abort ();
2389 if (REG_NOTES (last))
2390 REG_NOTES (insn) = REG_NOTES (last);
2391 PATTERN (insn) = PATTERN (last);
2392
2393 delete_insn (last);
2394 }
2395 else
2396 PATTERN (insn) = PATTERN (pat);
2397
2398 return;
2399 }
2400
2401 /* Otherwise, storing into VAR must be handled specially
2402 by storing into a temporary and copying that into VAR
2403 with a new insn after this one. Note that this case
2404 will be used when storing into a promoted scalar since
2405 the insn will now have different modes on the input
2406 and output and hence will be invalid (except for the case
2407 of setting it to a constant, which does not need any
2408 change if it is valid). We generate extra code in that case,
2409 but combine.c will eliminate it. */
2410
2411 if (dest == var)
2412 {
2413 rtx temp;
2414 rtx fixeddest = SET_DEST (x);
2415 enum machine_mode temp_mode;
2416
2417 /* STRICT_LOW_PART can be discarded, around a MEM. */
2418 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2419 fixeddest = XEXP (fixeddest, 0);
2420 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2421 if (GET_CODE (fixeddest) == SUBREG)
2422 {
2423 fixeddest = fixup_memory_subreg (fixeddest, insn,
2424 promoted_mode, 0);
2425 temp_mode = GET_MODE (fixeddest);
2426 }
2427 else
2428 {
2429 fixeddest = fixup_stack_1 (fixeddest, insn);
2430 temp_mode = promoted_mode;
2431 }
2432
2433 temp = gen_reg_rtx (temp_mode);
2434
2435 emit_insn_after (gen_move_insn (fixeddest,
2436 gen_lowpart (GET_MODE (fixeddest),
2437 temp)),
2438 insn);
2439
2440 SET_DEST (x) = temp;
2441 }
2442 }
2443
2444 default:
2445 break;
2446 }
2447
2448 /* Nothing special about this RTX; fix its operands. */
2449
2450 fmt = GET_RTX_FORMAT (code);
2451 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2452 {
2453 if (fmt[i] == 'e')
2454 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements,
2455 no_share);
2456 else if (fmt[i] == 'E')
2457 {
2458 int j;
2459 for (j = 0; j < XVECLEN (x, i); j++)
2460 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2461 insn, replacements, no_share);
2462 }
2463 }
2464 }
2465 \f
2466 /* Previously, X had the form (SUBREG:m1 (REG:PROMOTED_MODE ...)).
2467 The REG was placed on the stack, so X now has the form (SUBREG:m1
2468 (MEM:m2 ...)).
2469
2470 Return an rtx (MEM:m1 newaddr) which is equivalent. If any insns
2471 must be emitted to compute NEWADDR, put them before INSN.
2472
2473 UNCRITICAL nonzero means accept paradoxical subregs.
2474 This is used for subregs found inside REG_NOTES. */
2475
2476 static rtx
2477 fixup_memory_subreg (rtx x, rtx insn, enum machine_mode promoted_mode, int uncritical)
2478 {
2479 int offset;
2480 rtx mem = SUBREG_REG (x);
2481 rtx addr = XEXP (mem, 0);
2482 enum machine_mode mode = GET_MODE (x);
2483 rtx result, seq;
2484
2485 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2486 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (mem)) && ! uncritical)
2487 abort ();
2488
2489 offset = SUBREG_BYTE (x);
2490 if (BYTES_BIG_ENDIAN)
2491 /* If the PROMOTED_MODE is wider than the mode of the MEM, adjust
2492 the offset so that it points to the right location within the
2493 MEM. */
2494 offset -= (GET_MODE_SIZE (promoted_mode) - GET_MODE_SIZE (GET_MODE (mem)));
2495
2496 if (!flag_force_addr
2497 && memory_address_p (mode, plus_constant (addr, offset)))
2498 /* Shortcut if no insns need be emitted. */
2499 return adjust_address (mem, mode, offset);
2500
2501 start_sequence ();
2502 result = adjust_address (mem, mode, offset);
2503 seq = get_insns ();
2504 end_sequence ();
2505
2506 emit_insn_before (seq, insn);
2507 return result;
2508 }
2509
2510 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2511 Replace subexpressions of X in place.
2512 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2513 Otherwise return X, with its contents possibly altered.
2514
2515 INSN, PROMOTED_MODE and UNCRITICAL are as for
2516 fixup_memory_subreg. */
2517
2518 static rtx
2519 walk_fixup_memory_subreg (rtx x, rtx insn, enum machine_mode promoted_mode,
2520 int uncritical)
2521 {
2522 enum rtx_code code;
2523 const char *fmt;
2524 int i;
2525
2526 if (x == 0)
2527 return 0;
2528
2529 code = GET_CODE (x);
2530
2531 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2532 return fixup_memory_subreg (x, insn, promoted_mode, uncritical);
2533
2534 /* Nothing special about this RTX; fix its operands. */
2535
2536 fmt = GET_RTX_FORMAT (code);
2537 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2538 {
2539 if (fmt[i] == 'e')
2540 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn,
2541 promoted_mode, uncritical);
2542 else if (fmt[i] == 'E')
2543 {
2544 int j;
2545 for (j = 0; j < XVECLEN (x, i); j++)
2546 XVECEXP (x, i, j)
2547 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn,
2548 promoted_mode, uncritical);
2549 }
2550 }
2551 return x;
2552 }
2553 \f
2554 /* For each memory ref within X, if it refers to a stack slot
2555 with an out of range displacement, put the address in a temp register
2556 (emitting new insns before INSN to load these registers)
2557 and alter the memory ref to use that register.
2558 Replace each such MEM rtx with a copy, to avoid clobberage. */
2559
2560 static rtx
2561 fixup_stack_1 (rtx x, rtx insn)
2562 {
2563 int i;
2564 RTX_CODE code = GET_CODE (x);
2565 const char *fmt;
2566
2567 if (code == MEM)
2568 {
2569 rtx ad = XEXP (x, 0);
2570 /* If we have address of a stack slot but it's not valid
2571 (displacement is too large), compute the sum in a register. */
2572 if (GET_CODE (ad) == PLUS
2573 && GET_CODE (XEXP (ad, 0)) == REG
2574 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2575 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2576 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2577 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2578 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2579 #endif
2580 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2581 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2582 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2583 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2584 {
2585 rtx temp, seq;
2586 if (memory_address_p (GET_MODE (x), ad))
2587 return x;
2588
2589 start_sequence ();
2590 temp = copy_to_reg (ad);
2591 seq = get_insns ();
2592 end_sequence ();
2593 emit_insn_before (seq, insn);
2594 return replace_equiv_address (x, temp);
2595 }
2596 return x;
2597 }
2598
2599 fmt = GET_RTX_FORMAT (code);
2600 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2601 {
2602 if (fmt[i] == 'e')
2603 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2604 else if (fmt[i] == 'E')
2605 {
2606 int j;
2607 for (j = 0; j < XVECLEN (x, i); j++)
2608 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2609 }
2610 }
2611 return x;
2612 }
2613 \f
2614 /* Optimization: a bit-field instruction whose field
2615 happens to be a byte or halfword in memory
2616 can be changed to a move instruction.
2617
2618 We call here when INSN is an insn to examine or store into a bit-field.
2619 BODY is the SET-rtx to be altered.
2620
2621 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2622 (Currently this is called only from function.c, and EQUIV_MEM
2623 is always 0.) */
2624
2625 static void
2626 optimize_bit_field (rtx body, rtx insn, rtx *equiv_mem)
2627 {
2628 rtx bitfield;
2629 int destflag;
2630 rtx seq = 0;
2631 enum machine_mode mode;
2632
2633 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2634 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2635 bitfield = SET_DEST (body), destflag = 1;
2636 else
2637 bitfield = SET_SRC (body), destflag = 0;
2638
2639 /* First check that the field being stored has constant size and position
2640 and is in fact a byte or halfword suitably aligned. */
2641
2642 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2643 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2644 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2645 != BLKmode)
2646 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2647 {
2648 rtx memref = 0;
2649
2650 /* Now check that the containing word is memory, not a register,
2651 and that it is safe to change the machine mode. */
2652
2653 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2654 memref = XEXP (bitfield, 0);
2655 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2656 && equiv_mem != 0)
2657 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2658 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2659 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2660 memref = SUBREG_REG (XEXP (bitfield, 0));
2661 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2662 && equiv_mem != 0
2663 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2664 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2665
2666 if (memref
2667 && ! mode_dependent_address_p (XEXP (memref, 0))
2668 && ! MEM_VOLATILE_P (memref))
2669 {
2670 /* Now adjust the address, first for any subreg'ing
2671 that we are now getting rid of,
2672 and then for which byte of the word is wanted. */
2673
2674 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2675 rtx insns;
2676
2677 /* Adjust OFFSET to count bits from low-address byte. */
2678 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2679 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2680 - offset - INTVAL (XEXP (bitfield, 1)));
2681
2682 /* Adjust OFFSET to count bytes from low-address byte. */
2683 offset /= BITS_PER_UNIT;
2684 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2685 {
2686 offset += (SUBREG_BYTE (XEXP (bitfield, 0))
2687 / UNITS_PER_WORD) * UNITS_PER_WORD;
2688 if (BYTES_BIG_ENDIAN)
2689 offset -= (MIN (UNITS_PER_WORD,
2690 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2691 - MIN (UNITS_PER_WORD,
2692 GET_MODE_SIZE (GET_MODE (memref))));
2693 }
2694
2695 start_sequence ();
2696 memref = adjust_address (memref, mode, offset);
2697 insns = get_insns ();
2698 end_sequence ();
2699 emit_insn_before (insns, insn);
2700
2701 /* Store this memory reference where
2702 we found the bit field reference. */
2703
2704 if (destflag)
2705 {
2706 validate_change (insn, &SET_DEST (body), memref, 1);
2707 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2708 {
2709 rtx src = SET_SRC (body);
2710 while (GET_CODE (src) == SUBREG
2711 && SUBREG_BYTE (src) == 0)
2712 src = SUBREG_REG (src);
2713 if (GET_MODE (src) != GET_MODE (memref))
2714 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2715 validate_change (insn, &SET_SRC (body), src, 1);
2716 }
2717 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2718 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2719 /* This shouldn't happen because anything that didn't have
2720 one of these modes should have got converted explicitly
2721 and then referenced through a subreg.
2722 This is so because the original bit-field was
2723 handled by agg_mode and so its tree structure had
2724 the same mode that memref now has. */
2725 abort ();
2726 }
2727 else
2728 {
2729 rtx dest = SET_DEST (body);
2730
2731 while (GET_CODE (dest) == SUBREG
2732 && SUBREG_BYTE (dest) == 0
2733 && (GET_MODE_CLASS (GET_MODE (dest))
2734 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2735 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2736 <= UNITS_PER_WORD))
2737 dest = SUBREG_REG (dest);
2738
2739 validate_change (insn, &SET_DEST (body), dest, 1);
2740
2741 if (GET_MODE (dest) == GET_MODE (memref))
2742 validate_change (insn, &SET_SRC (body), memref, 1);
2743 else
2744 {
2745 /* Convert the mem ref to the destination mode. */
2746 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2747
2748 start_sequence ();
2749 convert_move (newreg, memref,
2750 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2751 seq = get_insns ();
2752 end_sequence ();
2753
2754 validate_change (insn, &SET_SRC (body), newreg, 1);
2755 }
2756 }
2757
2758 /* See if we can convert this extraction or insertion into
2759 a simple move insn. We might not be able to do so if this
2760 was, for example, part of a PARALLEL.
2761
2762 If we succeed, write out any needed conversions. If we fail,
2763 it is hard to guess why we failed, so don't do anything
2764 special; just let the optimization be suppressed. */
2765
2766 if (apply_change_group () && seq)
2767 emit_insn_before (seq, insn);
2768 }
2769 }
2770 }
2771 \f
2772 /* These routines are responsible for converting virtual register references
2773 to the actual hard register references once RTL generation is complete.
2774
2775 The following four variables are used for communication between the
2776 routines. They contain the offsets of the virtual registers from their
2777 respective hard registers. */
2778
2779 static int in_arg_offset;
2780 static int var_offset;
2781 static int dynamic_offset;
2782 static int out_arg_offset;
2783 static int cfa_offset;
2784
2785 /* In most machines, the stack pointer register is equivalent to the bottom
2786 of the stack. */
2787
2788 #ifndef STACK_POINTER_OFFSET
2789 #define STACK_POINTER_OFFSET 0
2790 #endif
2791
2792 /* If not defined, pick an appropriate default for the offset of dynamically
2793 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2794 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2795
2796 #ifndef STACK_DYNAMIC_OFFSET
2797
2798 /* The bottom of the stack points to the actual arguments. If
2799 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2800 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2801 stack space for register parameters is not pushed by the caller, but
2802 rather part of the fixed stack areas and hence not included in
2803 `current_function_outgoing_args_size'. Nevertheless, we must allow
2804 for it when allocating stack dynamic objects. */
2805
2806 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2807 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2808 ((ACCUMULATE_OUTGOING_ARGS \
2809 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2810 + (STACK_POINTER_OFFSET)) \
2811
2812 #else
2813 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2814 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2815 + (STACK_POINTER_OFFSET))
2816 #endif
2817 #endif
2818
2819 /* On most machines, the CFA coincides with the first incoming parm. */
2820
2821 #ifndef ARG_POINTER_CFA_OFFSET
2822 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2823 #endif
2824
2825 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just
2826 had its address taken. DECL is the decl or SAVE_EXPR for the
2827 object stored in the register, for later use if we do need to force
2828 REG into the stack. REG is overwritten by the MEM like in
2829 put_reg_into_stack. RESCAN is true if previously emitted
2830 instructions must be rescanned and modified now that the REG has
2831 been transformed. */
2832
2833 rtx
2834 gen_mem_addressof (rtx reg, tree decl, int rescan)
2835 {
2836 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2837 REGNO (reg), decl);
2838
2839 /* Calculate this before we start messing with decl's RTL. */
2840 HOST_WIDE_INT set = decl ? get_alias_set (decl) : 0;
2841
2842 /* If the original REG was a user-variable, then so is the REG whose
2843 address is being taken. Likewise for unchanging. */
2844 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2845 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2846
2847 PUT_CODE (reg, MEM);
2848 MEM_ATTRS (reg) = 0;
2849 XEXP (reg, 0) = r;
2850
2851 if (decl)
2852 {
2853 tree type = TREE_TYPE (decl);
2854 enum machine_mode decl_mode
2855 = (DECL_P (decl) ? DECL_MODE (decl) : TYPE_MODE (TREE_TYPE (decl)));
2856 rtx decl_rtl = (TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl)
2857 : DECL_RTL_IF_SET (decl));
2858
2859 PUT_MODE (reg, decl_mode);
2860
2861 /* Clear DECL_RTL momentarily so functions below will work
2862 properly, then set it again. */
2863 if (DECL_P (decl) && decl_rtl == reg)
2864 SET_DECL_RTL (decl, 0);
2865
2866 set_mem_attributes (reg, decl, 1);
2867 set_mem_alias_set (reg, set);
2868
2869 if (DECL_P (decl) && decl_rtl == reg)
2870 SET_DECL_RTL (decl, reg);
2871
2872 if (rescan
2873 && (TREE_USED (decl) || (DECL_P (decl) && DECL_INITIAL (decl) != 0)))
2874 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), reg, 0);
2875 }
2876 else if (rescan)
2877 {
2878 /* This can only happen during reload. Clear the same flag bits as
2879 reload. */
2880 MEM_VOLATILE_P (reg) = 0;
2881 RTX_UNCHANGING_P (reg) = 0;
2882 MEM_IN_STRUCT_P (reg) = 0;
2883 MEM_SCALAR_P (reg) = 0;
2884 MEM_ATTRS (reg) = 0;
2885
2886 fixup_var_refs (reg, GET_MODE (reg), 0, reg, 0);
2887 }
2888
2889 return reg;
2890 }
2891
2892 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2893
2894 void
2895 flush_addressof (tree decl)
2896 {
2897 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2898 && DECL_RTL (decl) != 0
2899 && GET_CODE (DECL_RTL (decl)) == MEM
2900 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2901 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2902 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2903 }
2904
2905 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2906
2907 static void
2908 put_addressof_into_stack (rtx r, htab_t ht)
2909 {
2910 tree decl, type;
2911 int volatile_p, used_p;
2912
2913 rtx reg = XEXP (r, 0);
2914
2915 if (GET_CODE (reg) != REG)
2916 abort ();
2917
2918 decl = ADDRESSOF_DECL (r);
2919 if (decl)
2920 {
2921 type = TREE_TYPE (decl);
2922 volatile_p = (TREE_CODE (decl) != SAVE_EXPR
2923 && TREE_THIS_VOLATILE (decl));
2924 used_p = (TREE_USED (decl)
2925 || (DECL_P (decl) && DECL_INITIAL (decl) != 0));
2926 }
2927 else
2928 {
2929 type = NULL_TREE;
2930 volatile_p = 0;
2931 used_p = 1;
2932 }
2933
2934 put_reg_into_stack (0, reg, type, GET_MODE (reg), GET_MODE (reg),
2935 volatile_p, ADDRESSOF_REGNO (r), used_p, ht);
2936 }
2937
2938 /* List of replacements made below in purge_addressof_1 when creating
2939 bitfield insertions. */
2940 static rtx purge_bitfield_addressof_replacements;
2941
2942 /* List of replacements made below in purge_addressof_1 for patterns
2943 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2944 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2945 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2946 enough in complex cases, e.g. when some field values can be
2947 extracted by usage MEM with narrower mode. */
2948 static rtx purge_addressof_replacements;
2949
2950 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2951 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2952 the stack. If the function returns FALSE then the replacement could not
2953 be made. If MAY_POSTPONE is true and we would not put the addressof
2954 to stack, postpone processing of the insn. */
2955
2956 static bool
2957 purge_addressof_1 (rtx *loc, rtx insn, int force, int store, int may_postpone,
2958 htab_t ht)
2959 {
2960 rtx x;
2961 RTX_CODE code;
2962 int i, j;
2963 const char *fmt;
2964 bool result = true;
2965 bool libcall = false;
2966
2967 /* Re-start here to avoid recursion in common cases. */
2968 restart:
2969
2970 x = *loc;
2971 if (x == 0)
2972 return true;
2973
2974 /* Is this a libcall? */
2975 if (!insn)
2976 libcall = REG_NOTE_KIND (*loc) == REG_RETVAL;
2977
2978 code = GET_CODE (x);
2979
2980 /* If we don't return in any of the cases below, we will recurse inside
2981 the RTX, which will normally result in any ADDRESSOF being forced into
2982 memory. */
2983 if (code == SET)
2984 {
2985 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1,
2986 may_postpone, ht);
2987 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0,
2988 may_postpone, ht);
2989 return result;
2990 }
2991 else if (code == ADDRESSOF)
2992 {
2993 rtx sub, insns;
2994
2995 if (GET_CODE (XEXP (x, 0)) != MEM)
2996 put_addressof_into_stack (x, ht);
2997
2998 /* We must create a copy of the rtx because it was created by
2999 overwriting a REG rtx which is always shared. */
3000 sub = copy_rtx (XEXP (XEXP (x, 0), 0));
3001 if (validate_change (insn, loc, sub, 0)
3002 || validate_replace_rtx (x, sub, insn))
3003 return true;
3004
3005 start_sequence ();
3006
3007 /* If SUB is a hard or virtual register, try it as a pseudo-register.
3008 Otherwise, perhaps SUB is an expression, so generate code to compute
3009 it. */
3010 if (GET_CODE (sub) == REG && REGNO (sub) <= LAST_VIRTUAL_REGISTER)
3011 sub = copy_to_reg (sub);
3012 else
3013 sub = force_operand (sub, NULL_RTX);
3014
3015 if (! validate_change (insn, loc, sub, 0)
3016 && ! validate_replace_rtx (x, sub, insn))
3017 abort ();
3018
3019 insns = get_insns ();
3020 end_sequence ();
3021 emit_insn_before (insns, insn);
3022 return true;
3023 }
3024
3025 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3026 {
3027 rtx sub = XEXP (XEXP (x, 0), 0);
3028
3029 if (GET_CODE (sub) == MEM)
3030 sub = adjust_address_nv (sub, GET_MODE (x), 0);
3031 else if (GET_CODE (sub) == REG
3032 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3033 ;
3034 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3035 {
3036 int size_x, size_sub;
3037
3038 if (may_postpone)
3039 {
3040 /* Postpone for now, so that we do not emit bitfield arithmetics
3041 unless there is some benefit from it. */
3042 if (!postponed_insns || XEXP (postponed_insns, 0) != insn)
3043 postponed_insns = alloc_INSN_LIST (insn, postponed_insns);
3044 return true;
3045 }
3046
3047 if (!insn)
3048 {
3049 /* When processing REG_NOTES look at the list of
3050 replacements done on the insn to find the register that X
3051 was replaced by. */
3052 rtx tem;
3053
3054 for (tem = purge_bitfield_addressof_replacements;
3055 tem != NULL_RTX;
3056 tem = XEXP (XEXP (tem, 1), 1))
3057 if (rtx_equal_p (x, XEXP (tem, 0)))
3058 {
3059 *loc = XEXP (XEXP (tem, 1), 0);
3060 return true;
3061 }
3062
3063 /* See comment for purge_addressof_replacements. */
3064 for (tem = purge_addressof_replacements;
3065 tem != NULL_RTX;
3066 tem = XEXP (XEXP (tem, 1), 1))
3067 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3068 {
3069 rtx z = XEXP (XEXP (tem, 1), 0);
3070
3071 if (GET_MODE (x) == GET_MODE (z)
3072 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3073 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3074 abort ();
3075
3076 /* It can happen that the note may speak of things
3077 in a wider (or just different) mode than the
3078 code did. This is especially true of
3079 REG_RETVAL. */
3080
3081 if (GET_CODE (z) == SUBREG && SUBREG_BYTE (z) == 0)
3082 z = SUBREG_REG (z);
3083
3084 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3085 && (GET_MODE_SIZE (GET_MODE (x))
3086 > GET_MODE_SIZE (GET_MODE (z))))
3087 {
3088 /* This can occur as a result in invalid
3089 pointer casts, e.g. float f; ...
3090 *(long long int *)&f.
3091 ??? We could emit a warning here, but
3092 without a line number that wouldn't be
3093 very helpful. */
3094 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3095 }
3096 else
3097 z = gen_lowpart (GET_MODE (x), z);
3098
3099 *loc = z;
3100 return true;
3101 }
3102
3103 /* When we are processing the REG_NOTES of the last instruction
3104 of a libcall, there will be typically no replacements
3105 for that insn; the replacements happened before, piecemeal
3106 fashion. OTOH we are not interested in the details of
3107 this for the REG_EQUAL note, we want to know the big picture,
3108 which can be succinctly described with a simple SUBREG.
3109 Note that removing the REG_EQUAL note is not an option
3110 on the last insn of a libcall, so we must do a replacement. */
3111
3112 /* In compile/990107-1.c:7 compiled at -O1 -m1 for sh-elf,
3113 we got
3114 (mem:DI (addressof:SI (reg/v:DF 160) 159 0x401c8510)
3115 [0 S8 A32]), which can be expressed with a simple
3116 same-size subreg */
3117 if ((GET_MODE_SIZE (GET_MODE (x))
3118 <= GET_MODE_SIZE (GET_MODE (sub)))
3119 /* Again, invalid pointer casts (as in
3120 compile/990203-1.c) can require paradoxical
3121 subregs. */
3122 || (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3123 && (GET_MODE_SIZE (GET_MODE (x))
3124 > GET_MODE_SIZE (GET_MODE (sub)))
3125 && libcall))
3126 {
3127 *loc = gen_rtx_SUBREG (GET_MODE (x), sub, 0);
3128 return true;
3129 }
3130 /* ??? Are there other cases we should handle? */
3131
3132 /* Sometimes we may not be able to find the replacement. For
3133 example when the original insn was a MEM in a wider mode,
3134 and the note is part of a sign extension of a narrowed
3135 version of that MEM. Gcc testcase compile/990829-1.c can
3136 generate an example of this situation. Rather than complain
3137 we return false, which will prompt our caller to remove the
3138 offending note. */
3139 return false;
3140 }
3141
3142 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3143 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3144
3145 /* Do not frob unchanging MEMs. If a later reference forces the
3146 pseudo to the stack, we can wind up with multiple writes to
3147 an unchanging memory, which is invalid. */
3148 if (RTX_UNCHANGING_P (x) && size_x != size_sub)
3149 ;
3150
3151 /* Don't even consider working with paradoxical subregs,
3152 or the moral equivalent seen here. */
3153 else if (size_x <= size_sub
3154 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3155 {
3156 /* Do a bitfield insertion to mirror what would happen
3157 in memory. */
3158
3159 rtx val, seq;
3160
3161 if (store)
3162 {
3163 rtx p = PREV_INSN (insn);
3164
3165 start_sequence ();
3166 val = gen_reg_rtx (GET_MODE (x));
3167 if (! validate_change (insn, loc, val, 0))
3168 {
3169 /* Discard the current sequence and put the
3170 ADDRESSOF on stack. */
3171 end_sequence ();
3172 goto give_up;
3173 }
3174 seq = get_insns ();
3175 end_sequence ();
3176 emit_insn_before (seq, insn);
3177 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3178 insn, ht);
3179
3180 start_sequence ();
3181 store_bit_field (sub, size_x, 0, GET_MODE (x),
3182 val, GET_MODE_SIZE (GET_MODE (sub)));
3183
3184 /* Make sure to unshare any shared rtl that store_bit_field
3185 might have created. */
3186 unshare_all_rtl_again (get_insns ());
3187
3188 seq = get_insns ();
3189 end_sequence ();
3190 p = emit_insn_after (seq, insn);
3191 if (NEXT_INSN (insn))
3192 compute_insns_for_mem (NEXT_INSN (insn),
3193 p ? NEXT_INSN (p) : NULL_RTX,
3194 ht);
3195 }
3196 else
3197 {
3198 rtx p = PREV_INSN (insn);
3199
3200 start_sequence ();
3201 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3202 GET_MODE (x), GET_MODE (x),
3203 GET_MODE_SIZE (GET_MODE (sub)));
3204
3205 if (! validate_change (insn, loc, val, 0))
3206 {
3207 /* Discard the current sequence and put the
3208 ADDRESSOF on stack. */
3209 end_sequence ();
3210 goto give_up;
3211 }
3212
3213 seq = get_insns ();
3214 end_sequence ();
3215 emit_insn_before (seq, insn);
3216 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3217 insn, ht);
3218 }
3219
3220 /* Remember the replacement so that the same one can be done
3221 on the REG_NOTES. */
3222 purge_bitfield_addressof_replacements
3223 = gen_rtx_EXPR_LIST (VOIDmode, x,
3224 gen_rtx_EXPR_LIST
3225 (VOIDmode, val,
3226 purge_bitfield_addressof_replacements));
3227
3228 /* We replaced with a reg -- all done. */
3229 return true;
3230 }
3231 }
3232
3233 else if (validate_change (insn, loc, sub, 0))
3234 {
3235 /* Remember the replacement so that the same one can be done
3236 on the REG_NOTES. */
3237 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3238 {
3239 rtx tem;
3240
3241 for (tem = purge_addressof_replacements;
3242 tem != NULL_RTX;
3243 tem = XEXP (XEXP (tem, 1), 1))
3244 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3245 {
3246 XEXP (XEXP (tem, 1), 0) = sub;
3247 return true;
3248 }
3249 purge_addressof_replacements
3250 = gen_rtx_EXPR_LIST (VOIDmode, XEXP (x, 0),
3251 gen_rtx_EXPR_LIST (VOIDmode, sub,
3252 purge_addressof_replacements));
3253 return true;
3254 }
3255 goto restart;
3256 }
3257 }
3258
3259 give_up:
3260 /* Scan all subexpressions. */
3261 fmt = GET_RTX_FORMAT (code);
3262 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3263 {
3264 if (*fmt == 'e')
3265 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0,
3266 may_postpone, ht);
3267 else if (*fmt == 'E')
3268 for (j = 0; j < XVECLEN (x, i); j++)
3269 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0,
3270 may_postpone, ht);
3271 }
3272
3273 return result;
3274 }
3275
3276 /* Return a hash value for K, a REG. */
3277
3278 static hashval_t
3279 insns_for_mem_hash (const void *k)
3280 {
3281 /* Use the address of the key for the hash value. */
3282 struct insns_for_mem_entry *m = (struct insns_for_mem_entry *) k;
3283 return htab_hash_pointer (m->key);
3284 }
3285
3286 /* Return nonzero if K1 and K2 (two REGs) are the same. */
3287
3288 static int
3289 insns_for_mem_comp (const void *k1, const void *k2)
3290 {
3291 struct insns_for_mem_entry *m1 = (struct insns_for_mem_entry *) k1;
3292 struct insns_for_mem_entry *m2 = (struct insns_for_mem_entry *) k2;
3293 return m1->key == m2->key;
3294 }
3295
3296 struct insns_for_mem_walk_info
3297 {
3298 /* The hash table that we are using to record which INSNs use which
3299 MEMs. */
3300 htab_t ht;
3301
3302 /* The INSN we are currently processing. */
3303 rtx insn;
3304
3305 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3306 to find the insns that use the REGs in the ADDRESSOFs. */
3307 int pass;
3308 };
3309
3310 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3311 that might be used in an ADDRESSOF expression, record this INSN in
3312 the hash table given by DATA (which is really a pointer to an
3313 insns_for_mem_walk_info structure). */
3314
3315 static int
3316 insns_for_mem_walk (rtx *r, void *data)
3317 {
3318 struct insns_for_mem_walk_info *ifmwi
3319 = (struct insns_for_mem_walk_info *) data;
3320 struct insns_for_mem_entry tmp;
3321 tmp.insns = NULL_RTX;
3322
3323 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3324 && GET_CODE (XEXP (*r, 0)) == REG)
3325 {
3326 void **e;
3327 tmp.key = XEXP (*r, 0);
3328 e = htab_find_slot (ifmwi->ht, &tmp, INSERT);
3329 if (*e == NULL)
3330 {
3331 *e = ggc_alloc (sizeof (tmp));
3332 memcpy (*e, &tmp, sizeof (tmp));
3333 }
3334 }
3335 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3336 {
3337 struct insns_for_mem_entry *ifme;
3338 tmp.key = *r;
3339 ifme = htab_find (ifmwi->ht, &tmp);
3340
3341 /* If we have not already recorded this INSN, do so now. Since
3342 we process the INSNs in order, we know that if we have
3343 recorded it it must be at the front of the list. */
3344 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3345 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3346 ifme->insns);
3347 }
3348
3349 return 0;
3350 }
3351
3352 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3353 which REGs in HT. */
3354
3355 static void
3356 compute_insns_for_mem (rtx insns, rtx last_insn, htab_t ht)
3357 {
3358 rtx insn;
3359 struct insns_for_mem_walk_info ifmwi;
3360 ifmwi.ht = ht;
3361
3362 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3363 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3364 if (INSN_P (insn))
3365 {
3366 ifmwi.insn = insn;
3367 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3368 }
3369 }
3370
3371 /* Helper function for purge_addressof called through for_each_rtx.
3372 Returns true iff the rtl is an ADDRESSOF. */
3373
3374 static int
3375 is_addressof (rtx *rtl, void *data ATTRIBUTE_UNUSED)
3376 {
3377 return GET_CODE (*rtl) == ADDRESSOF;
3378 }
3379
3380 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3381 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3382 stack. */
3383
3384 void
3385 purge_addressof (rtx insns)
3386 {
3387 rtx insn, tmp;
3388 htab_t ht;
3389
3390 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3391 requires a fixup pass over the instruction stream to correct
3392 INSNs that depended on the REG being a REG, and not a MEM. But,
3393 these fixup passes are slow. Furthermore, most MEMs are not
3394 mentioned in very many instructions. So, we speed up the process
3395 by pre-calculating which REGs occur in which INSNs; that allows
3396 us to perform the fixup passes much more quickly. */
3397 ht = htab_create_ggc (1000, insns_for_mem_hash, insns_for_mem_comp, NULL);
3398 compute_insns_for_mem (insns, NULL_RTX, ht);
3399
3400 postponed_insns = NULL;
3401
3402 for (insn = insns; insn; insn = NEXT_INSN (insn))
3403 if (INSN_P (insn))
3404 {
3405 if (! purge_addressof_1 (&PATTERN (insn), insn,
3406 asm_noperands (PATTERN (insn)) > 0, 0, 1, ht))
3407 /* If we could not replace the ADDRESSOFs in the insn,
3408 something is wrong. */
3409 abort ();
3410
3411 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, 0, ht))
3412 {
3413 /* If we could not replace the ADDRESSOFs in the insn's notes,
3414 we can just remove the offending notes instead. */
3415 rtx note;
3416
3417 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3418 {
3419 /* If we find a REG_RETVAL note then the insn is a libcall.
3420 Such insns must have REG_EQUAL notes as well, in order
3421 for later passes of the compiler to work. So it is not
3422 safe to delete the notes here, and instead we abort. */
3423 if (REG_NOTE_KIND (note) == REG_RETVAL)
3424 abort ();
3425 if (for_each_rtx (&note, is_addressof, NULL))
3426 remove_note (insn, note);
3427 }
3428 }
3429 }
3430
3431 /* Process the postponed insns. */
3432 while (postponed_insns)
3433 {
3434 insn = XEXP (postponed_insns, 0);
3435 tmp = postponed_insns;
3436 postponed_insns = XEXP (postponed_insns, 1);
3437 free_INSN_LIST_node (tmp);
3438
3439 if (! purge_addressof_1 (&PATTERN (insn), insn,
3440 asm_noperands (PATTERN (insn)) > 0, 0, 0, ht))
3441 abort ();
3442 }
3443
3444 /* Clean up. */
3445 purge_bitfield_addressof_replacements = 0;
3446 purge_addressof_replacements = 0;
3447
3448 /* REGs are shared. purge_addressof will destructively replace a REG
3449 with a MEM, which creates shared MEMs.
3450
3451 Unfortunately, the children of put_reg_into_stack assume that MEMs
3452 referring to the same stack slot are shared (fixup_var_refs and
3453 the associated hash table code).
3454
3455 So, we have to do another unsharing pass after we have flushed any
3456 REGs that had their address taken into the stack.
3457
3458 It may be worth tracking whether or not we converted any REGs into
3459 MEMs to avoid this overhead when it is not needed. */
3460 unshare_all_rtl_again (get_insns ());
3461 }
3462 \f
3463 /* Convert a SET of a hard subreg to a set of the appropriate hard
3464 register. A subroutine of purge_hard_subreg_sets. */
3465
3466 static void
3467 purge_single_hard_subreg_set (rtx pattern)
3468 {
3469 rtx reg = SET_DEST (pattern);
3470 enum machine_mode mode = GET_MODE (SET_DEST (pattern));
3471 int offset = 0;
3472
3473 if (GET_CODE (reg) == SUBREG && GET_CODE (SUBREG_REG (reg)) == REG
3474 && REGNO (SUBREG_REG (reg)) < FIRST_PSEUDO_REGISTER)
3475 {
3476 offset = subreg_regno_offset (REGNO (SUBREG_REG (reg)),
3477 GET_MODE (SUBREG_REG (reg)),
3478 SUBREG_BYTE (reg),
3479 GET_MODE (reg));
3480 reg = SUBREG_REG (reg);
3481 }
3482
3483
3484 if (GET_CODE (reg) == REG && REGNO (reg) < FIRST_PSEUDO_REGISTER)
3485 {
3486 reg = gen_rtx_REG (mode, REGNO (reg) + offset);
3487 SET_DEST (pattern) = reg;
3488 }
3489 }
3490
3491 /* Eliminate all occurrences of SETs of hard subregs from INSNS. The
3492 only such SETs that we expect to see are those left in because
3493 integrate can't handle sets of parts of a return value register.
3494
3495 We don't use alter_subreg because we only want to eliminate subregs
3496 of hard registers. */
3497
3498 void
3499 purge_hard_subreg_sets (rtx insn)
3500 {
3501 for (; insn; insn = NEXT_INSN (insn))
3502 {
3503 if (INSN_P (insn))
3504 {
3505 rtx pattern = PATTERN (insn);
3506 switch (GET_CODE (pattern))
3507 {
3508 case SET:
3509 if (GET_CODE (SET_DEST (pattern)) == SUBREG)
3510 purge_single_hard_subreg_set (pattern);
3511 break;
3512 case PARALLEL:
3513 {
3514 int j;
3515 for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--)
3516 {
3517 rtx inner_pattern = XVECEXP (pattern, 0, j);
3518 if (GET_CODE (inner_pattern) == SET
3519 && GET_CODE (SET_DEST (inner_pattern)) == SUBREG)
3520 purge_single_hard_subreg_set (inner_pattern);
3521 }
3522 }
3523 break;
3524 default:
3525 break;
3526 }
3527 }
3528 }
3529 }
3530 \f
3531 /* Pass through the INSNS of function FNDECL and convert virtual register
3532 references to hard register references. */
3533
3534 void
3535 instantiate_virtual_regs (tree fndecl, rtx insns)
3536 {
3537 rtx insn;
3538 unsigned int i;
3539
3540 /* Compute the offsets to use for this function. */
3541 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3542 var_offset = STARTING_FRAME_OFFSET;
3543 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3544 out_arg_offset = STACK_POINTER_OFFSET;
3545 cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
3546
3547 /* Scan all variables and parameters of this function. For each that is
3548 in memory, instantiate all virtual registers if the result is a valid
3549 address. If not, we do it later. That will handle most uses of virtual
3550 regs on many machines. */
3551 instantiate_decls (fndecl, 1);
3552
3553 /* Initialize recognition, indicating that volatile is OK. */
3554 init_recog ();
3555
3556 /* Scan through all the insns, instantiating every virtual register still
3557 present. */
3558 for (insn = insns; insn; insn = NEXT_INSN (insn))
3559 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3560 || GET_CODE (insn) == CALL_INSN)
3561 {
3562 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3563 if (INSN_DELETED_P (insn))
3564 continue;
3565 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3566 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
3567 if (GET_CODE (insn) == CALL_INSN)
3568 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
3569 NULL_RTX, 0);
3570
3571 /* Past this point all ASM statements should match. Verify that
3572 to avoid failures later in the compilation process. */
3573 if (asm_noperands (PATTERN (insn)) >= 0
3574 && ! check_asm_operands (PATTERN (insn)))
3575 instantiate_virtual_regs_lossage (insn);
3576 }
3577
3578 /* Instantiate the stack slots for the parm registers, for later use in
3579 addressof elimination. */
3580 for (i = 0; i < max_parm_reg; ++i)
3581 if (parm_reg_stack_loc[i])
3582 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3583
3584 /* Now instantiate the remaining register equivalences for debugging info.
3585 These will not be valid addresses. */
3586 instantiate_decls (fndecl, 0);
3587
3588 /* Indicate that, from now on, assign_stack_local should use
3589 frame_pointer_rtx. */
3590 virtuals_instantiated = 1;
3591 }
3592
3593 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3594 all virtual registers in their DECL_RTL's.
3595
3596 If VALID_ONLY, do this only if the resulting address is still valid.
3597 Otherwise, always do it. */
3598
3599 static void
3600 instantiate_decls (tree fndecl, int valid_only)
3601 {
3602 tree decl;
3603
3604 /* Process all parameters of the function. */
3605 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3606 {
3607 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3608 HOST_WIDE_INT size_rtl;
3609
3610 instantiate_decl (DECL_RTL (decl), size, valid_only);
3611
3612 /* If the parameter was promoted, then the incoming RTL mode may be
3613 larger than the declared type size. We must use the larger of
3614 the two sizes. */
3615 size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
3616 size = MAX (size_rtl, size);
3617 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3618 }
3619
3620 /* Now process all variables defined in the function or its subblocks. */
3621 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3622 }
3623
3624 /* Subroutine of instantiate_decls: Process all decls in the given
3625 BLOCK node and all its subblocks. */
3626
3627 static void
3628 instantiate_decls_1 (tree let, int valid_only)
3629 {
3630 tree t;
3631
3632 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3633 if (DECL_RTL_SET_P (t))
3634 instantiate_decl (DECL_RTL (t),
3635 int_size_in_bytes (TREE_TYPE (t)),
3636 valid_only);
3637
3638 /* Process all subblocks. */
3639 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3640 instantiate_decls_1 (t, valid_only);
3641 }
3642
3643 /* Subroutine of the preceding procedures: Given RTL representing a
3644 decl and the size of the object, do any instantiation required.
3645
3646 If VALID_ONLY is nonzero, it means that the RTL should only be
3647 changed if the new address is valid. */
3648
3649 static void
3650 instantiate_decl (rtx x, HOST_WIDE_INT size, int valid_only)
3651 {
3652 enum machine_mode mode;
3653 rtx addr;
3654
3655 /* If this is not a MEM, no need to do anything. Similarly if the
3656 address is a constant or a register that is not a virtual register. */
3657
3658 if (x == 0 || GET_CODE (x) != MEM)
3659 return;
3660
3661 addr = XEXP (x, 0);
3662 if (CONSTANT_P (addr)
3663 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3664 || (GET_CODE (addr) == REG
3665 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3666 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3667 return;
3668
3669 /* If we should only do this if the address is valid, copy the address.
3670 We need to do this so we can undo any changes that might make the
3671 address invalid. This copy is unfortunate, but probably can't be
3672 avoided. */
3673
3674 if (valid_only)
3675 addr = copy_rtx (addr);
3676
3677 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3678
3679 if (valid_only && size >= 0)
3680 {
3681 unsigned HOST_WIDE_INT decl_size = size;
3682
3683 /* Now verify that the resulting address is valid for every integer or
3684 floating-point mode up to and including SIZE bytes long. We do this
3685 since the object might be accessed in any mode and frame addresses
3686 are shared. */
3687
3688 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3689 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3690 mode = GET_MODE_WIDER_MODE (mode))
3691 if (! memory_address_p (mode, addr))
3692 return;
3693
3694 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3695 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3696 mode = GET_MODE_WIDER_MODE (mode))
3697 if (! memory_address_p (mode, addr))
3698 return;
3699 }
3700
3701 /* Put back the address now that we have updated it and we either know
3702 it is valid or we don't care whether it is valid. */
3703
3704 XEXP (x, 0) = addr;
3705 }
3706 \f
3707 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
3708 is a virtual register, return the equivalent hard register and set the
3709 offset indirectly through the pointer. Otherwise, return 0. */
3710
3711 static rtx
3712 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
3713 {
3714 rtx new;
3715 HOST_WIDE_INT offset;
3716
3717 if (x == virtual_incoming_args_rtx)
3718 new = arg_pointer_rtx, offset = in_arg_offset;
3719 else if (x == virtual_stack_vars_rtx)
3720 new = frame_pointer_rtx, offset = var_offset;
3721 else if (x == virtual_stack_dynamic_rtx)
3722 new = stack_pointer_rtx, offset = dynamic_offset;
3723 else if (x == virtual_outgoing_args_rtx)
3724 new = stack_pointer_rtx, offset = out_arg_offset;
3725 else if (x == virtual_cfa_rtx)
3726 new = arg_pointer_rtx, offset = cfa_offset;
3727 else
3728 return 0;
3729
3730 *poffset = offset;
3731 return new;
3732 }
3733 \f
3734
3735 /* Called when instantiate_virtual_regs has failed to update the instruction.
3736 Usually this means that non-matching instruction has been emit, however for
3737 asm statements it may be the problem in the constraints. */
3738 static void
3739 instantiate_virtual_regs_lossage (rtx insn)
3740 {
3741 if (asm_noperands (PATTERN (insn)) >= 0)
3742 {
3743 error_for_asm (insn, "impossible constraint in `asm'");
3744 delete_insn (insn);
3745 }
3746 else
3747 abort ();
3748 }
3749 /* Given a pointer to a piece of rtx and an optional pointer to the
3750 containing object, instantiate any virtual registers present in it.
3751
3752 If EXTRA_INSNS, we always do the replacement and generate
3753 any extra insns before OBJECT. If it zero, we do nothing if replacement
3754 is not valid.
3755
3756 Return 1 if we either had nothing to do or if we were able to do the
3757 needed replacement. Return 0 otherwise; we only return zero if
3758 EXTRA_INSNS is zero.
3759
3760 We first try some simple transformations to avoid the creation of extra
3761 pseudos. */
3762
3763 static int
3764 instantiate_virtual_regs_1 (rtx *loc, rtx object, int extra_insns)
3765 {
3766 rtx x;
3767 RTX_CODE code;
3768 rtx new = 0;
3769 HOST_WIDE_INT offset = 0;
3770 rtx temp;
3771 rtx seq;
3772 int i, j;
3773 const char *fmt;
3774
3775 /* Re-start here to avoid recursion in common cases. */
3776 restart:
3777
3778 x = *loc;
3779 if (x == 0)
3780 return 1;
3781
3782 /* We may have detected and deleted invalid asm statements. */
3783 if (object && INSN_P (object) && INSN_DELETED_P (object))
3784 return 1;
3785
3786 code = GET_CODE (x);
3787
3788 /* Check for some special cases. */
3789 switch (code)
3790 {
3791 case CONST_INT:
3792 case CONST_DOUBLE:
3793 case CONST_VECTOR:
3794 case CONST:
3795 case SYMBOL_REF:
3796 case CODE_LABEL:
3797 case PC:
3798 case CC0:
3799 case ASM_INPUT:
3800 case ADDR_VEC:
3801 case ADDR_DIFF_VEC:
3802 case RETURN:
3803 return 1;
3804
3805 case SET:
3806 /* We are allowed to set the virtual registers. This means that
3807 the actual register should receive the source minus the
3808 appropriate offset. This is used, for example, in the handling
3809 of non-local gotos. */
3810 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
3811 {
3812 rtx src = SET_SRC (x);
3813
3814 /* We are setting the register, not using it, so the relevant
3815 offset is the negative of the offset to use were we using
3816 the register. */
3817 offset = - offset;
3818 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3819
3820 /* The only valid sources here are PLUS or REG. Just do
3821 the simplest possible thing to handle them. */
3822 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3823 {
3824 instantiate_virtual_regs_lossage (object);
3825 return 1;
3826 }
3827
3828 start_sequence ();
3829 if (GET_CODE (src) != REG)
3830 temp = force_operand (src, NULL_RTX);
3831 else
3832 temp = src;
3833 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3834 seq = get_insns ();
3835 end_sequence ();
3836
3837 emit_insn_before (seq, object);
3838 SET_DEST (x) = new;
3839
3840 if (! validate_change (object, &SET_SRC (x), temp, 0)
3841 || ! extra_insns)
3842 instantiate_virtual_regs_lossage (object);
3843
3844 return 1;
3845 }
3846
3847 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3848 loc = &SET_SRC (x);
3849 goto restart;
3850
3851 case PLUS:
3852 /* Handle special case of virtual register plus constant. */
3853 if (CONSTANT_P (XEXP (x, 1)))
3854 {
3855 rtx old, new_offset;
3856
3857 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3858 if (GET_CODE (XEXP (x, 0)) == PLUS)
3859 {
3860 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
3861 {
3862 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3863 extra_insns);
3864 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3865 }
3866 else
3867 {
3868 loc = &XEXP (x, 0);
3869 goto restart;
3870 }
3871 }
3872
3873 #ifdef POINTERS_EXTEND_UNSIGNED
3874 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
3875 we can commute the PLUS and SUBREG because pointers into the
3876 frame are well-behaved. */
3877 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
3878 && GET_CODE (XEXP (x, 1)) == CONST_INT
3879 && 0 != (new
3880 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
3881 &offset))
3882 && validate_change (object, loc,
3883 plus_constant (gen_lowpart (ptr_mode,
3884 new),
3885 offset
3886 + INTVAL (XEXP (x, 1))),
3887 0))
3888 return 1;
3889 #endif
3890 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
3891 {
3892 /* We know the second operand is a constant. Unless the
3893 first operand is a REG (which has been already checked),
3894 it needs to be checked. */
3895 if (GET_CODE (XEXP (x, 0)) != REG)
3896 {
3897 loc = &XEXP (x, 0);
3898 goto restart;
3899 }
3900 return 1;
3901 }
3902
3903 new_offset = plus_constant (XEXP (x, 1), offset);
3904
3905 /* If the new constant is zero, try to replace the sum with just
3906 the register. */
3907 if (new_offset == const0_rtx
3908 && validate_change (object, loc, new, 0))
3909 return 1;
3910
3911 /* Next try to replace the register and new offset.
3912 There are two changes to validate here and we can't assume that
3913 in the case of old offset equals new just changing the register
3914 will yield a valid insn. In the interests of a little efficiency,
3915 however, we only call validate change once (we don't queue up the
3916 changes and then call apply_change_group). */
3917
3918 old = XEXP (x, 0);
3919 if (offset == 0
3920 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3921 : (XEXP (x, 0) = new,
3922 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3923 {
3924 if (! extra_insns)
3925 {
3926 XEXP (x, 0) = old;
3927 return 0;
3928 }
3929
3930 /* Otherwise copy the new constant into a register and replace
3931 constant with that register. */
3932 temp = gen_reg_rtx (Pmode);
3933 XEXP (x, 0) = new;
3934 if (validate_change (object, &XEXP (x, 1), temp, 0))
3935 emit_insn_before (gen_move_insn (temp, new_offset), object);
3936 else
3937 {
3938 /* If that didn't work, replace this expression with a
3939 register containing the sum. */
3940
3941 XEXP (x, 0) = old;
3942 new = gen_rtx_PLUS (Pmode, new, new_offset);
3943
3944 start_sequence ();
3945 temp = force_operand (new, NULL_RTX);
3946 seq = get_insns ();
3947 end_sequence ();
3948
3949 emit_insn_before (seq, object);
3950 if (! validate_change (object, loc, temp, 0)
3951 && ! validate_replace_rtx (x, temp, object))
3952 {
3953 instantiate_virtual_regs_lossage (object);
3954 return 1;
3955 }
3956 }
3957 }
3958
3959 return 1;
3960 }
3961
3962 /* Fall through to generic two-operand expression case. */
3963 case EXPR_LIST:
3964 case CALL:
3965 case COMPARE:
3966 case MINUS:
3967 case MULT:
3968 case DIV: case UDIV:
3969 case MOD: case UMOD:
3970 case AND: case IOR: case XOR:
3971 case ROTATERT: case ROTATE:
3972 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3973 case NE: case EQ:
3974 case GE: case GT: case GEU: case GTU:
3975 case LE: case LT: case LEU: case LTU:
3976 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3977 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3978 loc = &XEXP (x, 0);
3979 goto restart;
3980
3981 case MEM:
3982 /* Most cases of MEM that convert to valid addresses have already been
3983 handled by our scan of decls. The only special handling we
3984 need here is to make a copy of the rtx to ensure it isn't being
3985 shared if we have to change it to a pseudo.
3986
3987 If the rtx is a simple reference to an address via a virtual register,
3988 it can potentially be shared. In such cases, first try to make it
3989 a valid address, which can also be shared. Otherwise, copy it and
3990 proceed normally.
3991
3992 First check for common cases that need no processing. These are
3993 usually due to instantiation already being done on a previous instance
3994 of a shared rtx. */
3995
3996 temp = XEXP (x, 0);
3997 if (CONSTANT_ADDRESS_P (temp)
3998 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3999 || temp == arg_pointer_rtx
4000 #endif
4001 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4002 || temp == hard_frame_pointer_rtx
4003 #endif
4004 || temp == frame_pointer_rtx)
4005 return 1;
4006
4007 if (GET_CODE (temp) == PLUS
4008 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
4009 && (XEXP (temp, 0) == frame_pointer_rtx
4010 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4011 || XEXP (temp, 0) == hard_frame_pointer_rtx
4012 #endif
4013 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4014 || XEXP (temp, 0) == arg_pointer_rtx
4015 #endif
4016 ))
4017 return 1;
4018
4019 if (temp == virtual_stack_vars_rtx
4020 || temp == virtual_incoming_args_rtx
4021 || (GET_CODE (temp) == PLUS
4022 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
4023 && (XEXP (temp, 0) == virtual_stack_vars_rtx
4024 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
4025 {
4026 /* This MEM may be shared. If the substitution can be done without
4027 the need to generate new pseudos, we want to do it in place
4028 so all copies of the shared rtx benefit. The call below will
4029 only make substitutions if the resulting address is still
4030 valid.
4031
4032 Note that we cannot pass X as the object in the recursive call
4033 since the insn being processed may not allow all valid
4034 addresses. However, if we were not passed on object, we can
4035 only modify X without copying it if X will have a valid
4036 address.
4037
4038 ??? Also note that this can still lose if OBJECT is an insn that
4039 has less restrictions on an address that some other insn.
4040 In that case, we will modify the shared address. This case
4041 doesn't seem very likely, though. One case where this could
4042 happen is in the case of a USE or CLOBBER reference, but we
4043 take care of that below. */
4044
4045 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
4046 object ? object : x, 0))
4047 return 1;
4048
4049 /* Otherwise make a copy and process that copy. We copy the entire
4050 RTL expression since it might be a PLUS which could also be
4051 shared. */
4052 *loc = x = copy_rtx (x);
4053 }
4054
4055 /* Fall through to generic unary operation case. */
4056 case PREFETCH:
4057 case SUBREG:
4058 case STRICT_LOW_PART:
4059 case NEG: case NOT:
4060 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
4061 case SIGN_EXTEND: case ZERO_EXTEND:
4062 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
4063 case FLOAT: case FIX:
4064 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
4065 case ABS:
4066 case SQRT:
4067 case FFS:
4068 case CLZ: case CTZ:
4069 case POPCOUNT: case PARITY:
4070 /* These case either have just one operand or we know that we need not
4071 check the rest of the operands. */
4072 loc = &XEXP (x, 0);
4073 goto restart;
4074
4075 case USE:
4076 case CLOBBER:
4077 /* If the operand is a MEM, see if the change is a valid MEM. If not,
4078 go ahead and make the invalid one, but do it to a copy. For a REG,
4079 just make the recursive call, since there's no chance of a problem. */
4080
4081 if ((GET_CODE (XEXP (x, 0)) == MEM
4082 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
4083 0))
4084 || (GET_CODE (XEXP (x, 0)) == REG
4085 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4086 return 1;
4087
4088 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
4089 loc = &XEXP (x, 0);
4090 goto restart;
4091
4092 case REG:
4093 /* Try to replace with a PLUS. If that doesn't work, compute the sum
4094 in front of this insn and substitute the temporary. */
4095 if ((new = instantiate_new_reg (x, &offset)) != 0)
4096 {
4097 temp = plus_constant (new, offset);
4098 if (!validate_change (object, loc, temp, 0))
4099 {
4100 if (! extra_insns)
4101 return 0;
4102
4103 start_sequence ();
4104 temp = force_operand (temp, NULL_RTX);
4105 seq = get_insns ();
4106 end_sequence ();
4107
4108 emit_insn_before (seq, object);
4109 if (! validate_change (object, loc, temp, 0)
4110 && ! validate_replace_rtx (x, temp, object))
4111 instantiate_virtual_regs_lossage (object);
4112 }
4113 }
4114
4115 return 1;
4116
4117 case ADDRESSOF:
4118 if (GET_CODE (XEXP (x, 0)) == REG)
4119 return 1;
4120
4121 else if (GET_CODE (XEXP (x, 0)) == MEM)
4122 {
4123 /* If we have a (addressof (mem ..)), do any instantiation inside
4124 since we know we'll be making the inside valid when we finally
4125 remove the ADDRESSOF. */
4126 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4127 return 1;
4128 }
4129 break;
4130
4131 default:
4132 break;
4133 }
4134
4135 /* Scan all subexpressions. */
4136 fmt = GET_RTX_FORMAT (code);
4137 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4138 if (*fmt == 'e')
4139 {
4140 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4141 return 0;
4142 }
4143 else if (*fmt == 'E')
4144 for (j = 0; j < XVECLEN (x, i); j++)
4145 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4146 extra_insns))
4147 return 0;
4148
4149 return 1;
4150 }
4151 \f
4152 /* Optimization: assuming this function does not receive nonlocal gotos,
4153 delete the handlers for such, as well as the insns to establish
4154 and disestablish them. */
4155
4156 static void
4157 delete_handlers (void)
4158 {
4159 rtx insn;
4160 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4161 {
4162 /* Delete the handler by turning off the flag that would
4163 prevent jump_optimize from deleting it.
4164 Also permit deletion of the nonlocal labels themselves
4165 if nothing local refers to them. */
4166 if (GET_CODE (insn) == CODE_LABEL)
4167 {
4168 tree t, last_t;
4169
4170 LABEL_PRESERVE_P (insn) = 0;
4171
4172 /* Remove it from the nonlocal_label list, to avoid confusing
4173 flow. */
4174 for (t = nonlocal_labels, last_t = 0; t;
4175 last_t = t, t = TREE_CHAIN (t))
4176 if (DECL_RTL (TREE_VALUE (t)) == insn)
4177 break;
4178 if (t)
4179 {
4180 if (! last_t)
4181 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4182 else
4183 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4184 }
4185 }
4186 if (GET_CODE (insn) == INSN)
4187 {
4188 int can_delete = 0;
4189 rtx t;
4190 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4191 if (reg_mentioned_p (t, PATTERN (insn)))
4192 {
4193 can_delete = 1;
4194 break;
4195 }
4196 if (can_delete
4197 || (nonlocal_goto_stack_level != 0
4198 && reg_mentioned_p (nonlocal_goto_stack_level,
4199 PATTERN (insn))))
4200 delete_related_insns (insn);
4201 }
4202 }
4203 }
4204 \f
4205 /* Return the first insn following those generated by `assign_parms'. */
4206
4207 rtx
4208 get_first_nonparm_insn (void)
4209 {
4210 if (last_parm_insn)
4211 return NEXT_INSN (last_parm_insn);
4212 return get_insns ();
4213 }
4214
4215 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4216 This means a type for which function calls must pass an address to the
4217 function or get an address back from the function.
4218 EXP may be a type node or an expression (whose type is tested). */
4219
4220 int
4221 aggregate_value_p (tree exp, tree fntype)
4222 {
4223 int i, regno, nregs;
4224 rtx reg;
4225
4226 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
4227
4228 if (fntype)
4229 switch (TREE_CODE (fntype))
4230 {
4231 case CALL_EXPR:
4232 fntype = get_callee_fndecl (fntype);
4233 fntype = fntype ? TREE_TYPE (fntype) : 0;
4234 break;
4235 case FUNCTION_DECL:
4236 fntype = TREE_TYPE (fntype);
4237 break;
4238 case FUNCTION_TYPE:
4239 case METHOD_TYPE:
4240 break;
4241 case IDENTIFIER_NODE:
4242 fntype = 0;
4243 break;
4244 default:
4245 /* We don't expect other rtl types here. */
4246 abort();
4247 }
4248
4249 if (TREE_CODE (type) == VOID_TYPE)
4250 return 0;
4251 if (targetm.calls.return_in_memory (type, fntype))
4252 return 1;
4253 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4254 and thus can't be returned in registers. */
4255 if (TREE_ADDRESSABLE (type))
4256 return 1;
4257 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4258 return 1;
4259 /* Make sure we have suitable call-clobbered regs to return
4260 the value in; if not, we must return it in memory. */
4261 reg = hard_function_value (type, 0, 0);
4262
4263 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4264 it is OK. */
4265 if (GET_CODE (reg) != REG)
4266 return 0;
4267
4268 regno = REGNO (reg);
4269 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
4270 for (i = 0; i < nregs; i++)
4271 if (! call_used_regs[regno + i])
4272 return 1;
4273 return 0;
4274 }
4275 \f
4276 /* Assign RTL expressions to the function's parameters.
4277 This may involve copying them into registers and using
4278 those registers as the RTL for them. */
4279
4280 void
4281 assign_parms (tree fndecl)
4282 {
4283 tree parm;
4284 CUMULATIVE_ARGS args_so_far;
4285 /* Total space needed so far for args on the stack,
4286 given as a constant and a tree-expression. */
4287 struct args_size stack_args_size;
4288 HOST_WIDE_INT extra_pretend_bytes = 0;
4289 tree fntype = TREE_TYPE (fndecl);
4290 tree fnargs = DECL_ARGUMENTS (fndecl), orig_fnargs;
4291 /* This is used for the arg pointer when referring to stack args. */
4292 rtx internal_arg_pointer;
4293 /* This is a dummy PARM_DECL that we used for the function result if
4294 the function returns a structure. */
4295 tree function_result_decl = 0;
4296 int varargs_setup = 0;
4297 int reg_parm_stack_space ATTRIBUTE_UNUSED = 0;
4298 rtx conversion_insns = 0;
4299
4300 /* Nonzero if function takes extra anonymous args.
4301 This means the last named arg must be on the stack
4302 right before the anonymous ones. */
4303 int stdarg
4304 = (TYPE_ARG_TYPES (fntype) != 0
4305 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4306 != void_type_node));
4307
4308 current_function_stdarg = stdarg;
4309
4310 /* If the reg that the virtual arg pointer will be translated into is
4311 not a fixed reg or is the stack pointer, make a copy of the virtual
4312 arg pointer, and address parms via the copy. The frame pointer is
4313 considered fixed even though it is not marked as such.
4314
4315 The second time through, simply use ap to avoid generating rtx. */
4316
4317 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4318 || ! (fixed_regs[ARG_POINTER_REGNUM]
4319 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4320 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4321 else
4322 internal_arg_pointer = virtual_incoming_args_rtx;
4323 current_function_internal_arg_pointer = internal_arg_pointer;
4324
4325 stack_args_size.constant = 0;
4326 stack_args_size.var = 0;
4327
4328 /* If struct value address is treated as the first argument, make it so. */
4329 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
4330 && ! current_function_returns_pcc_struct
4331 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
4332 {
4333 tree type = build_pointer_type (TREE_TYPE (fntype));
4334
4335 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4336
4337 DECL_ARG_TYPE (function_result_decl) = type;
4338 TREE_CHAIN (function_result_decl) = fnargs;
4339 fnargs = function_result_decl;
4340 }
4341
4342 orig_fnargs = fnargs;
4343
4344 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4345 parm_reg_stack_loc = ggc_alloc_cleared (max_parm_reg * sizeof (rtx));
4346
4347 if (SPLIT_COMPLEX_ARGS)
4348 fnargs = split_complex_args (fnargs);
4349
4350 #ifdef REG_PARM_STACK_SPACE
4351 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4352 #endif
4353
4354 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4355 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4356 #else
4357 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, fndecl, -1);
4358 #endif
4359
4360 /* We haven't yet found an argument that we must push and pretend the
4361 caller did. */
4362 current_function_pretend_args_size = 0;
4363
4364 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4365 {
4366 rtx entry_parm;
4367 rtx stack_parm;
4368 enum machine_mode promoted_mode, passed_mode;
4369 enum machine_mode nominal_mode, promoted_nominal_mode;
4370 int unsignedp;
4371 struct locate_and_pad_arg_data locate;
4372 int passed_pointer = 0;
4373 int did_conversion = 0;
4374 tree passed_type = DECL_ARG_TYPE (parm);
4375 tree nominal_type = TREE_TYPE (parm);
4376 int last_named = 0, named_arg;
4377 int in_regs;
4378 int partial = 0;
4379 int pretend_bytes = 0;
4380 int loaded_in_reg = 0;
4381
4382 /* Set LAST_NAMED if this is last named arg before last
4383 anonymous args. */
4384 if (stdarg)
4385 {
4386 tree tem;
4387
4388 for (tem = TREE_CHAIN (parm); tem; tem = TREE_CHAIN (tem))
4389 if (DECL_NAME (tem))
4390 break;
4391
4392 if (tem == 0)
4393 last_named = 1;
4394 }
4395 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4396 most machines, if this is a varargs/stdarg function, then we treat
4397 the last named arg as if it were anonymous too. */
4398 named_arg = (targetm.calls.strict_argument_naming (&args_so_far)
4399 ? 1 : !last_named);
4400
4401 if (TREE_TYPE (parm) == error_mark_node
4402 /* This can happen after weird syntax errors
4403 or if an enum type is defined among the parms. */
4404 || TREE_CODE (parm) != PARM_DECL
4405 || passed_type == NULL)
4406 {
4407 SET_DECL_RTL (parm, gen_rtx_MEM (BLKmode, const0_rtx));
4408 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4409 TREE_USED (parm) = 1;
4410 continue;
4411 }
4412
4413 /* Find mode of arg as it is passed, and mode of arg
4414 as it should be during execution of this function. */
4415 passed_mode = TYPE_MODE (passed_type);
4416 nominal_mode = TYPE_MODE (nominal_type);
4417
4418 /* If the parm's mode is VOID, its value doesn't matter,
4419 and avoid the usual things like emit_move_insn that could crash. */
4420 if (nominal_mode == VOIDmode)
4421 {
4422 SET_DECL_RTL (parm, const0_rtx);
4423 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4424 continue;
4425 }
4426
4427 /* If the parm is to be passed as a transparent union, use the
4428 type of the first field for the tests below. We have already
4429 verified that the modes are the same. */
4430 if (DECL_TRANSPARENT_UNION (parm)
4431 || (TREE_CODE (passed_type) == UNION_TYPE
4432 && TYPE_TRANSPARENT_UNION (passed_type)))
4433 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4434
4435 /* See if this arg was passed by invisible reference. It is if
4436 it is an object whose size depends on the contents of the
4437 object itself or if the machine requires these objects be passed
4438 that way. */
4439
4440 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (passed_type))
4441 || TREE_ADDRESSABLE (passed_type)
4442 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4443 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4444 passed_type, named_arg)
4445 #endif
4446 )
4447 {
4448 passed_type = nominal_type = build_pointer_type (passed_type);
4449 passed_pointer = 1;
4450 passed_mode = nominal_mode = Pmode;
4451 }
4452 /* See if the frontend wants to pass this by invisible reference. */
4453 else if (passed_type != nominal_type
4454 && POINTER_TYPE_P (passed_type)
4455 && TREE_TYPE (passed_type) == nominal_type)
4456 {
4457 nominal_type = passed_type;
4458 passed_pointer = 1;
4459 passed_mode = nominal_mode = Pmode;
4460 }
4461
4462 promoted_mode = passed_mode;
4463
4464 if (targetm.calls.promote_function_args (TREE_TYPE (fndecl)))
4465 {
4466 /* Compute the mode in which the arg is actually extended to. */
4467 unsignedp = TREE_UNSIGNED (passed_type);
4468 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4469 }
4470
4471 /* Let machine desc say which reg (if any) the parm arrives in.
4472 0 means it arrives on the stack. */
4473 #ifdef FUNCTION_INCOMING_ARG
4474 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4475 passed_type, named_arg);
4476 #else
4477 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4478 passed_type, named_arg);
4479 #endif
4480
4481 if (entry_parm == 0)
4482 promoted_mode = passed_mode;
4483
4484 /* If this is the last named parameter, do any required setup for
4485 varargs or stdargs. We need to know about the case of this being an
4486 addressable type, in which case we skip the registers it
4487 would have arrived in.
4488
4489 For stdargs, LAST_NAMED will be set for two parameters, the one that
4490 is actually the last named, and the dummy parameter. We only
4491 want to do this action once.
4492
4493 Also, indicate when RTL generation is to be suppressed. */
4494 if (last_named && !varargs_setup)
4495 {
4496 int varargs_pretend_bytes = 0;
4497 targetm.calls.setup_incoming_varargs (&args_so_far, promoted_mode,
4498 passed_type,
4499 &varargs_pretend_bytes, 0);
4500 varargs_setup = 1;
4501
4502 /* If the back-end has requested extra stack space, record how
4503 much is needed. Do not change pretend_args_size otherwise
4504 since it may be nonzero from an earlier partial argument. */
4505 if (varargs_pretend_bytes > 0)
4506 current_function_pretend_args_size = varargs_pretend_bytes;
4507 }
4508
4509 /* Determine parm's home in the stack,
4510 in case it arrives in the stack or we should pretend it did.
4511
4512 Compute the stack position and rtx where the argument arrives
4513 and its size.
4514
4515 There is one complexity here: If this was a parameter that would
4516 have been passed in registers, but wasn't only because it is
4517 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4518 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4519 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4520 0 as it was the previous time. */
4521 in_regs = entry_parm != 0;
4522 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4523 in_regs = 1;
4524 #endif
4525 if (!in_regs && !named_arg)
4526 {
4527 int pretend_named =
4528 targetm.calls.pretend_outgoing_varargs_named (&args_so_far);
4529 if (pretend_named)
4530 {
4531 #ifdef FUNCTION_INCOMING_ARG
4532 in_regs = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4533 passed_type,
4534 pretend_named) != 0;
4535 #else
4536 in_regs = FUNCTION_ARG (args_so_far, promoted_mode,
4537 passed_type,
4538 pretend_named) != 0;
4539 #endif
4540 }
4541 }
4542
4543 /* If this parameter was passed both in registers and in the stack,
4544 use the copy on the stack. */
4545 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4546 entry_parm = 0;
4547
4548 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4549 if (entry_parm)
4550 {
4551 partial = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4552 passed_type, named_arg);
4553 if (partial
4554 /* The caller might already have allocated stack space
4555 for the register parameters. */
4556 && reg_parm_stack_space == 0)
4557 {
4558 /* Part of this argument is passed in registers and part
4559 is passed on the stack. Ask the prologue code to extend
4560 the stack part so that we can recreate the full value.
4561
4562 PRETEND_BYTES is the size of the registers we need to store.
4563 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
4564 stack space that the prologue should allocate.
4565
4566 Internally, gcc assumes that the argument pointer is
4567 aligned to STACK_BOUNDARY bits. This is used both for
4568 alignment optimizations (see init_emit) and to locate
4569 arguments that are aligned to more than PARM_BOUNDARY
4570 bits. We must preserve this invariant by rounding
4571 CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to a stack
4572 boundary. */
4573
4574 /* We assume at most one partial arg, and it must be the first
4575 argument on the stack. */
4576 if (extra_pretend_bytes || current_function_pretend_args_size)
4577 abort ();
4578
4579 pretend_bytes = partial * UNITS_PER_WORD;
4580 current_function_pretend_args_size
4581 = CEIL_ROUND (pretend_bytes, STACK_BYTES);
4582
4583 /* We want to align relative to the actual stack pointer, so
4584 don't include this in the stack size until later. */
4585 extra_pretend_bytes = current_function_pretend_args_size;
4586 }
4587 }
4588 #endif
4589
4590 memset (&locate, 0, sizeof (locate));
4591 locate_and_pad_parm (promoted_mode, passed_type, in_regs,
4592 entry_parm ? partial : 0, fndecl,
4593 &stack_args_size, &locate);
4594 /* Adjust offsets to include pretend args, unless this is the
4595 split arg. */
4596 if (pretend_bytes == 0)
4597 {
4598 locate.slot_offset.constant += extra_pretend_bytes;
4599 locate.offset.constant += extra_pretend_bytes;
4600 }
4601
4602 {
4603 rtx offset_rtx;
4604
4605 /* If we're passing this arg using a reg, make its stack home
4606 the aligned stack slot. */
4607 if (entry_parm)
4608 offset_rtx = ARGS_SIZE_RTX (locate.slot_offset);
4609 else
4610 offset_rtx = ARGS_SIZE_RTX (locate.offset);
4611
4612 if (offset_rtx == const0_rtx)
4613 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4614 else
4615 stack_parm = gen_rtx_MEM (promoted_mode,
4616 gen_rtx_PLUS (Pmode,
4617 internal_arg_pointer,
4618 offset_rtx));
4619
4620 set_mem_attributes (stack_parm, parm, 1);
4621 if (entry_parm && MEM_ATTRS (stack_parm)->align < PARM_BOUNDARY)
4622 set_mem_align (stack_parm, PARM_BOUNDARY);
4623
4624 /* Set also REG_ATTRS if parameter was passed in a register. */
4625 if (entry_parm)
4626 set_reg_attrs_for_parm (entry_parm, stack_parm);
4627 }
4628
4629 /* If this parm was passed part in regs and part in memory,
4630 pretend it arrived entirely in memory
4631 by pushing the register-part onto the stack.
4632
4633 In the special case of a DImode or DFmode that is split,
4634 we could put it together in a pseudoreg directly,
4635 but for now that's not worth bothering with. */
4636
4637 if (partial)
4638 {
4639 /* Handle calls that pass values in multiple non-contiguous
4640 locations. The Irix 6 ABI has examples of this. */
4641 if (GET_CODE (entry_parm) == PARALLEL)
4642 emit_group_store (validize_mem (stack_parm), entry_parm,
4643 TREE_TYPE (parm),
4644 int_size_in_bytes (TREE_TYPE (parm)));
4645
4646 else
4647 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
4648 partial);
4649
4650 entry_parm = stack_parm;
4651 }
4652
4653 /* If we didn't decide this parm came in a register,
4654 by default it came on the stack. */
4655 if (entry_parm == 0)
4656 entry_parm = stack_parm;
4657
4658 /* Record permanently how this parm was passed. */
4659 set_decl_incoming_rtl (parm, entry_parm);
4660
4661 /* If there is actually space on the stack for this parm,
4662 count it in stack_args_size; otherwise set stack_parm to 0
4663 to indicate there is no preallocated stack slot for the parm. */
4664
4665 if (entry_parm == stack_parm
4666 || (GET_CODE (entry_parm) == PARALLEL
4667 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4668 #if defined (REG_PARM_STACK_SPACE)
4669 /* On some machines, even if a parm value arrives in a register
4670 there is still an (uninitialized) stack slot allocated
4671 for it. */
4672 || REG_PARM_STACK_SPACE (fndecl) > 0
4673 #endif
4674 )
4675 {
4676 stack_args_size.constant += locate.size.constant;
4677 if (locate.size.var)
4678 ADD_PARM_SIZE (stack_args_size, locate.size.var);
4679 }
4680 else
4681 /* No stack slot was pushed for this parm. */
4682 stack_parm = 0;
4683
4684 /* Update info on where next arg arrives in registers. */
4685
4686 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4687 passed_type, named_arg);
4688
4689 /* If we can't trust the parm stack slot to be aligned enough
4690 for its ultimate type, don't use that slot after entry.
4691 We'll make another stack slot, if we need one. */
4692 {
4693 unsigned int thisparm_boundary
4694 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4695
4696 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4697 stack_parm = 0;
4698 }
4699
4700 /* If parm was passed in memory, and we need to convert it on entry,
4701 don't store it back in that same slot. */
4702 if (entry_parm == stack_parm
4703 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4704 stack_parm = 0;
4705
4706 /* When an argument is passed in multiple locations, we can't
4707 make use of this information, but we can save some copying if
4708 the whole argument is passed in a single register. */
4709 if (GET_CODE (entry_parm) == PARALLEL
4710 && nominal_mode != BLKmode && passed_mode != BLKmode)
4711 {
4712 int i, len = XVECLEN (entry_parm, 0);
4713
4714 for (i = 0; i < len; i++)
4715 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
4716 && GET_CODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) == REG
4717 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
4718 == passed_mode)
4719 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
4720 {
4721 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
4722 set_decl_incoming_rtl (parm, entry_parm);
4723 break;
4724 }
4725 }
4726
4727 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4728 in the mode in which it arrives.
4729 STACK_PARM is an RTX for a stack slot where the parameter can live
4730 during the function (in case we want to put it there).
4731 STACK_PARM is 0 if no stack slot was pushed for it.
4732
4733 Now output code if necessary to convert ENTRY_PARM to
4734 the type in which this function declares it,
4735 and store that result in an appropriate place,
4736 which may be a pseudo reg, may be STACK_PARM,
4737 or may be a local stack slot if STACK_PARM is 0.
4738
4739 Set DECL_RTL to that place. */
4740
4741 if (GET_CODE (entry_parm) == PARALLEL && nominal_mode != BLKmode
4742 && XVECLEN (entry_parm, 0) > 1)
4743 {
4744 /* Reconstitute objects the size of a register or larger using
4745 register operations instead of the stack. */
4746 rtx parmreg = gen_reg_rtx (nominal_mode);
4747
4748 if (REG_P (parmreg))
4749 {
4750 unsigned int regno = REGNO (parmreg);
4751
4752 emit_group_store (parmreg, entry_parm, TREE_TYPE (parm),
4753 int_size_in_bytes (TREE_TYPE (parm)));
4754 SET_DECL_RTL (parm, parmreg);
4755 loaded_in_reg = 1;
4756
4757 if (regno >= max_parm_reg)
4758 {
4759 rtx *new;
4760 int old_max_parm_reg = max_parm_reg;
4761
4762 /* It's slow to expand this one register at a time,
4763 but it's also rare and we need max_parm_reg to be
4764 precisely correct. */
4765 max_parm_reg = regno + 1;
4766 new = ggc_realloc (parm_reg_stack_loc,
4767 max_parm_reg * sizeof (rtx));
4768 memset (new + old_max_parm_reg, 0,
4769 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4770 parm_reg_stack_loc = new;
4771 parm_reg_stack_loc[regno] = stack_parm;
4772 }
4773 }
4774 }
4775
4776 if (nominal_mode == BLKmode
4777 #ifdef BLOCK_REG_PADDING
4778 || (locate.where_pad == (BYTES_BIG_ENDIAN ? upward : downward)
4779 && GET_MODE_SIZE (promoted_mode) < UNITS_PER_WORD)
4780 #endif
4781 || GET_CODE (entry_parm) == PARALLEL)
4782 {
4783 /* If a BLKmode arrives in registers, copy it to a stack slot.
4784 Handle calls that pass values in multiple non-contiguous
4785 locations. The Irix 6 ABI has examples of this. */
4786 if (GET_CODE (entry_parm) == REG
4787 || (GET_CODE (entry_parm) == PARALLEL
4788 && (!loaded_in_reg || !optimize)))
4789 {
4790 int size = int_size_in_bytes (TREE_TYPE (parm));
4791 int size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
4792 rtx mem;
4793
4794 /* Note that we will be storing an integral number of words.
4795 So we have to be careful to ensure that we allocate an
4796 integral number of words. We do this below in the
4797 assign_stack_local if space was not allocated in the argument
4798 list. If it was, this will not work if PARM_BOUNDARY is not
4799 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4800 if it becomes a problem. Exception is when BLKmode arrives
4801 with arguments not conforming to word_mode. */
4802
4803 if (stack_parm == 0)
4804 {
4805 stack_parm = assign_stack_local (BLKmode, size_stored, 0);
4806 PUT_MODE (stack_parm, GET_MODE (entry_parm));
4807 set_mem_attributes (stack_parm, parm, 1);
4808 }
4809 else if (GET_CODE (entry_parm) == PARALLEL
4810 && GET_MODE(entry_parm) == BLKmode)
4811 ;
4812 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4813 abort ();
4814
4815 mem = validize_mem (stack_parm);
4816
4817 /* Handle calls that pass values in multiple non-contiguous
4818 locations. The Irix 6 ABI has examples of this. */
4819 if (GET_CODE (entry_parm) == PARALLEL)
4820 emit_group_store (mem, entry_parm, TREE_TYPE (parm), size);
4821
4822 else if (size == 0)
4823 ;
4824
4825 /* If SIZE is that of a mode no bigger than a word, just use
4826 that mode's store operation. */
4827 else if (size <= UNITS_PER_WORD)
4828 {
4829 enum machine_mode mode
4830 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
4831
4832 if (mode != BLKmode
4833 #ifdef BLOCK_REG_PADDING
4834 && (size == UNITS_PER_WORD
4835 || (BLOCK_REG_PADDING (mode, TREE_TYPE (parm), 1)
4836 != (BYTES_BIG_ENDIAN ? upward : downward)))
4837 #endif
4838 )
4839 {
4840 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
4841 emit_move_insn (change_address (mem, mode, 0), reg);
4842 }
4843
4844 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
4845 machine must be aligned to the left before storing
4846 to memory. Note that the previous test doesn't
4847 handle all cases (e.g. SIZE == 3). */
4848 else if (size != UNITS_PER_WORD
4849 #ifdef BLOCK_REG_PADDING
4850 && (BLOCK_REG_PADDING (mode, TREE_TYPE (parm), 1)
4851 == downward)
4852 #else
4853 && BYTES_BIG_ENDIAN
4854 #endif
4855 )
4856 {
4857 rtx tem, x;
4858 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
4859 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
4860
4861 x = expand_binop (word_mode, ashl_optab, reg,
4862 GEN_INT (by), 0, 1, OPTAB_WIDEN);
4863 tem = change_address (mem, word_mode, 0);
4864 emit_move_insn (tem, x);
4865 }
4866 else
4867 move_block_from_reg (REGNO (entry_parm), mem,
4868 size_stored / UNITS_PER_WORD);
4869 }
4870 else
4871 move_block_from_reg (REGNO (entry_parm), mem,
4872 size_stored / UNITS_PER_WORD);
4873 }
4874 /* If parm is already bound to register pair, don't change
4875 this binding. */
4876 if (! DECL_RTL_SET_P (parm))
4877 SET_DECL_RTL (parm, stack_parm);
4878 }
4879 else if (! ((! optimize
4880 && ! DECL_REGISTER (parm))
4881 || TREE_SIDE_EFFECTS (parm)
4882 /* If -ffloat-store specified, don't put explicit
4883 float variables into registers. */
4884 || (flag_float_store
4885 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4886 /* Always assign pseudo to structure return or item passed
4887 by invisible reference. */
4888 || passed_pointer || parm == function_result_decl)
4889 {
4890 /* Store the parm in a pseudoregister during the function, but we
4891 may need to do it in a wider mode. */
4892
4893 rtx parmreg;
4894 unsigned int regno, regnoi = 0, regnor = 0;
4895
4896 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4897
4898 promoted_nominal_mode
4899 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4900
4901 parmreg = gen_reg_rtx (promoted_nominal_mode);
4902 mark_user_reg (parmreg);
4903
4904 /* If this was an item that we received a pointer to, set DECL_RTL
4905 appropriately. */
4906 if (passed_pointer)
4907 {
4908 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)),
4909 parmreg);
4910 set_mem_attributes (x, parm, 1);
4911 SET_DECL_RTL (parm, x);
4912 }
4913 else
4914 {
4915 SET_DECL_RTL (parm, parmreg);
4916 maybe_set_unchanging (DECL_RTL (parm), parm);
4917 }
4918
4919 /* Copy the value into the register. */
4920 if (nominal_mode != passed_mode
4921 || promoted_nominal_mode != promoted_mode)
4922 {
4923 int save_tree_used;
4924 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4925 mode, by the caller. We now have to convert it to
4926 NOMINAL_MODE, if different. However, PARMREG may be in
4927 a different mode than NOMINAL_MODE if it is being stored
4928 promoted.
4929
4930 If ENTRY_PARM is a hard register, it might be in a register
4931 not valid for operating in its mode (e.g., an odd-numbered
4932 register for a DFmode). In that case, moves are the only
4933 thing valid, so we can't do a convert from there. This
4934 occurs when the calling sequence allow such misaligned
4935 usages.
4936
4937 In addition, the conversion may involve a call, which could
4938 clobber parameters which haven't been copied to pseudo
4939 registers yet. Therefore, we must first copy the parm to
4940 a pseudo reg here, and save the conversion until after all
4941 parameters have been moved. */
4942
4943 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4944
4945 emit_move_insn (tempreg, validize_mem (entry_parm));
4946
4947 push_to_sequence (conversion_insns);
4948 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4949
4950 if (GET_CODE (tempreg) == SUBREG
4951 && GET_MODE (tempreg) == nominal_mode
4952 && GET_CODE (SUBREG_REG (tempreg)) == REG
4953 && nominal_mode == passed_mode
4954 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (entry_parm)
4955 && GET_MODE_SIZE (GET_MODE (tempreg))
4956 < GET_MODE_SIZE (GET_MODE (entry_parm)))
4957 {
4958 /* The argument is already sign/zero extended, so note it
4959 into the subreg. */
4960 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
4961 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
4962 }
4963
4964 /* TREE_USED gets set erroneously during expand_assignment. */
4965 save_tree_used = TREE_USED (parm);
4966 expand_assignment (parm,
4967 make_tree (nominal_type, tempreg), 0);
4968 TREE_USED (parm) = save_tree_used;
4969 conversion_insns = get_insns ();
4970 did_conversion = 1;
4971 end_sequence ();
4972 }
4973 else
4974 emit_move_insn (parmreg, validize_mem (entry_parm));
4975
4976 /* If we were passed a pointer but the actual value
4977 can safely live in a register, put it in one. */
4978 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4979 /* If by-reference argument was promoted, demote it. */
4980 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
4981 || ! ((! optimize
4982 && ! DECL_REGISTER (parm))
4983 || TREE_SIDE_EFFECTS (parm)
4984 /* If -ffloat-store specified, don't put explicit
4985 float variables into registers. */
4986 || (flag_float_store
4987 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))))
4988 {
4989 /* We can't use nominal_mode, because it will have been set to
4990 Pmode above. We must use the actual mode of the parm. */
4991 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4992 mark_user_reg (parmreg);
4993 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
4994 {
4995 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
4996 int unsigned_p = TREE_UNSIGNED (TREE_TYPE (parm));
4997 push_to_sequence (conversion_insns);
4998 emit_move_insn (tempreg, DECL_RTL (parm));
4999 SET_DECL_RTL (parm,
5000 convert_to_mode (GET_MODE (parmreg),
5001 tempreg,
5002 unsigned_p));
5003 emit_move_insn (parmreg, DECL_RTL (parm));
5004 conversion_insns = get_insns();
5005 did_conversion = 1;
5006 end_sequence ();
5007 }
5008 else
5009 emit_move_insn (parmreg, DECL_RTL (parm));
5010 SET_DECL_RTL (parm, parmreg);
5011 /* STACK_PARM is the pointer, not the parm, and PARMREG is
5012 now the parm. */
5013 stack_parm = 0;
5014 }
5015 #ifdef FUNCTION_ARG_CALLEE_COPIES
5016 /* If we are passed an arg by reference and it is our responsibility
5017 to make a copy, do it now.
5018 PASSED_TYPE and PASSED mode now refer to the pointer, not the
5019 original argument, so we must recreate them in the call to
5020 FUNCTION_ARG_CALLEE_COPIES. */
5021 /* ??? Later add code to handle the case that if the argument isn't
5022 modified, don't do the copy. */
5023
5024 else if (passed_pointer
5025 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
5026 TYPE_MODE (TREE_TYPE (passed_type)),
5027 TREE_TYPE (passed_type),
5028 named_arg)
5029 && ! TREE_ADDRESSABLE (TREE_TYPE (passed_type)))
5030 {
5031 rtx copy;
5032 tree type = TREE_TYPE (passed_type);
5033
5034 /* This sequence may involve a library call perhaps clobbering
5035 registers that haven't been copied to pseudos yet. */
5036
5037 push_to_sequence (conversion_insns);
5038
5039 if (!COMPLETE_TYPE_P (type)
5040 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5041 /* This is a variable sized object. */
5042 copy = gen_rtx_MEM (BLKmode,
5043 allocate_dynamic_stack_space
5044 (expr_size (parm), NULL_RTX,
5045 TYPE_ALIGN (type)));
5046 else
5047 copy = assign_stack_temp (TYPE_MODE (type),
5048 int_size_in_bytes (type), 1);
5049 set_mem_attributes (copy, parm, 1);
5050
5051 store_expr (parm, copy, 0);
5052 emit_move_insn (parmreg, XEXP (copy, 0));
5053 conversion_insns = get_insns ();
5054 did_conversion = 1;
5055 end_sequence ();
5056 }
5057 #endif /* FUNCTION_ARG_CALLEE_COPIES */
5058
5059 /* In any case, record the parm's desired stack location
5060 in case we later discover it must live in the stack.
5061
5062 If it is a COMPLEX value, store the stack location for both
5063 halves. */
5064
5065 if (GET_CODE (parmreg) == CONCAT)
5066 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
5067 else
5068 regno = REGNO (parmreg);
5069
5070 if (regno >= max_parm_reg)
5071 {
5072 rtx *new;
5073 int old_max_parm_reg = max_parm_reg;
5074
5075 /* It's slow to expand this one register at a time,
5076 but it's also rare and we need max_parm_reg to be
5077 precisely correct. */
5078 max_parm_reg = regno + 1;
5079 new = ggc_realloc (parm_reg_stack_loc,
5080 max_parm_reg * sizeof (rtx));
5081 memset (new + old_max_parm_reg, 0,
5082 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
5083 parm_reg_stack_loc = new;
5084 }
5085
5086 if (GET_CODE (parmreg) == CONCAT)
5087 {
5088 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
5089
5090 regnor = REGNO (gen_realpart (submode, parmreg));
5091 regnoi = REGNO (gen_imagpart (submode, parmreg));
5092
5093 if (stack_parm != 0)
5094 {
5095 parm_reg_stack_loc[regnor]
5096 = gen_realpart (submode, stack_parm);
5097 parm_reg_stack_loc[regnoi]
5098 = gen_imagpart (submode, stack_parm);
5099 }
5100 else
5101 {
5102 parm_reg_stack_loc[regnor] = 0;
5103 parm_reg_stack_loc[regnoi] = 0;
5104 }
5105 }
5106 else
5107 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
5108
5109 /* Mark the register as eliminable if we did no conversion
5110 and it was copied from memory at a fixed offset,
5111 and the arg pointer was not copied to a pseudo-reg.
5112 If the arg pointer is a pseudo reg or the offset formed
5113 an invalid address, such memory-equivalences
5114 as we make here would screw up life analysis for it. */
5115 if (nominal_mode == passed_mode
5116 && ! did_conversion
5117 && stack_parm != 0
5118 && GET_CODE (stack_parm) == MEM
5119 && locate.offset.var == 0
5120 && reg_mentioned_p (virtual_incoming_args_rtx,
5121 XEXP (stack_parm, 0)))
5122 {
5123 rtx linsn = get_last_insn ();
5124 rtx sinsn, set;
5125
5126 /* Mark complex types separately. */
5127 if (GET_CODE (parmreg) == CONCAT)
5128 /* Scan backwards for the set of the real and
5129 imaginary parts. */
5130 for (sinsn = linsn; sinsn != 0;
5131 sinsn = prev_nonnote_insn (sinsn))
5132 {
5133 set = single_set (sinsn);
5134 if (set != 0
5135 && SET_DEST (set) == regno_reg_rtx [regnoi])
5136 REG_NOTES (sinsn)
5137 = gen_rtx_EXPR_LIST (REG_EQUIV,
5138 parm_reg_stack_loc[regnoi],
5139 REG_NOTES (sinsn));
5140 else if (set != 0
5141 && SET_DEST (set) == regno_reg_rtx [regnor])
5142 REG_NOTES (sinsn)
5143 = gen_rtx_EXPR_LIST (REG_EQUIV,
5144 parm_reg_stack_loc[regnor],
5145 REG_NOTES (sinsn));
5146 }
5147 else if ((set = single_set (linsn)) != 0
5148 && SET_DEST (set) == parmreg)
5149 REG_NOTES (linsn)
5150 = gen_rtx_EXPR_LIST (REG_EQUIV,
5151 stack_parm, REG_NOTES (linsn));
5152 }
5153
5154 /* For pointer data type, suggest pointer register. */
5155 if (POINTER_TYPE_P (TREE_TYPE (parm)))
5156 mark_reg_pointer (parmreg,
5157 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5158
5159 /* If something wants our address, try to use ADDRESSOF. */
5160 if (TREE_ADDRESSABLE (parm))
5161 {
5162 /* If we end up putting something into the stack,
5163 fixup_var_refs_insns will need to make a pass over
5164 all the instructions. It looks through the pending
5165 sequences -- but it can't see the ones in the
5166 CONVERSION_INSNS, if they're not on the sequence
5167 stack. So, we go back to that sequence, just so that
5168 the fixups will happen. */
5169 push_to_sequence (conversion_insns);
5170 put_var_into_stack (parm, /*rescan=*/true);
5171 conversion_insns = get_insns ();
5172 end_sequence ();
5173 }
5174 }
5175 else
5176 {
5177 /* Value must be stored in the stack slot STACK_PARM
5178 during function execution. */
5179
5180 if (promoted_mode != nominal_mode)
5181 {
5182 /* Conversion is required. */
5183 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
5184
5185 emit_move_insn (tempreg, validize_mem (entry_parm));
5186
5187 push_to_sequence (conversion_insns);
5188 entry_parm = convert_to_mode (nominal_mode, tempreg,
5189 TREE_UNSIGNED (TREE_TYPE (parm)));
5190 if (stack_parm)
5191 /* ??? This may need a big-endian conversion on sparc64. */
5192 stack_parm = adjust_address (stack_parm, nominal_mode, 0);
5193
5194 conversion_insns = get_insns ();
5195 did_conversion = 1;
5196 end_sequence ();
5197 }
5198
5199 if (entry_parm != stack_parm)
5200 {
5201 if (stack_parm == 0)
5202 {
5203 stack_parm
5204 = assign_stack_local (GET_MODE (entry_parm),
5205 GET_MODE_SIZE (GET_MODE (entry_parm)),
5206 0);
5207 set_mem_attributes (stack_parm, parm, 1);
5208 }
5209
5210 if (promoted_mode != nominal_mode)
5211 {
5212 push_to_sequence (conversion_insns);
5213 emit_move_insn (validize_mem (stack_parm),
5214 validize_mem (entry_parm));
5215 conversion_insns = get_insns ();
5216 end_sequence ();
5217 }
5218 else
5219 emit_move_insn (validize_mem (stack_parm),
5220 validize_mem (entry_parm));
5221 }
5222
5223 SET_DECL_RTL (parm, stack_parm);
5224 }
5225 }
5226
5227 if (SPLIT_COMPLEX_ARGS && fnargs != orig_fnargs)
5228 {
5229 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
5230 {
5231 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE)
5232 {
5233 rtx tmp, real, imag;
5234 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
5235
5236 real = DECL_RTL (fnargs);
5237 imag = DECL_RTL (TREE_CHAIN (fnargs));
5238 if (inner != GET_MODE (real))
5239 {
5240 real = gen_lowpart_SUBREG (inner, real);
5241 imag = gen_lowpart_SUBREG (inner, imag);
5242 }
5243 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
5244 SET_DECL_RTL (parm, tmp);
5245
5246 real = DECL_INCOMING_RTL (fnargs);
5247 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
5248 if (inner != GET_MODE (real))
5249 {
5250 real = gen_lowpart_SUBREG (inner, real);
5251 imag = gen_lowpart_SUBREG (inner, imag);
5252 }
5253 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
5254 set_decl_incoming_rtl (parm, tmp);
5255 fnargs = TREE_CHAIN (fnargs);
5256 }
5257 else
5258 {
5259 SET_DECL_RTL (parm, DECL_RTL (fnargs));
5260 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
5261 }
5262 fnargs = TREE_CHAIN (fnargs);
5263 }
5264 }
5265
5266 /* Output all parameter conversion instructions (possibly including calls)
5267 now that all parameters have been copied out of hard registers. */
5268 emit_insn (conversion_insns);
5269
5270 /* If we are receiving a struct value address as the first argument, set up
5271 the RTL for the function result. As this might require code to convert
5272 the transmitted address to Pmode, we do this here to ensure that possible
5273 preliminary conversions of the address have been emitted already. */
5274 if (function_result_decl)
5275 {
5276 tree result = DECL_RESULT (fndecl);
5277 rtx addr = DECL_RTL (function_result_decl);
5278 rtx x;
5279
5280 addr = convert_memory_address (Pmode, addr);
5281 x = gen_rtx_MEM (DECL_MODE (result), addr);
5282 set_mem_attributes (x, result, 1);
5283 SET_DECL_RTL (result, x);
5284 }
5285
5286 last_parm_insn = get_last_insn ();
5287
5288 /* We have aligned all the args, so add space for the pretend args. */
5289 stack_args_size.constant += extra_pretend_bytes;
5290 current_function_args_size = stack_args_size.constant;
5291
5292 /* Adjust function incoming argument size for alignment and
5293 minimum length. */
5294
5295 #ifdef REG_PARM_STACK_SPACE
5296 current_function_args_size = MAX (current_function_args_size,
5297 REG_PARM_STACK_SPACE (fndecl));
5298 #endif
5299
5300 current_function_args_size
5301 = ((current_function_args_size + STACK_BYTES - 1)
5302 / STACK_BYTES) * STACK_BYTES;
5303
5304 #ifdef ARGS_GROW_DOWNWARD
5305 current_function_arg_offset_rtx
5306 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5307 : expand_expr (size_diffop (stack_args_size.var,
5308 size_int (-stack_args_size.constant)),
5309 NULL_RTX, VOIDmode, 0));
5310 #else
5311 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5312 #endif
5313
5314 /* See how many bytes, if any, of its args a function should try to pop
5315 on return. */
5316
5317 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5318 current_function_args_size);
5319
5320 /* For stdarg.h function, save info about
5321 regs and stack space used by the named args. */
5322
5323 current_function_args_info = args_so_far;
5324
5325 /* Set the rtx used for the function return value. Put this in its
5326 own variable so any optimizers that need this information don't have
5327 to include tree.h. Do this here so it gets done when an inlined
5328 function gets output. */
5329
5330 current_function_return_rtx
5331 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
5332 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
5333
5334 /* If scalar return value was computed in a pseudo-reg, or was a named
5335 return value that got dumped to the stack, copy that to the hard
5336 return register. */
5337 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
5338 {
5339 tree decl_result = DECL_RESULT (fndecl);
5340 rtx decl_rtl = DECL_RTL (decl_result);
5341
5342 if (REG_P (decl_rtl)
5343 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5344 : DECL_REGISTER (decl_result))
5345 {
5346 rtx real_decl_rtl;
5347
5348 #ifdef FUNCTION_OUTGOING_VALUE
5349 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
5350 fndecl);
5351 #else
5352 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
5353 fndecl);
5354 #endif
5355 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
5356 /* The delay slot scheduler assumes that current_function_return_rtx
5357 holds the hard register containing the return value, not a
5358 temporary pseudo. */
5359 current_function_return_rtx = real_decl_rtl;
5360 }
5361 }
5362 }
5363
5364 /* If ARGS contains entries with complex types, split the entry into two
5365 entries of the component type. Return a new list of substitutions are
5366 needed, else the old list. */
5367
5368 static tree
5369 split_complex_args (tree args)
5370 {
5371 tree p;
5372
5373 /* Before allocating memory, check for the common case of no complex. */
5374 for (p = args; p; p = TREE_CHAIN (p))
5375 if (TREE_CODE (TREE_TYPE (p)) == COMPLEX_TYPE)
5376 goto found;
5377 return args;
5378
5379 found:
5380 args = copy_list (args);
5381
5382 for (p = args; p; p = TREE_CHAIN (p))
5383 {
5384 tree type = TREE_TYPE (p);
5385 if (TREE_CODE (type) == COMPLEX_TYPE)
5386 {
5387 tree decl;
5388 tree subtype = TREE_TYPE (type);
5389
5390 /* Rewrite the PARM_DECL's type with its component. */
5391 TREE_TYPE (p) = subtype;
5392 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
5393 DECL_MODE (p) = VOIDmode;
5394 DECL_SIZE (p) = NULL;
5395 DECL_SIZE_UNIT (p) = NULL;
5396 layout_decl (p, 0);
5397
5398 /* Build a second synthetic decl. */
5399 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
5400 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
5401 layout_decl (decl, 0);
5402
5403 /* Splice it in; skip the new decl. */
5404 TREE_CHAIN (decl) = TREE_CHAIN (p);
5405 TREE_CHAIN (p) = decl;
5406 p = decl;
5407 }
5408 }
5409
5410 return args;
5411 }
5412 \f
5413 /* Indicate whether REGNO is an incoming argument to the current function
5414 that was promoted to a wider mode. If so, return the RTX for the
5415 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5416 that REGNO is promoted from and whether the promotion was signed or
5417 unsigned. */
5418
5419 rtx
5420 promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
5421 {
5422 tree arg;
5423
5424 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5425 arg = TREE_CHAIN (arg))
5426 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5427 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5428 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5429 {
5430 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5431 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5432
5433 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5434 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5435 && mode != DECL_MODE (arg))
5436 {
5437 *pmode = DECL_MODE (arg);
5438 *punsignedp = unsignedp;
5439 return DECL_INCOMING_RTL (arg);
5440 }
5441 }
5442
5443 return 0;
5444 }
5445
5446 \f
5447 /* Compute the size and offset from the start of the stacked arguments for a
5448 parm passed in mode PASSED_MODE and with type TYPE.
5449
5450 INITIAL_OFFSET_PTR points to the current offset into the stacked
5451 arguments.
5452
5453 The starting offset and size for this parm are returned in
5454 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
5455 nonzero, the offset is that of stack slot, which is returned in
5456 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
5457 padding required from the initial offset ptr to the stack slot.
5458
5459 IN_REGS is nonzero if the argument will be passed in registers. It will
5460 never be set if REG_PARM_STACK_SPACE is not defined.
5461
5462 FNDECL is the function in which the argument was defined.
5463
5464 There are two types of rounding that are done. The first, controlled by
5465 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5466 list to be aligned to the specific boundary (in bits). This rounding
5467 affects the initial and starting offsets, but not the argument size.
5468
5469 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5470 optionally rounds the size of the parm to PARM_BOUNDARY. The
5471 initial offset is not affected by this rounding, while the size always
5472 is and the starting offset may be. */
5473
5474 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
5475 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
5476 callers pass in the total size of args so far as
5477 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
5478
5479 void
5480 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
5481 int partial, tree fndecl ATTRIBUTE_UNUSED,
5482 struct args_size *initial_offset_ptr,
5483 struct locate_and_pad_arg_data *locate)
5484 {
5485 tree sizetree;
5486 enum direction where_pad;
5487 int boundary;
5488 int reg_parm_stack_space = 0;
5489 int part_size_in_regs;
5490
5491 #ifdef REG_PARM_STACK_SPACE
5492 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5493
5494 /* If we have found a stack parm before we reach the end of the
5495 area reserved for registers, skip that area. */
5496 if (! in_regs)
5497 {
5498 if (reg_parm_stack_space > 0)
5499 {
5500 if (initial_offset_ptr->var)
5501 {
5502 initial_offset_ptr->var
5503 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5504 ssize_int (reg_parm_stack_space));
5505 initial_offset_ptr->constant = 0;
5506 }
5507 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5508 initial_offset_ptr->constant = reg_parm_stack_space;
5509 }
5510 }
5511 #endif /* REG_PARM_STACK_SPACE */
5512
5513 part_size_in_regs = 0;
5514 if (reg_parm_stack_space == 0)
5515 part_size_in_regs = ((partial * UNITS_PER_WORD)
5516 / (PARM_BOUNDARY / BITS_PER_UNIT)
5517 * (PARM_BOUNDARY / BITS_PER_UNIT));
5518
5519 sizetree
5520 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5521 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5522 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5523 locate->where_pad = where_pad;
5524
5525 #ifdef ARGS_GROW_DOWNWARD
5526 locate->slot_offset.constant = -initial_offset_ptr->constant;
5527 if (initial_offset_ptr->var)
5528 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
5529 initial_offset_ptr->var);
5530
5531 {
5532 tree s2 = sizetree;
5533 if (where_pad != none
5534 && (!host_integerp (sizetree, 1)
5535 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5536 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
5537 SUB_PARM_SIZE (locate->slot_offset, s2);
5538 }
5539
5540 locate->slot_offset.constant += part_size_in_regs;
5541
5542 if (!in_regs
5543 #ifdef REG_PARM_STACK_SPACE
5544 || REG_PARM_STACK_SPACE (fndecl) > 0
5545 #endif
5546 )
5547 pad_to_arg_alignment (&locate->slot_offset, boundary,
5548 &locate->alignment_pad);
5549
5550 locate->size.constant = (-initial_offset_ptr->constant
5551 - locate->slot_offset.constant);
5552 if (initial_offset_ptr->var)
5553 locate->size.var = size_binop (MINUS_EXPR,
5554 size_binop (MINUS_EXPR,
5555 ssize_int (0),
5556 initial_offset_ptr->var),
5557 locate->slot_offset.var);
5558
5559 /* Pad_below needs the pre-rounded size to know how much to pad
5560 below. */
5561 locate->offset = locate->slot_offset;
5562 if (where_pad == downward)
5563 pad_below (&locate->offset, passed_mode, sizetree);
5564
5565 #else /* !ARGS_GROW_DOWNWARD */
5566 if (!in_regs
5567 #ifdef REG_PARM_STACK_SPACE
5568 || REG_PARM_STACK_SPACE (fndecl) > 0
5569 #endif
5570 )
5571 pad_to_arg_alignment (initial_offset_ptr, boundary,
5572 &locate->alignment_pad);
5573 locate->slot_offset = *initial_offset_ptr;
5574
5575 #ifdef PUSH_ROUNDING
5576 if (passed_mode != BLKmode)
5577 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5578 #endif
5579
5580 /* Pad_below needs the pre-rounded size to know how much to pad below
5581 so this must be done before rounding up. */
5582 locate->offset = locate->slot_offset;
5583 if (where_pad == downward)
5584 pad_below (&locate->offset, passed_mode, sizetree);
5585
5586 if (where_pad != none
5587 && (!host_integerp (sizetree, 1)
5588 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5589 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5590
5591 ADD_PARM_SIZE (locate->size, sizetree);
5592
5593 locate->size.constant -= part_size_in_regs;
5594 #endif /* ARGS_GROW_DOWNWARD */
5595 }
5596
5597 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5598 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5599
5600 static void
5601 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
5602 struct args_size *alignment_pad)
5603 {
5604 tree save_var = NULL_TREE;
5605 HOST_WIDE_INT save_constant = 0;
5606 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5607 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
5608
5609 #ifdef SPARC_STACK_BOUNDARY_HACK
5610 /* The sparc port has a bug. It sometimes claims a STACK_BOUNDARY
5611 higher than the real alignment of %sp. However, when it does this,
5612 the alignment of %sp+STACK_POINTER_OFFSET will be STACK_BOUNDARY.
5613 This is a temporary hack while the sparc port is fixed. */
5614 if (SPARC_STACK_BOUNDARY_HACK)
5615 sp_offset = 0;
5616 #endif
5617
5618 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5619 {
5620 save_var = offset_ptr->var;
5621 save_constant = offset_ptr->constant;
5622 }
5623
5624 alignment_pad->var = NULL_TREE;
5625 alignment_pad->constant = 0;
5626
5627 if (boundary > BITS_PER_UNIT)
5628 {
5629 if (offset_ptr->var)
5630 {
5631 tree sp_offset_tree = ssize_int (sp_offset);
5632 tree offset = size_binop (PLUS_EXPR,
5633 ARGS_SIZE_TREE (*offset_ptr),
5634 sp_offset_tree);
5635 #ifdef ARGS_GROW_DOWNWARD
5636 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
5637 #else
5638 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
5639 #endif
5640
5641 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
5642 /* ARGS_SIZE_TREE includes constant term. */
5643 offset_ptr->constant = 0;
5644 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5645 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5646 save_var);
5647 }
5648 else
5649 {
5650 offset_ptr->constant = -sp_offset +
5651 #ifdef ARGS_GROW_DOWNWARD
5652 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
5653 #else
5654 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
5655 #endif
5656 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5657 alignment_pad->constant = offset_ptr->constant - save_constant;
5658 }
5659 }
5660 }
5661
5662 static void
5663 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
5664 {
5665 if (passed_mode != BLKmode)
5666 {
5667 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5668 offset_ptr->constant
5669 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5670 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5671 - GET_MODE_SIZE (passed_mode));
5672 }
5673 else
5674 {
5675 if (TREE_CODE (sizetree) != INTEGER_CST
5676 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5677 {
5678 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5679 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5680 /* Add it in. */
5681 ADD_PARM_SIZE (*offset_ptr, s2);
5682 SUB_PARM_SIZE (*offset_ptr, sizetree);
5683 }
5684 }
5685 }
5686 \f
5687 /* Walk the tree of blocks describing the binding levels within a function
5688 and warn about uninitialized variables.
5689 This is done after calling flow_analysis and before global_alloc
5690 clobbers the pseudo-regs to hard regs. */
5691
5692 void
5693 uninitialized_vars_warning (tree block)
5694 {
5695 tree decl, sub;
5696 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5697 {
5698 if (warn_uninitialized
5699 && TREE_CODE (decl) == VAR_DECL
5700 /* These warnings are unreliable for and aggregates
5701 because assigning the fields one by one can fail to convince
5702 flow.c that the entire aggregate was initialized.
5703 Unions are troublesome because members may be shorter. */
5704 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5705 && DECL_RTL_SET_P (decl)
5706 && GET_CODE (DECL_RTL (decl)) == REG
5707 /* Global optimizations can make it difficult to determine if a
5708 particular variable has been initialized. However, a VAR_DECL
5709 with a nonzero DECL_INITIAL had an initializer, so do not
5710 claim it is potentially uninitialized.
5711
5712 When the DECL_INITIAL is NULL call the language hook to tell us
5713 if we want to warn. */
5714 && (DECL_INITIAL (decl) == NULL_TREE || lang_hooks.decl_uninit (decl))
5715 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5716 warning ("%J'%D' might be used uninitialized in this function",
5717 decl, decl);
5718 if (extra_warnings
5719 && TREE_CODE (decl) == VAR_DECL
5720 && DECL_RTL_SET_P (decl)
5721 && GET_CODE (DECL_RTL (decl)) == REG
5722 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5723 warning ("%Jvariable '%D' might be clobbered by `longjmp' or `vfork'",
5724 decl, decl);
5725 }
5726 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5727 uninitialized_vars_warning (sub);
5728 }
5729
5730 /* Do the appropriate part of uninitialized_vars_warning
5731 but for arguments instead of local variables. */
5732
5733 void
5734 setjmp_args_warning (void)
5735 {
5736 tree decl;
5737 for (decl = DECL_ARGUMENTS (current_function_decl);
5738 decl; decl = TREE_CHAIN (decl))
5739 if (DECL_RTL (decl) != 0
5740 && GET_CODE (DECL_RTL (decl)) == REG
5741 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5742 warning ("%Jargument '%D' might be clobbered by `longjmp' or `vfork'",
5743 decl, decl);
5744 }
5745
5746 /* If this function call setjmp, put all vars into the stack
5747 unless they were declared `register'. */
5748
5749 void
5750 setjmp_protect (tree block)
5751 {
5752 tree decl, sub;
5753 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5754 if ((TREE_CODE (decl) == VAR_DECL
5755 || TREE_CODE (decl) == PARM_DECL)
5756 && DECL_RTL (decl) != 0
5757 && (GET_CODE (DECL_RTL (decl)) == REG
5758 || (GET_CODE (DECL_RTL (decl)) == MEM
5759 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5760 /* If this variable came from an inline function, it must be
5761 that its life doesn't overlap the setjmp. If there was a
5762 setjmp in the function, it would already be in memory. We
5763 must exclude such variable because their DECL_RTL might be
5764 set to strange things such as virtual_stack_vars_rtx. */
5765 && ! DECL_FROM_INLINE (decl)
5766 && (
5767 #ifdef NON_SAVING_SETJMP
5768 /* If longjmp doesn't restore the registers,
5769 don't put anything in them. */
5770 NON_SAVING_SETJMP
5771 ||
5772 #endif
5773 ! DECL_REGISTER (decl)))
5774 put_var_into_stack (decl, /*rescan=*/true);
5775 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5776 setjmp_protect (sub);
5777 }
5778 \f
5779 /* Like the previous function, but for args instead of local variables. */
5780
5781 void
5782 setjmp_protect_args (void)
5783 {
5784 tree decl;
5785 for (decl = DECL_ARGUMENTS (current_function_decl);
5786 decl; decl = TREE_CHAIN (decl))
5787 if ((TREE_CODE (decl) == VAR_DECL
5788 || TREE_CODE (decl) == PARM_DECL)
5789 && DECL_RTL (decl) != 0
5790 && (GET_CODE (DECL_RTL (decl)) == REG
5791 || (GET_CODE (DECL_RTL (decl)) == MEM
5792 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5793 && (
5794 /* If longjmp doesn't restore the registers,
5795 don't put anything in them. */
5796 #ifdef NON_SAVING_SETJMP
5797 NON_SAVING_SETJMP
5798 ||
5799 #endif
5800 ! DECL_REGISTER (decl)))
5801 put_var_into_stack (decl, /*rescan=*/true);
5802 }
5803 \f
5804 /* Return the context-pointer register corresponding to DECL,
5805 or 0 if it does not need one. */
5806
5807 rtx
5808 lookup_static_chain (tree decl)
5809 {
5810 tree context = decl_function_context (decl);
5811 tree link;
5812
5813 if (context == 0
5814 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5815 return 0;
5816
5817 /* We treat inline_function_decl as an alias for the current function
5818 because that is the inline function whose vars, types, etc.
5819 are being merged into the current function.
5820 See expand_inline_function. */
5821 if (context == current_function_decl || context == inline_function_decl)
5822 return virtual_stack_vars_rtx;
5823
5824 for (link = context_display; link; link = TREE_CHAIN (link))
5825 if (TREE_PURPOSE (link) == context)
5826 return RTL_EXPR_RTL (TREE_VALUE (link));
5827
5828 abort ();
5829 }
5830 \f
5831 /* Convert a stack slot address ADDR for variable VAR
5832 (from a containing function)
5833 into an address valid in this function (using a static chain). */
5834
5835 rtx
5836 fix_lexical_addr (rtx addr, tree var)
5837 {
5838 rtx basereg;
5839 HOST_WIDE_INT displacement;
5840 tree context = decl_function_context (var);
5841 struct function *fp;
5842 rtx base = 0;
5843
5844 /* If this is the present function, we need not do anything. */
5845 if (context == current_function_decl || context == inline_function_decl)
5846 return addr;
5847
5848 fp = find_function_data (context);
5849
5850 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5851 addr = XEXP (XEXP (addr, 0), 0);
5852
5853 /* Decode given address as base reg plus displacement. */
5854 if (GET_CODE (addr) == REG)
5855 basereg = addr, displacement = 0;
5856 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5857 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5858 else
5859 abort ();
5860
5861 /* We accept vars reached via the containing function's
5862 incoming arg pointer and via its stack variables pointer. */
5863 if (basereg == fp->internal_arg_pointer)
5864 {
5865 /* If reached via arg pointer, get the arg pointer value
5866 out of that function's stack frame.
5867
5868 There are two cases: If a separate ap is needed, allocate a
5869 slot in the outer function for it and dereference it that way.
5870 This is correct even if the real ap is actually a pseudo.
5871 Otherwise, just adjust the offset from the frame pointer to
5872 compensate. */
5873
5874 #ifdef NEED_SEPARATE_AP
5875 rtx addr;
5876
5877 addr = get_arg_pointer_save_area (fp);
5878 addr = fix_lexical_addr (XEXP (addr, 0), var);
5879 addr = memory_address (Pmode, addr);
5880
5881 base = gen_rtx_MEM (Pmode, addr);
5882 set_mem_alias_set (base, get_frame_alias_set ());
5883 base = copy_to_reg (base);
5884 #else
5885 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5886 base = lookup_static_chain (var);
5887 #endif
5888 }
5889
5890 else if (basereg == virtual_stack_vars_rtx)
5891 {
5892 /* This is the same code as lookup_static_chain, duplicated here to
5893 avoid an extra call to decl_function_context. */
5894 tree link;
5895
5896 for (link = context_display; link; link = TREE_CHAIN (link))
5897 if (TREE_PURPOSE (link) == context)
5898 {
5899 base = RTL_EXPR_RTL (TREE_VALUE (link));
5900 break;
5901 }
5902 }
5903
5904 if (base == 0)
5905 abort ();
5906
5907 /* Use same offset, relative to appropriate static chain or argument
5908 pointer. */
5909 return plus_constant (base, displacement);
5910 }
5911 \f
5912 /* Return the address of the trampoline for entering nested fn FUNCTION.
5913 If necessary, allocate a trampoline (in the stack frame)
5914 and emit rtl to initialize its contents (at entry to this function). */
5915
5916 rtx
5917 trampoline_address (tree function)
5918 {
5919 tree link;
5920 tree rtlexp;
5921 rtx tramp;
5922 struct function *fp;
5923 tree fn_context;
5924
5925 /* Find an existing trampoline and return it. */
5926 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5927 if (TREE_PURPOSE (link) == function)
5928 return
5929 adjust_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5930
5931 for (fp = outer_function_chain; fp; fp = fp->outer)
5932 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5933 if (TREE_PURPOSE (link) == function)
5934 {
5935 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5936 function);
5937 return adjust_trampoline_addr (tramp);
5938 }
5939
5940 /* None exists; we must make one. */
5941
5942 /* Find the `struct function' for the function containing FUNCTION. */
5943 fp = 0;
5944 fn_context = decl_function_context (function);
5945 if (fn_context != current_function_decl
5946 && fn_context != inline_function_decl)
5947 fp = find_function_data (fn_context);
5948
5949 /* Allocate run-time space for this trampoline. */
5950 /* If rounding needed, allocate extra space
5951 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5952 #define TRAMPOLINE_REAL_SIZE \
5953 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5954 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5955 fp ? fp : cfun);
5956 /* Record the trampoline for reuse and note it for later initialization
5957 by expand_function_end. */
5958 if (fp != 0)
5959 {
5960 rtlexp = make_node (RTL_EXPR);
5961 RTL_EXPR_RTL (rtlexp) = tramp;
5962 fp->x_trampoline_list = tree_cons (function, rtlexp,
5963 fp->x_trampoline_list);
5964 }
5965 else
5966 {
5967 /* Make the RTL_EXPR node temporary, not momentary, so that the
5968 trampoline_list doesn't become garbage. */
5969 rtlexp = make_node (RTL_EXPR);
5970
5971 RTL_EXPR_RTL (rtlexp) = tramp;
5972 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5973 }
5974
5975 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5976 return adjust_trampoline_addr (tramp);
5977 }
5978
5979 /* Given a trampoline address,
5980 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5981
5982 static rtx
5983 round_trampoline_addr (rtx tramp)
5984 {
5985 /* Round address up to desired boundary. */
5986 rtx temp = gen_reg_rtx (Pmode);
5987 rtx addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5988 rtx mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5989
5990 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5991 temp, 0, OPTAB_LIB_WIDEN);
5992 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5993 temp, 0, OPTAB_LIB_WIDEN);
5994
5995 return tramp;
5996 }
5997
5998 /* Given a trampoline address, round it then apply any
5999 platform-specific adjustments so that the result can be used for a
6000 function call . */
6001
6002 static rtx
6003 adjust_trampoline_addr (rtx tramp)
6004 {
6005 tramp = round_trampoline_addr (tramp);
6006 #ifdef TRAMPOLINE_ADJUST_ADDRESS
6007 TRAMPOLINE_ADJUST_ADDRESS (tramp);
6008 #endif
6009 return tramp;
6010 }
6011 \f
6012 /* Put all this function's BLOCK nodes including those that are chained
6013 onto the first block into a vector, and return it.
6014 Also store in each NOTE for the beginning or end of a block
6015 the index of that block in the vector.
6016 The arguments are BLOCK, the chain of top-level blocks of the function,
6017 and INSNS, the insn chain of the function. */
6018
6019 void
6020 identify_blocks (void)
6021 {
6022 int n_blocks;
6023 tree *block_vector, *last_block_vector;
6024 tree *block_stack;
6025 tree block = DECL_INITIAL (current_function_decl);
6026
6027 if (block == 0)
6028 return;
6029
6030 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
6031 depth-first order. */
6032 block_vector = get_block_vector (block, &n_blocks);
6033 block_stack = xmalloc (n_blocks * sizeof (tree));
6034
6035 last_block_vector = identify_blocks_1 (get_insns (),
6036 block_vector + 1,
6037 block_vector + n_blocks,
6038 block_stack);
6039
6040 /* If we didn't use all of the subblocks, we've misplaced block notes. */
6041 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
6042 if (0 && last_block_vector != block_vector + n_blocks)
6043 abort ();
6044
6045 free (block_vector);
6046 free (block_stack);
6047 }
6048
6049 /* Subroutine of identify_blocks. Do the block substitution on the
6050 insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
6051
6052 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
6053 BLOCK_VECTOR is incremented for each block seen. */
6054
6055 static tree *
6056 identify_blocks_1 (rtx insns, tree *block_vector, tree *end_block_vector,
6057 tree *orig_block_stack)
6058 {
6059 rtx insn;
6060 tree *block_stack = orig_block_stack;
6061
6062 for (insn = insns; insn; insn = NEXT_INSN (insn))
6063 {
6064 if (GET_CODE (insn) == NOTE)
6065 {
6066 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
6067 {
6068 tree b;
6069
6070 /* If there are more block notes than BLOCKs, something
6071 is badly wrong. */
6072 if (block_vector == end_block_vector)
6073 abort ();
6074
6075 b = *block_vector++;
6076 NOTE_BLOCK (insn) = b;
6077 *block_stack++ = b;
6078 }
6079 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
6080 {
6081 /* If there are more NOTE_INSN_BLOCK_ENDs than
6082 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
6083 if (block_stack == orig_block_stack)
6084 abort ();
6085
6086 NOTE_BLOCK (insn) = *--block_stack;
6087 }
6088 }
6089 else if (GET_CODE (insn) == CALL_INSN
6090 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
6091 {
6092 rtx cp = PATTERN (insn);
6093
6094 block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
6095 end_block_vector, block_stack);
6096 if (XEXP (cp, 1))
6097 block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
6098 end_block_vector, block_stack);
6099 if (XEXP (cp, 2))
6100 block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
6101 end_block_vector, block_stack);
6102 }
6103 }
6104
6105 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
6106 something is badly wrong. */
6107 if (block_stack != orig_block_stack)
6108 abort ();
6109
6110 return block_vector;
6111 }
6112
6113 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
6114 and create duplicate blocks. */
6115 /* ??? Need an option to either create block fragments or to create
6116 abstract origin duplicates of a source block. It really depends
6117 on what optimization has been performed. */
6118
6119 void
6120 reorder_blocks (void)
6121 {
6122 tree block = DECL_INITIAL (current_function_decl);
6123 varray_type block_stack;
6124
6125 if (block == NULL_TREE)
6126 return;
6127
6128 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
6129
6130 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
6131 reorder_blocks_0 (block);
6132
6133 /* Prune the old trees away, so that they don't get in the way. */
6134 BLOCK_SUBBLOCKS (block) = NULL_TREE;
6135 BLOCK_CHAIN (block) = NULL_TREE;
6136
6137 /* Recreate the block tree from the note nesting. */
6138 reorder_blocks_1 (get_insns (), block, &block_stack);
6139 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
6140
6141 /* Remove deleted blocks from the block fragment chains. */
6142 reorder_fix_fragments (block);
6143 }
6144
6145 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
6146
6147 static void
6148 reorder_blocks_0 (tree block)
6149 {
6150 while (block)
6151 {
6152 TREE_ASM_WRITTEN (block) = 0;
6153 reorder_blocks_0 (BLOCK_SUBBLOCKS (block));
6154 block = BLOCK_CHAIN (block);
6155 }
6156 }
6157
6158 static void
6159 reorder_blocks_1 (rtx insns, tree current_block, varray_type *p_block_stack)
6160 {
6161 rtx insn;
6162
6163 for (insn = insns; insn; insn = NEXT_INSN (insn))
6164 {
6165 if (GET_CODE (insn) == NOTE)
6166 {
6167 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
6168 {
6169 tree block = NOTE_BLOCK (insn);
6170
6171 /* If we have seen this block before, that means it now
6172 spans multiple address regions. Create a new fragment. */
6173 if (TREE_ASM_WRITTEN (block))
6174 {
6175 tree new_block = copy_node (block);
6176 tree origin;
6177
6178 origin = (BLOCK_FRAGMENT_ORIGIN (block)
6179 ? BLOCK_FRAGMENT_ORIGIN (block)
6180 : block);
6181 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
6182 BLOCK_FRAGMENT_CHAIN (new_block)
6183 = BLOCK_FRAGMENT_CHAIN (origin);
6184 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
6185
6186 NOTE_BLOCK (insn) = new_block;
6187 block = new_block;
6188 }
6189
6190 BLOCK_SUBBLOCKS (block) = 0;
6191 TREE_ASM_WRITTEN (block) = 1;
6192 /* When there's only one block for the entire function,
6193 current_block == block and we mustn't do this, it
6194 will cause infinite recursion. */
6195 if (block != current_block)
6196 {
6197 BLOCK_SUPERCONTEXT (block) = current_block;
6198 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
6199 BLOCK_SUBBLOCKS (current_block) = block;
6200 current_block = block;
6201 }
6202 VARRAY_PUSH_TREE (*p_block_stack, block);
6203 }
6204 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
6205 {
6206 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
6207 VARRAY_POP (*p_block_stack);
6208 BLOCK_SUBBLOCKS (current_block)
6209 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
6210 current_block = BLOCK_SUPERCONTEXT (current_block);
6211 }
6212 }
6213 else if (GET_CODE (insn) == CALL_INSN
6214 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
6215 {
6216 rtx cp = PATTERN (insn);
6217 reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
6218 if (XEXP (cp, 1))
6219 reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
6220 if (XEXP (cp, 2))
6221 reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
6222 }
6223 }
6224 }
6225
6226 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
6227 appears in the block tree, select one of the fragments to become
6228 the new origin block. */
6229
6230 static void
6231 reorder_fix_fragments (tree block)
6232 {
6233 while (block)
6234 {
6235 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
6236 tree new_origin = NULL_TREE;
6237
6238 if (dup_origin)
6239 {
6240 if (! TREE_ASM_WRITTEN (dup_origin))
6241 {
6242 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
6243
6244 /* Find the first of the remaining fragments. There must
6245 be at least one -- the current block. */
6246 while (! TREE_ASM_WRITTEN (new_origin))
6247 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
6248 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
6249 }
6250 }
6251 else if (! dup_origin)
6252 new_origin = block;
6253
6254 /* Re-root the rest of the fragments to the new origin. In the
6255 case that DUP_ORIGIN was null, that means BLOCK was the origin
6256 of a chain of fragments and we want to remove those fragments
6257 that didn't make it to the output. */
6258 if (new_origin)
6259 {
6260 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
6261 tree chain = *pp;
6262
6263 while (chain)
6264 {
6265 if (TREE_ASM_WRITTEN (chain))
6266 {
6267 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
6268 *pp = chain;
6269 pp = &BLOCK_FRAGMENT_CHAIN (chain);
6270 }
6271 chain = BLOCK_FRAGMENT_CHAIN (chain);
6272 }
6273 *pp = NULL_TREE;
6274 }
6275
6276 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
6277 block = BLOCK_CHAIN (block);
6278 }
6279 }
6280
6281 /* Reverse the order of elements in the chain T of blocks,
6282 and return the new head of the chain (old last element). */
6283
6284 static tree
6285 blocks_nreverse (tree t)
6286 {
6287 tree prev = 0, decl, next;
6288 for (decl = t; decl; decl = next)
6289 {
6290 next = BLOCK_CHAIN (decl);
6291 BLOCK_CHAIN (decl) = prev;
6292 prev = decl;
6293 }
6294 return prev;
6295 }
6296
6297 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
6298 non-NULL, list them all into VECTOR, in a depth-first preorder
6299 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
6300 blocks. */
6301
6302 static int
6303 all_blocks (tree block, tree *vector)
6304 {
6305 int n_blocks = 0;
6306
6307 while (block)
6308 {
6309 TREE_ASM_WRITTEN (block) = 0;
6310
6311 /* Record this block. */
6312 if (vector)
6313 vector[n_blocks] = block;
6314
6315 ++n_blocks;
6316
6317 /* Record the subblocks, and their subblocks... */
6318 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
6319 vector ? vector + n_blocks : 0);
6320 block = BLOCK_CHAIN (block);
6321 }
6322
6323 return n_blocks;
6324 }
6325
6326 /* Return a vector containing all the blocks rooted at BLOCK. The
6327 number of elements in the vector is stored in N_BLOCKS_P. The
6328 vector is dynamically allocated; it is the caller's responsibility
6329 to call `free' on the pointer returned. */
6330
6331 static tree *
6332 get_block_vector (tree block, int *n_blocks_p)
6333 {
6334 tree *block_vector;
6335
6336 *n_blocks_p = all_blocks (block, NULL);
6337 block_vector = xmalloc (*n_blocks_p * sizeof (tree));
6338 all_blocks (block, block_vector);
6339
6340 return block_vector;
6341 }
6342
6343 static GTY(()) int next_block_index = 2;
6344
6345 /* Set BLOCK_NUMBER for all the blocks in FN. */
6346
6347 void
6348 number_blocks (tree fn)
6349 {
6350 int i;
6351 int n_blocks;
6352 tree *block_vector;
6353
6354 /* For SDB and XCOFF debugging output, we start numbering the blocks
6355 from 1 within each function, rather than keeping a running
6356 count. */
6357 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
6358 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
6359 next_block_index = 1;
6360 #endif
6361
6362 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
6363
6364 /* The top-level BLOCK isn't numbered at all. */
6365 for (i = 1; i < n_blocks; ++i)
6366 /* We number the blocks from two. */
6367 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
6368
6369 free (block_vector);
6370
6371 return;
6372 }
6373
6374 /* If VAR is present in a subblock of BLOCK, return the subblock. */
6375
6376 tree
6377 debug_find_var_in_block_tree (tree var, tree block)
6378 {
6379 tree t;
6380
6381 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
6382 if (t == var)
6383 return block;
6384
6385 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
6386 {
6387 tree ret = debug_find_var_in_block_tree (var, t);
6388 if (ret)
6389 return ret;
6390 }
6391
6392 return NULL_TREE;
6393 }
6394 \f
6395 /* Allocate a function structure for FNDECL and set its contents
6396 to the defaults. */
6397
6398 void
6399 allocate_struct_function (tree fndecl)
6400 {
6401 tree result;
6402
6403 cfun = ggc_alloc_cleared (sizeof (struct function));
6404
6405 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
6406
6407 cfun->stack_alignment_needed = STACK_BOUNDARY;
6408 cfun->preferred_stack_boundary = STACK_BOUNDARY;
6409
6410 current_function_funcdef_no = funcdef_no++;
6411
6412 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
6413
6414 init_stmt_for_function ();
6415 init_eh_for_function ();
6416
6417 (*lang_hooks.function.init) (cfun);
6418 if (init_machine_status)
6419 cfun->machine = (*init_machine_status) ();
6420
6421 if (fndecl == NULL)
6422 return;
6423
6424 DECL_STRUCT_FUNCTION (fndecl) = cfun;
6425 cfun->decl = fndecl;
6426
6427 result = DECL_RESULT (fndecl);
6428 if (aggregate_value_p (result, fndecl))
6429 {
6430 #ifdef PCC_STATIC_STRUCT_RETURN
6431 current_function_returns_pcc_struct = 1;
6432 #endif
6433 current_function_returns_struct = 1;
6434 }
6435
6436 current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
6437
6438 current_function_needs_context
6439 = (decl_function_context (current_function_decl) != 0
6440 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6441 }
6442
6443 /* Reset cfun, and other non-struct-function variables to defaults as
6444 appropriate for emitting rtl at the start of a function. */
6445
6446 static void
6447 prepare_function_start (tree fndecl)
6448 {
6449 if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
6450 cfun = DECL_STRUCT_FUNCTION (fndecl);
6451 else
6452 allocate_struct_function (fndecl);
6453 init_emit ();
6454 init_varasm_status (cfun);
6455 init_expr ();
6456
6457 cse_not_expected = ! optimize;
6458
6459 /* Caller save not needed yet. */
6460 caller_save_needed = 0;
6461
6462 /* We haven't done register allocation yet. */
6463 reg_renumber = 0;
6464
6465 /* Indicate that we need to distinguish between the return value of the
6466 present function and the return value of a function being called. */
6467 rtx_equal_function_value_matters = 1;
6468
6469 /* Indicate that we have not instantiated virtual registers yet. */
6470 virtuals_instantiated = 0;
6471
6472 /* Indicate that we want CONCATs now. */
6473 generating_concat_p = 1;
6474
6475 /* Indicate we have no need of a frame pointer yet. */
6476 frame_pointer_needed = 0;
6477 }
6478
6479 /* Initialize the rtl expansion mechanism so that we can do simple things
6480 like generate sequences. This is used to provide a context during global
6481 initialization of some passes. */
6482 void
6483 init_dummy_function_start (void)
6484 {
6485 prepare_function_start (NULL);
6486 }
6487
6488 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
6489 and initialize static variables for generating RTL for the statements
6490 of the function. */
6491
6492 void
6493 init_function_start (tree subr)
6494 {
6495 prepare_function_start (subr);
6496
6497 /* Within function body, compute a type's size as soon it is laid out. */
6498 immediate_size_expand++;
6499
6500 /* Prevent ever trying to delete the first instruction of a
6501 function. Also tell final how to output a linenum before the
6502 function prologue. Note linenums could be missing, e.g. when
6503 compiling a Java .class file. */
6504 if (DECL_SOURCE_LINE (subr))
6505 emit_line_note (DECL_SOURCE_LOCATION (subr));
6506
6507 /* Make sure first insn is a note even if we don't want linenums.
6508 This makes sure the first insn will never be deleted.
6509 Also, final expects a note to appear there. */
6510 emit_note (NOTE_INSN_DELETED);
6511
6512 /* Warn if this value is an aggregate type,
6513 regardless of which calling convention we are using for it. */
6514 if (warn_aggregate_return
6515 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6516 warning ("function returns an aggregate");
6517 }
6518
6519 /* Make sure all values used by the optimization passes have sane
6520 defaults. */
6521 void
6522 init_function_for_compilation (void)
6523 {
6524 reg_renumber = 0;
6525
6526 /* No prologue/epilogue insns yet. */
6527 VARRAY_GROW (prologue, 0);
6528 VARRAY_GROW (epilogue, 0);
6529 VARRAY_GROW (sibcall_epilogue, 0);
6530 }
6531
6532 /* Expand a call to __main at the beginning of a possible main function. */
6533
6534 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6535 #undef HAS_INIT_SECTION
6536 #define HAS_INIT_SECTION
6537 #endif
6538
6539 void
6540 expand_main_function (void)
6541 {
6542 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
6543 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
6544 {
6545 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
6546 rtx tmp, seq;
6547
6548 start_sequence ();
6549 /* Forcibly align the stack. */
6550 #ifdef STACK_GROWS_DOWNWARD
6551 tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
6552 stack_pointer_rtx, 1, OPTAB_WIDEN);
6553 #else
6554 tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
6555 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
6556 tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
6557 stack_pointer_rtx, 1, OPTAB_WIDEN);
6558 #endif
6559 if (tmp != stack_pointer_rtx)
6560 emit_move_insn (stack_pointer_rtx, tmp);
6561
6562 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
6563 tmp = force_reg (Pmode, const0_rtx);
6564 allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
6565 seq = get_insns ();
6566 end_sequence ();
6567
6568 for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
6569 if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
6570 break;
6571 if (tmp)
6572 emit_insn_before (seq, tmp);
6573 else
6574 emit_insn (seq);
6575 }
6576 #endif
6577
6578 #ifndef HAS_INIT_SECTION
6579 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
6580 #endif
6581 }
6582 \f
6583 /* The PENDING_SIZES represent the sizes of variable-sized types.
6584 Create RTL for the various sizes now (using temporary variables),
6585 so that we can refer to the sizes from the RTL we are generating
6586 for the current function. The PENDING_SIZES are a TREE_LIST. The
6587 TREE_VALUE of each node is a SAVE_EXPR. */
6588
6589 void
6590 expand_pending_sizes (tree pending_sizes)
6591 {
6592 tree tem;
6593
6594 /* Evaluate now the sizes of any types declared among the arguments. */
6595 for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
6596 {
6597 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
6598 /* Flush the queue in case this parameter declaration has
6599 side-effects. */
6600 emit_queue ();
6601 }
6602 }
6603
6604 /* Start the RTL for a new function, and set variables used for
6605 emitting RTL.
6606 SUBR is the FUNCTION_DECL node.
6607 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6608 the function's parameters, which must be run at any return statement. */
6609
6610 void
6611 expand_function_start (tree subr, int parms_have_cleanups)
6612 {
6613 tree tem;
6614 rtx last_ptr = NULL_RTX;
6615
6616 /* Make sure volatile mem refs aren't considered
6617 valid operands of arithmetic insns. */
6618 init_recog_no_volatile ();
6619
6620 current_function_instrument_entry_exit
6621 = (flag_instrument_function_entry_exit
6622 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6623
6624 current_function_profile
6625 = (profile_flag
6626 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6627
6628 current_function_limit_stack
6629 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
6630
6631 /* If function gets a static chain arg, store it in the stack frame.
6632 Do this first, so it gets the first stack slot offset. */
6633 if (current_function_needs_context)
6634 {
6635 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6636
6637 /* Delay copying static chain if it is not a register to avoid
6638 conflicts with regs used for parameters. */
6639 if (! SMALL_REGISTER_CLASSES
6640 || GET_CODE (static_chain_incoming_rtx) == REG)
6641 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6642 }
6643
6644 /* If the parameters of this function need cleaning up, get a label
6645 for the beginning of the code which executes those cleanups. This must
6646 be done before doing anything with return_label. */
6647 if (parms_have_cleanups)
6648 cleanup_label = gen_label_rtx ();
6649 else
6650 cleanup_label = 0;
6651
6652 /* Make the label for return statements to jump to. Do not special
6653 case machines with special return instructions -- they will be
6654 handled later during jump, ifcvt, or epilogue creation. */
6655 return_label = gen_label_rtx ();
6656
6657 /* Initialize rtx used to return the value. */
6658 /* Do this before assign_parms so that we copy the struct value address
6659 before any library calls that assign parms might generate. */
6660
6661 /* Decide whether to return the value in memory or in a register. */
6662 if (aggregate_value_p (DECL_RESULT (subr), subr))
6663 {
6664 /* Returning something that won't go in a register. */
6665 rtx value_address = 0;
6666
6667 #ifdef PCC_STATIC_STRUCT_RETURN
6668 if (current_function_returns_pcc_struct)
6669 {
6670 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6671 value_address = assemble_static_space (size);
6672 }
6673 else
6674 #endif
6675 {
6676 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 1);
6677 /* Expect to be passed the address of a place to store the value.
6678 If it is passed as an argument, assign_parms will take care of
6679 it. */
6680 if (sv)
6681 {
6682 value_address = gen_reg_rtx (Pmode);
6683 emit_move_insn (value_address, sv);
6684 }
6685 }
6686 if (value_address)
6687 {
6688 rtx x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6689 set_mem_attributes (x, DECL_RESULT (subr), 1);
6690 SET_DECL_RTL (DECL_RESULT (subr), x);
6691 }
6692 }
6693 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6694 /* If return mode is void, this decl rtl should not be used. */
6695 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
6696 else
6697 {
6698 /* Compute the return values into a pseudo reg, which we will copy
6699 into the true return register after the cleanups are done. */
6700
6701 /* In order to figure out what mode to use for the pseudo, we
6702 figure out what the mode of the eventual return register will
6703 actually be, and use that. */
6704 rtx hard_reg
6705 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
6706 subr, 1);
6707
6708 /* Structures that are returned in registers are not aggregate_value_p,
6709 so we may see a PARALLEL or a REG. */
6710 if (REG_P (hard_reg))
6711 SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
6712 else if (GET_CODE (hard_reg) == PARALLEL)
6713 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
6714 else
6715 abort ();
6716
6717 /* Set DECL_REGISTER flag so that expand_function_end will copy the
6718 result to the real return register(s). */
6719 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6720 }
6721
6722 /* Initialize rtx for parameters and local variables.
6723 In some cases this requires emitting insns. */
6724
6725 assign_parms (subr);
6726
6727 /* Copy the static chain now if it wasn't a register. The delay is to
6728 avoid conflicts with the parameter passing registers. */
6729
6730 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6731 if (GET_CODE (static_chain_incoming_rtx) != REG)
6732 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6733
6734 /* The following was moved from init_function_start.
6735 The move is supposed to make sdb output more accurate. */
6736 /* Indicate the beginning of the function body,
6737 as opposed to parm setup. */
6738 emit_note (NOTE_INSN_FUNCTION_BEG);
6739
6740 if (GET_CODE (get_last_insn ()) != NOTE)
6741 emit_note (NOTE_INSN_DELETED);
6742 parm_birth_insn = get_last_insn ();
6743
6744 context_display = 0;
6745 if (current_function_needs_context)
6746 {
6747 /* Fetch static chain values for containing functions. */
6748 tem = decl_function_context (current_function_decl);
6749 /* Copy the static chain pointer into a pseudo. If we have
6750 small register classes, copy the value from memory if
6751 static_chain_incoming_rtx is a REG. */
6752 if (tem)
6753 {
6754 /* If the static chain originally came in a register, put it back
6755 there, then move it out in the next insn. The reason for
6756 this peculiar code is to satisfy function integration. */
6757 if (SMALL_REGISTER_CLASSES
6758 && GET_CODE (static_chain_incoming_rtx) == REG)
6759 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6760 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6761 }
6762
6763 while (tem)
6764 {
6765 tree rtlexp = make_node (RTL_EXPR);
6766
6767 RTL_EXPR_RTL (rtlexp) = last_ptr;
6768 context_display = tree_cons (tem, rtlexp, context_display);
6769 tem = decl_function_context (tem);
6770 if (tem == 0)
6771 break;
6772 /* Chain through stack frames, assuming pointer to next lexical frame
6773 is found at the place we always store it. */
6774 #ifdef FRAME_GROWS_DOWNWARD
6775 last_ptr = plus_constant (last_ptr,
6776 -(HOST_WIDE_INT) GET_MODE_SIZE (Pmode));
6777 #endif
6778 last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
6779 set_mem_alias_set (last_ptr, get_frame_alias_set ());
6780 last_ptr = copy_to_reg (last_ptr);
6781
6782 /* If we are not optimizing, ensure that we know that this
6783 piece of context is live over the entire function. */
6784 if (! optimize)
6785 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6786 save_expr_regs);
6787 }
6788 }
6789
6790 if (current_function_instrument_entry_exit)
6791 {
6792 rtx fun = DECL_RTL (current_function_decl);
6793 if (GET_CODE (fun) == MEM)
6794 fun = XEXP (fun, 0);
6795 else
6796 abort ();
6797 emit_library_call (profile_function_entry_libfunc, LCT_NORMAL, VOIDmode,
6798 2, fun, Pmode,
6799 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6800 0,
6801 hard_frame_pointer_rtx),
6802 Pmode);
6803 }
6804
6805 if (current_function_profile)
6806 {
6807 #ifdef PROFILE_HOOK
6808 PROFILE_HOOK (current_function_funcdef_no);
6809 #endif
6810 }
6811
6812 /* After the display initializations is where the tail-recursion label
6813 should go, if we end up needing one. Ensure we have a NOTE here
6814 since some things (like trampolines) get placed before this. */
6815 tail_recursion_reentry = emit_note (NOTE_INSN_DELETED);
6816
6817 /* Evaluate now the sizes of any types declared among the arguments. */
6818 expand_pending_sizes (nreverse (get_pending_sizes ()));
6819
6820 /* Make sure there is a line number after the function entry setup code. */
6821 force_next_line_note ();
6822 }
6823 \f
6824 /* Undo the effects of init_dummy_function_start. */
6825 void
6826 expand_dummy_function_end (void)
6827 {
6828 /* End any sequences that failed to be closed due to syntax errors. */
6829 while (in_sequence_p ())
6830 end_sequence ();
6831
6832 /* Outside function body, can't compute type's actual size
6833 until next function's body starts. */
6834
6835 free_after_parsing (cfun);
6836 free_after_compilation (cfun);
6837 cfun = 0;
6838 }
6839
6840 /* Call DOIT for each hard register used as a return value from
6841 the current function. */
6842
6843 void
6844 diddle_return_value (void (*doit) (rtx, void *), void *arg)
6845 {
6846 rtx outgoing = current_function_return_rtx;
6847
6848 if (! outgoing)
6849 return;
6850
6851 if (GET_CODE (outgoing) == REG)
6852 (*doit) (outgoing, arg);
6853 else if (GET_CODE (outgoing) == PARALLEL)
6854 {
6855 int i;
6856
6857 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6858 {
6859 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6860
6861 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6862 (*doit) (x, arg);
6863 }
6864 }
6865 }
6866
6867 static void
6868 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
6869 {
6870 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6871 }
6872
6873 void
6874 clobber_return_register (void)
6875 {
6876 diddle_return_value (do_clobber_return_reg, NULL);
6877
6878 /* In case we do use pseudo to return value, clobber it too. */
6879 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6880 {
6881 tree decl_result = DECL_RESULT (current_function_decl);
6882 rtx decl_rtl = DECL_RTL (decl_result);
6883 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
6884 {
6885 do_clobber_return_reg (decl_rtl, NULL);
6886 }
6887 }
6888 }
6889
6890 static void
6891 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
6892 {
6893 emit_insn (gen_rtx_USE (VOIDmode, reg));
6894 }
6895
6896 void
6897 use_return_register (void)
6898 {
6899 diddle_return_value (do_use_return_reg, NULL);
6900 }
6901
6902 static GTY(()) rtx initial_trampoline;
6903
6904 /* Generate RTL for the end of the current function. */
6905
6906 void
6907 expand_function_end (void)
6908 {
6909 tree link;
6910 rtx clobber_after;
6911
6912 finish_expr_for_function ();
6913
6914 /* If arg_pointer_save_area was referenced only from a nested
6915 function, we will not have initialized it yet. Do that now. */
6916 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
6917 get_arg_pointer_save_area (cfun);
6918
6919 #ifdef NON_SAVING_SETJMP
6920 /* Don't put any variables in registers if we call setjmp
6921 on a machine that fails to restore the registers. */
6922 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6923 {
6924 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6925 setjmp_protect (DECL_INITIAL (current_function_decl));
6926
6927 setjmp_protect_args ();
6928 }
6929 #endif
6930
6931 /* Initialize any trampolines required by this function. */
6932 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6933 {
6934 tree function = TREE_PURPOSE (link);
6935 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6936 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6937 #ifdef TRAMPOLINE_TEMPLATE
6938 rtx blktramp;
6939 #endif
6940 rtx seq;
6941
6942 #ifdef TRAMPOLINE_TEMPLATE
6943 /* First make sure this compilation has a template for
6944 initializing trampolines. */
6945 if (initial_trampoline == 0)
6946 {
6947 initial_trampoline
6948 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6949 set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
6950 }
6951 #endif
6952
6953 /* Generate insns to initialize the trampoline. */
6954 start_sequence ();
6955 tramp = round_trampoline_addr (XEXP (tramp, 0));
6956 #ifdef TRAMPOLINE_TEMPLATE
6957 blktramp = replace_equiv_address (initial_trampoline, tramp);
6958 emit_block_move (blktramp, initial_trampoline,
6959 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
6960 #endif
6961 trampolines_created = 1;
6962 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6963 seq = get_insns ();
6964 end_sequence ();
6965
6966 /* Put those insns at entry to the containing function (this one). */
6967 emit_insn_before (seq, tail_recursion_reentry);
6968 }
6969
6970 /* If we are doing stack checking and this function makes calls,
6971 do a stack probe at the start of the function to ensure we have enough
6972 space for another stack frame. */
6973 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6974 {
6975 rtx insn, seq;
6976
6977 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6978 if (GET_CODE (insn) == CALL_INSN)
6979 {
6980 start_sequence ();
6981 probe_stack_range (STACK_CHECK_PROTECT,
6982 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6983 seq = get_insns ();
6984 end_sequence ();
6985 emit_insn_before (seq, tail_recursion_reentry);
6986 break;
6987 }
6988 }
6989
6990 /* Possibly warn about unused parameters. */
6991 if (warn_unused_parameter)
6992 {
6993 tree decl;
6994
6995 for (decl = DECL_ARGUMENTS (current_function_decl);
6996 decl; decl = TREE_CHAIN (decl))
6997 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6998 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6999 warning ("%Junused parameter '%D'", decl, decl);
7000 }
7001
7002 /* Delete handlers for nonlocal gotos if nothing uses them. */
7003 if (nonlocal_goto_handler_slots != 0
7004 && ! current_function_has_nonlocal_label)
7005 delete_handlers ();
7006
7007 /* End any sequences that failed to be closed due to syntax errors. */
7008 while (in_sequence_p ())
7009 end_sequence ();
7010
7011 /* Outside function body, can't compute type's actual size
7012 until next function's body starts. */
7013 immediate_size_expand--;
7014
7015 clear_pending_stack_adjust ();
7016 do_pending_stack_adjust ();
7017
7018 /* Mark the end of the function body.
7019 If control reaches this insn, the function can drop through
7020 without returning a value. */
7021 emit_note (NOTE_INSN_FUNCTION_END);
7022
7023 /* Must mark the last line number note in the function, so that the test
7024 coverage code can avoid counting the last line twice. This just tells
7025 the code to ignore the immediately following line note, since there
7026 already exists a copy of this note somewhere above. This line number
7027 note is still needed for debugging though, so we can't delete it. */
7028 if (flag_test_coverage)
7029 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
7030
7031 /* Output a linenumber for the end of the function.
7032 SDB depends on this. */
7033 force_next_line_note ();
7034 emit_line_note (input_location);
7035
7036 /* Before the return label (if any), clobber the return
7037 registers so that they are not propagated live to the rest of
7038 the function. This can only happen with functions that drop
7039 through; if there had been a return statement, there would
7040 have either been a return rtx, or a jump to the return label.
7041
7042 We delay actual code generation after the current_function_value_rtx
7043 is computed. */
7044 clobber_after = get_last_insn ();
7045
7046 /* Output the label for the actual return from the function,
7047 if one is expected. This happens either because a function epilogue
7048 is used instead of a return instruction, or because a return was done
7049 with a goto in order to run local cleanups, or because of pcc-style
7050 structure returning. */
7051 if (return_label)
7052 emit_label (return_label);
7053
7054 if (current_function_instrument_entry_exit)
7055 {
7056 rtx fun = DECL_RTL (current_function_decl);
7057 if (GET_CODE (fun) == MEM)
7058 fun = XEXP (fun, 0);
7059 else
7060 abort ();
7061 emit_library_call (profile_function_exit_libfunc, LCT_NORMAL, VOIDmode,
7062 2, fun, Pmode,
7063 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
7064 0,
7065 hard_frame_pointer_rtx),
7066 Pmode);
7067 }
7068
7069 /* Let except.c know where it should emit the call to unregister
7070 the function context for sjlj exceptions. */
7071 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
7072 sjlj_emit_function_exit_after (get_last_insn ());
7073
7074 /* If we had calls to alloca, and this machine needs
7075 an accurate stack pointer to exit the function,
7076 insert some code to save and restore the stack pointer. */
7077 if (! EXIT_IGNORE_STACK
7078 && current_function_calls_alloca)
7079 {
7080 rtx tem = 0;
7081
7082 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
7083 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
7084 }
7085
7086 /* If scalar return value was computed in a pseudo-reg, or was a named
7087 return value that got dumped to the stack, copy that to the hard
7088 return register. */
7089 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
7090 {
7091 tree decl_result = DECL_RESULT (current_function_decl);
7092 rtx decl_rtl = DECL_RTL (decl_result);
7093
7094 if (REG_P (decl_rtl)
7095 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
7096 : DECL_REGISTER (decl_result))
7097 {
7098 rtx real_decl_rtl = current_function_return_rtx;
7099
7100 /* This should be set in assign_parms. */
7101 if (! REG_FUNCTION_VALUE_P (real_decl_rtl))
7102 abort ();
7103
7104 /* If this is a BLKmode structure being returned in registers,
7105 then use the mode computed in expand_return. Note that if
7106 decl_rtl is memory, then its mode may have been changed,
7107 but that current_function_return_rtx has not. */
7108 if (GET_MODE (real_decl_rtl) == BLKmode)
7109 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
7110
7111 /* If a named return value dumped decl_return to memory, then
7112 we may need to re-do the PROMOTE_MODE signed/unsigned
7113 extension. */
7114 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
7115 {
7116 int unsignedp = TREE_UNSIGNED (TREE_TYPE (decl_result));
7117
7118 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
7119 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
7120 &unsignedp, 1);
7121
7122 convert_move (real_decl_rtl, decl_rtl, unsignedp);
7123 }
7124 else if (GET_CODE (real_decl_rtl) == PARALLEL)
7125 {
7126 /* If expand_function_start has created a PARALLEL for decl_rtl,
7127 move the result to the real return registers. Otherwise, do
7128 a group load from decl_rtl for a named return. */
7129 if (GET_CODE (decl_rtl) == PARALLEL)
7130 emit_group_move (real_decl_rtl, decl_rtl);
7131 else
7132 emit_group_load (real_decl_rtl, decl_rtl,
7133 TREE_TYPE (decl_result),
7134 int_size_in_bytes (TREE_TYPE (decl_result)));
7135 }
7136 else
7137 emit_move_insn (real_decl_rtl, decl_rtl);
7138 }
7139 }
7140
7141 /* If returning a structure, arrange to return the address of the value
7142 in a place where debuggers expect to find it.
7143
7144 If returning a structure PCC style,
7145 the caller also depends on this value.
7146 And current_function_returns_pcc_struct is not necessarily set. */
7147 if (current_function_returns_struct
7148 || current_function_returns_pcc_struct)
7149 {
7150 rtx value_address
7151 = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7152 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
7153 #ifdef FUNCTION_OUTGOING_VALUE
7154 rtx outgoing
7155 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
7156 current_function_decl);
7157 #else
7158 rtx outgoing
7159 = FUNCTION_VALUE (build_pointer_type (type), current_function_decl);
7160 #endif
7161
7162 /* Mark this as a function return value so integrate will delete the
7163 assignment and USE below when inlining this function. */
7164 REG_FUNCTION_VALUE_P (outgoing) = 1;
7165
7166 /* The address may be ptr_mode and OUTGOING may be Pmode. */
7167 value_address = convert_memory_address (GET_MODE (outgoing),
7168 value_address);
7169
7170 emit_move_insn (outgoing, value_address);
7171
7172 /* Show return register used to hold result (in this case the address
7173 of the result. */
7174 current_function_return_rtx = outgoing;
7175 }
7176
7177 /* If this is an implementation of throw, do what's necessary to
7178 communicate between __builtin_eh_return and the epilogue. */
7179 expand_eh_return ();
7180
7181 /* Emit the actual code to clobber return register. */
7182 {
7183 rtx seq, after;
7184
7185 start_sequence ();
7186 clobber_return_register ();
7187 seq = get_insns ();
7188 end_sequence ();
7189
7190 after = emit_insn_after (seq, clobber_after);
7191
7192 if (clobber_after != after)
7193 cfun->x_clobber_return_insn = after;
7194 }
7195
7196 /* Output the label for the naked return from the function, if one is
7197 expected. This is currently used only by __builtin_return. */
7198 if (naked_return_label)
7199 emit_label (naked_return_label);
7200
7201 /* ??? This should no longer be necessary since stupid is no longer with
7202 us, but there are some parts of the compiler (eg reload_combine, and
7203 sh mach_dep_reorg) that still try and compute their own lifetime info
7204 instead of using the general framework. */
7205 use_return_register ();
7206
7207 /* Fix up any gotos that jumped out to the outermost
7208 binding level of the function.
7209 Must follow emitting RETURN_LABEL. */
7210
7211 /* If you have any cleanups to do at this point,
7212 and they need to create temporary variables,
7213 then you will lose. */
7214 expand_fixups (get_insns ());
7215 }
7216
7217 rtx
7218 get_arg_pointer_save_area (struct function *f)
7219 {
7220 rtx ret = f->x_arg_pointer_save_area;
7221
7222 if (! ret)
7223 {
7224 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
7225 f->x_arg_pointer_save_area = ret;
7226 }
7227
7228 if (f == cfun && ! f->arg_pointer_save_area_init)
7229 {
7230 rtx seq;
7231
7232 /* Save the arg pointer at the beginning of the function. The
7233 generated stack slot may not be a valid memory address, so we
7234 have to check it and fix it if necessary. */
7235 start_sequence ();
7236 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
7237 seq = get_insns ();
7238 end_sequence ();
7239
7240 push_topmost_sequence ();
7241 emit_insn_after (seq, get_insns ());
7242 pop_topmost_sequence ();
7243 }
7244
7245 return ret;
7246 }
7247 \f
7248 /* Extend a vector that records the INSN_UIDs of INSNS
7249 (a list of one or more insns). */
7250
7251 static void
7252 record_insns (rtx insns, varray_type *vecp)
7253 {
7254 int i, len;
7255 rtx tmp;
7256
7257 tmp = insns;
7258 len = 0;
7259 while (tmp != NULL_RTX)
7260 {
7261 len++;
7262 tmp = NEXT_INSN (tmp);
7263 }
7264
7265 i = VARRAY_SIZE (*vecp);
7266 VARRAY_GROW (*vecp, i + len);
7267 tmp = insns;
7268 while (tmp != NULL_RTX)
7269 {
7270 VARRAY_INT (*vecp, i) = INSN_UID (tmp);
7271 i++;
7272 tmp = NEXT_INSN (tmp);
7273 }
7274 }
7275
7276 /* Set the locator of the insn chain starting at INSN to LOC. */
7277 static void
7278 set_insn_locators (rtx insn, int loc)
7279 {
7280 while (insn != NULL_RTX)
7281 {
7282 if (INSN_P (insn))
7283 INSN_LOCATOR (insn) = loc;
7284 insn = NEXT_INSN (insn);
7285 }
7286 }
7287
7288 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
7289 be running after reorg, SEQUENCE rtl is possible. */
7290
7291 static int
7292 contains (rtx insn, varray_type vec)
7293 {
7294 int i, j;
7295
7296 if (GET_CODE (insn) == INSN
7297 && GET_CODE (PATTERN (insn)) == SEQUENCE)
7298 {
7299 int count = 0;
7300 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7301 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7302 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
7303 count++;
7304 return count;
7305 }
7306 else
7307 {
7308 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7309 if (INSN_UID (insn) == VARRAY_INT (vec, j))
7310 return 1;
7311 }
7312 return 0;
7313 }
7314
7315 int
7316 prologue_epilogue_contains (rtx insn)
7317 {
7318 if (contains (insn, prologue))
7319 return 1;
7320 if (contains (insn, epilogue))
7321 return 1;
7322 return 0;
7323 }
7324
7325 int
7326 sibcall_epilogue_contains (rtx insn)
7327 {
7328 if (sibcall_epilogue)
7329 return contains (insn, sibcall_epilogue);
7330 return 0;
7331 }
7332
7333 #ifdef HAVE_return
7334 /* Insert gen_return at the end of block BB. This also means updating
7335 block_for_insn appropriately. */
7336
7337 static void
7338 emit_return_into_block (basic_block bb, rtx line_note)
7339 {
7340 emit_jump_insn_after (gen_return (), BB_END (bb));
7341 if (line_note)
7342 emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
7343 }
7344 #endif /* HAVE_return */
7345
7346 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
7347
7348 /* These functions convert the epilogue into a variant that does not modify the
7349 stack pointer. This is used in cases where a function returns an object
7350 whose size is not known until it is computed. The called function leaves the
7351 object on the stack, leaves the stack depressed, and returns a pointer to
7352 the object.
7353
7354 What we need to do is track all modifications and references to the stack
7355 pointer, deleting the modifications and changing the references to point to
7356 the location the stack pointer would have pointed to had the modifications
7357 taken place.
7358
7359 These functions need to be portable so we need to make as few assumptions
7360 about the epilogue as we can. However, the epilogue basically contains
7361 three things: instructions to reset the stack pointer, instructions to
7362 reload registers, possibly including the frame pointer, and an
7363 instruction to return to the caller.
7364
7365 If we can't be sure of what a relevant epilogue insn is doing, we abort.
7366 We also make no attempt to validate the insns we make since if they are
7367 invalid, we probably can't do anything valid. The intent is that these
7368 routines get "smarter" as more and more machines start to use them and
7369 they try operating on different epilogues.
7370
7371 We use the following structure to track what the part of the epilogue that
7372 we've already processed has done. We keep two copies of the SP equivalence,
7373 one for use during the insn we are processing and one for use in the next
7374 insn. The difference is because one part of a PARALLEL may adjust SP
7375 and the other may use it. */
7376
7377 struct epi_info
7378 {
7379 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
7380 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
7381 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
7382 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
7383 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
7384 should be set to once we no longer need
7385 its value. */
7386 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
7387 for registers. */
7388 };
7389
7390 static void handle_epilogue_set (rtx, struct epi_info *);
7391 static void update_epilogue_consts (rtx, rtx, void *);
7392 static void emit_equiv_load (struct epi_info *);
7393
7394 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
7395 no modifications to the stack pointer. Return the new list of insns. */
7396
7397 static rtx
7398 keep_stack_depressed (rtx insns)
7399 {
7400 int j;
7401 struct epi_info info;
7402 rtx insn, next;
7403
7404 /* If the epilogue is just a single instruction, it must be OK as is. */
7405 if (NEXT_INSN (insns) == NULL_RTX)
7406 return insns;
7407
7408 /* Otherwise, start a sequence, initialize the information we have, and
7409 process all the insns we were given. */
7410 start_sequence ();
7411
7412 info.sp_equiv_reg = stack_pointer_rtx;
7413 info.sp_offset = 0;
7414 info.equiv_reg_src = 0;
7415
7416 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
7417 info.const_equiv[j] = 0;
7418
7419 insn = insns;
7420 next = NULL_RTX;
7421 while (insn != NULL_RTX)
7422 {
7423 next = NEXT_INSN (insn);
7424
7425 if (!INSN_P (insn))
7426 {
7427 add_insn (insn);
7428 insn = next;
7429 continue;
7430 }
7431
7432 /* If this insn references the register that SP is equivalent to and
7433 we have a pending load to that register, we must force out the load
7434 first and then indicate we no longer know what SP's equivalent is. */
7435 if (info.equiv_reg_src != 0
7436 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
7437 {
7438 emit_equiv_load (&info);
7439 info.sp_equiv_reg = 0;
7440 }
7441
7442 info.new_sp_equiv_reg = info.sp_equiv_reg;
7443 info.new_sp_offset = info.sp_offset;
7444
7445 /* If this is a (RETURN) and the return address is on the stack,
7446 update the address and change to an indirect jump. */
7447 if (GET_CODE (PATTERN (insn)) == RETURN
7448 || (GET_CODE (PATTERN (insn)) == PARALLEL
7449 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
7450 {
7451 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
7452 rtx base = 0;
7453 HOST_WIDE_INT offset = 0;
7454 rtx jump_insn, jump_set;
7455
7456 /* If the return address is in a register, we can emit the insn
7457 unchanged. Otherwise, it must be a MEM and we see what the
7458 base register and offset are. In any case, we have to emit any
7459 pending load to the equivalent reg of SP, if any. */
7460 if (GET_CODE (retaddr) == REG)
7461 {
7462 emit_equiv_load (&info);
7463 add_insn (insn);
7464 insn = next;
7465 continue;
7466 }
7467 else if (GET_CODE (retaddr) == MEM
7468 && GET_CODE (XEXP (retaddr, 0)) == REG)
7469 base = gen_rtx_REG (Pmode, REGNO (XEXP (retaddr, 0))), offset = 0;
7470 else if (GET_CODE (retaddr) == MEM
7471 && GET_CODE (XEXP (retaddr, 0)) == PLUS
7472 && GET_CODE (XEXP (XEXP (retaddr, 0), 0)) == REG
7473 && GET_CODE (XEXP (XEXP (retaddr, 0), 1)) == CONST_INT)
7474 {
7475 base = gen_rtx_REG (Pmode, REGNO (XEXP (XEXP (retaddr, 0), 0)));
7476 offset = INTVAL (XEXP (XEXP (retaddr, 0), 1));
7477 }
7478 else
7479 abort ();
7480
7481 /* If the base of the location containing the return pointer
7482 is SP, we must update it with the replacement address. Otherwise,
7483 just build the necessary MEM. */
7484 retaddr = plus_constant (base, offset);
7485 if (base == stack_pointer_rtx)
7486 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
7487 plus_constant (info.sp_equiv_reg,
7488 info.sp_offset));
7489
7490 retaddr = gen_rtx_MEM (Pmode, retaddr);
7491
7492 /* If there is a pending load to the equivalent register for SP
7493 and we reference that register, we must load our address into
7494 a scratch register and then do that load. */
7495 if (info.equiv_reg_src
7496 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
7497 {
7498 unsigned int regno;
7499 rtx reg;
7500
7501 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7502 if (HARD_REGNO_MODE_OK (regno, Pmode)
7503 && !fixed_regs[regno]
7504 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
7505 && !REGNO_REG_SET_P (EXIT_BLOCK_PTR->global_live_at_start,
7506 regno)
7507 && !refers_to_regno_p (regno,
7508 regno + hard_regno_nregs[regno]
7509 [Pmode],
7510 info.equiv_reg_src, NULL)
7511 && info.const_equiv[regno] == 0)
7512 break;
7513
7514 if (regno == FIRST_PSEUDO_REGISTER)
7515 abort ();
7516
7517 reg = gen_rtx_REG (Pmode, regno);
7518 emit_move_insn (reg, retaddr);
7519 retaddr = reg;
7520 }
7521
7522 emit_equiv_load (&info);
7523 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
7524
7525 /* Show the SET in the above insn is a RETURN. */
7526 jump_set = single_set (jump_insn);
7527 if (jump_set == 0)
7528 abort ();
7529 else
7530 SET_IS_RETURN_P (jump_set) = 1;
7531 }
7532
7533 /* If SP is not mentioned in the pattern and its equivalent register, if
7534 any, is not modified, just emit it. Otherwise, if neither is set,
7535 replace the reference to SP and emit the insn. If none of those are
7536 true, handle each SET individually. */
7537 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
7538 && (info.sp_equiv_reg == stack_pointer_rtx
7539 || !reg_set_p (info.sp_equiv_reg, insn)))
7540 add_insn (insn);
7541 else if (! reg_set_p (stack_pointer_rtx, insn)
7542 && (info.sp_equiv_reg == stack_pointer_rtx
7543 || !reg_set_p (info.sp_equiv_reg, insn)))
7544 {
7545 if (! validate_replace_rtx (stack_pointer_rtx,
7546 plus_constant (info.sp_equiv_reg,
7547 info.sp_offset),
7548 insn))
7549 abort ();
7550
7551 add_insn (insn);
7552 }
7553 else if (GET_CODE (PATTERN (insn)) == SET)
7554 handle_epilogue_set (PATTERN (insn), &info);
7555 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7556 {
7557 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
7558 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
7559 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
7560 }
7561 else
7562 add_insn (insn);
7563
7564 info.sp_equiv_reg = info.new_sp_equiv_reg;
7565 info.sp_offset = info.new_sp_offset;
7566
7567 /* Now update any constants this insn sets. */
7568 note_stores (PATTERN (insn), update_epilogue_consts, &info);
7569 insn = next;
7570 }
7571
7572 insns = get_insns ();
7573 end_sequence ();
7574 return insns;
7575 }
7576
7577 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
7578 structure that contains information about what we've seen so far. We
7579 process this SET by either updating that data or by emitting one or
7580 more insns. */
7581
7582 static void
7583 handle_epilogue_set (rtx set, struct epi_info *p)
7584 {
7585 /* First handle the case where we are setting SP. Record what it is being
7586 set from. If unknown, abort. */
7587 if (reg_set_p (stack_pointer_rtx, set))
7588 {
7589 if (SET_DEST (set) != stack_pointer_rtx)
7590 abort ();
7591
7592 if (GET_CODE (SET_SRC (set)) == PLUS)
7593 {
7594 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
7595 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
7596 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
7597 else if (GET_CODE (XEXP (SET_SRC (set), 1)) == REG
7598 && REGNO (XEXP (SET_SRC (set), 1)) < FIRST_PSEUDO_REGISTER
7599 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))] != 0)
7600 p->new_sp_offset
7601 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
7602 else
7603 abort ();
7604 }
7605 else
7606 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
7607
7608 /* If we are adjusting SP, we adjust from the old data. */
7609 if (p->new_sp_equiv_reg == stack_pointer_rtx)
7610 {
7611 p->new_sp_equiv_reg = p->sp_equiv_reg;
7612 p->new_sp_offset += p->sp_offset;
7613 }
7614
7615 if (p->new_sp_equiv_reg == 0 || GET_CODE (p->new_sp_equiv_reg) != REG)
7616 abort ();
7617
7618 return;
7619 }
7620
7621 /* Next handle the case where we are setting SP's equivalent register.
7622 If we already have a value to set it to, abort. We could update, but
7623 there seems little point in handling that case. Note that we have
7624 to allow for the case where we are setting the register set in
7625 the previous part of a PARALLEL inside a single insn. But use the
7626 old offset for any updates within this insn. We must allow for the case
7627 where the register is being set in a different (usually wider) mode than
7628 Pmode). */
7629 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
7630 {
7631 if (p->equiv_reg_src != 0
7632 || GET_CODE (p->new_sp_equiv_reg) != REG
7633 || GET_CODE (SET_DEST (set)) != REG
7634 || GET_MODE_BITSIZE (GET_MODE (SET_DEST (set))) > BITS_PER_WORD
7635 || REGNO (p->new_sp_equiv_reg) != REGNO (SET_DEST (set)))
7636 abort ();
7637 else
7638 p->equiv_reg_src
7639 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7640 plus_constant (p->sp_equiv_reg,
7641 p->sp_offset));
7642 }
7643
7644 /* Otherwise, replace any references to SP in the insn to its new value
7645 and emit the insn. */
7646 else
7647 {
7648 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7649 plus_constant (p->sp_equiv_reg,
7650 p->sp_offset));
7651 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
7652 plus_constant (p->sp_equiv_reg,
7653 p->sp_offset));
7654 emit_insn (set);
7655 }
7656 }
7657
7658 /* Update the tracking information for registers set to constants. */
7659
7660 static void
7661 update_epilogue_consts (rtx dest, rtx x, void *data)
7662 {
7663 struct epi_info *p = (struct epi_info *) data;
7664 rtx new;
7665
7666 if (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
7667 return;
7668
7669 /* If we are either clobbering a register or doing a partial set,
7670 show we don't know the value. */
7671 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
7672 p->const_equiv[REGNO (dest)] = 0;
7673
7674 /* If we are setting it to a constant, record that constant. */
7675 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
7676 p->const_equiv[REGNO (dest)] = SET_SRC (x);
7677
7678 /* If this is a binary operation between a register we have been tracking
7679 and a constant, see if we can compute a new constant value. */
7680 else if (ARITHMETIC_P (SET_SRC (x))
7681 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
7682 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
7683 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
7684 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
7685 && 0 != (new = simplify_binary_operation
7686 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
7687 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
7688 XEXP (SET_SRC (x), 1)))
7689 && GET_CODE (new) == CONST_INT)
7690 p->const_equiv[REGNO (dest)] = new;
7691
7692 /* Otherwise, we can't do anything with this value. */
7693 else
7694 p->const_equiv[REGNO (dest)] = 0;
7695 }
7696
7697 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
7698
7699 static void
7700 emit_equiv_load (struct epi_info *p)
7701 {
7702 if (p->equiv_reg_src != 0)
7703 {
7704 rtx dest = p->sp_equiv_reg;
7705
7706 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
7707 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
7708 REGNO (p->sp_equiv_reg));
7709
7710 emit_move_insn (dest, p->equiv_reg_src);
7711 p->equiv_reg_src = 0;
7712 }
7713 }
7714 #endif
7715
7716 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
7717 this into place with notes indicating where the prologue ends and where
7718 the epilogue begins. Update the basic block information when possible. */
7719
7720 void
7721 thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
7722 {
7723 int inserted = 0;
7724 edge e;
7725 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
7726 rtx seq;
7727 #endif
7728 #ifdef HAVE_prologue
7729 rtx prologue_end = NULL_RTX;
7730 #endif
7731 #if defined (HAVE_epilogue) || defined(HAVE_return)
7732 rtx epilogue_end = NULL_RTX;
7733 #endif
7734
7735 #ifdef HAVE_prologue
7736 if (HAVE_prologue)
7737 {
7738 start_sequence ();
7739 seq = gen_prologue ();
7740 emit_insn (seq);
7741
7742 /* Retain a map of the prologue insns. */
7743 record_insns (seq, &prologue);
7744 prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
7745
7746 seq = get_insns ();
7747 end_sequence ();
7748 set_insn_locators (seq, prologue_locator);
7749
7750 /* Can't deal with multiple successors of the entry block
7751 at the moment. Function should always have at least one
7752 entry point. */
7753 if (!ENTRY_BLOCK_PTR->succ || ENTRY_BLOCK_PTR->succ->succ_next)
7754 abort ();
7755
7756 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
7757 inserted = 1;
7758 }
7759 #endif
7760
7761 /* If the exit block has no non-fake predecessors, we don't need
7762 an epilogue. */
7763 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7764 if ((e->flags & EDGE_FAKE) == 0)
7765 break;
7766 if (e == NULL)
7767 goto epilogue_done;
7768
7769 #ifdef HAVE_return
7770 if (optimize && HAVE_return)
7771 {
7772 /* If we're allowed to generate a simple return instruction,
7773 then by definition we don't need a full epilogue. Examine
7774 the block that falls through to EXIT. If it does not
7775 contain any code, examine its predecessors and try to
7776 emit (conditional) return instructions. */
7777
7778 basic_block last;
7779 edge e_next;
7780 rtx label;
7781
7782 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7783 if (e->flags & EDGE_FALLTHRU)
7784 break;
7785 if (e == NULL)
7786 goto epilogue_done;
7787 last = e->src;
7788
7789 /* Verify that there are no active instructions in the last block. */
7790 label = BB_END (last);
7791 while (label && GET_CODE (label) != CODE_LABEL)
7792 {
7793 if (active_insn_p (label))
7794 break;
7795 label = PREV_INSN (label);
7796 }
7797
7798 if (BB_HEAD (last) == label && GET_CODE (label) == CODE_LABEL)
7799 {
7800 rtx epilogue_line_note = NULL_RTX;
7801
7802 /* Locate the line number associated with the closing brace,
7803 if we can find one. */
7804 for (seq = get_last_insn ();
7805 seq && ! active_insn_p (seq);
7806 seq = PREV_INSN (seq))
7807 if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
7808 {
7809 epilogue_line_note = seq;
7810 break;
7811 }
7812
7813 for (e = last->pred; e; e = e_next)
7814 {
7815 basic_block bb = e->src;
7816 rtx jump;
7817
7818 e_next = e->pred_next;
7819 if (bb == ENTRY_BLOCK_PTR)
7820 continue;
7821
7822 jump = BB_END (bb);
7823 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
7824 continue;
7825
7826 /* If we have an unconditional jump, we can replace that
7827 with a simple return instruction. */
7828 if (simplejump_p (jump))
7829 {
7830 emit_return_into_block (bb, epilogue_line_note);
7831 delete_insn (jump);
7832 }
7833
7834 /* If we have a conditional jump, we can try to replace
7835 that with a conditional return instruction. */
7836 else if (condjump_p (jump))
7837 {
7838 if (! redirect_jump (jump, 0, 0))
7839 continue;
7840
7841 /* If this block has only one successor, it both jumps
7842 and falls through to the fallthru block, so we can't
7843 delete the edge. */
7844 if (bb->succ->succ_next == NULL)
7845 continue;
7846 }
7847 else
7848 continue;
7849
7850 /* Fix up the CFG for the successful change we just made. */
7851 redirect_edge_succ (e, EXIT_BLOCK_PTR);
7852 }
7853
7854 /* Emit a return insn for the exit fallthru block. Whether
7855 this is still reachable will be determined later. */
7856
7857 emit_barrier_after (BB_END (last));
7858 emit_return_into_block (last, epilogue_line_note);
7859 epilogue_end = BB_END (last);
7860 last->succ->flags &= ~EDGE_FALLTHRU;
7861 goto epilogue_done;
7862 }
7863 }
7864 #endif
7865 #ifdef HAVE_epilogue
7866 if (HAVE_epilogue)
7867 {
7868 /* Find the edge that falls through to EXIT. Other edges may exist
7869 due to RETURN instructions, but those don't need epilogues.
7870 There really shouldn't be a mixture -- either all should have
7871 been converted or none, however... */
7872
7873 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7874 if (e->flags & EDGE_FALLTHRU)
7875 break;
7876 if (e == NULL)
7877 goto epilogue_done;
7878
7879 start_sequence ();
7880 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
7881
7882 seq = gen_epilogue ();
7883
7884 #ifdef INCOMING_RETURN_ADDR_RTX
7885 /* If this function returns with the stack depressed and we can support
7886 it, massage the epilogue to actually do that. */
7887 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
7888 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
7889 seq = keep_stack_depressed (seq);
7890 #endif
7891
7892 emit_jump_insn (seq);
7893
7894 /* Retain a map of the epilogue insns. */
7895 record_insns (seq, &epilogue);
7896 set_insn_locators (seq, epilogue_locator);
7897
7898 seq = get_insns ();
7899 end_sequence ();
7900
7901 insert_insn_on_edge (seq, e);
7902 inserted = 1;
7903 }
7904 #endif
7905 epilogue_done:
7906
7907 if (inserted)
7908 commit_edge_insertions ();
7909
7910 #ifdef HAVE_sibcall_epilogue
7911 /* Emit sibling epilogues before any sibling call sites. */
7912 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7913 {
7914 basic_block bb = e->src;
7915 rtx insn = BB_END (bb);
7916 rtx i;
7917 rtx newinsn;
7918
7919 if (GET_CODE (insn) != CALL_INSN
7920 || ! SIBLING_CALL_P (insn))
7921 continue;
7922
7923 start_sequence ();
7924 emit_insn (gen_sibcall_epilogue ());
7925 seq = get_insns ();
7926 end_sequence ();
7927
7928 /* Retain a map of the epilogue insns. Used in life analysis to
7929 avoid getting rid of sibcall epilogue insns. Do this before we
7930 actually emit the sequence. */
7931 record_insns (seq, &sibcall_epilogue);
7932 set_insn_locators (seq, epilogue_locator);
7933
7934 i = PREV_INSN (insn);
7935 newinsn = emit_insn_before (seq, insn);
7936 }
7937 #endif
7938
7939 #ifdef HAVE_prologue
7940 /* This is probably all useless now that we use locators. */
7941 if (prologue_end)
7942 {
7943 rtx insn, prev;
7944
7945 /* GDB handles `break f' by setting a breakpoint on the first
7946 line note after the prologue. Which means (1) that if
7947 there are line number notes before where we inserted the
7948 prologue we should move them, and (2) we should generate a
7949 note before the end of the first basic block, if there isn't
7950 one already there.
7951
7952 ??? This behavior is completely broken when dealing with
7953 multiple entry functions. We simply place the note always
7954 into first basic block and let alternate entry points
7955 to be missed.
7956 */
7957
7958 for (insn = prologue_end; insn; insn = prev)
7959 {
7960 prev = PREV_INSN (insn);
7961 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7962 {
7963 /* Note that we cannot reorder the first insn in the
7964 chain, since rest_of_compilation relies on that
7965 remaining constant. */
7966 if (prev == NULL)
7967 break;
7968 reorder_insns (insn, insn, prologue_end);
7969 }
7970 }
7971
7972 /* Find the last line number note in the first block. */
7973 for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
7974 insn != prologue_end && insn;
7975 insn = PREV_INSN (insn))
7976 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7977 break;
7978
7979 /* If we didn't find one, make a copy of the first line number
7980 we run across. */
7981 if (! insn)
7982 {
7983 for (insn = next_active_insn (prologue_end);
7984 insn;
7985 insn = PREV_INSN (insn))
7986 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7987 {
7988 emit_note_copy_after (insn, prologue_end);
7989 break;
7990 }
7991 }
7992 }
7993 #endif
7994 #ifdef HAVE_epilogue
7995 if (epilogue_end)
7996 {
7997 rtx insn, next;
7998
7999 /* Similarly, move any line notes that appear after the epilogue.
8000 There is no need, however, to be quite so anal about the existence
8001 of such a note. */
8002 for (insn = epilogue_end; insn; insn = next)
8003 {
8004 next = NEXT_INSN (insn);
8005 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
8006 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
8007 }
8008 }
8009 #endif
8010 }
8011
8012 /* Reposition the prologue-end and epilogue-begin notes after instruction
8013 scheduling and delayed branch scheduling. */
8014
8015 void
8016 reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
8017 {
8018 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
8019 rtx insn, last, note;
8020 int len;
8021
8022 if ((len = VARRAY_SIZE (prologue)) > 0)
8023 {
8024 last = 0, note = 0;
8025
8026 /* Scan from the beginning until we reach the last prologue insn.
8027 We apparently can't depend on basic_block_{head,end} after
8028 reorg has run. */
8029 for (insn = f; insn; insn = NEXT_INSN (insn))
8030 {
8031 if (GET_CODE (insn) == NOTE)
8032 {
8033 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
8034 note = insn;
8035 }
8036 else if (contains (insn, prologue))
8037 {
8038 last = insn;
8039 if (--len == 0)
8040 break;
8041 }
8042 }
8043
8044 if (last)
8045 {
8046 /* Find the prologue-end note if we haven't already, and
8047 move it to just after the last prologue insn. */
8048 if (note == 0)
8049 {
8050 for (note = last; (note = NEXT_INSN (note));)
8051 if (GET_CODE (note) == NOTE
8052 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
8053 break;
8054 }
8055
8056 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
8057 if (GET_CODE (last) == CODE_LABEL)
8058 last = NEXT_INSN (last);
8059 reorder_insns (note, note, last);
8060 }
8061 }
8062
8063 if ((len = VARRAY_SIZE (epilogue)) > 0)
8064 {
8065 last = 0, note = 0;
8066
8067 /* Scan from the end until we reach the first epilogue insn.
8068 We apparently can't depend on basic_block_{head,end} after
8069 reorg has run. */
8070 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
8071 {
8072 if (GET_CODE (insn) == NOTE)
8073 {
8074 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
8075 note = insn;
8076 }
8077 else if (contains (insn, epilogue))
8078 {
8079 last = insn;
8080 if (--len == 0)
8081 break;
8082 }
8083 }
8084
8085 if (last)
8086 {
8087 /* Find the epilogue-begin note if we haven't already, and
8088 move it to just before the first epilogue insn. */
8089 if (note == 0)
8090 {
8091 for (note = insn; (note = PREV_INSN (note));)
8092 if (GET_CODE (note) == NOTE
8093 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
8094 break;
8095 }
8096
8097 if (PREV_INSN (last) != note)
8098 reorder_insns (note, note, PREV_INSN (last));
8099 }
8100 }
8101 #endif /* HAVE_prologue or HAVE_epilogue */
8102 }
8103
8104 /* Called once, at initialization, to initialize function.c. */
8105
8106 void
8107 init_function_once (void)
8108 {
8109 VARRAY_INT_INIT (prologue, 0, "prologue");
8110 VARRAY_INT_INIT (epilogue, 0, "epilogue");
8111 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
8112 }
8113
8114 /* Returns the name of the current function. */
8115 const char *
8116 current_function_name (void)
8117 {
8118 return (*lang_hooks.decl_printable_name) (cfun->decl, 2);
8119 }
8120
8121 #include "gt-function.h"