]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/function.c
Delete SEQUENCE rtl usage outside of reorg and ssa passes.
[thirdparty/gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "regs.h"
51 #include "hard-reg-set.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "output.h"
55 #include "basic-block.h"
56 #include "obstack.h"
57 #include "toplev.h"
58 #include "hashtab.h"
59 #include "ggc.h"
60 #include "tm_p.h"
61 #include "integrate.h"
62 #include "langhooks.h"
63
64 #ifndef TRAMPOLINE_ALIGNMENT
65 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
66 #endif
67
68 #ifndef LOCAL_ALIGNMENT
69 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
70 #endif
71
72 /* Some systems use __main in a way incompatible with its use in gcc, in these
73 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
74 give the same symbol without quotes for an alternative entry point. You
75 must define both, or neither. */
76 #ifndef NAME__MAIN
77 #define NAME__MAIN "__main"
78 #define SYMBOL__MAIN __main
79 #endif
80
81 /* Round a value to the lowest integer less than it that is a multiple of
82 the required alignment. Avoid using division in case the value is
83 negative. Assume the alignment is a power of two. */
84 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
85
86 /* Similar, but round to the next highest integer that meets the
87 alignment. */
88 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
89
90 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
91 during rtl generation. If they are different register numbers, this is
92 always true. It may also be true if
93 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
94 generation. See fix_lexical_addr for details. */
95
96 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
97 #define NEED_SEPARATE_AP
98 #endif
99
100 /* Nonzero if function being compiled doesn't contain any calls
101 (ignoring the prologue and epilogue). This is set prior to
102 local register allocation and is valid for the remaining
103 compiler passes. */
104 int current_function_is_leaf;
105
106 /* Nonzero if function being compiled doesn't contain any instructions
107 that can throw an exception. This is set prior to final. */
108
109 int current_function_nothrow;
110
111 /* Nonzero if function being compiled doesn't modify the stack pointer
112 (ignoring the prologue and epilogue). This is only valid after
113 life_analysis has run. */
114 int current_function_sp_is_unchanging;
115
116 /* Nonzero if the function being compiled is a leaf function which only
117 uses leaf registers. This is valid after reload (specifically after
118 sched2) and is useful only if the port defines LEAF_REGISTERS. */
119 int current_function_uses_only_leaf_regs;
120
121 /* Nonzero once virtual register instantiation has been done.
122 assign_stack_local uses frame_pointer_rtx when this is nonzero.
123 calls.c:emit_library_call_value_1 uses it to set up
124 post-instantiation libcalls. */
125 int virtuals_instantiated;
126
127 /* Assign unique numbers to labels generated for profiling. */
128 static int profile_label_no;
129
130 /* These variables hold pointers to functions to create and destroy
131 target specific, per-function data structures. */
132 struct machine_function * (*init_machine_status) PARAMS ((void));
133
134 /* The FUNCTION_DECL for an inline function currently being expanded. */
135 tree inline_function_decl;
136
137 /* The currently compiled function. */
138 struct function *cfun = 0;
139
140 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
141 static GTY(()) varray_type prologue;
142 static GTY(()) varray_type epilogue;
143
144 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
145 in this function. */
146 static GTY(()) varray_type sibcall_epilogue;
147 \f
148 /* In order to evaluate some expressions, such as function calls returning
149 structures in memory, we need to temporarily allocate stack locations.
150 We record each allocated temporary in the following structure.
151
152 Associated with each temporary slot is a nesting level. When we pop up
153 one level, all temporaries associated with the previous level are freed.
154 Normally, all temporaries are freed after the execution of the statement
155 in which they were created. However, if we are inside a ({...}) grouping,
156 the result may be in a temporary and hence must be preserved. If the
157 result could be in a temporary, we preserve it if we can determine which
158 one it is in. If we cannot determine which temporary may contain the
159 result, all temporaries are preserved. A temporary is preserved by
160 pretending it was allocated at the previous nesting level.
161
162 Automatic variables are also assigned temporary slots, at the nesting
163 level where they are defined. They are marked a "kept" so that
164 free_temp_slots will not free them. */
165
166 struct temp_slot GTY(())
167 {
168 /* Points to next temporary slot. */
169 struct temp_slot *next;
170 /* The rtx to used to reference the slot. */
171 rtx slot;
172 /* The rtx used to represent the address if not the address of the
173 slot above. May be an EXPR_LIST if multiple addresses exist. */
174 rtx address;
175 /* The alignment (in bits) of the slot. */
176 unsigned int align;
177 /* The size, in units, of the slot. */
178 HOST_WIDE_INT size;
179 /* The type of the object in the slot, or zero if it doesn't correspond
180 to a type. We use this to determine whether a slot can be reused.
181 It can be reused if objects of the type of the new slot will always
182 conflict with objects of the type of the old slot. */
183 tree type;
184 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
185 tree rtl_expr;
186 /* Non-zero if this temporary is currently in use. */
187 char in_use;
188 /* Non-zero if this temporary has its address taken. */
189 char addr_taken;
190 /* Nesting level at which this slot is being used. */
191 int level;
192 /* Non-zero if this should survive a call to free_temp_slots. */
193 int keep;
194 /* The offset of the slot from the frame_pointer, including extra space
195 for alignment. This info is for combine_temp_slots. */
196 HOST_WIDE_INT base_offset;
197 /* The size of the slot, including extra space for alignment. This
198 info is for combine_temp_slots. */
199 HOST_WIDE_INT full_size;
200 };
201 \f
202 /* This structure is used to record MEMs or pseudos used to replace VAR, any
203 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
204 maintain this list in case two operands of an insn were required to match;
205 in that case we must ensure we use the same replacement. */
206
207 struct fixup_replacement GTY(())
208 {
209 rtx old;
210 rtx new;
211 struct fixup_replacement *next;
212 };
213
214 struct insns_for_mem_entry
215 {
216 /* A MEM. */
217 rtx key;
218 /* These are the INSNs which reference the MEM. */
219 rtx insns;
220 };
221
222 /* Forward declarations. */
223
224 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
225 int, struct function *));
226 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
227 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
228 enum machine_mode, enum machine_mode,
229 int, unsigned int, int,
230 htab_t));
231 static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
232 enum machine_mode,
233 htab_t));
234 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int, rtx,
235 htab_t));
236 static struct fixup_replacement
237 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
238 static void fixup_var_refs_insns PARAMS ((rtx, rtx, enum machine_mode,
239 int, int, rtx));
240 static void fixup_var_refs_insns_with_hash
241 PARAMS ((htab_t, rtx,
242 enum machine_mode, int, rtx));
243 static void fixup_var_refs_insn PARAMS ((rtx, rtx, enum machine_mode,
244 int, int, rtx));
245 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
246 struct fixup_replacement **, rtx));
247 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, enum machine_mode, int));
248 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, enum machine_mode,
249 int));
250 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
251 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
252 static void instantiate_decls PARAMS ((tree, int));
253 static void instantiate_decls_1 PARAMS ((tree, int));
254 static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
255 static rtx instantiate_new_reg PARAMS ((rtx, HOST_WIDE_INT *));
256 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
257 static void delete_handlers PARAMS ((void));
258 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
259 struct args_size *));
260 #ifndef ARGS_GROW_DOWNWARD
261 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
262 tree));
263 #endif
264 static rtx round_trampoline_addr PARAMS ((rtx));
265 static rtx adjust_trampoline_addr PARAMS ((rtx));
266 static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
267 static void reorder_blocks_0 PARAMS ((tree));
268 static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
269 static void reorder_fix_fragments PARAMS ((tree));
270 static tree blocks_nreverse PARAMS ((tree));
271 static int all_blocks PARAMS ((tree, tree *));
272 static tree *get_block_vector PARAMS ((tree, int *));
273 extern tree debug_find_var_in_block_tree PARAMS ((tree, tree));
274 /* We always define `record_insns' even if its not used so that we
275 can always export `prologue_epilogue_contains'. */
276 static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
277 static int contains PARAMS ((rtx, varray_type));
278 #ifdef HAVE_return
279 static void emit_return_into_block PARAMS ((basic_block, rtx));
280 #endif
281 static void put_addressof_into_stack PARAMS ((rtx, htab_t));
282 static bool purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
283 htab_t));
284 static void purge_single_hard_subreg_set PARAMS ((rtx));
285 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
286 static rtx keep_stack_depressed PARAMS ((rtx));
287 #endif
288 static int is_addressof PARAMS ((rtx *, void *));
289 static hashval_t insns_for_mem_hash PARAMS ((const void *));
290 static int insns_for_mem_comp PARAMS ((const void *, const void *));
291 static int insns_for_mem_walk PARAMS ((rtx *, void *));
292 static void compute_insns_for_mem PARAMS ((rtx, rtx, htab_t));
293 static void prepare_function_start PARAMS ((void));
294 static void do_clobber_return_reg PARAMS ((rtx, void *));
295 static void do_use_return_reg PARAMS ((rtx, void *));
296 \f
297 /* Pointer to chain of `struct function' for containing functions. */
298 static GTY(()) struct function *outer_function_chain;
299
300 /* Given a function decl for a containing function,
301 return the `struct function' for it. */
302
303 struct function *
304 find_function_data (decl)
305 tree decl;
306 {
307 struct function *p;
308
309 for (p = outer_function_chain; p; p = p->outer)
310 if (p->decl == decl)
311 return p;
312
313 abort ();
314 }
315
316 /* Save the current context for compilation of a nested function.
317 This is called from language-specific code. The caller should use
318 the enter_nested langhook to save any language-specific state,
319 since this function knows only about language-independent
320 variables. */
321
322 void
323 push_function_context_to (context)
324 tree context;
325 {
326 struct function *p;
327
328 if (context)
329 {
330 if (context == current_function_decl)
331 cfun->contains_functions = 1;
332 else
333 {
334 struct function *containing = find_function_data (context);
335 containing->contains_functions = 1;
336 }
337 }
338
339 if (cfun == 0)
340 init_dummy_function_start ();
341 p = cfun;
342
343 p->outer = outer_function_chain;
344 outer_function_chain = p;
345 p->fixup_var_refs_queue = 0;
346
347 (*lang_hooks.function.enter_nested) (p);
348
349 cfun = 0;
350 }
351
352 void
353 push_function_context ()
354 {
355 push_function_context_to (current_function_decl);
356 }
357
358 /* Restore the last saved context, at the end of a nested function.
359 This function is called from language-specific code. */
360
361 void
362 pop_function_context_from (context)
363 tree context ATTRIBUTE_UNUSED;
364 {
365 struct function *p = outer_function_chain;
366 struct var_refs_queue *queue;
367
368 cfun = p;
369 outer_function_chain = p->outer;
370
371 current_function_decl = p->decl;
372 reg_renumber = 0;
373
374 restore_emit_status (p);
375
376 (*lang_hooks.function.leave_nested) (p);
377
378 /* Finish doing put_var_into_stack for any of our variables which became
379 addressable during the nested function. If only one entry has to be
380 fixed up, just do that one. Otherwise, first make a list of MEMs that
381 are not to be unshared. */
382 if (p->fixup_var_refs_queue == 0)
383 ;
384 else if (p->fixup_var_refs_queue->next == 0)
385 fixup_var_refs (p->fixup_var_refs_queue->modified,
386 p->fixup_var_refs_queue->promoted_mode,
387 p->fixup_var_refs_queue->unsignedp,
388 p->fixup_var_refs_queue->modified, 0);
389 else
390 {
391 rtx list = 0;
392
393 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
394 list = gen_rtx_EXPR_LIST (VOIDmode, queue->modified, list);
395
396 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
397 fixup_var_refs (queue->modified, queue->promoted_mode,
398 queue->unsignedp, list, 0);
399
400 }
401
402 p->fixup_var_refs_queue = 0;
403
404 /* Reset variables that have known state during rtx generation. */
405 rtx_equal_function_value_matters = 1;
406 virtuals_instantiated = 0;
407 generating_concat_p = 1;
408 }
409
410 void
411 pop_function_context ()
412 {
413 pop_function_context_from (current_function_decl);
414 }
415
416 /* Clear out all parts of the state in F that can safely be discarded
417 after the function has been parsed, but not compiled, to let
418 garbage collection reclaim the memory. */
419
420 void
421 free_after_parsing (f)
422 struct function *f;
423 {
424 /* f->expr->forced_labels is used by code generation. */
425 /* f->emit->regno_reg_rtx is used by code generation. */
426 /* f->varasm is used by code generation. */
427 /* f->eh->eh_return_stub_label is used by code generation. */
428
429 (*lang_hooks.function.final) (f);
430 f->stmt = NULL;
431 }
432
433 /* Clear out all parts of the state in F that can safely be discarded
434 after the function has been compiled, to let garbage collection
435 reclaim the memory. */
436
437 void
438 free_after_compilation (f)
439 struct function *f;
440 {
441 f->eh = NULL;
442 f->expr = NULL;
443 f->emit = NULL;
444 f->varasm = NULL;
445 f->machine = NULL;
446
447 f->x_temp_slots = NULL;
448 f->arg_offset_rtx = NULL;
449 f->return_rtx = NULL;
450 f->internal_arg_pointer = NULL;
451 f->x_nonlocal_labels = NULL;
452 f->x_nonlocal_goto_handler_slots = NULL;
453 f->x_nonlocal_goto_handler_labels = NULL;
454 f->x_nonlocal_goto_stack_level = NULL;
455 f->x_cleanup_label = NULL;
456 f->x_return_label = NULL;
457 f->x_save_expr_regs = NULL;
458 f->x_stack_slot_list = NULL;
459 f->x_rtl_expr_chain = NULL;
460 f->x_tail_recursion_label = NULL;
461 f->x_tail_recursion_reentry = NULL;
462 f->x_arg_pointer_save_area = NULL;
463 f->x_clobber_return_insn = NULL;
464 f->x_context_display = NULL;
465 f->x_trampoline_list = NULL;
466 f->x_parm_birth_insn = NULL;
467 f->x_last_parm_insn = NULL;
468 f->x_parm_reg_stack_loc = NULL;
469 f->fixup_var_refs_queue = NULL;
470 f->original_arg_vector = NULL;
471 f->original_decl_initial = NULL;
472 f->inl_last_parm_insn = NULL;
473 f->epilogue_delay_list = NULL;
474 }
475 \f
476 /* Allocate fixed slots in the stack frame of the current function. */
477
478 /* Return size needed for stack frame based on slots so far allocated in
479 function F.
480 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
481 the caller may have to do that. */
482
483 HOST_WIDE_INT
484 get_func_frame_size (f)
485 struct function *f;
486 {
487 #ifdef FRAME_GROWS_DOWNWARD
488 return -f->x_frame_offset;
489 #else
490 return f->x_frame_offset;
491 #endif
492 }
493
494 /* Return size needed for stack frame based on slots so far allocated.
495 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
496 the caller may have to do that. */
497 HOST_WIDE_INT
498 get_frame_size ()
499 {
500 return get_func_frame_size (cfun);
501 }
502
503 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
504 with machine mode MODE.
505
506 ALIGN controls the amount of alignment for the address of the slot:
507 0 means according to MODE,
508 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
509 positive specifies alignment boundary in bits.
510
511 We do not round to stack_boundary here.
512
513 FUNCTION specifies the function to allocate in. */
514
515 static rtx
516 assign_stack_local_1 (mode, size, align, function)
517 enum machine_mode mode;
518 HOST_WIDE_INT size;
519 int align;
520 struct function *function;
521 {
522 rtx x, addr;
523 int bigend_correction = 0;
524 int alignment;
525 int frame_off, frame_alignment, frame_phase;
526
527 if (align == 0)
528 {
529 tree type;
530
531 if (mode == BLKmode)
532 alignment = BIGGEST_ALIGNMENT;
533 else
534 alignment = GET_MODE_ALIGNMENT (mode);
535
536 /* Allow the target to (possibly) increase the alignment of this
537 stack slot. */
538 type = (*lang_hooks.types.type_for_mode) (mode, 0);
539 if (type)
540 alignment = LOCAL_ALIGNMENT (type, alignment);
541
542 alignment /= BITS_PER_UNIT;
543 }
544 else if (align == -1)
545 {
546 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
547 size = CEIL_ROUND (size, alignment);
548 }
549 else
550 alignment = align / BITS_PER_UNIT;
551
552 #ifdef FRAME_GROWS_DOWNWARD
553 function->x_frame_offset -= size;
554 #endif
555
556 /* Ignore alignment we can't do with expected alignment of the boundary. */
557 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
558 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
559
560 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
561 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
562
563 /* Calculate how many bytes the start of local variables is off from
564 stack alignment. */
565 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
566 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
567 frame_phase = frame_off ? frame_alignment - frame_off : 0;
568
569 /* Round frame offset to that alignment.
570 We must be careful here, since FRAME_OFFSET might be negative and
571 division with a negative dividend isn't as well defined as we might
572 like. So we instead assume that ALIGNMENT is a power of two and
573 use logical operations which are unambiguous. */
574 #ifdef FRAME_GROWS_DOWNWARD
575 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset - frame_phase, alignment) + frame_phase;
576 #else
577 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset - frame_phase, alignment) + frame_phase;
578 #endif
579
580 /* On a big-endian machine, if we are allocating more space than we will use,
581 use the least significant bytes of those that are allocated. */
582 if (BYTES_BIG_ENDIAN && mode != BLKmode)
583 bigend_correction = size - GET_MODE_SIZE (mode);
584
585 /* If we have already instantiated virtual registers, return the actual
586 address relative to the frame pointer. */
587 if (function == cfun && virtuals_instantiated)
588 addr = plus_constant (frame_pointer_rtx,
589 (frame_offset + bigend_correction
590 + STARTING_FRAME_OFFSET));
591 else
592 addr = plus_constant (virtual_stack_vars_rtx,
593 function->x_frame_offset + bigend_correction);
594
595 #ifndef FRAME_GROWS_DOWNWARD
596 function->x_frame_offset += size;
597 #endif
598
599 x = gen_rtx_MEM (mode, addr);
600
601 function->x_stack_slot_list
602 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
603
604 return x;
605 }
606
607 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
608 current function. */
609
610 rtx
611 assign_stack_local (mode, size, align)
612 enum machine_mode mode;
613 HOST_WIDE_INT size;
614 int align;
615 {
616 return assign_stack_local_1 (mode, size, align, cfun);
617 }
618 \f
619 /* Allocate a temporary stack slot and record it for possible later
620 reuse.
621
622 MODE is the machine mode to be given to the returned rtx.
623
624 SIZE is the size in units of the space required. We do no rounding here
625 since assign_stack_local will do any required rounding.
626
627 KEEP is 1 if this slot is to be retained after a call to
628 free_temp_slots. Automatic variables for a block are allocated
629 with this flag. KEEP is 2 if we allocate a longer term temporary,
630 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
631 if we are to allocate something at an inner level to be treated as
632 a variable in the block (e.g., a SAVE_EXPR).
633
634 TYPE is the type that will be used for the stack slot. */
635
636 rtx
637 assign_stack_temp_for_type (mode, size, keep, type)
638 enum machine_mode mode;
639 HOST_WIDE_INT size;
640 int keep;
641 tree type;
642 {
643 unsigned int align;
644 struct temp_slot *p, *best_p = 0;
645
646 /* If SIZE is -1 it means that somebody tried to allocate a temporary
647 of a variable size. */
648 if (size == -1)
649 abort ();
650
651 if (mode == BLKmode)
652 align = BIGGEST_ALIGNMENT;
653 else
654 align = GET_MODE_ALIGNMENT (mode);
655
656 if (! type)
657 type = (*lang_hooks.types.type_for_mode) (mode, 0);
658
659 if (type)
660 align = LOCAL_ALIGNMENT (type, align);
661
662 /* Try to find an available, already-allocated temporary of the proper
663 mode which meets the size and alignment requirements. Choose the
664 smallest one with the closest alignment. */
665 for (p = temp_slots; p; p = p->next)
666 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
667 && ! p->in_use
668 && objects_must_conflict_p (p->type, type)
669 && (best_p == 0 || best_p->size > p->size
670 || (best_p->size == p->size && best_p->align > p->align)))
671 {
672 if (p->align == align && p->size == size)
673 {
674 best_p = 0;
675 break;
676 }
677 best_p = p;
678 }
679
680 /* Make our best, if any, the one to use. */
681 if (best_p)
682 {
683 /* If there are enough aligned bytes left over, make them into a new
684 temp_slot so that the extra bytes don't get wasted. Do this only
685 for BLKmode slots, so that we can be sure of the alignment. */
686 if (GET_MODE (best_p->slot) == BLKmode)
687 {
688 int alignment = best_p->align / BITS_PER_UNIT;
689 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
690
691 if (best_p->size - rounded_size >= alignment)
692 {
693 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
694 p->in_use = p->addr_taken = 0;
695 p->size = best_p->size - rounded_size;
696 p->base_offset = best_p->base_offset + rounded_size;
697 p->full_size = best_p->full_size - rounded_size;
698 p->slot = gen_rtx_MEM (BLKmode,
699 plus_constant (XEXP (best_p->slot, 0),
700 rounded_size));
701 p->align = best_p->align;
702 p->address = 0;
703 p->rtl_expr = 0;
704 p->type = best_p->type;
705 p->next = temp_slots;
706 temp_slots = p;
707
708 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
709 stack_slot_list);
710
711 best_p->size = rounded_size;
712 best_p->full_size = rounded_size;
713 }
714 }
715
716 p = best_p;
717 }
718
719 /* If we still didn't find one, make a new temporary. */
720 if (p == 0)
721 {
722 HOST_WIDE_INT frame_offset_old = frame_offset;
723
724 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
725
726 /* We are passing an explicit alignment request to assign_stack_local.
727 One side effect of that is assign_stack_local will not round SIZE
728 to ensure the frame offset remains suitably aligned.
729
730 So for requests which depended on the rounding of SIZE, we go ahead
731 and round it now. We also make sure ALIGNMENT is at least
732 BIGGEST_ALIGNMENT. */
733 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
734 abort ();
735 p->slot = assign_stack_local (mode,
736 (mode == BLKmode
737 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
738 : size),
739 align);
740
741 p->align = align;
742
743 /* The following slot size computation is necessary because we don't
744 know the actual size of the temporary slot until assign_stack_local
745 has performed all the frame alignment and size rounding for the
746 requested temporary. Note that extra space added for alignment
747 can be either above or below this stack slot depending on which
748 way the frame grows. We include the extra space if and only if it
749 is above this slot. */
750 #ifdef FRAME_GROWS_DOWNWARD
751 p->size = frame_offset_old - frame_offset;
752 #else
753 p->size = size;
754 #endif
755
756 /* Now define the fields used by combine_temp_slots. */
757 #ifdef FRAME_GROWS_DOWNWARD
758 p->base_offset = frame_offset;
759 p->full_size = frame_offset_old - frame_offset;
760 #else
761 p->base_offset = frame_offset_old;
762 p->full_size = frame_offset - frame_offset_old;
763 #endif
764 p->address = 0;
765 p->next = temp_slots;
766 temp_slots = p;
767 }
768
769 p->in_use = 1;
770 p->addr_taken = 0;
771 p->rtl_expr = seq_rtl_expr;
772 p->type = type;
773
774 if (keep == 2)
775 {
776 p->level = target_temp_slot_level;
777 p->keep = 0;
778 }
779 else if (keep == 3)
780 {
781 p->level = var_temp_slot_level;
782 p->keep = 0;
783 }
784 else
785 {
786 p->level = temp_slot_level;
787 p->keep = keep;
788 }
789
790 /* We may be reusing an old slot, so clear any MEM flags that may have been
791 set from before. */
792 RTX_UNCHANGING_P (p->slot) = 0;
793 MEM_IN_STRUCT_P (p->slot) = 0;
794 MEM_SCALAR_P (p->slot) = 0;
795 MEM_VOLATILE_P (p->slot) = 0;
796 set_mem_alias_set (p->slot, 0);
797
798 /* If we know the alias set for the memory that will be used, use
799 it. If there's no TYPE, then we don't know anything about the
800 alias set for the memory. */
801 set_mem_alias_set (p->slot, type ? get_alias_set (type) : 0);
802 set_mem_align (p->slot, align);
803
804 /* If a type is specified, set the relevant flags. */
805 if (type != 0)
806 {
807 RTX_UNCHANGING_P (p->slot) = TYPE_READONLY (type);
808 MEM_VOLATILE_P (p->slot) = TYPE_VOLATILE (type);
809 MEM_SET_IN_STRUCT_P (p->slot, AGGREGATE_TYPE_P (type));
810 }
811
812 return p->slot;
813 }
814
815 /* Allocate a temporary stack slot and record it for possible later
816 reuse. First three arguments are same as in preceding function. */
817
818 rtx
819 assign_stack_temp (mode, size, keep)
820 enum machine_mode mode;
821 HOST_WIDE_INT size;
822 int keep;
823 {
824 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
825 }
826 \f
827 /* Assign a temporary.
828 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
829 and so that should be used in error messages. In either case, we
830 allocate of the given type.
831 KEEP is as for assign_stack_temp.
832 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
833 it is 0 if a register is OK.
834 DONT_PROMOTE is 1 if we should not promote values in register
835 to wider modes. */
836
837 rtx
838 assign_temp (type_or_decl, keep, memory_required, dont_promote)
839 tree type_or_decl;
840 int keep;
841 int memory_required;
842 int dont_promote ATTRIBUTE_UNUSED;
843 {
844 tree type, decl;
845 enum machine_mode mode;
846 #ifndef PROMOTE_FOR_CALL_ONLY
847 int unsignedp;
848 #endif
849
850 if (DECL_P (type_or_decl))
851 decl = type_or_decl, type = TREE_TYPE (decl);
852 else
853 decl = NULL, type = type_or_decl;
854
855 mode = TYPE_MODE (type);
856 #ifndef PROMOTE_FOR_CALL_ONLY
857 unsignedp = TREE_UNSIGNED (type);
858 #endif
859
860 if (mode == BLKmode || memory_required)
861 {
862 HOST_WIDE_INT size = int_size_in_bytes (type);
863 rtx tmp;
864
865 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
866 problems with allocating the stack space. */
867 if (size == 0)
868 size = 1;
869
870 /* Unfortunately, we don't yet know how to allocate variable-sized
871 temporaries. However, sometimes we have a fixed upper limit on
872 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
873 instead. This is the case for Chill variable-sized strings. */
874 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
875 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
876 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
877 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
878
879 /* The size of the temporary may be too large to fit into an integer. */
880 /* ??? Not sure this should happen except for user silliness, so limit
881 this to things that aren't compiler-generated temporaries. The
882 rest of the time we'll abort in assign_stack_temp_for_type. */
883 if (decl && size == -1
884 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
885 {
886 error_with_decl (decl, "size of variable `%s' is too large");
887 size = 1;
888 }
889
890 tmp = assign_stack_temp_for_type (mode, size, keep, type);
891 return tmp;
892 }
893
894 #ifndef PROMOTE_FOR_CALL_ONLY
895 if (! dont_promote)
896 mode = promote_mode (type, mode, &unsignedp, 0);
897 #endif
898
899 return gen_reg_rtx (mode);
900 }
901 \f
902 /* Combine temporary stack slots which are adjacent on the stack.
903
904 This allows for better use of already allocated stack space. This is only
905 done for BLKmode slots because we can be sure that we won't have alignment
906 problems in this case. */
907
908 void
909 combine_temp_slots ()
910 {
911 struct temp_slot *p, *q;
912 struct temp_slot *prev_p, *prev_q;
913 int num_slots;
914
915 /* We can't combine slots, because the information about which slot
916 is in which alias set will be lost. */
917 if (flag_strict_aliasing)
918 return;
919
920 /* If there are a lot of temp slots, don't do anything unless
921 high levels of optimization. */
922 if (! flag_expensive_optimizations)
923 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
924 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
925 return;
926
927 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
928 {
929 int delete_p = 0;
930
931 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
932 for (q = p->next, prev_q = p; q; q = prev_q->next)
933 {
934 int delete_q = 0;
935 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
936 {
937 if (p->base_offset + p->full_size == q->base_offset)
938 {
939 /* Q comes after P; combine Q into P. */
940 p->size += q->size;
941 p->full_size += q->full_size;
942 delete_q = 1;
943 }
944 else if (q->base_offset + q->full_size == p->base_offset)
945 {
946 /* P comes after Q; combine P into Q. */
947 q->size += p->size;
948 q->full_size += p->full_size;
949 delete_p = 1;
950 break;
951 }
952 }
953 /* Either delete Q or advance past it. */
954 if (delete_q)
955 prev_q->next = q->next;
956 else
957 prev_q = q;
958 }
959 /* Either delete P or advance past it. */
960 if (delete_p)
961 {
962 if (prev_p)
963 prev_p->next = p->next;
964 else
965 temp_slots = p->next;
966 }
967 else
968 prev_p = p;
969 }
970 }
971 \f
972 /* Find the temp slot corresponding to the object at address X. */
973
974 static struct temp_slot *
975 find_temp_slot_from_address (x)
976 rtx x;
977 {
978 struct temp_slot *p;
979 rtx next;
980
981 for (p = temp_slots; p; p = p->next)
982 {
983 if (! p->in_use)
984 continue;
985
986 else if (XEXP (p->slot, 0) == x
987 || p->address == x
988 || (GET_CODE (x) == PLUS
989 && XEXP (x, 0) == virtual_stack_vars_rtx
990 && GET_CODE (XEXP (x, 1)) == CONST_INT
991 && INTVAL (XEXP (x, 1)) >= p->base_offset
992 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
993 return p;
994
995 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
996 for (next = p->address; next; next = XEXP (next, 1))
997 if (XEXP (next, 0) == x)
998 return p;
999 }
1000
1001 /* If we have a sum involving a register, see if it points to a temp
1002 slot. */
1003 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
1004 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
1005 return p;
1006 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
1007 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
1008 return p;
1009
1010 return 0;
1011 }
1012
1013 /* Indicate that NEW is an alternate way of referring to the temp slot
1014 that previously was known by OLD. */
1015
1016 void
1017 update_temp_slot_address (old, new)
1018 rtx old, new;
1019 {
1020 struct temp_slot *p;
1021
1022 if (rtx_equal_p (old, new))
1023 return;
1024
1025 p = find_temp_slot_from_address (old);
1026
1027 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1028 is a register, see if one operand of the PLUS is a temporary
1029 location. If so, NEW points into it. Otherwise, if both OLD and
1030 NEW are a PLUS and if there is a register in common between them.
1031 If so, try a recursive call on those values. */
1032 if (p == 0)
1033 {
1034 if (GET_CODE (old) != PLUS)
1035 return;
1036
1037 if (GET_CODE (new) == REG)
1038 {
1039 update_temp_slot_address (XEXP (old, 0), new);
1040 update_temp_slot_address (XEXP (old, 1), new);
1041 return;
1042 }
1043 else if (GET_CODE (new) != PLUS)
1044 return;
1045
1046 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1047 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1048 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1049 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1050 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1051 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1052 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1053 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1054
1055 return;
1056 }
1057
1058 /* Otherwise add an alias for the temp's address. */
1059 else if (p->address == 0)
1060 p->address = new;
1061 else
1062 {
1063 if (GET_CODE (p->address) != EXPR_LIST)
1064 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1065
1066 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1067 }
1068 }
1069
1070 /* If X could be a reference to a temporary slot, mark the fact that its
1071 address was taken. */
1072
1073 void
1074 mark_temp_addr_taken (x)
1075 rtx x;
1076 {
1077 struct temp_slot *p;
1078
1079 if (x == 0)
1080 return;
1081
1082 /* If X is not in memory or is at a constant address, it cannot be in
1083 a temporary slot. */
1084 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1085 return;
1086
1087 p = find_temp_slot_from_address (XEXP (x, 0));
1088 if (p != 0)
1089 p->addr_taken = 1;
1090 }
1091
1092 /* If X could be a reference to a temporary slot, mark that slot as
1093 belonging to the to one level higher than the current level. If X
1094 matched one of our slots, just mark that one. Otherwise, we can't
1095 easily predict which it is, so upgrade all of them. Kept slots
1096 need not be touched.
1097
1098 This is called when an ({...}) construct occurs and a statement
1099 returns a value in memory. */
1100
1101 void
1102 preserve_temp_slots (x)
1103 rtx x;
1104 {
1105 struct temp_slot *p = 0;
1106
1107 /* If there is no result, we still might have some objects whose address
1108 were taken, so we need to make sure they stay around. */
1109 if (x == 0)
1110 {
1111 for (p = temp_slots; p; p = p->next)
1112 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1113 p->level--;
1114
1115 return;
1116 }
1117
1118 /* If X is a register that is being used as a pointer, see if we have
1119 a temporary slot we know it points to. To be consistent with
1120 the code below, we really should preserve all non-kept slots
1121 if we can't find a match, but that seems to be much too costly. */
1122 if (GET_CODE (x) == REG && REG_POINTER (x))
1123 p = find_temp_slot_from_address (x);
1124
1125 /* If X is not in memory or is at a constant address, it cannot be in
1126 a temporary slot, but it can contain something whose address was
1127 taken. */
1128 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1129 {
1130 for (p = temp_slots; p; p = p->next)
1131 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1132 p->level--;
1133
1134 return;
1135 }
1136
1137 /* First see if we can find a match. */
1138 if (p == 0)
1139 p = find_temp_slot_from_address (XEXP (x, 0));
1140
1141 if (p != 0)
1142 {
1143 /* Move everything at our level whose address was taken to our new
1144 level in case we used its address. */
1145 struct temp_slot *q;
1146
1147 if (p->level == temp_slot_level)
1148 {
1149 for (q = temp_slots; q; q = q->next)
1150 if (q != p && q->addr_taken && q->level == p->level)
1151 q->level--;
1152
1153 p->level--;
1154 p->addr_taken = 0;
1155 }
1156 return;
1157 }
1158
1159 /* Otherwise, preserve all non-kept slots at this level. */
1160 for (p = temp_slots; p; p = p->next)
1161 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1162 p->level--;
1163 }
1164
1165 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1166 with that RTL_EXPR, promote it into a temporary slot at the present
1167 level so it will not be freed when we free slots made in the
1168 RTL_EXPR. */
1169
1170 void
1171 preserve_rtl_expr_result (x)
1172 rtx x;
1173 {
1174 struct temp_slot *p;
1175
1176 /* If X is not in memory or is at a constant address, it cannot be in
1177 a temporary slot. */
1178 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1179 return;
1180
1181 /* If we can find a match, move it to our level unless it is already at
1182 an upper level. */
1183 p = find_temp_slot_from_address (XEXP (x, 0));
1184 if (p != 0)
1185 {
1186 p->level = MIN (p->level, temp_slot_level);
1187 p->rtl_expr = 0;
1188 }
1189
1190 return;
1191 }
1192
1193 /* Free all temporaries used so far. This is normally called at the end
1194 of generating code for a statement. Don't free any temporaries
1195 currently in use for an RTL_EXPR that hasn't yet been emitted.
1196 We could eventually do better than this since it can be reused while
1197 generating the same RTL_EXPR, but this is complex and probably not
1198 worthwhile. */
1199
1200 void
1201 free_temp_slots ()
1202 {
1203 struct temp_slot *p;
1204
1205 for (p = temp_slots; p; p = p->next)
1206 if (p->in_use && p->level == temp_slot_level && ! p->keep
1207 && p->rtl_expr == 0)
1208 p->in_use = 0;
1209
1210 combine_temp_slots ();
1211 }
1212
1213 /* Free all temporary slots used in T, an RTL_EXPR node. */
1214
1215 void
1216 free_temps_for_rtl_expr (t)
1217 tree t;
1218 {
1219 struct temp_slot *p;
1220
1221 for (p = temp_slots; p; p = p->next)
1222 if (p->rtl_expr == t)
1223 {
1224 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1225 needs to be preserved. This can happen if a temporary in
1226 the RTL_EXPR was addressed; preserve_temp_slots will move
1227 the temporary into a higher level. */
1228 if (temp_slot_level <= p->level)
1229 p->in_use = 0;
1230 else
1231 p->rtl_expr = NULL_TREE;
1232 }
1233
1234 combine_temp_slots ();
1235 }
1236
1237 /* Mark all temporaries ever allocated in this function as not suitable
1238 for reuse until the current level is exited. */
1239
1240 void
1241 mark_all_temps_used ()
1242 {
1243 struct temp_slot *p;
1244
1245 for (p = temp_slots; p; p = p->next)
1246 {
1247 p->in_use = p->keep = 1;
1248 p->level = MIN (p->level, temp_slot_level);
1249 }
1250 }
1251
1252 /* Push deeper into the nesting level for stack temporaries. */
1253
1254 void
1255 push_temp_slots ()
1256 {
1257 temp_slot_level++;
1258 }
1259
1260 /* Likewise, but save the new level as the place to allocate variables
1261 for blocks. */
1262
1263 #if 0
1264 void
1265 push_temp_slots_for_block ()
1266 {
1267 push_temp_slots ();
1268
1269 var_temp_slot_level = temp_slot_level;
1270 }
1271
1272 /* Likewise, but save the new level as the place to allocate temporaries
1273 for TARGET_EXPRs. */
1274
1275 void
1276 push_temp_slots_for_target ()
1277 {
1278 push_temp_slots ();
1279
1280 target_temp_slot_level = temp_slot_level;
1281 }
1282
1283 /* Set and get the value of target_temp_slot_level. The only
1284 permitted use of these functions is to save and restore this value. */
1285
1286 int
1287 get_target_temp_slot_level ()
1288 {
1289 return target_temp_slot_level;
1290 }
1291
1292 void
1293 set_target_temp_slot_level (level)
1294 int level;
1295 {
1296 target_temp_slot_level = level;
1297 }
1298 #endif
1299
1300 /* Pop a temporary nesting level. All slots in use in the current level
1301 are freed. */
1302
1303 void
1304 pop_temp_slots ()
1305 {
1306 struct temp_slot *p;
1307
1308 for (p = temp_slots; p; p = p->next)
1309 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1310 p->in_use = 0;
1311
1312 combine_temp_slots ();
1313
1314 temp_slot_level--;
1315 }
1316
1317 /* Initialize temporary slots. */
1318
1319 void
1320 init_temp_slots ()
1321 {
1322 /* We have not allocated any temporaries yet. */
1323 temp_slots = 0;
1324 temp_slot_level = 0;
1325 var_temp_slot_level = 0;
1326 target_temp_slot_level = 0;
1327 }
1328 \f
1329 /* Retroactively move an auto variable from a register to a stack slot.
1330 This is done when an address-reference to the variable is seen. */
1331
1332 void
1333 put_var_into_stack (decl)
1334 tree decl;
1335 {
1336 rtx reg;
1337 enum machine_mode promoted_mode, decl_mode;
1338 struct function *function = 0;
1339 tree context;
1340 int can_use_addressof;
1341 int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1342 int usedp = (TREE_USED (decl)
1343 || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1344
1345 context = decl_function_context (decl);
1346
1347 /* Get the current rtl used for this object and its original mode. */
1348 reg = (TREE_CODE (decl) == SAVE_EXPR
1349 ? SAVE_EXPR_RTL (decl)
1350 : DECL_RTL_IF_SET (decl));
1351
1352 /* No need to do anything if decl has no rtx yet
1353 since in that case caller is setting TREE_ADDRESSABLE
1354 and a stack slot will be assigned when the rtl is made. */
1355 if (reg == 0)
1356 return;
1357
1358 /* Get the declared mode for this object. */
1359 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1360 : DECL_MODE (decl));
1361 /* Get the mode it's actually stored in. */
1362 promoted_mode = GET_MODE (reg);
1363
1364 /* If this variable comes from an outer function, find that
1365 function's saved context. Don't use find_function_data here,
1366 because it might not be in any active function.
1367 FIXME: Is that really supposed to happen?
1368 It does in ObjC at least. */
1369 if (context != current_function_decl && context != inline_function_decl)
1370 for (function = outer_function_chain; function; function = function->outer)
1371 if (function->decl == context)
1372 break;
1373
1374 /* If this is a variable-size object with a pseudo to address it,
1375 put that pseudo into the stack, if the var is nonlocal. */
1376 if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1377 && GET_CODE (reg) == MEM
1378 && GET_CODE (XEXP (reg, 0)) == REG
1379 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1380 {
1381 reg = XEXP (reg, 0);
1382 decl_mode = promoted_mode = GET_MODE (reg);
1383 }
1384
1385 can_use_addressof
1386 = (function == 0
1387 && optimize > 0
1388 /* FIXME make it work for promoted modes too */
1389 && decl_mode == promoted_mode
1390 #ifdef NON_SAVING_SETJMP
1391 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1392 #endif
1393 );
1394
1395 /* If we can't use ADDRESSOF, make sure we see through one we already
1396 generated. */
1397 if (! can_use_addressof && GET_CODE (reg) == MEM
1398 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1399 reg = XEXP (XEXP (reg, 0), 0);
1400
1401 /* Now we should have a value that resides in one or more pseudo regs. */
1402
1403 if (GET_CODE (reg) == REG)
1404 {
1405 /* If this variable lives in the current function and we don't need
1406 to put things in the stack for the sake of setjmp, try to keep it
1407 in a register until we know we actually need the address. */
1408 if (can_use_addressof)
1409 gen_mem_addressof (reg, decl);
1410 else
1411 put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
1412 decl_mode, volatilep, 0, usedp, 0);
1413 }
1414 else if (GET_CODE (reg) == CONCAT)
1415 {
1416 /* A CONCAT contains two pseudos; put them both in the stack.
1417 We do it so they end up consecutive.
1418 We fixup references to the parts only after we fixup references
1419 to the whole CONCAT, lest we do double fixups for the latter
1420 references. */
1421 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1422 tree part_type = (*lang_hooks.types.type_for_mode) (part_mode, 0);
1423 rtx lopart = XEXP (reg, 0);
1424 rtx hipart = XEXP (reg, 1);
1425 #ifdef FRAME_GROWS_DOWNWARD
1426 /* Since part 0 should have a lower address, do it second. */
1427 put_reg_into_stack (function, hipart, part_type, part_mode,
1428 part_mode, volatilep, 0, 0, 0);
1429 put_reg_into_stack (function, lopart, part_type, part_mode,
1430 part_mode, volatilep, 0, 0, 0);
1431 #else
1432 put_reg_into_stack (function, lopart, part_type, part_mode,
1433 part_mode, volatilep, 0, 0, 0);
1434 put_reg_into_stack (function, hipart, part_type, part_mode,
1435 part_mode, volatilep, 0, 0, 0);
1436 #endif
1437
1438 /* Change the CONCAT into a combined MEM for both parts. */
1439 PUT_CODE (reg, MEM);
1440 MEM_ATTRS (reg) = 0;
1441
1442 /* set_mem_attributes uses DECL_RTL to avoid re-generating of
1443 already computed alias sets. Here we want to re-generate. */
1444 if (DECL_P (decl))
1445 SET_DECL_RTL (decl, NULL);
1446 set_mem_attributes (reg, decl, 1);
1447 if (DECL_P (decl))
1448 SET_DECL_RTL (decl, reg);
1449
1450 /* The two parts are in memory order already.
1451 Use the lower parts address as ours. */
1452 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1453 /* Prevent sharing of rtl that might lose. */
1454 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1455 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1456 if (usedp)
1457 {
1458 schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1459 promoted_mode, 0);
1460 schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1461 schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1462 }
1463 }
1464 else
1465 return;
1466 }
1467
1468 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1469 into the stack frame of FUNCTION (0 means the current function).
1470 DECL_MODE is the machine mode of the user-level data type.
1471 PROMOTED_MODE is the machine mode of the register.
1472 VOLATILE_P is nonzero if this is for a "volatile" decl.
1473 USED_P is nonzero if this reg might have already been used in an insn. */
1474
1475 static void
1476 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1477 original_regno, used_p, ht)
1478 struct function *function;
1479 rtx reg;
1480 tree type;
1481 enum machine_mode promoted_mode, decl_mode;
1482 int volatile_p;
1483 unsigned int original_regno;
1484 int used_p;
1485 htab_t ht;
1486 {
1487 struct function *func = function ? function : cfun;
1488 rtx new = 0;
1489 unsigned int regno = original_regno;
1490
1491 if (regno == 0)
1492 regno = REGNO (reg);
1493
1494 if (regno < func->x_max_parm_reg)
1495 new = func->x_parm_reg_stack_loc[regno];
1496
1497 if (new == 0)
1498 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1499
1500 PUT_CODE (reg, MEM);
1501 PUT_MODE (reg, decl_mode);
1502 XEXP (reg, 0) = XEXP (new, 0);
1503 MEM_ATTRS (reg) = 0;
1504 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1505 MEM_VOLATILE_P (reg) = volatile_p;
1506
1507 /* If this is a memory ref that contains aggregate components,
1508 mark it as such for cse and loop optimize. If we are reusing a
1509 previously generated stack slot, then we need to copy the bit in
1510 case it was set for other reasons. For instance, it is set for
1511 __builtin_va_alist. */
1512 if (type)
1513 {
1514 MEM_SET_IN_STRUCT_P (reg,
1515 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1516 set_mem_alias_set (reg, get_alias_set (type));
1517 }
1518
1519 if (used_p)
1520 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
1521 }
1522
1523 /* Make sure that all refs to the variable, previously made
1524 when it was a register, are fixed up to be valid again.
1525 See function above for meaning of arguments. */
1526
1527 static void
1528 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
1529 struct function *function;
1530 rtx reg;
1531 tree type;
1532 enum machine_mode promoted_mode;
1533 htab_t ht;
1534 {
1535 int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
1536
1537 if (function != 0)
1538 {
1539 struct var_refs_queue *temp;
1540
1541 temp
1542 = (struct var_refs_queue *) ggc_alloc (sizeof (struct var_refs_queue));
1543 temp->modified = reg;
1544 temp->promoted_mode = promoted_mode;
1545 temp->unsignedp = unsigned_p;
1546 temp->next = function->fixup_var_refs_queue;
1547 function->fixup_var_refs_queue = temp;
1548 }
1549 else
1550 /* Variable is local; fix it up now. */
1551 fixup_var_refs (reg, promoted_mode, unsigned_p, reg, ht);
1552 }
1553 \f
1554 static void
1555 fixup_var_refs (var, promoted_mode, unsignedp, may_share, ht)
1556 rtx var;
1557 enum machine_mode promoted_mode;
1558 int unsignedp;
1559 htab_t ht;
1560 rtx may_share;
1561 {
1562 tree pending;
1563 rtx first_insn = get_insns ();
1564 struct sequence_stack *stack = seq_stack;
1565 tree rtl_exps = rtl_expr_chain;
1566
1567 /* If there's a hash table, it must record all uses of VAR. */
1568 if (ht)
1569 {
1570 if (stack != 0)
1571 abort ();
1572 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp,
1573 may_share);
1574 return;
1575 }
1576
1577 fixup_var_refs_insns (first_insn, var, promoted_mode, unsignedp,
1578 stack == 0, may_share);
1579
1580 /* Scan all pending sequences too. */
1581 for (; stack; stack = stack->next)
1582 {
1583 push_to_full_sequence (stack->first, stack->last);
1584 fixup_var_refs_insns (stack->first, var, promoted_mode, unsignedp,
1585 stack->next != 0, may_share);
1586 /* Update remembered end of sequence
1587 in case we added an insn at the end. */
1588 stack->last = get_last_insn ();
1589 end_sequence ();
1590 }
1591
1592 /* Scan all waiting RTL_EXPRs too. */
1593 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1594 {
1595 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1596 if (seq != const0_rtx && seq != 0)
1597 {
1598 push_to_sequence (seq);
1599 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1600 may_share);
1601 end_sequence ();
1602 }
1603 }
1604 }
1605 \f
1606 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1607 some part of an insn. Return a struct fixup_replacement whose OLD
1608 value is equal to X. Allocate a new structure if no such entry exists. */
1609
1610 static struct fixup_replacement *
1611 find_fixup_replacement (replacements, x)
1612 struct fixup_replacement **replacements;
1613 rtx x;
1614 {
1615 struct fixup_replacement *p;
1616
1617 /* See if we have already replaced this. */
1618 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1619 ;
1620
1621 if (p == 0)
1622 {
1623 p = (struct fixup_replacement *) xmalloc (sizeof (struct fixup_replacement));
1624 p->old = x;
1625 p->new = 0;
1626 p->next = *replacements;
1627 *replacements = p;
1628 }
1629
1630 return p;
1631 }
1632
1633 /* Scan the insn-chain starting with INSN for refs to VAR and fix them
1634 up. TOPLEVEL is nonzero if this chain is the main chain of insns
1635 for the current function. MAY_SHARE is either a MEM that is not
1636 to be unshared or a list of them. */
1637
1638 static void
1639 fixup_var_refs_insns (insn, var, promoted_mode, unsignedp, toplevel, may_share)
1640 rtx insn;
1641 rtx var;
1642 enum machine_mode promoted_mode;
1643 int unsignedp;
1644 int toplevel;
1645 rtx may_share;
1646 {
1647 while (insn)
1648 {
1649 /* fixup_var_refs_insn might modify insn, so save its next
1650 pointer now. */
1651 rtx next = NEXT_INSN (insn);
1652
1653 /* CALL_PLACEHOLDERs are special; we have to switch into each of
1654 the three sequences they (potentially) contain, and process
1655 them recursively. The CALL_INSN itself is not interesting. */
1656
1657 if (GET_CODE (insn) == CALL_INSN
1658 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1659 {
1660 int i;
1661
1662 /* Look at the Normal call, sibling call and tail recursion
1663 sequences attached to the CALL_PLACEHOLDER. */
1664 for (i = 0; i < 3; i++)
1665 {
1666 rtx seq = XEXP (PATTERN (insn), i);
1667 if (seq)
1668 {
1669 push_to_sequence (seq);
1670 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1671 may_share);
1672 XEXP (PATTERN (insn), i) = get_insns ();
1673 end_sequence ();
1674 }
1675 }
1676 }
1677
1678 else if (INSN_P (insn))
1679 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel,
1680 may_share);
1681
1682 insn = next;
1683 }
1684 }
1685
1686 /* Look up the insns which reference VAR in HT and fix them up. Other
1687 arguments are the same as fixup_var_refs_insns.
1688
1689 N.B. No need for special processing of CALL_PLACEHOLDERs here,
1690 because the hash table will point straight to the interesting insn
1691 (inside the CALL_PLACEHOLDER). */
1692
1693 static void
1694 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp, may_share)
1695 htab_t ht;
1696 rtx var;
1697 enum machine_mode promoted_mode;
1698 int unsignedp;
1699 rtx may_share;
1700 {
1701 struct insns_for_mem_entry tmp;
1702 struct insns_for_mem_entry *ime;
1703 rtx insn_list;
1704
1705 tmp.key = var;
1706 ime = (struct insns_for_mem_entry *) htab_find (ht, &tmp);
1707 for (insn_list = ime->insns; insn_list != 0; insn_list = XEXP (insn_list, 1))
1708 if (INSN_P (XEXP (insn_list, 0)))
1709 fixup_var_refs_insn (XEXP (insn_list, 0), var, promoted_mode,
1710 unsignedp, 1, may_share);
1711 }
1712
1713
1714 /* Per-insn processing by fixup_var_refs_insns(_with_hash). INSN is
1715 the insn under examination, VAR is the variable to fix up
1716 references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
1717 TOPLEVEL is nonzero if this is the main insn chain for this
1718 function. */
1719
1720 static void
1721 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel, no_share)
1722 rtx insn;
1723 rtx var;
1724 enum machine_mode promoted_mode;
1725 int unsignedp;
1726 int toplevel;
1727 rtx no_share;
1728 {
1729 rtx call_dest = 0;
1730 rtx set, prev, prev_set;
1731 rtx note;
1732
1733 /* Remember the notes in case we delete the insn. */
1734 note = REG_NOTES (insn);
1735
1736 /* If this is a CLOBBER of VAR, delete it.
1737
1738 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1739 and REG_RETVAL notes too. */
1740 if (GET_CODE (PATTERN (insn)) == CLOBBER
1741 && (XEXP (PATTERN (insn), 0) == var
1742 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1743 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1744 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1745 {
1746 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1747 /* The REG_LIBCALL note will go away since we are going to
1748 turn INSN into a NOTE, so just delete the
1749 corresponding REG_RETVAL note. */
1750 remove_note (XEXP (note, 0),
1751 find_reg_note (XEXP (note, 0), REG_RETVAL,
1752 NULL_RTX));
1753
1754 delete_insn (insn);
1755 }
1756
1757 /* The insn to load VAR from a home in the arglist
1758 is now a no-op. When we see it, just delete it.
1759 Similarly if this is storing VAR from a register from which
1760 it was loaded in the previous insn. This will occur
1761 when an ADDRESSOF was made for an arglist slot. */
1762 else if (toplevel
1763 && (set = single_set (insn)) != 0
1764 && SET_DEST (set) == var
1765 /* If this represents the result of an insn group,
1766 don't delete the insn. */
1767 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1768 && (rtx_equal_p (SET_SRC (set), var)
1769 || (GET_CODE (SET_SRC (set)) == REG
1770 && (prev = prev_nonnote_insn (insn)) != 0
1771 && (prev_set = single_set (prev)) != 0
1772 && SET_DEST (prev_set) == SET_SRC (set)
1773 && rtx_equal_p (SET_SRC (prev_set), var))))
1774 {
1775 delete_insn (insn);
1776 }
1777 else
1778 {
1779 struct fixup_replacement *replacements = 0;
1780 rtx next_insn = NEXT_INSN (insn);
1781
1782 if (SMALL_REGISTER_CLASSES)
1783 {
1784 /* If the insn that copies the results of a CALL_INSN
1785 into a pseudo now references VAR, we have to use an
1786 intermediate pseudo since we want the life of the
1787 return value register to be only a single insn.
1788
1789 If we don't use an intermediate pseudo, such things as
1790 address computations to make the address of VAR valid
1791 if it is not can be placed between the CALL_INSN and INSN.
1792
1793 To make sure this doesn't happen, we record the destination
1794 of the CALL_INSN and see if the next insn uses both that
1795 and VAR. */
1796
1797 if (call_dest != 0 && GET_CODE (insn) == INSN
1798 && reg_mentioned_p (var, PATTERN (insn))
1799 && reg_mentioned_p (call_dest, PATTERN (insn)))
1800 {
1801 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1802
1803 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1804
1805 PATTERN (insn) = replace_rtx (PATTERN (insn),
1806 call_dest, temp);
1807 }
1808
1809 if (GET_CODE (insn) == CALL_INSN
1810 && GET_CODE (PATTERN (insn)) == SET)
1811 call_dest = SET_DEST (PATTERN (insn));
1812 else if (GET_CODE (insn) == CALL_INSN
1813 && GET_CODE (PATTERN (insn)) == PARALLEL
1814 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1815 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1816 else
1817 call_dest = 0;
1818 }
1819
1820 /* See if we have to do anything to INSN now that VAR is in
1821 memory. If it needs to be loaded into a pseudo, use a single
1822 pseudo for the entire insn in case there is a MATCH_DUP
1823 between two operands. We pass a pointer to the head of
1824 a list of struct fixup_replacements. If fixup_var_refs_1
1825 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1826 it will record them in this list.
1827
1828 If it allocated a pseudo for any replacement, we copy into
1829 it here. */
1830
1831 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1832 &replacements, no_share);
1833
1834 /* If this is last_parm_insn, and any instructions were output
1835 after it to fix it up, then we must set last_parm_insn to
1836 the last such instruction emitted. */
1837 if (insn == last_parm_insn)
1838 last_parm_insn = PREV_INSN (next_insn);
1839
1840 while (replacements)
1841 {
1842 struct fixup_replacement *next;
1843
1844 if (GET_CODE (replacements->new) == REG)
1845 {
1846 rtx insert_before;
1847 rtx seq;
1848
1849 /* OLD might be a (subreg (mem)). */
1850 if (GET_CODE (replacements->old) == SUBREG)
1851 replacements->old
1852 = fixup_memory_subreg (replacements->old, insn,
1853 promoted_mode, 0);
1854 else
1855 replacements->old
1856 = fixup_stack_1 (replacements->old, insn);
1857
1858 insert_before = insn;
1859
1860 /* If we are changing the mode, do a conversion.
1861 This might be wasteful, but combine.c will
1862 eliminate much of the waste. */
1863
1864 if (GET_MODE (replacements->new)
1865 != GET_MODE (replacements->old))
1866 {
1867 start_sequence ();
1868 convert_move (replacements->new,
1869 replacements->old, unsignedp);
1870 seq = get_insns ();
1871 end_sequence ();
1872 }
1873 else
1874 seq = gen_move_insn (replacements->new,
1875 replacements->old);
1876
1877 emit_insn_before (seq, insert_before);
1878 }
1879
1880 next = replacements->next;
1881 free (replacements);
1882 replacements = next;
1883 }
1884 }
1885
1886 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1887 But don't touch other insns referred to by reg-notes;
1888 we will get them elsewhere. */
1889 while (note)
1890 {
1891 if (GET_CODE (note) != INSN_LIST)
1892 XEXP (note, 0)
1893 = walk_fixup_memory_subreg (XEXP (note, 0), insn,
1894 promoted_mode, 1);
1895 note = XEXP (note, 1);
1896 }
1897 }
1898 \f
1899 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1900 See if the rtx expression at *LOC in INSN needs to be changed.
1901
1902 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1903 contain a list of original rtx's and replacements. If we find that we need
1904 to modify this insn by replacing a memory reference with a pseudo or by
1905 making a new MEM to implement a SUBREG, we consult that list to see if
1906 we have already chosen a replacement. If none has already been allocated,
1907 we allocate it and update the list. fixup_var_refs_insn will copy VAR
1908 or the SUBREG, as appropriate, to the pseudo. */
1909
1910 static void
1911 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements, no_share)
1912 rtx var;
1913 enum machine_mode promoted_mode;
1914 rtx *loc;
1915 rtx insn;
1916 struct fixup_replacement **replacements;
1917 rtx no_share;
1918 {
1919 int i;
1920 rtx x = *loc;
1921 RTX_CODE code = GET_CODE (x);
1922 const char *fmt;
1923 rtx tem, tem1;
1924 struct fixup_replacement *replacement;
1925
1926 switch (code)
1927 {
1928 case ADDRESSOF:
1929 if (XEXP (x, 0) == var)
1930 {
1931 /* Prevent sharing of rtl that might lose. */
1932 rtx sub = copy_rtx (XEXP (var, 0));
1933
1934 if (! validate_change (insn, loc, sub, 0))
1935 {
1936 rtx y = gen_reg_rtx (GET_MODE (sub));
1937 rtx seq, new_insn;
1938
1939 /* We should be able to replace with a register or all is lost.
1940 Note that we can't use validate_change to verify this, since
1941 we're not caring for replacing all dups simultaneously. */
1942 if (! validate_replace_rtx (*loc, y, insn))
1943 abort ();
1944
1945 /* Careful! First try to recognize a direct move of the
1946 value, mimicking how things are done in gen_reload wrt
1947 PLUS. Consider what happens when insn is a conditional
1948 move instruction and addsi3 clobbers flags. */
1949
1950 start_sequence ();
1951 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1952 seq = get_insns ();
1953 end_sequence ();
1954
1955 if (recog_memoized (new_insn) < 0)
1956 {
1957 /* That failed. Fall back on force_operand and hope. */
1958
1959 start_sequence ();
1960 sub = force_operand (sub, y);
1961 if (sub != y)
1962 emit_insn (gen_move_insn (y, sub));
1963 seq = get_insns ();
1964 end_sequence ();
1965 }
1966
1967 #ifdef HAVE_cc0
1968 /* Don't separate setter from user. */
1969 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1970 insn = PREV_INSN (insn);
1971 #endif
1972
1973 emit_insn_before (seq, insn);
1974 }
1975 }
1976 return;
1977
1978 case MEM:
1979 if (var == x)
1980 {
1981 /* If we already have a replacement, use it. Otherwise,
1982 try to fix up this address in case it is invalid. */
1983
1984 replacement = find_fixup_replacement (replacements, var);
1985 if (replacement->new)
1986 {
1987 *loc = replacement->new;
1988 return;
1989 }
1990
1991 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1992
1993 /* Unless we are forcing memory to register or we changed the mode,
1994 we can leave things the way they are if the insn is valid. */
1995
1996 INSN_CODE (insn) = -1;
1997 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1998 && recog_memoized (insn) >= 0)
1999 return;
2000
2001 *loc = replacement->new = gen_reg_rtx (promoted_mode);
2002 return;
2003 }
2004
2005 /* If X contains VAR, we need to unshare it here so that we update
2006 each occurrence separately. But all identical MEMs in one insn
2007 must be replaced with the same rtx because of the possibility of
2008 MATCH_DUPs. */
2009
2010 if (reg_mentioned_p (var, x))
2011 {
2012 replacement = find_fixup_replacement (replacements, x);
2013 if (replacement->new == 0)
2014 replacement->new = copy_most_rtx (x, no_share);
2015
2016 *loc = x = replacement->new;
2017 code = GET_CODE (x);
2018 }
2019 break;
2020
2021 case REG:
2022 case CC0:
2023 case PC:
2024 case CONST_INT:
2025 case CONST:
2026 case SYMBOL_REF:
2027 case LABEL_REF:
2028 case CONST_DOUBLE:
2029 case CONST_VECTOR:
2030 return;
2031
2032 case SIGN_EXTRACT:
2033 case ZERO_EXTRACT:
2034 /* Note that in some cases those types of expressions are altered
2035 by optimize_bit_field, and do not survive to get here. */
2036 if (XEXP (x, 0) == var
2037 || (GET_CODE (XEXP (x, 0)) == SUBREG
2038 && SUBREG_REG (XEXP (x, 0)) == var))
2039 {
2040 /* Get TEM as a valid MEM in the mode presently in the insn.
2041
2042 We don't worry about the possibility of MATCH_DUP here; it
2043 is highly unlikely and would be tricky to handle. */
2044
2045 tem = XEXP (x, 0);
2046 if (GET_CODE (tem) == SUBREG)
2047 {
2048 if (GET_MODE_BITSIZE (GET_MODE (tem))
2049 > GET_MODE_BITSIZE (GET_MODE (var)))
2050 {
2051 replacement = find_fixup_replacement (replacements, var);
2052 if (replacement->new == 0)
2053 replacement->new = gen_reg_rtx (GET_MODE (var));
2054 SUBREG_REG (tem) = replacement->new;
2055
2056 /* The following code works only if we have a MEM, so we
2057 need to handle the subreg here. We directly substitute
2058 it assuming that a subreg must be OK here. We already
2059 scheduled a replacement to copy the mem into the
2060 subreg. */
2061 XEXP (x, 0) = tem;
2062 return;
2063 }
2064 else
2065 tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2066 }
2067 else
2068 tem = fixup_stack_1 (tem, insn);
2069
2070 /* Unless we want to load from memory, get TEM into the proper mode
2071 for an extract from memory. This can only be done if the
2072 extract is at a constant position and length. */
2073
2074 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2075 && GET_CODE (XEXP (x, 2)) == CONST_INT
2076 && ! mode_dependent_address_p (XEXP (tem, 0))
2077 && ! MEM_VOLATILE_P (tem))
2078 {
2079 enum machine_mode wanted_mode = VOIDmode;
2080 enum machine_mode is_mode = GET_MODE (tem);
2081 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2082
2083 if (GET_CODE (x) == ZERO_EXTRACT)
2084 {
2085 enum machine_mode new_mode
2086 = mode_for_extraction (EP_extzv, 1);
2087 if (new_mode != MAX_MACHINE_MODE)
2088 wanted_mode = new_mode;
2089 }
2090 else if (GET_CODE (x) == SIGN_EXTRACT)
2091 {
2092 enum machine_mode new_mode
2093 = mode_for_extraction (EP_extv, 1);
2094 if (new_mode != MAX_MACHINE_MODE)
2095 wanted_mode = new_mode;
2096 }
2097
2098 /* If we have a narrower mode, we can do something. */
2099 if (wanted_mode != VOIDmode
2100 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2101 {
2102 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2103 rtx old_pos = XEXP (x, 2);
2104 rtx newmem;
2105
2106 /* If the bytes and bits are counted differently, we
2107 must adjust the offset. */
2108 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2109 offset = (GET_MODE_SIZE (is_mode)
2110 - GET_MODE_SIZE (wanted_mode) - offset);
2111
2112 pos %= GET_MODE_BITSIZE (wanted_mode);
2113
2114 newmem = adjust_address_nv (tem, wanted_mode, offset);
2115
2116 /* Make the change and see if the insn remains valid. */
2117 INSN_CODE (insn) = -1;
2118 XEXP (x, 0) = newmem;
2119 XEXP (x, 2) = GEN_INT (pos);
2120
2121 if (recog_memoized (insn) >= 0)
2122 return;
2123
2124 /* Otherwise, restore old position. XEXP (x, 0) will be
2125 restored later. */
2126 XEXP (x, 2) = old_pos;
2127 }
2128 }
2129
2130 /* If we get here, the bitfield extract insn can't accept a memory
2131 reference. Copy the input into a register. */
2132
2133 tem1 = gen_reg_rtx (GET_MODE (tem));
2134 emit_insn_before (gen_move_insn (tem1, tem), insn);
2135 XEXP (x, 0) = tem1;
2136 return;
2137 }
2138 break;
2139
2140 case SUBREG:
2141 if (SUBREG_REG (x) == var)
2142 {
2143 /* If this is a special SUBREG made because VAR was promoted
2144 from a wider mode, replace it with VAR and call ourself
2145 recursively, this time saying that the object previously
2146 had its current mode (by virtue of the SUBREG). */
2147
2148 if (SUBREG_PROMOTED_VAR_P (x))
2149 {
2150 *loc = var;
2151 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements,
2152 no_share);
2153 return;
2154 }
2155
2156 /* If this SUBREG makes VAR wider, it has become a paradoxical
2157 SUBREG with VAR in memory, but these aren't allowed at this
2158 stage of the compilation. So load VAR into a pseudo and take
2159 a SUBREG of that pseudo. */
2160 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2161 {
2162 replacement = find_fixup_replacement (replacements, var);
2163 if (replacement->new == 0)
2164 replacement->new = gen_reg_rtx (promoted_mode);
2165 SUBREG_REG (x) = replacement->new;
2166 return;
2167 }
2168
2169 /* See if we have already found a replacement for this SUBREG.
2170 If so, use it. Otherwise, make a MEM and see if the insn
2171 is recognized. If not, or if we should force MEM into a register,
2172 make a pseudo for this SUBREG. */
2173 replacement = find_fixup_replacement (replacements, x);
2174 if (replacement->new)
2175 {
2176 *loc = replacement->new;
2177 return;
2178 }
2179
2180 replacement->new = *loc = fixup_memory_subreg (x, insn,
2181 promoted_mode, 0);
2182
2183 INSN_CODE (insn) = -1;
2184 if (! flag_force_mem && recog_memoized (insn) >= 0)
2185 return;
2186
2187 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2188 return;
2189 }
2190 break;
2191
2192 case SET:
2193 /* First do special simplification of bit-field references. */
2194 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2195 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2196 optimize_bit_field (x, insn, 0);
2197 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2198 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2199 optimize_bit_field (x, insn, 0);
2200
2201 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2202 into a register and then store it back out. */
2203 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2204 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2205 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2206 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2207 > GET_MODE_SIZE (GET_MODE (var))))
2208 {
2209 replacement = find_fixup_replacement (replacements, var);
2210 if (replacement->new == 0)
2211 replacement->new = gen_reg_rtx (GET_MODE (var));
2212
2213 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2214 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2215 }
2216
2217 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2218 insn into a pseudo and store the low part of the pseudo into VAR. */
2219 if (GET_CODE (SET_DEST (x)) == SUBREG
2220 && SUBREG_REG (SET_DEST (x)) == var
2221 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2222 > GET_MODE_SIZE (GET_MODE (var))))
2223 {
2224 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2225 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2226 tem)),
2227 insn);
2228 break;
2229 }
2230
2231 {
2232 rtx dest = SET_DEST (x);
2233 rtx src = SET_SRC (x);
2234 rtx outerdest = dest;
2235
2236 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2237 || GET_CODE (dest) == SIGN_EXTRACT
2238 || GET_CODE (dest) == ZERO_EXTRACT)
2239 dest = XEXP (dest, 0);
2240
2241 if (GET_CODE (src) == SUBREG)
2242 src = SUBREG_REG (src);
2243
2244 /* If VAR does not appear at the top level of the SET
2245 just scan the lower levels of the tree. */
2246
2247 if (src != var && dest != var)
2248 break;
2249
2250 /* We will need to rerecognize this insn. */
2251 INSN_CODE (insn) = -1;
2252
2253 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var
2254 && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
2255 {
2256 /* Since this case will return, ensure we fixup all the
2257 operands here. */
2258 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2259 insn, replacements, no_share);
2260 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2261 insn, replacements, no_share);
2262 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2263 insn, replacements, no_share);
2264
2265 tem = XEXP (outerdest, 0);
2266
2267 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2268 that may appear inside a ZERO_EXTRACT.
2269 This was legitimate when the MEM was a REG. */
2270 if (GET_CODE (tem) == SUBREG
2271 && SUBREG_REG (tem) == var)
2272 tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2273 else
2274 tem = fixup_stack_1 (tem, insn);
2275
2276 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2277 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2278 && ! mode_dependent_address_p (XEXP (tem, 0))
2279 && ! MEM_VOLATILE_P (tem))
2280 {
2281 enum machine_mode wanted_mode;
2282 enum machine_mode is_mode = GET_MODE (tem);
2283 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2284
2285 wanted_mode = mode_for_extraction (EP_insv, 0);
2286
2287 /* If we have a narrower mode, we can do something. */
2288 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2289 {
2290 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2291 rtx old_pos = XEXP (outerdest, 2);
2292 rtx newmem;
2293
2294 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2295 offset = (GET_MODE_SIZE (is_mode)
2296 - GET_MODE_SIZE (wanted_mode) - offset);
2297
2298 pos %= GET_MODE_BITSIZE (wanted_mode);
2299
2300 newmem = adjust_address_nv (tem, wanted_mode, offset);
2301
2302 /* Make the change and see if the insn remains valid. */
2303 INSN_CODE (insn) = -1;
2304 XEXP (outerdest, 0) = newmem;
2305 XEXP (outerdest, 2) = GEN_INT (pos);
2306
2307 if (recog_memoized (insn) >= 0)
2308 return;
2309
2310 /* Otherwise, restore old position. XEXP (x, 0) will be
2311 restored later. */
2312 XEXP (outerdest, 2) = old_pos;
2313 }
2314 }
2315
2316 /* If we get here, the bit-field store doesn't allow memory
2317 or isn't located at a constant position. Load the value into
2318 a register, do the store, and put it back into memory. */
2319
2320 tem1 = gen_reg_rtx (GET_MODE (tem));
2321 emit_insn_before (gen_move_insn (tem1, tem), insn);
2322 emit_insn_after (gen_move_insn (tem, tem1), insn);
2323 XEXP (outerdest, 0) = tem1;
2324 return;
2325 }
2326
2327 /* STRICT_LOW_PART is a no-op on memory references
2328 and it can cause combinations to be unrecognizable,
2329 so eliminate it. */
2330
2331 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2332 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2333
2334 /* A valid insn to copy VAR into or out of a register
2335 must be left alone, to avoid an infinite loop here.
2336 If the reference to VAR is by a subreg, fix that up,
2337 since SUBREG is not valid for a memref.
2338 Also fix up the address of the stack slot.
2339
2340 Note that we must not try to recognize the insn until
2341 after we know that we have valid addresses and no
2342 (subreg (mem ...) ...) constructs, since these interfere
2343 with determining the validity of the insn. */
2344
2345 if ((SET_SRC (x) == var
2346 || (GET_CODE (SET_SRC (x)) == SUBREG
2347 && SUBREG_REG (SET_SRC (x)) == var))
2348 && (GET_CODE (SET_DEST (x)) == REG
2349 || (GET_CODE (SET_DEST (x)) == SUBREG
2350 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2351 && GET_MODE (var) == promoted_mode
2352 && x == single_set (insn))
2353 {
2354 rtx pat, last;
2355
2356 if (GET_CODE (SET_SRC (x)) == SUBREG
2357 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
2358 > GET_MODE_SIZE (GET_MODE (var))))
2359 {
2360 /* This (subreg VAR) is now a paradoxical subreg. We need
2361 to replace VAR instead of the subreg. */
2362 replacement = find_fixup_replacement (replacements, var);
2363 if (replacement->new == NULL_RTX)
2364 replacement->new = gen_reg_rtx (GET_MODE (var));
2365 SUBREG_REG (SET_SRC (x)) = replacement->new;
2366 }
2367 else
2368 {
2369 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2370 if (replacement->new)
2371 SET_SRC (x) = replacement->new;
2372 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2373 SET_SRC (x) = replacement->new
2374 = fixup_memory_subreg (SET_SRC (x), insn, promoted_mode,
2375 0);
2376 else
2377 SET_SRC (x) = replacement->new
2378 = fixup_stack_1 (SET_SRC (x), insn);
2379 }
2380
2381 if (recog_memoized (insn) >= 0)
2382 return;
2383
2384 /* INSN is not valid, but we know that we want to
2385 copy SET_SRC (x) to SET_DEST (x) in some way. So
2386 we generate the move and see whether it requires more
2387 than one insn. If it does, we emit those insns and
2388 delete INSN. Otherwise, we an just replace the pattern
2389 of INSN; we have already verified above that INSN has
2390 no other function that to do X. */
2391
2392 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2393 if (NEXT_INSN (pat) != NULL_RTX)
2394 {
2395 last = emit_insn_before (pat, insn);
2396
2397 /* INSN might have REG_RETVAL or other important notes, so
2398 we need to store the pattern of the last insn in the
2399 sequence into INSN similarly to the normal case. LAST
2400 should not have REG_NOTES, but we allow them if INSN has
2401 no REG_NOTES. */
2402 if (REG_NOTES (last) && REG_NOTES (insn))
2403 abort ();
2404 if (REG_NOTES (last))
2405 REG_NOTES (insn) = REG_NOTES (last);
2406 PATTERN (insn) = PATTERN (last);
2407
2408 delete_insn (last);
2409 }
2410 else
2411 PATTERN (insn) = PATTERN (pat);
2412
2413 return;
2414 }
2415
2416 if ((SET_DEST (x) == var
2417 || (GET_CODE (SET_DEST (x)) == SUBREG
2418 && SUBREG_REG (SET_DEST (x)) == var))
2419 && (GET_CODE (SET_SRC (x)) == REG
2420 || (GET_CODE (SET_SRC (x)) == SUBREG
2421 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2422 && GET_MODE (var) == promoted_mode
2423 && x == single_set (insn))
2424 {
2425 rtx pat, last;
2426
2427 if (GET_CODE (SET_DEST (x)) == SUBREG)
2428 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn,
2429 promoted_mode, 0);
2430 else
2431 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2432
2433 if (recog_memoized (insn) >= 0)
2434 return;
2435
2436 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2437 if (NEXT_INSN (pat) != NULL_RTX)
2438 {
2439 last = emit_insn_before (pat, insn);
2440
2441 /* INSN might have REG_RETVAL or other important notes, so
2442 we need to store the pattern of the last insn in the
2443 sequence into INSN similarly to the normal case. LAST
2444 should not have REG_NOTES, but we allow them if INSN has
2445 no REG_NOTES. */
2446 if (REG_NOTES (last) && REG_NOTES (insn))
2447 abort ();
2448 if (REG_NOTES (last))
2449 REG_NOTES (insn) = REG_NOTES (last);
2450 PATTERN (insn) = PATTERN (last);
2451
2452 delete_insn (last);
2453 }
2454 else
2455 PATTERN (insn) = PATTERN (pat);
2456
2457 return;
2458 }
2459
2460 /* Otherwise, storing into VAR must be handled specially
2461 by storing into a temporary and copying that into VAR
2462 with a new insn after this one. Note that this case
2463 will be used when storing into a promoted scalar since
2464 the insn will now have different modes on the input
2465 and output and hence will be invalid (except for the case
2466 of setting it to a constant, which does not need any
2467 change if it is valid). We generate extra code in that case,
2468 but combine.c will eliminate it. */
2469
2470 if (dest == var)
2471 {
2472 rtx temp;
2473 rtx fixeddest = SET_DEST (x);
2474 enum machine_mode temp_mode;
2475
2476 /* STRICT_LOW_PART can be discarded, around a MEM. */
2477 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2478 fixeddest = XEXP (fixeddest, 0);
2479 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2480 if (GET_CODE (fixeddest) == SUBREG)
2481 {
2482 fixeddest = fixup_memory_subreg (fixeddest, insn,
2483 promoted_mode, 0);
2484 temp_mode = GET_MODE (fixeddest);
2485 }
2486 else
2487 {
2488 fixeddest = fixup_stack_1 (fixeddest, insn);
2489 temp_mode = promoted_mode;
2490 }
2491
2492 temp = gen_reg_rtx (temp_mode);
2493
2494 emit_insn_after (gen_move_insn (fixeddest,
2495 gen_lowpart (GET_MODE (fixeddest),
2496 temp)),
2497 insn);
2498
2499 SET_DEST (x) = temp;
2500 }
2501 }
2502
2503 default:
2504 break;
2505 }
2506
2507 /* Nothing special about this RTX; fix its operands. */
2508
2509 fmt = GET_RTX_FORMAT (code);
2510 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2511 {
2512 if (fmt[i] == 'e')
2513 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements,
2514 no_share);
2515 else if (fmt[i] == 'E')
2516 {
2517 int j;
2518 for (j = 0; j < XVECLEN (x, i); j++)
2519 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2520 insn, replacements, no_share);
2521 }
2522 }
2523 }
2524 \f
2525 /* Previously, X had the form (SUBREG:m1 (REG:PROMOTED_MODE ...)).
2526 The REG was placed on the stack, so X now has the form (SUBREG:m1
2527 (MEM:m2 ...)).
2528
2529 Return an rtx (MEM:m1 newaddr) which is equivalent. If any insns
2530 must be emitted to compute NEWADDR, put them before INSN.
2531
2532 UNCRITICAL nonzero means accept paradoxical subregs.
2533 This is used for subregs found inside REG_NOTES. */
2534
2535 static rtx
2536 fixup_memory_subreg (x, insn, promoted_mode, uncritical)
2537 rtx x;
2538 rtx insn;
2539 enum machine_mode promoted_mode;
2540 int uncritical;
2541 {
2542 int offset;
2543 rtx mem = SUBREG_REG (x);
2544 rtx addr = XEXP (mem, 0);
2545 enum machine_mode mode = GET_MODE (x);
2546 rtx result, seq;
2547
2548 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2549 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (mem)) && ! uncritical)
2550 abort ();
2551
2552 offset = SUBREG_BYTE (x);
2553 if (BYTES_BIG_ENDIAN)
2554 /* If the PROMOTED_MODE is wider than the mode of the MEM, adjust
2555 the offset so that it points to the right location within the
2556 MEM. */
2557 offset -= (GET_MODE_SIZE (promoted_mode) - GET_MODE_SIZE (GET_MODE (mem)));
2558
2559 if (!flag_force_addr
2560 && memory_address_p (mode, plus_constant (addr, offset)))
2561 /* Shortcut if no insns need be emitted. */
2562 return adjust_address (mem, mode, offset);
2563
2564 start_sequence ();
2565 result = adjust_address (mem, mode, offset);
2566 seq = get_insns ();
2567 end_sequence ();
2568
2569 emit_insn_before (seq, insn);
2570 return result;
2571 }
2572
2573 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2574 Replace subexpressions of X in place.
2575 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2576 Otherwise return X, with its contents possibly altered.
2577
2578 INSN, PROMOTED_MODE and UNCRITICAL are as for
2579 fixup_memory_subreg. */
2580
2581 static rtx
2582 walk_fixup_memory_subreg (x, insn, promoted_mode, uncritical)
2583 rtx x;
2584 rtx insn;
2585 enum machine_mode promoted_mode;
2586 int uncritical;
2587 {
2588 enum rtx_code code;
2589 const char *fmt;
2590 int i;
2591
2592 if (x == 0)
2593 return 0;
2594
2595 code = GET_CODE (x);
2596
2597 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2598 return fixup_memory_subreg (x, insn, promoted_mode, uncritical);
2599
2600 /* Nothing special about this RTX; fix its operands. */
2601
2602 fmt = GET_RTX_FORMAT (code);
2603 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2604 {
2605 if (fmt[i] == 'e')
2606 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn,
2607 promoted_mode, uncritical);
2608 else if (fmt[i] == 'E')
2609 {
2610 int j;
2611 for (j = 0; j < XVECLEN (x, i); j++)
2612 XVECEXP (x, i, j)
2613 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn,
2614 promoted_mode, uncritical);
2615 }
2616 }
2617 return x;
2618 }
2619 \f
2620 /* For each memory ref within X, if it refers to a stack slot
2621 with an out of range displacement, put the address in a temp register
2622 (emitting new insns before INSN to load these registers)
2623 and alter the memory ref to use that register.
2624 Replace each such MEM rtx with a copy, to avoid clobberage. */
2625
2626 static rtx
2627 fixup_stack_1 (x, insn)
2628 rtx x;
2629 rtx insn;
2630 {
2631 int i;
2632 RTX_CODE code = GET_CODE (x);
2633 const char *fmt;
2634
2635 if (code == MEM)
2636 {
2637 rtx ad = XEXP (x, 0);
2638 /* If we have address of a stack slot but it's not valid
2639 (displacement is too large), compute the sum in a register. */
2640 if (GET_CODE (ad) == PLUS
2641 && GET_CODE (XEXP (ad, 0)) == REG
2642 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2643 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2644 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2645 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2646 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2647 #endif
2648 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2649 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2650 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2651 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2652 {
2653 rtx temp, seq;
2654 if (memory_address_p (GET_MODE (x), ad))
2655 return x;
2656
2657 start_sequence ();
2658 temp = copy_to_reg (ad);
2659 seq = get_insns ();
2660 end_sequence ();
2661 emit_insn_before (seq, insn);
2662 return replace_equiv_address (x, temp);
2663 }
2664 return x;
2665 }
2666
2667 fmt = GET_RTX_FORMAT (code);
2668 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2669 {
2670 if (fmt[i] == 'e')
2671 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2672 else if (fmt[i] == 'E')
2673 {
2674 int j;
2675 for (j = 0; j < XVECLEN (x, i); j++)
2676 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2677 }
2678 }
2679 return x;
2680 }
2681 \f
2682 /* Optimization: a bit-field instruction whose field
2683 happens to be a byte or halfword in memory
2684 can be changed to a move instruction.
2685
2686 We call here when INSN is an insn to examine or store into a bit-field.
2687 BODY is the SET-rtx to be altered.
2688
2689 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2690 (Currently this is called only from function.c, and EQUIV_MEM
2691 is always 0.) */
2692
2693 static void
2694 optimize_bit_field (body, insn, equiv_mem)
2695 rtx body;
2696 rtx insn;
2697 rtx *equiv_mem;
2698 {
2699 rtx bitfield;
2700 int destflag;
2701 rtx seq = 0;
2702 enum machine_mode mode;
2703
2704 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2705 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2706 bitfield = SET_DEST (body), destflag = 1;
2707 else
2708 bitfield = SET_SRC (body), destflag = 0;
2709
2710 /* First check that the field being stored has constant size and position
2711 and is in fact a byte or halfword suitably aligned. */
2712
2713 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2714 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2715 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2716 != BLKmode)
2717 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2718 {
2719 rtx memref = 0;
2720
2721 /* Now check that the containing word is memory, not a register,
2722 and that it is safe to change the machine mode. */
2723
2724 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2725 memref = XEXP (bitfield, 0);
2726 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2727 && equiv_mem != 0)
2728 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2729 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2730 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2731 memref = SUBREG_REG (XEXP (bitfield, 0));
2732 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2733 && equiv_mem != 0
2734 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2735 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2736
2737 if (memref
2738 && ! mode_dependent_address_p (XEXP (memref, 0))
2739 && ! MEM_VOLATILE_P (memref))
2740 {
2741 /* Now adjust the address, first for any subreg'ing
2742 that we are now getting rid of,
2743 and then for which byte of the word is wanted. */
2744
2745 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2746 rtx insns;
2747
2748 /* Adjust OFFSET to count bits from low-address byte. */
2749 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2750 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2751 - offset - INTVAL (XEXP (bitfield, 1)));
2752
2753 /* Adjust OFFSET to count bytes from low-address byte. */
2754 offset /= BITS_PER_UNIT;
2755 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2756 {
2757 offset += (SUBREG_BYTE (XEXP (bitfield, 0))
2758 / UNITS_PER_WORD) * UNITS_PER_WORD;
2759 if (BYTES_BIG_ENDIAN)
2760 offset -= (MIN (UNITS_PER_WORD,
2761 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2762 - MIN (UNITS_PER_WORD,
2763 GET_MODE_SIZE (GET_MODE (memref))));
2764 }
2765
2766 start_sequence ();
2767 memref = adjust_address (memref, mode, offset);
2768 insns = get_insns ();
2769 end_sequence ();
2770 emit_insn_before (insns, insn);
2771
2772 /* Store this memory reference where
2773 we found the bit field reference. */
2774
2775 if (destflag)
2776 {
2777 validate_change (insn, &SET_DEST (body), memref, 1);
2778 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2779 {
2780 rtx src = SET_SRC (body);
2781 while (GET_CODE (src) == SUBREG
2782 && SUBREG_BYTE (src) == 0)
2783 src = SUBREG_REG (src);
2784 if (GET_MODE (src) != GET_MODE (memref))
2785 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2786 validate_change (insn, &SET_SRC (body), src, 1);
2787 }
2788 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2789 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2790 /* This shouldn't happen because anything that didn't have
2791 one of these modes should have got converted explicitly
2792 and then referenced through a subreg.
2793 This is so because the original bit-field was
2794 handled by agg_mode and so its tree structure had
2795 the same mode that memref now has. */
2796 abort ();
2797 }
2798 else
2799 {
2800 rtx dest = SET_DEST (body);
2801
2802 while (GET_CODE (dest) == SUBREG
2803 && SUBREG_BYTE (dest) == 0
2804 && (GET_MODE_CLASS (GET_MODE (dest))
2805 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2806 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2807 <= UNITS_PER_WORD))
2808 dest = SUBREG_REG (dest);
2809
2810 validate_change (insn, &SET_DEST (body), dest, 1);
2811
2812 if (GET_MODE (dest) == GET_MODE (memref))
2813 validate_change (insn, &SET_SRC (body), memref, 1);
2814 else
2815 {
2816 /* Convert the mem ref to the destination mode. */
2817 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2818
2819 start_sequence ();
2820 convert_move (newreg, memref,
2821 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2822 seq = get_insns ();
2823 end_sequence ();
2824
2825 validate_change (insn, &SET_SRC (body), newreg, 1);
2826 }
2827 }
2828
2829 /* See if we can convert this extraction or insertion into
2830 a simple move insn. We might not be able to do so if this
2831 was, for example, part of a PARALLEL.
2832
2833 If we succeed, write out any needed conversions. If we fail,
2834 it is hard to guess why we failed, so don't do anything
2835 special; just let the optimization be suppressed. */
2836
2837 if (apply_change_group () && seq)
2838 emit_insn_before (seq, insn);
2839 }
2840 }
2841 }
2842 \f
2843 /* These routines are responsible for converting virtual register references
2844 to the actual hard register references once RTL generation is complete.
2845
2846 The following four variables are used for communication between the
2847 routines. They contain the offsets of the virtual registers from their
2848 respective hard registers. */
2849
2850 static int in_arg_offset;
2851 static int var_offset;
2852 static int dynamic_offset;
2853 static int out_arg_offset;
2854 static int cfa_offset;
2855
2856 /* In most machines, the stack pointer register is equivalent to the bottom
2857 of the stack. */
2858
2859 #ifndef STACK_POINTER_OFFSET
2860 #define STACK_POINTER_OFFSET 0
2861 #endif
2862
2863 /* If not defined, pick an appropriate default for the offset of dynamically
2864 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2865 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2866
2867 #ifndef STACK_DYNAMIC_OFFSET
2868
2869 /* The bottom of the stack points to the actual arguments. If
2870 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2871 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2872 stack space for register parameters is not pushed by the caller, but
2873 rather part of the fixed stack areas and hence not included in
2874 `current_function_outgoing_args_size'. Nevertheless, we must allow
2875 for it when allocating stack dynamic objects. */
2876
2877 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2878 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2879 ((ACCUMULATE_OUTGOING_ARGS \
2880 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2881 + (STACK_POINTER_OFFSET)) \
2882
2883 #else
2884 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2885 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2886 + (STACK_POINTER_OFFSET))
2887 #endif
2888 #endif
2889
2890 /* On most machines, the CFA coincides with the first incoming parm. */
2891
2892 #ifndef ARG_POINTER_CFA_OFFSET
2893 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2894 #endif
2895
2896 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had its
2897 address taken. DECL is the decl or SAVE_EXPR for the object stored in the
2898 register, for later use if we do need to force REG into the stack. REG is
2899 overwritten by the MEM like in put_reg_into_stack. */
2900
2901 rtx
2902 gen_mem_addressof (reg, decl)
2903 rtx reg;
2904 tree decl;
2905 {
2906 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2907 REGNO (reg), decl);
2908
2909 /* Calculate this before we start messing with decl's RTL. */
2910 HOST_WIDE_INT set = decl ? get_alias_set (decl) : 0;
2911
2912 /* If the original REG was a user-variable, then so is the REG whose
2913 address is being taken. Likewise for unchanging. */
2914 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2915 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2916
2917 PUT_CODE (reg, MEM);
2918 MEM_ATTRS (reg) = 0;
2919 XEXP (reg, 0) = r;
2920
2921 if (decl)
2922 {
2923 tree type = TREE_TYPE (decl);
2924 enum machine_mode decl_mode
2925 = (DECL_P (decl) ? DECL_MODE (decl) : TYPE_MODE (TREE_TYPE (decl)));
2926 rtx decl_rtl = (TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl)
2927 : DECL_RTL_IF_SET (decl));
2928
2929 PUT_MODE (reg, decl_mode);
2930
2931 /* Clear DECL_RTL momentarily so functions below will work
2932 properly, then set it again. */
2933 if (DECL_P (decl) && decl_rtl == reg)
2934 SET_DECL_RTL (decl, 0);
2935
2936 set_mem_attributes (reg, decl, 1);
2937 set_mem_alias_set (reg, set);
2938
2939 if (DECL_P (decl) && decl_rtl == reg)
2940 SET_DECL_RTL (decl, reg);
2941
2942 if (TREE_USED (decl) || (DECL_P (decl) && DECL_INITIAL (decl) != 0))
2943 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), reg, 0);
2944 }
2945 else
2946 fixup_var_refs (reg, GET_MODE (reg), 0, reg, 0);
2947
2948 return reg;
2949 }
2950
2951 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2952
2953 void
2954 flush_addressof (decl)
2955 tree decl;
2956 {
2957 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2958 && DECL_RTL (decl) != 0
2959 && GET_CODE (DECL_RTL (decl)) == MEM
2960 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2961 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2962 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2963 }
2964
2965 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2966
2967 static void
2968 put_addressof_into_stack (r, ht)
2969 rtx r;
2970 htab_t ht;
2971 {
2972 tree decl, type;
2973 int volatile_p, used_p;
2974
2975 rtx reg = XEXP (r, 0);
2976
2977 if (GET_CODE (reg) != REG)
2978 abort ();
2979
2980 decl = ADDRESSOF_DECL (r);
2981 if (decl)
2982 {
2983 type = TREE_TYPE (decl);
2984 volatile_p = (TREE_CODE (decl) != SAVE_EXPR
2985 && TREE_THIS_VOLATILE (decl));
2986 used_p = (TREE_USED (decl)
2987 || (DECL_P (decl) && DECL_INITIAL (decl) != 0));
2988 }
2989 else
2990 {
2991 type = NULL_TREE;
2992 volatile_p = 0;
2993 used_p = 1;
2994 }
2995
2996 put_reg_into_stack (0, reg, type, GET_MODE (reg), GET_MODE (reg),
2997 volatile_p, ADDRESSOF_REGNO (r), used_p, ht);
2998 }
2999
3000 /* List of replacements made below in purge_addressof_1 when creating
3001 bitfield insertions. */
3002 static rtx purge_bitfield_addressof_replacements;
3003
3004 /* List of replacements made below in purge_addressof_1 for patterns
3005 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
3006 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
3007 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
3008 enough in complex cases, e.g. when some field values can be
3009 extracted by usage MEM with narrower mode. */
3010 static rtx purge_addressof_replacements;
3011
3012 /* Helper function for purge_addressof. See if the rtx expression at *LOC
3013 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
3014 the stack. If the function returns FALSE then the replacement could not
3015 be made. */
3016
3017 static bool
3018 purge_addressof_1 (loc, insn, force, store, ht)
3019 rtx *loc;
3020 rtx insn;
3021 int force, store;
3022 htab_t ht;
3023 {
3024 rtx x;
3025 RTX_CODE code;
3026 int i, j;
3027 const char *fmt;
3028 bool result = true;
3029
3030 /* Re-start here to avoid recursion in common cases. */
3031 restart:
3032
3033 x = *loc;
3034 if (x == 0)
3035 return true;
3036
3037 code = GET_CODE (x);
3038
3039 /* If we don't return in any of the cases below, we will recurse inside
3040 the RTX, which will normally result in any ADDRESSOF being forced into
3041 memory. */
3042 if (code == SET)
3043 {
3044 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3045 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3046 return result;
3047 }
3048 else if (code == ADDRESSOF)
3049 {
3050 rtx sub, insns;
3051
3052 if (GET_CODE (XEXP (x, 0)) != MEM)
3053 {
3054 put_addressof_into_stack (x, ht);
3055 return true;
3056 }
3057
3058 /* We must create a copy of the rtx because it was created by
3059 overwriting a REG rtx which is always shared. */
3060 sub = copy_rtx (XEXP (XEXP (x, 0), 0));
3061 if (validate_change (insn, loc, sub, 0)
3062 || validate_replace_rtx (x, sub, insn))
3063 return true;
3064
3065 start_sequence ();
3066 sub = force_operand (sub, NULL_RTX);
3067 if (! validate_change (insn, loc, sub, 0)
3068 && ! validate_replace_rtx (x, sub, insn))
3069 abort ();
3070
3071 insns = get_insns ();
3072 end_sequence ();
3073 emit_insn_before (insns, insn);
3074 return true;
3075 }
3076
3077 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3078 {
3079 rtx sub = XEXP (XEXP (x, 0), 0);
3080
3081 if (GET_CODE (sub) == MEM)
3082 sub = adjust_address_nv (sub, GET_MODE (x), 0);
3083 else if (GET_CODE (sub) == REG
3084 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3085 ;
3086 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3087 {
3088 int size_x, size_sub;
3089
3090 if (!insn)
3091 {
3092 /* When processing REG_NOTES look at the list of
3093 replacements done on the insn to find the register that X
3094 was replaced by. */
3095 rtx tem;
3096
3097 for (tem = purge_bitfield_addressof_replacements;
3098 tem != NULL_RTX;
3099 tem = XEXP (XEXP (tem, 1), 1))
3100 if (rtx_equal_p (x, XEXP (tem, 0)))
3101 {
3102 *loc = XEXP (XEXP (tem, 1), 0);
3103 return true;
3104 }
3105
3106 /* See comment for purge_addressof_replacements. */
3107 for (tem = purge_addressof_replacements;
3108 tem != NULL_RTX;
3109 tem = XEXP (XEXP (tem, 1), 1))
3110 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3111 {
3112 rtx z = XEXP (XEXP (tem, 1), 0);
3113
3114 if (GET_MODE (x) == GET_MODE (z)
3115 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3116 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3117 abort ();
3118
3119 /* It can happen that the note may speak of things
3120 in a wider (or just different) mode than the
3121 code did. This is especially true of
3122 REG_RETVAL. */
3123
3124 if (GET_CODE (z) == SUBREG && SUBREG_BYTE (z) == 0)
3125 z = SUBREG_REG (z);
3126
3127 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3128 && (GET_MODE_SIZE (GET_MODE (x))
3129 > GET_MODE_SIZE (GET_MODE (z))))
3130 {
3131 /* This can occur as a result in invalid
3132 pointer casts, e.g. float f; ...
3133 *(long long int *)&f.
3134 ??? We could emit a warning here, but
3135 without a line number that wouldn't be
3136 very helpful. */
3137 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3138 }
3139 else
3140 z = gen_lowpart (GET_MODE (x), z);
3141
3142 *loc = z;
3143 return true;
3144 }
3145
3146 /* Sometimes we may not be able to find the replacement. For
3147 example when the original insn was a MEM in a wider mode,
3148 and the note is part of a sign extension of a narrowed
3149 version of that MEM. Gcc testcase compile/990829-1.c can
3150 generate an example of this situation. Rather than complain
3151 we return false, which will prompt our caller to remove the
3152 offending note. */
3153 return false;
3154 }
3155
3156 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3157 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3158
3159 /* Don't even consider working with paradoxical subregs,
3160 or the moral equivalent seen here. */
3161 if (size_x <= size_sub
3162 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3163 {
3164 /* Do a bitfield insertion to mirror what would happen
3165 in memory. */
3166
3167 rtx val, seq;
3168
3169 if (store)
3170 {
3171 rtx p = PREV_INSN (insn);
3172
3173 start_sequence ();
3174 val = gen_reg_rtx (GET_MODE (x));
3175 if (! validate_change (insn, loc, val, 0))
3176 {
3177 /* Discard the current sequence and put the
3178 ADDRESSOF on stack. */
3179 end_sequence ();
3180 goto give_up;
3181 }
3182 seq = get_insns ();
3183 end_sequence ();
3184 emit_insn_before (seq, insn);
3185 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3186 insn, ht);
3187
3188 start_sequence ();
3189 store_bit_field (sub, size_x, 0, GET_MODE (x),
3190 val, GET_MODE_SIZE (GET_MODE (sub)));
3191
3192 /* Make sure to unshare any shared rtl that store_bit_field
3193 might have created. */
3194 unshare_all_rtl_again (get_insns ());
3195
3196 seq = get_insns ();
3197 end_sequence ();
3198 p = emit_insn_after (seq, insn);
3199 if (NEXT_INSN (insn))
3200 compute_insns_for_mem (NEXT_INSN (insn),
3201 p ? NEXT_INSN (p) : NULL_RTX,
3202 ht);
3203 }
3204 else
3205 {
3206 rtx p = PREV_INSN (insn);
3207
3208 start_sequence ();
3209 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3210 GET_MODE (x), GET_MODE (x),
3211 GET_MODE_SIZE (GET_MODE (sub)));
3212
3213 if (! validate_change (insn, loc, val, 0))
3214 {
3215 /* Discard the current sequence and put the
3216 ADDRESSOF on stack. */
3217 end_sequence ();
3218 goto give_up;
3219 }
3220
3221 seq = get_insns ();
3222 end_sequence ();
3223 emit_insn_before (seq, insn);
3224 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3225 insn, ht);
3226 }
3227
3228 /* Remember the replacement so that the same one can be done
3229 on the REG_NOTES. */
3230 purge_bitfield_addressof_replacements
3231 = gen_rtx_EXPR_LIST (VOIDmode, x,
3232 gen_rtx_EXPR_LIST
3233 (VOIDmode, val,
3234 purge_bitfield_addressof_replacements));
3235
3236 /* We replaced with a reg -- all done. */
3237 return true;
3238 }
3239 }
3240
3241 else if (validate_change (insn, loc, sub, 0))
3242 {
3243 /* Remember the replacement so that the same one can be done
3244 on the REG_NOTES. */
3245 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3246 {
3247 rtx tem;
3248
3249 for (tem = purge_addressof_replacements;
3250 tem != NULL_RTX;
3251 tem = XEXP (XEXP (tem, 1), 1))
3252 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3253 {
3254 XEXP (XEXP (tem, 1), 0) = sub;
3255 return true;
3256 }
3257 purge_addressof_replacements
3258 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3259 gen_rtx_EXPR_LIST (VOIDmode, sub,
3260 purge_addressof_replacements));
3261 return true;
3262 }
3263 goto restart;
3264 }
3265 }
3266
3267 give_up:
3268 /* Scan all subexpressions. */
3269 fmt = GET_RTX_FORMAT (code);
3270 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3271 {
3272 if (*fmt == 'e')
3273 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3274 else if (*fmt == 'E')
3275 for (j = 0; j < XVECLEN (x, i); j++)
3276 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3277 }
3278
3279 return result;
3280 }
3281
3282 /* Return a hash value for K, a REG. */
3283
3284 static hashval_t
3285 insns_for_mem_hash (k)
3286 const void * k;
3287 {
3288 /* Use the address of the key for the hash value. */
3289 struct insns_for_mem_entry *m = (struct insns_for_mem_entry *) k;
3290 return (hashval_t) m->key;
3291 }
3292
3293 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3294
3295 static int
3296 insns_for_mem_comp (k1, k2)
3297 const void * k1;
3298 const void * k2;
3299 {
3300 struct insns_for_mem_entry *m1 = (struct insns_for_mem_entry *) k1;
3301 struct insns_for_mem_entry *m2 = (struct insns_for_mem_entry *) k2;
3302 return m1->key == m2->key;
3303 }
3304
3305 struct insns_for_mem_walk_info
3306 {
3307 /* The hash table that we are using to record which INSNs use which
3308 MEMs. */
3309 htab_t ht;
3310
3311 /* The INSN we are currently processing. */
3312 rtx insn;
3313
3314 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3315 to find the insns that use the REGs in the ADDRESSOFs. */
3316 int pass;
3317 };
3318
3319 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3320 that might be used in an ADDRESSOF expression, record this INSN in
3321 the hash table given by DATA (which is really a pointer to an
3322 insns_for_mem_walk_info structure). */
3323
3324 static int
3325 insns_for_mem_walk (r, data)
3326 rtx *r;
3327 void *data;
3328 {
3329 struct insns_for_mem_walk_info *ifmwi
3330 = (struct insns_for_mem_walk_info *) data;
3331 struct insns_for_mem_entry tmp;
3332 tmp.insns = NULL_RTX;
3333
3334 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3335 && GET_CODE (XEXP (*r, 0)) == REG)
3336 {
3337 PTR *e;
3338 tmp.key = XEXP (*r, 0);
3339 e = htab_find_slot (ifmwi->ht, &tmp, INSERT);
3340 if (*e == NULL)
3341 {
3342 *e = ggc_alloc (sizeof (tmp));
3343 memcpy (*e, &tmp, sizeof (tmp));
3344 }
3345 }
3346 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3347 {
3348 struct insns_for_mem_entry *ifme;
3349 tmp.key = *r;
3350 ifme = (struct insns_for_mem_entry *) htab_find (ifmwi->ht, &tmp);
3351
3352 /* If we have not already recorded this INSN, do so now. Since
3353 we process the INSNs in order, we know that if we have
3354 recorded it it must be at the front of the list. */
3355 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3356 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3357 ifme->insns);
3358 }
3359
3360 return 0;
3361 }
3362
3363 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3364 which REGs in HT. */
3365
3366 static void
3367 compute_insns_for_mem (insns, last_insn, ht)
3368 rtx insns;
3369 rtx last_insn;
3370 htab_t ht;
3371 {
3372 rtx insn;
3373 struct insns_for_mem_walk_info ifmwi;
3374 ifmwi.ht = ht;
3375
3376 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3377 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3378 if (INSN_P (insn))
3379 {
3380 ifmwi.insn = insn;
3381 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3382 }
3383 }
3384
3385 /* Helper function for purge_addressof called through for_each_rtx.
3386 Returns true iff the rtl is an ADDRESSOF. */
3387
3388 static int
3389 is_addressof (rtl, data)
3390 rtx *rtl;
3391 void *data ATTRIBUTE_UNUSED;
3392 {
3393 return GET_CODE (*rtl) == ADDRESSOF;
3394 }
3395
3396 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3397 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3398 stack. */
3399
3400 void
3401 purge_addressof (insns)
3402 rtx insns;
3403 {
3404 rtx insn;
3405 htab_t ht;
3406
3407 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3408 requires a fixup pass over the instruction stream to correct
3409 INSNs that depended on the REG being a REG, and not a MEM. But,
3410 these fixup passes are slow. Furthermore, most MEMs are not
3411 mentioned in very many instructions. So, we speed up the process
3412 by pre-calculating which REGs occur in which INSNs; that allows
3413 us to perform the fixup passes much more quickly. */
3414 ht = htab_create_ggc (1000, insns_for_mem_hash, insns_for_mem_comp, NULL);
3415 compute_insns_for_mem (insns, NULL_RTX, ht);
3416
3417 for (insn = insns; insn; insn = NEXT_INSN (insn))
3418 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3419 || GET_CODE (insn) == CALL_INSN)
3420 {
3421 if (! purge_addressof_1 (&PATTERN (insn), insn,
3422 asm_noperands (PATTERN (insn)) > 0, 0, ht))
3423 /* If we could not replace the ADDRESSOFs in the insn,
3424 something is wrong. */
3425 abort ();
3426
3427 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, ht))
3428 {
3429 /* If we could not replace the ADDRESSOFs in the insn's notes,
3430 we can just remove the offending notes instead. */
3431 rtx note;
3432
3433 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3434 {
3435 /* If we find a REG_RETVAL note then the insn is a libcall.
3436 Such insns must have REG_EQUAL notes as well, in order
3437 for later passes of the compiler to work. So it is not
3438 safe to delete the notes here, and instead we abort. */
3439 if (REG_NOTE_KIND (note) == REG_RETVAL)
3440 abort ();
3441 if (for_each_rtx (&note, is_addressof, NULL))
3442 remove_note (insn, note);
3443 }
3444 }
3445 }
3446
3447 /* Clean up. */
3448 purge_bitfield_addressof_replacements = 0;
3449 purge_addressof_replacements = 0;
3450
3451 /* REGs are shared. purge_addressof will destructively replace a REG
3452 with a MEM, which creates shared MEMs.
3453
3454 Unfortunately, the children of put_reg_into_stack assume that MEMs
3455 referring to the same stack slot are shared (fixup_var_refs and
3456 the associated hash table code).
3457
3458 So, we have to do another unsharing pass after we have flushed any
3459 REGs that had their address taken into the stack.
3460
3461 It may be worth tracking whether or not we converted any REGs into
3462 MEMs to avoid this overhead when it is not needed. */
3463 unshare_all_rtl_again (get_insns ());
3464 }
3465 \f
3466 /* Convert a SET of a hard subreg to a set of the appropriate hard
3467 register. A subroutine of purge_hard_subreg_sets. */
3468
3469 static void
3470 purge_single_hard_subreg_set (pattern)
3471 rtx pattern;
3472 {
3473 rtx reg = SET_DEST (pattern);
3474 enum machine_mode mode = GET_MODE (SET_DEST (pattern));
3475 int offset = 0;
3476
3477 if (GET_CODE (reg) == SUBREG && GET_CODE (SUBREG_REG (reg)) == REG
3478 && REGNO (SUBREG_REG (reg)) < FIRST_PSEUDO_REGISTER)
3479 {
3480 offset = subreg_regno_offset (REGNO (SUBREG_REG (reg)),
3481 GET_MODE (SUBREG_REG (reg)),
3482 SUBREG_BYTE (reg),
3483 GET_MODE (reg));
3484 reg = SUBREG_REG (reg);
3485 }
3486
3487
3488 if (GET_CODE (reg) == REG && REGNO (reg) < FIRST_PSEUDO_REGISTER)
3489 {
3490 reg = gen_rtx_REG (mode, REGNO (reg) + offset);
3491 SET_DEST (pattern) = reg;
3492 }
3493 }
3494
3495 /* Eliminate all occurrences of SETs of hard subregs from INSNS. The
3496 only such SETs that we expect to see are those left in because
3497 integrate can't handle sets of parts of a return value register.
3498
3499 We don't use alter_subreg because we only want to eliminate subregs
3500 of hard registers. */
3501
3502 void
3503 purge_hard_subreg_sets (insn)
3504 rtx insn;
3505 {
3506 for (; insn; insn = NEXT_INSN (insn))
3507 {
3508 if (INSN_P (insn))
3509 {
3510 rtx pattern = PATTERN (insn);
3511 switch (GET_CODE (pattern))
3512 {
3513 case SET:
3514 if (GET_CODE (SET_DEST (pattern)) == SUBREG)
3515 purge_single_hard_subreg_set (pattern);
3516 break;
3517 case PARALLEL:
3518 {
3519 int j;
3520 for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--)
3521 {
3522 rtx inner_pattern = XVECEXP (pattern, 0, j);
3523 if (GET_CODE (inner_pattern) == SET
3524 && GET_CODE (SET_DEST (inner_pattern)) == SUBREG)
3525 purge_single_hard_subreg_set (inner_pattern);
3526 }
3527 }
3528 break;
3529 default:
3530 break;
3531 }
3532 }
3533 }
3534 }
3535 \f
3536 /* Pass through the INSNS of function FNDECL and convert virtual register
3537 references to hard register references. */
3538
3539 void
3540 instantiate_virtual_regs (fndecl, insns)
3541 tree fndecl;
3542 rtx insns;
3543 {
3544 rtx insn;
3545 unsigned int i;
3546
3547 /* Compute the offsets to use for this function. */
3548 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3549 var_offset = STARTING_FRAME_OFFSET;
3550 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3551 out_arg_offset = STACK_POINTER_OFFSET;
3552 cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
3553
3554 /* Scan all variables and parameters of this function. For each that is
3555 in memory, instantiate all virtual registers if the result is a valid
3556 address. If not, we do it later. That will handle most uses of virtual
3557 regs on many machines. */
3558 instantiate_decls (fndecl, 1);
3559
3560 /* Initialize recognition, indicating that volatile is OK. */
3561 init_recog ();
3562
3563 /* Scan through all the insns, instantiating every virtual register still
3564 present. */
3565 for (insn = insns; insn; insn = NEXT_INSN (insn))
3566 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3567 || GET_CODE (insn) == CALL_INSN)
3568 {
3569 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3570 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3571 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
3572 if (GET_CODE (insn) == CALL_INSN)
3573 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
3574 NULL_RTX, 0);
3575 }
3576
3577 /* Instantiate the stack slots for the parm registers, for later use in
3578 addressof elimination. */
3579 for (i = 0; i < max_parm_reg; ++i)
3580 if (parm_reg_stack_loc[i])
3581 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3582
3583 /* Now instantiate the remaining register equivalences for debugging info.
3584 These will not be valid addresses. */
3585 instantiate_decls (fndecl, 0);
3586
3587 /* Indicate that, from now on, assign_stack_local should use
3588 frame_pointer_rtx. */
3589 virtuals_instantiated = 1;
3590 }
3591
3592 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3593 all virtual registers in their DECL_RTL's.
3594
3595 If VALID_ONLY, do this only if the resulting address is still valid.
3596 Otherwise, always do it. */
3597
3598 static void
3599 instantiate_decls (fndecl, valid_only)
3600 tree fndecl;
3601 int valid_only;
3602 {
3603 tree decl;
3604
3605 /* Process all parameters of the function. */
3606 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3607 {
3608 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3609 HOST_WIDE_INT size_rtl;
3610
3611 instantiate_decl (DECL_RTL (decl), size, valid_only);
3612
3613 /* If the parameter was promoted, then the incoming RTL mode may be
3614 larger than the declared type size. We must use the larger of
3615 the two sizes. */
3616 size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
3617 size = MAX (size_rtl, size);
3618 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3619 }
3620
3621 /* Now process all variables defined in the function or its subblocks. */
3622 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3623 }
3624
3625 /* Subroutine of instantiate_decls: Process all decls in the given
3626 BLOCK node and all its subblocks. */
3627
3628 static void
3629 instantiate_decls_1 (let, valid_only)
3630 tree let;
3631 int valid_only;
3632 {
3633 tree t;
3634
3635 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3636 if (DECL_RTL_SET_P (t))
3637 instantiate_decl (DECL_RTL (t),
3638 int_size_in_bytes (TREE_TYPE (t)),
3639 valid_only);
3640
3641 /* Process all subblocks. */
3642 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3643 instantiate_decls_1 (t, valid_only);
3644 }
3645
3646 /* Subroutine of the preceding procedures: Given RTL representing a
3647 decl and the size of the object, do any instantiation required.
3648
3649 If VALID_ONLY is non-zero, it means that the RTL should only be
3650 changed if the new address is valid. */
3651
3652 static void
3653 instantiate_decl (x, size, valid_only)
3654 rtx x;
3655 HOST_WIDE_INT size;
3656 int valid_only;
3657 {
3658 enum machine_mode mode;
3659 rtx addr;
3660
3661 /* If this is not a MEM, no need to do anything. Similarly if the
3662 address is a constant or a register that is not a virtual register. */
3663
3664 if (x == 0 || GET_CODE (x) != MEM)
3665 return;
3666
3667 addr = XEXP (x, 0);
3668 if (CONSTANT_P (addr)
3669 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3670 || (GET_CODE (addr) == REG
3671 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3672 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3673 return;
3674
3675 /* If we should only do this if the address is valid, copy the address.
3676 We need to do this so we can undo any changes that might make the
3677 address invalid. This copy is unfortunate, but probably can't be
3678 avoided. */
3679
3680 if (valid_only)
3681 addr = copy_rtx (addr);
3682
3683 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3684
3685 if (valid_only && size >= 0)
3686 {
3687 unsigned HOST_WIDE_INT decl_size = size;
3688
3689 /* Now verify that the resulting address is valid for every integer or
3690 floating-point mode up to and including SIZE bytes long. We do this
3691 since the object might be accessed in any mode and frame addresses
3692 are shared. */
3693
3694 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3695 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3696 mode = GET_MODE_WIDER_MODE (mode))
3697 if (! memory_address_p (mode, addr))
3698 return;
3699
3700 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3701 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3702 mode = GET_MODE_WIDER_MODE (mode))
3703 if (! memory_address_p (mode, addr))
3704 return;
3705 }
3706
3707 /* Put back the address now that we have updated it and we either know
3708 it is valid or we don't care whether it is valid. */
3709
3710 XEXP (x, 0) = addr;
3711 }
3712 \f
3713 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
3714 is a virtual register, return the equivalent hard register and set the
3715 offset indirectly through the pointer. Otherwise, return 0. */
3716
3717 static rtx
3718 instantiate_new_reg (x, poffset)
3719 rtx x;
3720 HOST_WIDE_INT *poffset;
3721 {
3722 rtx new;
3723 HOST_WIDE_INT offset;
3724
3725 if (x == virtual_incoming_args_rtx)
3726 new = arg_pointer_rtx, offset = in_arg_offset;
3727 else if (x == virtual_stack_vars_rtx)
3728 new = frame_pointer_rtx, offset = var_offset;
3729 else if (x == virtual_stack_dynamic_rtx)
3730 new = stack_pointer_rtx, offset = dynamic_offset;
3731 else if (x == virtual_outgoing_args_rtx)
3732 new = stack_pointer_rtx, offset = out_arg_offset;
3733 else if (x == virtual_cfa_rtx)
3734 new = arg_pointer_rtx, offset = cfa_offset;
3735 else
3736 return 0;
3737
3738 *poffset = offset;
3739 return new;
3740 }
3741 \f
3742 /* Given a pointer to a piece of rtx and an optional pointer to the
3743 containing object, instantiate any virtual registers present in it.
3744
3745 If EXTRA_INSNS, we always do the replacement and generate
3746 any extra insns before OBJECT. If it zero, we do nothing if replacement
3747 is not valid.
3748
3749 Return 1 if we either had nothing to do or if we were able to do the
3750 needed replacement. Return 0 otherwise; we only return zero if
3751 EXTRA_INSNS is zero.
3752
3753 We first try some simple transformations to avoid the creation of extra
3754 pseudos. */
3755
3756 static int
3757 instantiate_virtual_regs_1 (loc, object, extra_insns)
3758 rtx *loc;
3759 rtx object;
3760 int extra_insns;
3761 {
3762 rtx x;
3763 RTX_CODE code;
3764 rtx new = 0;
3765 HOST_WIDE_INT offset = 0;
3766 rtx temp;
3767 rtx seq;
3768 int i, j;
3769 const char *fmt;
3770
3771 /* Re-start here to avoid recursion in common cases. */
3772 restart:
3773
3774 x = *loc;
3775 if (x == 0)
3776 return 1;
3777
3778 code = GET_CODE (x);
3779
3780 /* Check for some special cases. */
3781 switch (code)
3782 {
3783 case CONST_INT:
3784 case CONST_DOUBLE:
3785 case CONST_VECTOR:
3786 case CONST:
3787 case SYMBOL_REF:
3788 case CODE_LABEL:
3789 case PC:
3790 case CC0:
3791 case ASM_INPUT:
3792 case ADDR_VEC:
3793 case ADDR_DIFF_VEC:
3794 case RETURN:
3795 return 1;
3796
3797 case SET:
3798 /* We are allowed to set the virtual registers. This means that
3799 the actual register should receive the source minus the
3800 appropriate offset. This is used, for example, in the handling
3801 of non-local gotos. */
3802 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
3803 {
3804 rtx src = SET_SRC (x);
3805
3806 /* We are setting the register, not using it, so the relevant
3807 offset is the negative of the offset to use were we using
3808 the register. */
3809 offset = - offset;
3810 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3811
3812 /* The only valid sources here are PLUS or REG. Just do
3813 the simplest possible thing to handle them. */
3814 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3815 abort ();
3816
3817 start_sequence ();
3818 if (GET_CODE (src) != REG)
3819 temp = force_operand (src, NULL_RTX);
3820 else
3821 temp = src;
3822 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3823 seq = get_insns ();
3824 end_sequence ();
3825
3826 emit_insn_before (seq, object);
3827 SET_DEST (x) = new;
3828
3829 if (! validate_change (object, &SET_SRC (x), temp, 0)
3830 || ! extra_insns)
3831 abort ();
3832
3833 return 1;
3834 }
3835
3836 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3837 loc = &SET_SRC (x);
3838 goto restart;
3839
3840 case PLUS:
3841 /* Handle special case of virtual register plus constant. */
3842 if (CONSTANT_P (XEXP (x, 1)))
3843 {
3844 rtx old, new_offset;
3845
3846 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3847 if (GET_CODE (XEXP (x, 0)) == PLUS)
3848 {
3849 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
3850 {
3851 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3852 extra_insns);
3853 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3854 }
3855 else
3856 {
3857 loc = &XEXP (x, 0);
3858 goto restart;
3859 }
3860 }
3861
3862 #ifdef POINTERS_EXTEND_UNSIGNED
3863 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
3864 we can commute the PLUS and SUBREG because pointers into the
3865 frame are well-behaved. */
3866 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
3867 && GET_CODE (XEXP (x, 1)) == CONST_INT
3868 && 0 != (new
3869 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
3870 &offset))
3871 && validate_change (object, loc,
3872 plus_constant (gen_lowpart (ptr_mode,
3873 new),
3874 offset
3875 + INTVAL (XEXP (x, 1))),
3876 0))
3877 return 1;
3878 #endif
3879 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
3880 {
3881 /* We know the second operand is a constant. Unless the
3882 first operand is a REG (which has been already checked),
3883 it needs to be checked. */
3884 if (GET_CODE (XEXP (x, 0)) != REG)
3885 {
3886 loc = &XEXP (x, 0);
3887 goto restart;
3888 }
3889 return 1;
3890 }
3891
3892 new_offset = plus_constant (XEXP (x, 1), offset);
3893
3894 /* If the new constant is zero, try to replace the sum with just
3895 the register. */
3896 if (new_offset == const0_rtx
3897 && validate_change (object, loc, new, 0))
3898 return 1;
3899
3900 /* Next try to replace the register and new offset.
3901 There are two changes to validate here and we can't assume that
3902 in the case of old offset equals new just changing the register
3903 will yield a valid insn. In the interests of a little efficiency,
3904 however, we only call validate change once (we don't queue up the
3905 changes and then call apply_change_group). */
3906
3907 old = XEXP (x, 0);
3908 if (offset == 0
3909 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3910 : (XEXP (x, 0) = new,
3911 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3912 {
3913 if (! extra_insns)
3914 {
3915 XEXP (x, 0) = old;
3916 return 0;
3917 }
3918
3919 /* Otherwise copy the new constant into a register and replace
3920 constant with that register. */
3921 temp = gen_reg_rtx (Pmode);
3922 XEXP (x, 0) = new;
3923 if (validate_change (object, &XEXP (x, 1), temp, 0))
3924 emit_insn_before (gen_move_insn (temp, new_offset), object);
3925 else
3926 {
3927 /* If that didn't work, replace this expression with a
3928 register containing the sum. */
3929
3930 XEXP (x, 0) = old;
3931 new = gen_rtx_PLUS (Pmode, new, new_offset);
3932
3933 start_sequence ();
3934 temp = force_operand (new, NULL_RTX);
3935 seq = get_insns ();
3936 end_sequence ();
3937
3938 emit_insn_before (seq, object);
3939 if (! validate_change (object, loc, temp, 0)
3940 && ! validate_replace_rtx (x, temp, object))
3941 abort ();
3942 }
3943 }
3944
3945 return 1;
3946 }
3947
3948 /* Fall through to generic two-operand expression case. */
3949 case EXPR_LIST:
3950 case CALL:
3951 case COMPARE:
3952 case MINUS:
3953 case MULT:
3954 case DIV: case UDIV:
3955 case MOD: case UMOD:
3956 case AND: case IOR: case XOR:
3957 case ROTATERT: case ROTATE:
3958 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3959 case NE: case EQ:
3960 case GE: case GT: case GEU: case GTU:
3961 case LE: case LT: case LEU: case LTU:
3962 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3963 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3964 loc = &XEXP (x, 0);
3965 goto restart;
3966
3967 case MEM:
3968 /* Most cases of MEM that convert to valid addresses have already been
3969 handled by our scan of decls. The only special handling we
3970 need here is to make a copy of the rtx to ensure it isn't being
3971 shared if we have to change it to a pseudo.
3972
3973 If the rtx is a simple reference to an address via a virtual register,
3974 it can potentially be shared. In such cases, first try to make it
3975 a valid address, which can also be shared. Otherwise, copy it and
3976 proceed normally.
3977
3978 First check for common cases that need no processing. These are
3979 usually due to instantiation already being done on a previous instance
3980 of a shared rtx. */
3981
3982 temp = XEXP (x, 0);
3983 if (CONSTANT_ADDRESS_P (temp)
3984 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3985 || temp == arg_pointer_rtx
3986 #endif
3987 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3988 || temp == hard_frame_pointer_rtx
3989 #endif
3990 || temp == frame_pointer_rtx)
3991 return 1;
3992
3993 if (GET_CODE (temp) == PLUS
3994 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3995 && (XEXP (temp, 0) == frame_pointer_rtx
3996 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3997 || XEXP (temp, 0) == hard_frame_pointer_rtx
3998 #endif
3999 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4000 || XEXP (temp, 0) == arg_pointer_rtx
4001 #endif
4002 ))
4003 return 1;
4004
4005 if (temp == virtual_stack_vars_rtx
4006 || temp == virtual_incoming_args_rtx
4007 || (GET_CODE (temp) == PLUS
4008 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
4009 && (XEXP (temp, 0) == virtual_stack_vars_rtx
4010 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
4011 {
4012 /* This MEM may be shared. If the substitution can be done without
4013 the need to generate new pseudos, we want to do it in place
4014 so all copies of the shared rtx benefit. The call below will
4015 only make substitutions if the resulting address is still
4016 valid.
4017
4018 Note that we cannot pass X as the object in the recursive call
4019 since the insn being processed may not allow all valid
4020 addresses. However, if we were not passed on object, we can
4021 only modify X without copying it if X will have a valid
4022 address.
4023
4024 ??? Also note that this can still lose if OBJECT is an insn that
4025 has less restrictions on an address that some other insn.
4026 In that case, we will modify the shared address. This case
4027 doesn't seem very likely, though. One case where this could
4028 happen is in the case of a USE or CLOBBER reference, but we
4029 take care of that below. */
4030
4031 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
4032 object ? object : x, 0))
4033 return 1;
4034
4035 /* Otherwise make a copy and process that copy. We copy the entire
4036 RTL expression since it might be a PLUS which could also be
4037 shared. */
4038 *loc = x = copy_rtx (x);
4039 }
4040
4041 /* Fall through to generic unary operation case. */
4042 case PREFETCH:
4043 case SUBREG:
4044 case STRICT_LOW_PART:
4045 case NEG: case NOT:
4046 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
4047 case SIGN_EXTEND: case ZERO_EXTEND:
4048 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
4049 case FLOAT: case FIX:
4050 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
4051 case ABS:
4052 case SQRT:
4053 case FFS:
4054 /* These case either have just one operand or we know that we need not
4055 check the rest of the operands. */
4056 loc = &XEXP (x, 0);
4057 goto restart;
4058
4059 case USE:
4060 case CLOBBER:
4061 /* If the operand is a MEM, see if the change is a valid MEM. If not,
4062 go ahead and make the invalid one, but do it to a copy. For a REG,
4063 just make the recursive call, since there's no chance of a problem. */
4064
4065 if ((GET_CODE (XEXP (x, 0)) == MEM
4066 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
4067 0))
4068 || (GET_CODE (XEXP (x, 0)) == REG
4069 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4070 return 1;
4071
4072 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
4073 loc = &XEXP (x, 0);
4074 goto restart;
4075
4076 case REG:
4077 /* Try to replace with a PLUS. If that doesn't work, compute the sum
4078 in front of this insn and substitute the temporary. */
4079 if ((new = instantiate_new_reg (x, &offset)) != 0)
4080 {
4081 temp = plus_constant (new, offset);
4082 if (!validate_change (object, loc, temp, 0))
4083 {
4084 if (! extra_insns)
4085 return 0;
4086
4087 start_sequence ();
4088 temp = force_operand (temp, NULL_RTX);
4089 seq = get_insns ();
4090 end_sequence ();
4091
4092 emit_insn_before (seq, object);
4093 if (! validate_change (object, loc, temp, 0)
4094 && ! validate_replace_rtx (x, temp, object))
4095 abort ();
4096 }
4097 }
4098
4099 return 1;
4100
4101 case ADDRESSOF:
4102 if (GET_CODE (XEXP (x, 0)) == REG)
4103 return 1;
4104
4105 else if (GET_CODE (XEXP (x, 0)) == MEM)
4106 {
4107 /* If we have a (addressof (mem ..)), do any instantiation inside
4108 since we know we'll be making the inside valid when we finally
4109 remove the ADDRESSOF. */
4110 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4111 return 1;
4112 }
4113 break;
4114
4115 default:
4116 break;
4117 }
4118
4119 /* Scan all subexpressions. */
4120 fmt = GET_RTX_FORMAT (code);
4121 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4122 if (*fmt == 'e')
4123 {
4124 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4125 return 0;
4126 }
4127 else if (*fmt == 'E')
4128 for (j = 0; j < XVECLEN (x, i); j++)
4129 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4130 extra_insns))
4131 return 0;
4132
4133 return 1;
4134 }
4135 \f
4136 /* Optimization: assuming this function does not receive nonlocal gotos,
4137 delete the handlers for such, as well as the insns to establish
4138 and disestablish them. */
4139
4140 static void
4141 delete_handlers ()
4142 {
4143 rtx insn;
4144 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4145 {
4146 /* Delete the handler by turning off the flag that would
4147 prevent jump_optimize from deleting it.
4148 Also permit deletion of the nonlocal labels themselves
4149 if nothing local refers to them. */
4150 if (GET_CODE (insn) == CODE_LABEL)
4151 {
4152 tree t, last_t;
4153
4154 LABEL_PRESERVE_P (insn) = 0;
4155
4156 /* Remove it from the nonlocal_label list, to avoid confusing
4157 flow. */
4158 for (t = nonlocal_labels, last_t = 0; t;
4159 last_t = t, t = TREE_CHAIN (t))
4160 if (DECL_RTL (TREE_VALUE (t)) == insn)
4161 break;
4162 if (t)
4163 {
4164 if (! last_t)
4165 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4166 else
4167 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4168 }
4169 }
4170 if (GET_CODE (insn) == INSN)
4171 {
4172 int can_delete = 0;
4173 rtx t;
4174 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4175 if (reg_mentioned_p (t, PATTERN (insn)))
4176 {
4177 can_delete = 1;
4178 break;
4179 }
4180 if (can_delete
4181 || (nonlocal_goto_stack_level != 0
4182 && reg_mentioned_p (nonlocal_goto_stack_level,
4183 PATTERN (insn))))
4184 delete_related_insns (insn);
4185 }
4186 }
4187 }
4188 \f
4189 int
4190 max_parm_reg_num ()
4191 {
4192 return max_parm_reg;
4193 }
4194
4195 /* Return the first insn following those generated by `assign_parms'. */
4196
4197 rtx
4198 get_first_nonparm_insn ()
4199 {
4200 if (last_parm_insn)
4201 return NEXT_INSN (last_parm_insn);
4202 return get_insns ();
4203 }
4204
4205 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
4206 Crash if there is none. */
4207
4208 rtx
4209 get_first_block_beg ()
4210 {
4211 rtx searcher;
4212 rtx insn = get_first_nonparm_insn ();
4213
4214 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
4215 if (GET_CODE (searcher) == NOTE
4216 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
4217 return searcher;
4218
4219 abort (); /* Invalid call to this function. (See comments above.) */
4220 return NULL_RTX;
4221 }
4222
4223 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4224 This means a type for which function calls must pass an address to the
4225 function or get an address back from the function.
4226 EXP may be a type node or an expression (whose type is tested). */
4227
4228 int
4229 aggregate_value_p (exp)
4230 tree exp;
4231 {
4232 int i, regno, nregs;
4233 rtx reg;
4234
4235 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
4236
4237 if (TREE_CODE (type) == VOID_TYPE)
4238 return 0;
4239 if (RETURN_IN_MEMORY (type))
4240 return 1;
4241 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4242 and thus can't be returned in registers. */
4243 if (TREE_ADDRESSABLE (type))
4244 return 1;
4245 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4246 return 1;
4247 /* Make sure we have suitable call-clobbered regs to return
4248 the value in; if not, we must return it in memory. */
4249 reg = hard_function_value (type, 0, 0);
4250
4251 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4252 it is OK. */
4253 if (GET_CODE (reg) != REG)
4254 return 0;
4255
4256 regno = REGNO (reg);
4257 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4258 for (i = 0; i < nregs; i++)
4259 if (! call_used_regs[regno + i])
4260 return 1;
4261 return 0;
4262 }
4263 \f
4264 /* Assign RTL expressions to the function's parameters.
4265 This may involve copying them into registers and using
4266 those registers as the RTL for them. */
4267
4268 void
4269 assign_parms (fndecl)
4270 tree fndecl;
4271 {
4272 tree parm;
4273 rtx entry_parm = 0;
4274 rtx stack_parm = 0;
4275 CUMULATIVE_ARGS args_so_far;
4276 enum machine_mode promoted_mode, passed_mode;
4277 enum machine_mode nominal_mode, promoted_nominal_mode;
4278 int unsignedp;
4279 /* Total space needed so far for args on the stack,
4280 given as a constant and a tree-expression. */
4281 struct args_size stack_args_size;
4282 tree fntype = TREE_TYPE (fndecl);
4283 tree fnargs = DECL_ARGUMENTS (fndecl);
4284 /* This is used for the arg pointer when referring to stack args. */
4285 rtx internal_arg_pointer;
4286 /* This is a dummy PARM_DECL that we used for the function result if
4287 the function returns a structure. */
4288 tree function_result_decl = 0;
4289 #ifdef SETUP_INCOMING_VARARGS
4290 int varargs_setup = 0;
4291 #endif
4292 rtx conversion_insns = 0;
4293 struct args_size alignment_pad;
4294
4295 /* Nonzero if the last arg is named `__builtin_va_alist',
4296 which is used on some machines for old-fashioned non-ANSI varargs.h;
4297 this should be stuck onto the stack as if it had arrived there. */
4298 int hide_last_arg
4299 = (current_function_varargs
4300 && fnargs
4301 && (parm = tree_last (fnargs)) != 0
4302 && DECL_NAME (parm)
4303 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4304 "__builtin_va_alist")));
4305
4306 /* Nonzero if function takes extra anonymous args.
4307 This means the last named arg must be on the stack
4308 right before the anonymous ones. */
4309 int stdarg
4310 = (TYPE_ARG_TYPES (fntype) != 0
4311 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4312 != void_type_node));
4313
4314 current_function_stdarg = stdarg;
4315
4316 /* If the reg that the virtual arg pointer will be translated into is
4317 not a fixed reg or is the stack pointer, make a copy of the virtual
4318 arg pointer, and address parms via the copy. The frame pointer is
4319 considered fixed even though it is not marked as such.
4320
4321 The second time through, simply use ap to avoid generating rtx. */
4322
4323 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4324 || ! (fixed_regs[ARG_POINTER_REGNUM]
4325 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4326 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4327 else
4328 internal_arg_pointer = virtual_incoming_args_rtx;
4329 current_function_internal_arg_pointer = internal_arg_pointer;
4330
4331 stack_args_size.constant = 0;
4332 stack_args_size.var = 0;
4333
4334 /* If struct value address is treated as the first argument, make it so. */
4335 if (aggregate_value_p (DECL_RESULT (fndecl))
4336 && ! current_function_returns_pcc_struct
4337 && struct_value_incoming_rtx == 0)
4338 {
4339 tree type = build_pointer_type (TREE_TYPE (fntype));
4340
4341 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4342
4343 DECL_ARG_TYPE (function_result_decl) = type;
4344 TREE_CHAIN (function_result_decl) = fnargs;
4345 fnargs = function_result_decl;
4346 }
4347
4348 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4349 parm_reg_stack_loc = (rtx *) ggc_alloc_cleared (max_parm_reg * sizeof (rtx));
4350
4351 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4352 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4353 #else
4354 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4355 #endif
4356
4357 /* We haven't yet found an argument that we must push and pretend the
4358 caller did. */
4359 current_function_pretend_args_size = 0;
4360
4361 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4362 {
4363 struct args_size stack_offset;
4364 struct args_size arg_size;
4365 int passed_pointer = 0;
4366 int did_conversion = 0;
4367 tree passed_type = DECL_ARG_TYPE (parm);
4368 tree nominal_type = TREE_TYPE (parm);
4369 int pretend_named;
4370 int last_named = 0, named_arg;
4371
4372 /* Set LAST_NAMED if this is last named arg before last
4373 anonymous args. */
4374 if (stdarg || current_function_varargs)
4375 {
4376 tree tem;
4377
4378 for (tem = TREE_CHAIN (parm); tem; tem = TREE_CHAIN (tem))
4379 if (DECL_NAME (tem))
4380 break;
4381
4382 if (tem == 0)
4383 last_named = 1;
4384 }
4385 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4386 most machines, if this is a varargs/stdarg function, then we treat
4387 the last named arg as if it were anonymous too. */
4388 named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4389
4390 if (TREE_TYPE (parm) == error_mark_node
4391 /* This can happen after weird syntax errors
4392 or if an enum type is defined among the parms. */
4393 || TREE_CODE (parm) != PARM_DECL
4394 || passed_type == NULL)
4395 {
4396 SET_DECL_RTL (parm, gen_rtx_MEM (BLKmode, const0_rtx));
4397 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4398 TREE_USED (parm) = 1;
4399 continue;
4400 }
4401
4402 /* For varargs.h function, save info about regs and stack space
4403 used by the individual args, not including the va_alist arg. */
4404 if (hide_last_arg && last_named)
4405 current_function_args_info = args_so_far;
4406
4407 /* Find mode of arg as it is passed, and mode of arg
4408 as it should be during execution of this function. */
4409 passed_mode = TYPE_MODE (passed_type);
4410 nominal_mode = TYPE_MODE (nominal_type);
4411
4412 /* If the parm's mode is VOID, its value doesn't matter,
4413 and avoid the usual things like emit_move_insn that could crash. */
4414 if (nominal_mode == VOIDmode)
4415 {
4416 SET_DECL_RTL (parm, const0_rtx);
4417 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4418 continue;
4419 }
4420
4421 /* If the parm is to be passed as a transparent union, use the
4422 type of the first field for the tests below. We have already
4423 verified that the modes are the same. */
4424 if (DECL_TRANSPARENT_UNION (parm)
4425 || (TREE_CODE (passed_type) == UNION_TYPE
4426 && TYPE_TRANSPARENT_UNION (passed_type)))
4427 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4428
4429 /* See if this arg was passed by invisible reference. It is if
4430 it is an object whose size depends on the contents of the
4431 object itself or if the machine requires these objects be passed
4432 that way. */
4433
4434 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4435 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4436 || TREE_ADDRESSABLE (passed_type)
4437 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4438 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4439 passed_type, named_arg)
4440 #endif
4441 )
4442 {
4443 passed_type = nominal_type = build_pointer_type (passed_type);
4444 passed_pointer = 1;
4445 passed_mode = nominal_mode = Pmode;
4446 }
4447
4448 promoted_mode = passed_mode;
4449
4450 #ifdef PROMOTE_FUNCTION_ARGS
4451 /* Compute the mode in which the arg is actually extended to. */
4452 unsignedp = TREE_UNSIGNED (passed_type);
4453 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4454 #endif
4455
4456 /* Let machine desc say which reg (if any) the parm arrives in.
4457 0 means it arrives on the stack. */
4458 #ifdef FUNCTION_INCOMING_ARG
4459 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4460 passed_type, named_arg);
4461 #else
4462 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4463 passed_type, named_arg);
4464 #endif
4465
4466 if (entry_parm == 0)
4467 promoted_mode = passed_mode;
4468
4469 #ifdef SETUP_INCOMING_VARARGS
4470 /* If this is the last named parameter, do any required setup for
4471 varargs or stdargs. We need to know about the case of this being an
4472 addressable type, in which case we skip the registers it
4473 would have arrived in.
4474
4475 For stdargs, LAST_NAMED will be set for two parameters, the one that
4476 is actually the last named, and the dummy parameter. We only
4477 want to do this action once.
4478
4479 Also, indicate when RTL generation is to be suppressed. */
4480 if (last_named && !varargs_setup)
4481 {
4482 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4483 current_function_pretend_args_size, 0);
4484 varargs_setup = 1;
4485 }
4486 #endif
4487
4488 /* Determine parm's home in the stack,
4489 in case it arrives in the stack or we should pretend it did.
4490
4491 Compute the stack position and rtx where the argument arrives
4492 and its size.
4493
4494 There is one complexity here: If this was a parameter that would
4495 have been passed in registers, but wasn't only because it is
4496 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4497 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4498 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4499 0 as it was the previous time. */
4500
4501 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4502 locate_and_pad_parm (promoted_mode, passed_type,
4503 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4504 1,
4505 #else
4506 #ifdef FUNCTION_INCOMING_ARG
4507 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4508 passed_type,
4509 pretend_named) != 0,
4510 #else
4511 FUNCTION_ARG (args_so_far, promoted_mode,
4512 passed_type,
4513 pretend_named) != 0,
4514 #endif
4515 #endif
4516 fndecl, &stack_args_size, &stack_offset, &arg_size,
4517 &alignment_pad);
4518
4519 {
4520 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4521
4522 if (offset_rtx == const0_rtx)
4523 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4524 else
4525 stack_parm = gen_rtx_MEM (promoted_mode,
4526 gen_rtx_PLUS (Pmode,
4527 internal_arg_pointer,
4528 offset_rtx));
4529
4530 set_mem_attributes (stack_parm, parm, 1);
4531 }
4532
4533 /* If this parameter was passed both in registers and in the stack,
4534 use the copy on the stack. */
4535 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4536 entry_parm = 0;
4537
4538 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4539 /* If this parm was passed part in regs and part in memory,
4540 pretend it arrived entirely in memory
4541 by pushing the register-part onto the stack.
4542
4543 In the special case of a DImode or DFmode that is split,
4544 we could put it together in a pseudoreg directly,
4545 but for now that's not worth bothering with. */
4546
4547 if (entry_parm)
4548 {
4549 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4550 passed_type, named_arg);
4551
4552 if (nregs > 0)
4553 {
4554 current_function_pretend_args_size
4555 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4556 / (PARM_BOUNDARY / BITS_PER_UNIT)
4557 * (PARM_BOUNDARY / BITS_PER_UNIT));
4558
4559 /* Handle calls that pass values in multiple non-contiguous
4560 locations. The Irix 6 ABI has examples of this. */
4561 if (GET_CODE (entry_parm) == PARALLEL)
4562 emit_group_store (validize_mem (stack_parm), entry_parm,
4563 int_size_in_bytes (TREE_TYPE (parm)));
4564
4565 else
4566 move_block_from_reg (REGNO (entry_parm),
4567 validize_mem (stack_parm), nregs,
4568 int_size_in_bytes (TREE_TYPE (parm)));
4569
4570 entry_parm = stack_parm;
4571 }
4572 }
4573 #endif
4574
4575 /* If we didn't decide this parm came in a register,
4576 by default it came on the stack. */
4577 if (entry_parm == 0)
4578 entry_parm = stack_parm;
4579
4580 /* Record permanently how this parm was passed. */
4581 DECL_INCOMING_RTL (parm) = entry_parm;
4582
4583 /* If there is actually space on the stack for this parm,
4584 count it in stack_args_size; otherwise set stack_parm to 0
4585 to indicate there is no preallocated stack slot for the parm. */
4586
4587 if (entry_parm == stack_parm
4588 || (GET_CODE (entry_parm) == PARALLEL
4589 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4590 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4591 /* On some machines, even if a parm value arrives in a register
4592 there is still an (uninitialized) stack slot allocated for it.
4593
4594 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4595 whether this parameter already has a stack slot allocated,
4596 because an arg block exists only if current_function_args_size
4597 is larger than some threshold, and we haven't calculated that
4598 yet. So, for now, we just assume that stack slots never exist
4599 in this case. */
4600 || REG_PARM_STACK_SPACE (fndecl) > 0
4601 #endif
4602 )
4603 {
4604 stack_args_size.constant += arg_size.constant;
4605 if (arg_size.var)
4606 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4607 }
4608 else
4609 /* No stack slot was pushed for this parm. */
4610 stack_parm = 0;
4611
4612 /* Update info on where next arg arrives in registers. */
4613
4614 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4615 passed_type, named_arg);
4616
4617 /* If we can't trust the parm stack slot to be aligned enough
4618 for its ultimate type, don't use that slot after entry.
4619 We'll make another stack slot, if we need one. */
4620 {
4621 unsigned int thisparm_boundary
4622 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4623
4624 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4625 stack_parm = 0;
4626 }
4627
4628 /* If parm was passed in memory, and we need to convert it on entry,
4629 don't store it back in that same slot. */
4630 if (entry_parm != 0
4631 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4632 stack_parm = 0;
4633
4634 /* When an argument is passed in multiple locations, we can't
4635 make use of this information, but we can save some copying if
4636 the whole argument is passed in a single register. */
4637 if (GET_CODE (entry_parm) == PARALLEL
4638 && nominal_mode != BLKmode && passed_mode != BLKmode)
4639 {
4640 int i, len = XVECLEN (entry_parm, 0);
4641
4642 for (i = 0; i < len; i++)
4643 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
4644 && GET_CODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) == REG
4645 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
4646 == passed_mode)
4647 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
4648 {
4649 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
4650 DECL_INCOMING_RTL (parm) = entry_parm;
4651 break;
4652 }
4653 }
4654
4655 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4656 in the mode in which it arrives.
4657 STACK_PARM is an RTX for a stack slot where the parameter can live
4658 during the function (in case we want to put it there).
4659 STACK_PARM is 0 if no stack slot was pushed for it.
4660
4661 Now output code if necessary to convert ENTRY_PARM to
4662 the type in which this function declares it,
4663 and store that result in an appropriate place,
4664 which may be a pseudo reg, may be STACK_PARM,
4665 or may be a local stack slot if STACK_PARM is 0.
4666
4667 Set DECL_RTL to that place. */
4668
4669 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4670 {
4671 /* If a BLKmode arrives in registers, copy it to a stack slot.
4672 Handle calls that pass values in multiple non-contiguous
4673 locations. The Irix 6 ABI has examples of this. */
4674 if (GET_CODE (entry_parm) == REG
4675 || GET_CODE (entry_parm) == PARALLEL)
4676 {
4677 int size_stored
4678 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4679 UNITS_PER_WORD);
4680
4681 /* Note that we will be storing an integral number of words.
4682 So we have to be careful to ensure that we allocate an
4683 integral number of words. We do this below in the
4684 assign_stack_local if space was not allocated in the argument
4685 list. If it was, this will not work if PARM_BOUNDARY is not
4686 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4687 if it becomes a problem. */
4688
4689 if (stack_parm == 0)
4690 {
4691 stack_parm
4692 = assign_stack_local (GET_MODE (entry_parm),
4693 size_stored, 0);
4694 set_mem_attributes (stack_parm, parm, 1);
4695 }
4696
4697 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4698 abort ();
4699
4700 /* Handle calls that pass values in multiple non-contiguous
4701 locations. The Irix 6 ABI has examples of this. */
4702 if (GET_CODE (entry_parm) == PARALLEL)
4703 emit_group_store (validize_mem (stack_parm), entry_parm,
4704 int_size_in_bytes (TREE_TYPE (parm)));
4705 else
4706 move_block_from_reg (REGNO (entry_parm),
4707 validize_mem (stack_parm),
4708 size_stored / UNITS_PER_WORD,
4709 int_size_in_bytes (TREE_TYPE (parm)));
4710 }
4711 SET_DECL_RTL (parm, stack_parm);
4712 }
4713 else if (! ((! optimize
4714 && ! DECL_REGISTER (parm))
4715 || TREE_SIDE_EFFECTS (parm)
4716 /* If -ffloat-store specified, don't put explicit
4717 float variables into registers. */
4718 || (flag_float_store
4719 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4720 /* Always assign pseudo to structure return or item passed
4721 by invisible reference. */
4722 || passed_pointer || parm == function_result_decl)
4723 {
4724 /* Store the parm in a pseudoregister during the function, but we
4725 may need to do it in a wider mode. */
4726
4727 rtx parmreg;
4728 unsigned int regno, regnoi = 0, regnor = 0;
4729
4730 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4731
4732 promoted_nominal_mode
4733 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4734
4735 parmreg = gen_reg_rtx (promoted_nominal_mode);
4736 mark_user_reg (parmreg);
4737
4738 /* If this was an item that we received a pointer to, set DECL_RTL
4739 appropriately. */
4740 if (passed_pointer)
4741 {
4742 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)),
4743 parmreg);
4744 set_mem_attributes (x, parm, 1);
4745 SET_DECL_RTL (parm, x);
4746 }
4747 else
4748 {
4749 SET_DECL_RTL (parm, parmreg);
4750 maybe_set_unchanging (DECL_RTL (parm), parm);
4751 }
4752
4753 /* Copy the value into the register. */
4754 if (nominal_mode != passed_mode
4755 || promoted_nominal_mode != promoted_mode)
4756 {
4757 int save_tree_used;
4758 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4759 mode, by the caller. We now have to convert it to
4760 NOMINAL_MODE, if different. However, PARMREG may be in
4761 a different mode than NOMINAL_MODE if it is being stored
4762 promoted.
4763
4764 If ENTRY_PARM is a hard register, it might be in a register
4765 not valid for operating in its mode (e.g., an odd-numbered
4766 register for a DFmode). In that case, moves are the only
4767 thing valid, so we can't do a convert from there. This
4768 occurs when the calling sequence allow such misaligned
4769 usages.
4770
4771 In addition, the conversion may involve a call, which could
4772 clobber parameters which haven't been copied to pseudo
4773 registers yet. Therefore, we must first copy the parm to
4774 a pseudo reg here, and save the conversion until after all
4775 parameters have been moved. */
4776
4777 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4778
4779 emit_move_insn (tempreg, validize_mem (entry_parm));
4780
4781 push_to_sequence (conversion_insns);
4782 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4783
4784 if (GET_CODE (tempreg) == SUBREG
4785 && GET_MODE (tempreg) == nominal_mode
4786 && GET_CODE (SUBREG_REG (tempreg)) == REG
4787 && nominal_mode == passed_mode
4788 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (entry_parm)
4789 && GET_MODE_SIZE (GET_MODE (tempreg))
4790 < GET_MODE_SIZE (GET_MODE (entry_parm)))
4791 {
4792 /* The argument is already sign/zero extended, so note it
4793 into the subreg. */
4794 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
4795 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
4796 }
4797
4798 /* TREE_USED gets set erroneously during expand_assignment. */
4799 save_tree_used = TREE_USED (parm);
4800 expand_assignment (parm,
4801 make_tree (nominal_type, tempreg), 0, 0);
4802 TREE_USED (parm) = save_tree_used;
4803 conversion_insns = get_insns ();
4804 did_conversion = 1;
4805 end_sequence ();
4806 }
4807 else
4808 emit_move_insn (parmreg, validize_mem (entry_parm));
4809
4810 /* If we were passed a pointer but the actual value
4811 can safely live in a register, put it in one. */
4812 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4813 /* If by-reference argument was promoted, demote it. */
4814 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
4815 || ! ((! optimize
4816 && ! DECL_REGISTER (parm))
4817 || TREE_SIDE_EFFECTS (parm)
4818 /* If -ffloat-store specified, don't put explicit
4819 float variables into registers. */
4820 || (flag_float_store
4821 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))))
4822 {
4823 /* We can't use nominal_mode, because it will have been set to
4824 Pmode above. We must use the actual mode of the parm. */
4825 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4826 mark_user_reg (parmreg);
4827 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
4828 {
4829 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
4830 int unsigned_p = TREE_UNSIGNED (TREE_TYPE (parm));
4831 push_to_sequence (conversion_insns);
4832 emit_move_insn (tempreg, DECL_RTL (parm));
4833 SET_DECL_RTL (parm,
4834 convert_to_mode (GET_MODE (parmreg),
4835 tempreg,
4836 unsigned_p));
4837 emit_move_insn (parmreg, DECL_RTL (parm));
4838 conversion_insns = get_insns();
4839 did_conversion = 1;
4840 end_sequence ();
4841 }
4842 else
4843 emit_move_insn (parmreg, DECL_RTL (parm));
4844 SET_DECL_RTL (parm, parmreg);
4845 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4846 now the parm. */
4847 stack_parm = 0;
4848 }
4849 #ifdef FUNCTION_ARG_CALLEE_COPIES
4850 /* If we are passed an arg by reference and it is our responsibility
4851 to make a copy, do it now.
4852 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4853 original argument, so we must recreate them in the call to
4854 FUNCTION_ARG_CALLEE_COPIES. */
4855 /* ??? Later add code to handle the case that if the argument isn't
4856 modified, don't do the copy. */
4857
4858 else if (passed_pointer
4859 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4860 TYPE_MODE (DECL_ARG_TYPE (parm)),
4861 DECL_ARG_TYPE (parm),
4862 named_arg)
4863 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4864 {
4865 rtx copy;
4866 tree type = DECL_ARG_TYPE (parm);
4867
4868 /* This sequence may involve a library call perhaps clobbering
4869 registers that haven't been copied to pseudos yet. */
4870
4871 push_to_sequence (conversion_insns);
4872
4873 if (!COMPLETE_TYPE_P (type)
4874 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4875 /* This is a variable sized object. */
4876 copy = gen_rtx_MEM (BLKmode,
4877 allocate_dynamic_stack_space
4878 (expr_size (parm), NULL_RTX,
4879 TYPE_ALIGN (type)));
4880 else
4881 copy = assign_stack_temp (TYPE_MODE (type),
4882 int_size_in_bytes (type), 1);
4883 set_mem_attributes (copy, parm, 1);
4884
4885 store_expr (parm, copy, 0);
4886 emit_move_insn (parmreg, XEXP (copy, 0));
4887 conversion_insns = get_insns ();
4888 did_conversion = 1;
4889 end_sequence ();
4890 }
4891 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4892
4893 /* In any case, record the parm's desired stack location
4894 in case we later discover it must live in the stack.
4895
4896 If it is a COMPLEX value, store the stack location for both
4897 halves. */
4898
4899 if (GET_CODE (parmreg) == CONCAT)
4900 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4901 else
4902 regno = REGNO (parmreg);
4903
4904 if (regno >= max_parm_reg)
4905 {
4906 rtx *new;
4907 int old_max_parm_reg = max_parm_reg;
4908
4909 /* It's slow to expand this one register at a time,
4910 but it's also rare and we need max_parm_reg to be
4911 precisely correct. */
4912 max_parm_reg = regno + 1;
4913 new = (rtx *) ggc_realloc (parm_reg_stack_loc,
4914 max_parm_reg * sizeof (rtx));
4915 memset ((char *) (new + old_max_parm_reg), 0,
4916 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4917 parm_reg_stack_loc = new;
4918 }
4919
4920 if (GET_CODE (parmreg) == CONCAT)
4921 {
4922 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4923
4924 regnor = REGNO (gen_realpart (submode, parmreg));
4925 regnoi = REGNO (gen_imagpart (submode, parmreg));
4926
4927 if (stack_parm != 0)
4928 {
4929 parm_reg_stack_loc[regnor]
4930 = gen_realpart (submode, stack_parm);
4931 parm_reg_stack_loc[regnoi]
4932 = gen_imagpart (submode, stack_parm);
4933 }
4934 else
4935 {
4936 parm_reg_stack_loc[regnor] = 0;
4937 parm_reg_stack_loc[regnoi] = 0;
4938 }
4939 }
4940 else
4941 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4942
4943 /* Mark the register as eliminable if we did no conversion
4944 and it was copied from memory at a fixed offset,
4945 and the arg pointer was not copied to a pseudo-reg.
4946 If the arg pointer is a pseudo reg or the offset formed
4947 an invalid address, such memory-equivalences
4948 as we make here would screw up life analysis for it. */
4949 if (nominal_mode == passed_mode
4950 && ! did_conversion
4951 && stack_parm != 0
4952 && GET_CODE (stack_parm) == MEM
4953 && stack_offset.var == 0
4954 && reg_mentioned_p (virtual_incoming_args_rtx,
4955 XEXP (stack_parm, 0)))
4956 {
4957 rtx linsn = get_last_insn ();
4958 rtx sinsn, set;
4959
4960 /* Mark complex types separately. */
4961 if (GET_CODE (parmreg) == CONCAT)
4962 /* Scan backwards for the set of the real and
4963 imaginary parts. */
4964 for (sinsn = linsn; sinsn != 0;
4965 sinsn = prev_nonnote_insn (sinsn))
4966 {
4967 set = single_set (sinsn);
4968 if (set != 0
4969 && SET_DEST (set) == regno_reg_rtx [regnoi])
4970 REG_NOTES (sinsn)
4971 = gen_rtx_EXPR_LIST (REG_EQUIV,
4972 parm_reg_stack_loc[regnoi],
4973 REG_NOTES (sinsn));
4974 else if (set != 0
4975 && SET_DEST (set) == regno_reg_rtx [regnor])
4976 REG_NOTES (sinsn)
4977 = gen_rtx_EXPR_LIST (REG_EQUIV,
4978 parm_reg_stack_loc[regnor],
4979 REG_NOTES (sinsn));
4980 }
4981 else if ((set = single_set (linsn)) != 0
4982 && SET_DEST (set) == parmreg)
4983 REG_NOTES (linsn)
4984 = gen_rtx_EXPR_LIST (REG_EQUIV,
4985 stack_parm, REG_NOTES (linsn));
4986 }
4987
4988 /* For pointer data type, suggest pointer register. */
4989 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4990 mark_reg_pointer (parmreg,
4991 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4992
4993 /* If something wants our address, try to use ADDRESSOF. */
4994 if (TREE_ADDRESSABLE (parm))
4995 {
4996 /* If we end up putting something into the stack,
4997 fixup_var_refs_insns will need to make a pass over
4998 all the instructions. It looks through the pending
4999 sequences -- but it can't see the ones in the
5000 CONVERSION_INSNS, if they're not on the sequence
5001 stack. So, we go back to that sequence, just so that
5002 the fixups will happen. */
5003 push_to_sequence (conversion_insns);
5004 put_var_into_stack (parm);
5005 conversion_insns = get_insns ();
5006 end_sequence ();
5007 }
5008 }
5009 else
5010 {
5011 /* Value must be stored in the stack slot STACK_PARM
5012 during function execution. */
5013
5014 if (promoted_mode != nominal_mode)
5015 {
5016 /* Conversion is required. */
5017 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
5018
5019 emit_move_insn (tempreg, validize_mem (entry_parm));
5020
5021 push_to_sequence (conversion_insns);
5022 entry_parm = convert_to_mode (nominal_mode, tempreg,
5023 TREE_UNSIGNED (TREE_TYPE (parm)));
5024 if (stack_parm)
5025 /* ??? This may need a big-endian conversion on sparc64. */
5026 stack_parm = adjust_address (stack_parm, nominal_mode, 0);
5027
5028 conversion_insns = get_insns ();
5029 did_conversion = 1;
5030 end_sequence ();
5031 }
5032
5033 if (entry_parm != stack_parm)
5034 {
5035 if (stack_parm == 0)
5036 {
5037 stack_parm
5038 = assign_stack_local (GET_MODE (entry_parm),
5039 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
5040 set_mem_attributes (stack_parm, parm, 1);
5041 }
5042
5043 if (promoted_mode != nominal_mode)
5044 {
5045 push_to_sequence (conversion_insns);
5046 emit_move_insn (validize_mem (stack_parm),
5047 validize_mem (entry_parm));
5048 conversion_insns = get_insns ();
5049 end_sequence ();
5050 }
5051 else
5052 emit_move_insn (validize_mem (stack_parm),
5053 validize_mem (entry_parm));
5054 }
5055
5056 SET_DECL_RTL (parm, stack_parm);
5057 }
5058
5059 /* If this "parameter" was the place where we are receiving the
5060 function's incoming structure pointer, set up the result. */
5061 if (parm == function_result_decl)
5062 {
5063 tree result = DECL_RESULT (fndecl);
5064 rtx addr = DECL_RTL (parm);
5065 rtx x;
5066
5067 #ifdef POINTERS_EXTEND_UNSIGNED
5068 if (GET_MODE (addr) != Pmode)
5069 addr = convert_memory_address (Pmode, addr);
5070 #endif
5071
5072 x = gen_rtx_MEM (DECL_MODE (result), addr);
5073 set_mem_attributes (x, result, 1);
5074 SET_DECL_RTL (result, x);
5075 }
5076
5077 if (GET_CODE (DECL_RTL (parm)) == REG)
5078 REGNO_DECL (REGNO (DECL_RTL (parm))) = parm;
5079 else if (GET_CODE (DECL_RTL (parm)) == CONCAT)
5080 {
5081 REGNO_DECL (REGNO (XEXP (DECL_RTL (parm), 0))) = parm;
5082 REGNO_DECL (REGNO (XEXP (DECL_RTL (parm), 1))) = parm;
5083 }
5084
5085 }
5086
5087 /* Output all parameter conversion instructions (possibly including calls)
5088 now that all parameters have been copied out of hard registers. */
5089 emit_insn (conversion_insns);
5090
5091 last_parm_insn = get_last_insn ();
5092
5093 current_function_args_size = stack_args_size.constant;
5094
5095 /* Adjust function incoming argument size for alignment and
5096 minimum length. */
5097
5098 #ifdef REG_PARM_STACK_SPACE
5099 #ifndef MAYBE_REG_PARM_STACK_SPACE
5100 current_function_args_size = MAX (current_function_args_size,
5101 REG_PARM_STACK_SPACE (fndecl));
5102 #endif
5103 #endif
5104
5105 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5106
5107 current_function_args_size
5108 = ((current_function_args_size + STACK_BYTES - 1)
5109 / STACK_BYTES) * STACK_BYTES;
5110
5111 #ifdef ARGS_GROW_DOWNWARD
5112 current_function_arg_offset_rtx
5113 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5114 : expand_expr (size_diffop (stack_args_size.var,
5115 size_int (-stack_args_size.constant)),
5116 NULL_RTX, VOIDmode, 0));
5117 #else
5118 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5119 #endif
5120
5121 /* See how many bytes, if any, of its args a function should try to pop
5122 on return. */
5123
5124 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5125 current_function_args_size);
5126
5127 /* For stdarg.h function, save info about
5128 regs and stack space used by the named args. */
5129
5130 if (!hide_last_arg)
5131 current_function_args_info = args_so_far;
5132
5133 /* Set the rtx used for the function return value. Put this in its
5134 own variable so any optimizers that need this information don't have
5135 to include tree.h. Do this here so it gets done when an inlined
5136 function gets output. */
5137
5138 current_function_return_rtx
5139 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
5140 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
5141
5142 /* If scalar return value was computed in a pseudo-reg, or was a named
5143 return value that got dumped to the stack, copy that to the hard
5144 return register. */
5145 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
5146 {
5147 tree decl_result = DECL_RESULT (fndecl);
5148 rtx decl_rtl = DECL_RTL (decl_result);
5149
5150 if (REG_P (decl_rtl)
5151 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5152 : DECL_REGISTER (decl_result))
5153 {
5154 rtx real_decl_rtl;
5155
5156 #ifdef FUNCTION_OUTGOING_VALUE
5157 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
5158 fndecl);
5159 #else
5160 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
5161 fndecl);
5162 #endif
5163 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
5164 /* The delay slot scheduler assumes that current_function_return_rtx
5165 holds the hard register containing the return value, not a
5166 temporary pseudo. */
5167 current_function_return_rtx = real_decl_rtl;
5168 }
5169 }
5170 }
5171 \f
5172 /* Indicate whether REGNO is an incoming argument to the current function
5173 that was promoted to a wider mode. If so, return the RTX for the
5174 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5175 that REGNO is promoted from and whether the promotion was signed or
5176 unsigned. */
5177
5178 #ifdef PROMOTE_FUNCTION_ARGS
5179
5180 rtx
5181 promoted_input_arg (regno, pmode, punsignedp)
5182 unsigned int regno;
5183 enum machine_mode *pmode;
5184 int *punsignedp;
5185 {
5186 tree arg;
5187
5188 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5189 arg = TREE_CHAIN (arg))
5190 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5191 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5192 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5193 {
5194 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5195 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5196
5197 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5198 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5199 && mode != DECL_MODE (arg))
5200 {
5201 *pmode = DECL_MODE (arg);
5202 *punsignedp = unsignedp;
5203 return DECL_INCOMING_RTL (arg);
5204 }
5205 }
5206
5207 return 0;
5208 }
5209
5210 #endif
5211 \f
5212 /* Compute the size and offset from the start of the stacked arguments for a
5213 parm passed in mode PASSED_MODE and with type TYPE.
5214
5215 INITIAL_OFFSET_PTR points to the current offset into the stacked
5216 arguments.
5217
5218 The starting offset and size for this parm are returned in *OFFSET_PTR
5219 and *ARG_SIZE_PTR, respectively.
5220
5221 IN_REGS is non-zero if the argument will be passed in registers. It will
5222 never be set if REG_PARM_STACK_SPACE is not defined.
5223
5224 FNDECL is the function in which the argument was defined.
5225
5226 There are two types of rounding that are done. The first, controlled by
5227 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5228 list to be aligned to the specific boundary (in bits). This rounding
5229 affects the initial and starting offsets, but not the argument size.
5230
5231 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5232 optionally rounds the size of the parm to PARM_BOUNDARY. The
5233 initial offset is not affected by this rounding, while the size always
5234 is and the starting offset may be. */
5235
5236 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
5237 initial_offset_ptr is positive because locate_and_pad_parm's
5238 callers pass in the total size of args so far as
5239 initial_offset_ptr. arg_size_ptr is always positive. */
5240
5241 void
5242 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
5243 initial_offset_ptr, offset_ptr, arg_size_ptr,
5244 alignment_pad)
5245 enum machine_mode passed_mode;
5246 tree type;
5247 int in_regs ATTRIBUTE_UNUSED;
5248 tree fndecl ATTRIBUTE_UNUSED;
5249 struct args_size *initial_offset_ptr;
5250 struct args_size *offset_ptr;
5251 struct args_size *arg_size_ptr;
5252 struct args_size *alignment_pad;
5253
5254 {
5255 tree sizetree
5256 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5257 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5258 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5259
5260 #ifdef REG_PARM_STACK_SPACE
5261 /* If we have found a stack parm before we reach the end of the
5262 area reserved for registers, skip that area. */
5263 if (! in_regs)
5264 {
5265 int reg_parm_stack_space = 0;
5266
5267 #ifdef MAYBE_REG_PARM_STACK_SPACE
5268 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5269 #else
5270 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5271 #endif
5272 if (reg_parm_stack_space > 0)
5273 {
5274 if (initial_offset_ptr->var)
5275 {
5276 initial_offset_ptr->var
5277 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5278 ssize_int (reg_parm_stack_space));
5279 initial_offset_ptr->constant = 0;
5280 }
5281 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5282 initial_offset_ptr->constant = reg_parm_stack_space;
5283 }
5284 }
5285 #endif /* REG_PARM_STACK_SPACE */
5286
5287 arg_size_ptr->var = 0;
5288 arg_size_ptr->constant = 0;
5289 alignment_pad->var = 0;
5290 alignment_pad->constant = 0;
5291
5292 #ifdef ARGS_GROW_DOWNWARD
5293 if (initial_offset_ptr->var)
5294 {
5295 offset_ptr->constant = 0;
5296 offset_ptr->var = size_binop (MINUS_EXPR, ssize_int (0),
5297 initial_offset_ptr->var);
5298 }
5299 else
5300 {
5301 offset_ptr->constant = -initial_offset_ptr->constant;
5302 offset_ptr->var = 0;
5303 }
5304 if (where_pad != none
5305 && (!host_integerp (sizetree, 1)
5306 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5307 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5308 SUB_PARM_SIZE (*offset_ptr, sizetree);
5309 if (where_pad != downward)
5310 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
5311 if (initial_offset_ptr->var)
5312 arg_size_ptr->var = size_binop (MINUS_EXPR,
5313 size_binop (MINUS_EXPR,
5314 ssize_int (0),
5315 initial_offset_ptr->var),
5316 offset_ptr->var);
5317
5318 else
5319 arg_size_ptr->constant = (-initial_offset_ptr->constant
5320 - offset_ptr->constant);
5321
5322 #else /* !ARGS_GROW_DOWNWARD */
5323 if (!in_regs
5324 #ifdef REG_PARM_STACK_SPACE
5325 || REG_PARM_STACK_SPACE (fndecl) > 0
5326 #endif
5327 )
5328 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5329 *offset_ptr = *initial_offset_ptr;
5330
5331 #ifdef PUSH_ROUNDING
5332 if (passed_mode != BLKmode)
5333 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5334 #endif
5335
5336 /* Pad_below needs the pre-rounded size to know how much to pad below
5337 so this must be done before rounding up. */
5338 if (where_pad == downward
5339 /* However, BLKmode args passed in regs have their padding done elsewhere.
5340 The stack slot must be able to hold the entire register. */
5341 && !(in_regs && passed_mode == BLKmode))
5342 pad_below (offset_ptr, passed_mode, sizetree);
5343
5344 if (where_pad != none
5345 && (!host_integerp (sizetree, 1)
5346 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5347 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5348
5349 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5350 #endif /* ARGS_GROW_DOWNWARD */
5351 }
5352
5353 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5354 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5355
5356 static void
5357 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5358 struct args_size *offset_ptr;
5359 int boundary;
5360 struct args_size *alignment_pad;
5361 {
5362 tree save_var = NULL_TREE;
5363 HOST_WIDE_INT save_constant = 0;
5364
5365 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5366
5367 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5368 {
5369 save_var = offset_ptr->var;
5370 save_constant = offset_ptr->constant;
5371 }
5372
5373 alignment_pad->var = NULL_TREE;
5374 alignment_pad->constant = 0;
5375
5376 if (boundary > BITS_PER_UNIT)
5377 {
5378 if (offset_ptr->var)
5379 {
5380 offset_ptr->var =
5381 #ifdef ARGS_GROW_DOWNWARD
5382 round_down
5383 #else
5384 round_up
5385 #endif
5386 (ARGS_SIZE_TREE (*offset_ptr),
5387 boundary / BITS_PER_UNIT);
5388 offset_ptr->constant = 0; /*?*/
5389 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5390 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5391 save_var);
5392 }
5393 else
5394 {
5395 offset_ptr->constant =
5396 #ifdef ARGS_GROW_DOWNWARD
5397 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5398 #else
5399 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5400 #endif
5401 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5402 alignment_pad->constant = offset_ptr->constant - save_constant;
5403 }
5404 }
5405 }
5406
5407 #ifndef ARGS_GROW_DOWNWARD
5408 static void
5409 pad_below (offset_ptr, passed_mode, sizetree)
5410 struct args_size *offset_ptr;
5411 enum machine_mode passed_mode;
5412 tree sizetree;
5413 {
5414 if (passed_mode != BLKmode)
5415 {
5416 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5417 offset_ptr->constant
5418 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5419 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5420 - GET_MODE_SIZE (passed_mode));
5421 }
5422 else
5423 {
5424 if (TREE_CODE (sizetree) != INTEGER_CST
5425 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5426 {
5427 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5428 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5429 /* Add it in. */
5430 ADD_PARM_SIZE (*offset_ptr, s2);
5431 SUB_PARM_SIZE (*offset_ptr, sizetree);
5432 }
5433 }
5434 }
5435 #endif
5436 \f
5437 /* Walk the tree of blocks describing the binding levels within a function
5438 and warn about uninitialized variables.
5439 This is done after calling flow_analysis and before global_alloc
5440 clobbers the pseudo-regs to hard regs. */
5441
5442 void
5443 uninitialized_vars_warning (block)
5444 tree block;
5445 {
5446 tree decl, sub;
5447 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5448 {
5449 if (warn_uninitialized
5450 && TREE_CODE (decl) == VAR_DECL
5451 /* These warnings are unreliable for and aggregates
5452 because assigning the fields one by one can fail to convince
5453 flow.c that the entire aggregate was initialized.
5454 Unions are troublesome because members may be shorter. */
5455 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5456 && DECL_RTL (decl) != 0
5457 && GET_CODE (DECL_RTL (decl)) == REG
5458 /* Global optimizations can make it difficult to determine if a
5459 particular variable has been initialized. However, a VAR_DECL
5460 with a nonzero DECL_INITIAL had an initializer, so do not
5461 claim it is potentially uninitialized.
5462
5463 We do not care about the actual value in DECL_INITIAL, so we do
5464 not worry that it may be a dangling pointer. */
5465 && DECL_INITIAL (decl) == NULL_TREE
5466 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5467 warning_with_decl (decl,
5468 "`%s' might be used uninitialized in this function");
5469 if (extra_warnings
5470 && TREE_CODE (decl) == VAR_DECL
5471 && DECL_RTL (decl) != 0
5472 && GET_CODE (DECL_RTL (decl)) == REG
5473 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5474 warning_with_decl (decl,
5475 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5476 }
5477 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5478 uninitialized_vars_warning (sub);
5479 }
5480
5481 /* Do the appropriate part of uninitialized_vars_warning
5482 but for arguments instead of local variables. */
5483
5484 void
5485 setjmp_args_warning ()
5486 {
5487 tree decl;
5488 for (decl = DECL_ARGUMENTS (current_function_decl);
5489 decl; decl = TREE_CHAIN (decl))
5490 if (DECL_RTL (decl) != 0
5491 && GET_CODE (DECL_RTL (decl)) == REG
5492 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5493 warning_with_decl (decl,
5494 "argument `%s' might be clobbered by `longjmp' or `vfork'");
5495 }
5496
5497 /* If this function call setjmp, put all vars into the stack
5498 unless they were declared `register'. */
5499
5500 void
5501 setjmp_protect (block)
5502 tree block;
5503 {
5504 tree decl, sub;
5505 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5506 if ((TREE_CODE (decl) == VAR_DECL
5507 || TREE_CODE (decl) == PARM_DECL)
5508 && DECL_RTL (decl) != 0
5509 && (GET_CODE (DECL_RTL (decl)) == REG
5510 || (GET_CODE (DECL_RTL (decl)) == MEM
5511 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5512 /* If this variable came from an inline function, it must be
5513 that its life doesn't overlap the setjmp. If there was a
5514 setjmp in the function, it would already be in memory. We
5515 must exclude such variable because their DECL_RTL might be
5516 set to strange things such as virtual_stack_vars_rtx. */
5517 && ! DECL_FROM_INLINE (decl)
5518 && (
5519 #ifdef NON_SAVING_SETJMP
5520 /* If longjmp doesn't restore the registers,
5521 don't put anything in them. */
5522 NON_SAVING_SETJMP
5523 ||
5524 #endif
5525 ! DECL_REGISTER (decl)))
5526 put_var_into_stack (decl);
5527 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5528 setjmp_protect (sub);
5529 }
5530 \f
5531 /* Like the previous function, but for args instead of local variables. */
5532
5533 void
5534 setjmp_protect_args ()
5535 {
5536 tree decl;
5537 for (decl = DECL_ARGUMENTS (current_function_decl);
5538 decl; decl = TREE_CHAIN (decl))
5539 if ((TREE_CODE (decl) == VAR_DECL
5540 || TREE_CODE (decl) == PARM_DECL)
5541 && DECL_RTL (decl) != 0
5542 && (GET_CODE (DECL_RTL (decl)) == REG
5543 || (GET_CODE (DECL_RTL (decl)) == MEM
5544 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5545 && (
5546 /* If longjmp doesn't restore the registers,
5547 don't put anything in them. */
5548 #ifdef NON_SAVING_SETJMP
5549 NON_SAVING_SETJMP
5550 ||
5551 #endif
5552 ! DECL_REGISTER (decl)))
5553 put_var_into_stack (decl);
5554 }
5555 \f
5556 /* Return the context-pointer register corresponding to DECL,
5557 or 0 if it does not need one. */
5558
5559 rtx
5560 lookup_static_chain (decl)
5561 tree decl;
5562 {
5563 tree context = decl_function_context (decl);
5564 tree link;
5565
5566 if (context == 0
5567 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5568 return 0;
5569
5570 /* We treat inline_function_decl as an alias for the current function
5571 because that is the inline function whose vars, types, etc.
5572 are being merged into the current function.
5573 See expand_inline_function. */
5574 if (context == current_function_decl || context == inline_function_decl)
5575 return virtual_stack_vars_rtx;
5576
5577 for (link = context_display; link; link = TREE_CHAIN (link))
5578 if (TREE_PURPOSE (link) == context)
5579 return RTL_EXPR_RTL (TREE_VALUE (link));
5580
5581 abort ();
5582 }
5583 \f
5584 /* Convert a stack slot address ADDR for variable VAR
5585 (from a containing function)
5586 into an address valid in this function (using a static chain). */
5587
5588 rtx
5589 fix_lexical_addr (addr, var)
5590 rtx addr;
5591 tree var;
5592 {
5593 rtx basereg;
5594 HOST_WIDE_INT displacement;
5595 tree context = decl_function_context (var);
5596 struct function *fp;
5597 rtx base = 0;
5598
5599 /* If this is the present function, we need not do anything. */
5600 if (context == current_function_decl || context == inline_function_decl)
5601 return addr;
5602
5603 fp = find_function_data (context);
5604
5605 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5606 addr = XEXP (XEXP (addr, 0), 0);
5607
5608 /* Decode given address as base reg plus displacement. */
5609 if (GET_CODE (addr) == REG)
5610 basereg = addr, displacement = 0;
5611 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5612 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5613 else
5614 abort ();
5615
5616 /* We accept vars reached via the containing function's
5617 incoming arg pointer and via its stack variables pointer. */
5618 if (basereg == fp->internal_arg_pointer)
5619 {
5620 /* If reached via arg pointer, get the arg pointer value
5621 out of that function's stack frame.
5622
5623 There are two cases: If a separate ap is needed, allocate a
5624 slot in the outer function for it and dereference it that way.
5625 This is correct even if the real ap is actually a pseudo.
5626 Otherwise, just adjust the offset from the frame pointer to
5627 compensate. */
5628
5629 #ifdef NEED_SEPARATE_AP
5630 rtx addr;
5631
5632 addr = get_arg_pointer_save_area (fp);
5633 addr = fix_lexical_addr (XEXP (addr, 0), var);
5634 addr = memory_address (Pmode, addr);
5635
5636 base = gen_rtx_MEM (Pmode, addr);
5637 set_mem_alias_set (base, get_frame_alias_set ());
5638 base = copy_to_reg (base);
5639 #else
5640 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5641 base = lookup_static_chain (var);
5642 #endif
5643 }
5644
5645 else if (basereg == virtual_stack_vars_rtx)
5646 {
5647 /* This is the same code as lookup_static_chain, duplicated here to
5648 avoid an extra call to decl_function_context. */
5649 tree link;
5650
5651 for (link = context_display; link; link = TREE_CHAIN (link))
5652 if (TREE_PURPOSE (link) == context)
5653 {
5654 base = RTL_EXPR_RTL (TREE_VALUE (link));
5655 break;
5656 }
5657 }
5658
5659 if (base == 0)
5660 abort ();
5661
5662 /* Use same offset, relative to appropriate static chain or argument
5663 pointer. */
5664 return plus_constant (base, displacement);
5665 }
5666 \f
5667 /* Return the address of the trampoline for entering nested fn FUNCTION.
5668 If necessary, allocate a trampoline (in the stack frame)
5669 and emit rtl to initialize its contents (at entry to this function). */
5670
5671 rtx
5672 trampoline_address (function)
5673 tree function;
5674 {
5675 tree link;
5676 tree rtlexp;
5677 rtx tramp;
5678 struct function *fp;
5679 tree fn_context;
5680
5681 /* Find an existing trampoline and return it. */
5682 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5683 if (TREE_PURPOSE (link) == function)
5684 return
5685 adjust_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5686
5687 for (fp = outer_function_chain; fp; fp = fp->outer)
5688 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5689 if (TREE_PURPOSE (link) == function)
5690 {
5691 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5692 function);
5693 return adjust_trampoline_addr (tramp);
5694 }
5695
5696 /* None exists; we must make one. */
5697
5698 /* Find the `struct function' for the function containing FUNCTION. */
5699 fp = 0;
5700 fn_context = decl_function_context (function);
5701 if (fn_context != current_function_decl
5702 && fn_context != inline_function_decl)
5703 fp = find_function_data (fn_context);
5704
5705 /* Allocate run-time space for this trampoline
5706 (usually in the defining function's stack frame). */
5707 #ifdef ALLOCATE_TRAMPOLINE
5708 tramp = ALLOCATE_TRAMPOLINE (fp);
5709 #else
5710 /* If rounding needed, allocate extra space
5711 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5712 #ifdef TRAMPOLINE_ALIGNMENT
5713 #define TRAMPOLINE_REAL_SIZE \
5714 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5715 #else
5716 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5717 #endif
5718 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5719 fp ? fp : cfun);
5720 #endif
5721
5722 /* Record the trampoline for reuse and note it for later initialization
5723 by expand_function_end. */
5724 if (fp != 0)
5725 {
5726 rtlexp = make_node (RTL_EXPR);
5727 RTL_EXPR_RTL (rtlexp) = tramp;
5728 fp->x_trampoline_list = tree_cons (function, rtlexp,
5729 fp->x_trampoline_list);
5730 }
5731 else
5732 {
5733 /* Make the RTL_EXPR node temporary, not momentary, so that the
5734 trampoline_list doesn't become garbage. */
5735 rtlexp = make_node (RTL_EXPR);
5736
5737 RTL_EXPR_RTL (rtlexp) = tramp;
5738 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5739 }
5740
5741 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5742 return adjust_trampoline_addr (tramp);
5743 }
5744
5745 /* Given a trampoline address,
5746 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5747
5748 static rtx
5749 round_trampoline_addr (tramp)
5750 rtx tramp;
5751 {
5752 #ifdef TRAMPOLINE_ALIGNMENT
5753 /* Round address up to desired boundary. */
5754 rtx temp = gen_reg_rtx (Pmode);
5755 rtx addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5756 rtx mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5757
5758 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5759 temp, 0, OPTAB_LIB_WIDEN);
5760 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5761 temp, 0, OPTAB_LIB_WIDEN);
5762 #endif
5763 return tramp;
5764 }
5765
5766 /* Given a trampoline address, round it then apply any
5767 platform-specific adjustments so that the result can be used for a
5768 function call . */
5769
5770 static rtx
5771 adjust_trampoline_addr (tramp)
5772 rtx tramp;
5773 {
5774 tramp = round_trampoline_addr (tramp);
5775 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5776 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5777 #endif
5778 return tramp;
5779 }
5780 \f
5781 /* Put all this function's BLOCK nodes including those that are chained
5782 onto the first block into a vector, and return it.
5783 Also store in each NOTE for the beginning or end of a block
5784 the index of that block in the vector.
5785 The arguments are BLOCK, the chain of top-level blocks of the function,
5786 and INSNS, the insn chain of the function. */
5787
5788 void
5789 identify_blocks ()
5790 {
5791 int n_blocks;
5792 tree *block_vector, *last_block_vector;
5793 tree *block_stack;
5794 tree block = DECL_INITIAL (current_function_decl);
5795
5796 if (block == 0)
5797 return;
5798
5799 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5800 depth-first order. */
5801 block_vector = get_block_vector (block, &n_blocks);
5802 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5803
5804 last_block_vector = identify_blocks_1 (get_insns (),
5805 block_vector + 1,
5806 block_vector + n_blocks,
5807 block_stack);
5808
5809 /* If we didn't use all of the subblocks, we've misplaced block notes. */
5810 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
5811 if (0 && last_block_vector != block_vector + n_blocks)
5812 abort ();
5813
5814 free (block_vector);
5815 free (block_stack);
5816 }
5817
5818 /* Subroutine of identify_blocks. Do the block substitution on the
5819 insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
5820
5821 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
5822 BLOCK_VECTOR is incremented for each block seen. */
5823
5824 static tree *
5825 identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
5826 rtx insns;
5827 tree *block_vector;
5828 tree *end_block_vector;
5829 tree *orig_block_stack;
5830 {
5831 rtx insn;
5832 tree *block_stack = orig_block_stack;
5833
5834 for (insn = insns; insn; insn = NEXT_INSN (insn))
5835 {
5836 if (GET_CODE (insn) == NOTE)
5837 {
5838 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5839 {
5840 tree b;
5841
5842 /* If there are more block notes than BLOCKs, something
5843 is badly wrong. */
5844 if (block_vector == end_block_vector)
5845 abort ();
5846
5847 b = *block_vector++;
5848 NOTE_BLOCK (insn) = b;
5849 *block_stack++ = b;
5850 }
5851 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5852 {
5853 /* If there are more NOTE_INSN_BLOCK_ENDs than
5854 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
5855 if (block_stack == orig_block_stack)
5856 abort ();
5857
5858 NOTE_BLOCK (insn) = *--block_stack;
5859 }
5860 }
5861 else if (GET_CODE (insn) == CALL_INSN
5862 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5863 {
5864 rtx cp = PATTERN (insn);
5865
5866 block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
5867 end_block_vector, block_stack);
5868 if (XEXP (cp, 1))
5869 block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
5870 end_block_vector, block_stack);
5871 if (XEXP (cp, 2))
5872 block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
5873 end_block_vector, block_stack);
5874 }
5875 }
5876
5877 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
5878 something is badly wrong. */
5879 if (block_stack != orig_block_stack)
5880 abort ();
5881
5882 return block_vector;
5883 }
5884
5885 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
5886 and create duplicate blocks. */
5887 /* ??? Need an option to either create block fragments or to create
5888 abstract origin duplicates of a source block. It really depends
5889 on what optimization has been performed. */
5890
5891 void
5892 reorder_blocks ()
5893 {
5894 tree block = DECL_INITIAL (current_function_decl);
5895 varray_type block_stack;
5896
5897 if (block == NULL_TREE)
5898 return;
5899
5900 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
5901
5902 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
5903 reorder_blocks_0 (block);
5904
5905 /* Prune the old trees away, so that they don't get in the way. */
5906 BLOCK_SUBBLOCKS (block) = NULL_TREE;
5907 BLOCK_CHAIN (block) = NULL_TREE;
5908
5909 /* Recreate the block tree from the note nesting. */
5910 reorder_blocks_1 (get_insns (), block, &block_stack);
5911 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
5912
5913 /* Remove deleted blocks from the block fragment chains. */
5914 reorder_fix_fragments (block);
5915 }
5916
5917 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
5918
5919 static void
5920 reorder_blocks_0 (block)
5921 tree block;
5922 {
5923 while (block)
5924 {
5925 TREE_ASM_WRITTEN (block) = 0;
5926 reorder_blocks_0 (BLOCK_SUBBLOCKS (block));
5927 block = BLOCK_CHAIN (block);
5928 }
5929 }
5930
5931 static void
5932 reorder_blocks_1 (insns, current_block, p_block_stack)
5933 rtx insns;
5934 tree current_block;
5935 varray_type *p_block_stack;
5936 {
5937 rtx insn;
5938
5939 for (insn = insns; insn; insn = NEXT_INSN (insn))
5940 {
5941 if (GET_CODE (insn) == NOTE)
5942 {
5943 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5944 {
5945 tree block = NOTE_BLOCK (insn);
5946
5947 /* If we have seen this block before, that means it now
5948 spans multiple address regions. Create a new fragment. */
5949 if (TREE_ASM_WRITTEN (block))
5950 {
5951 tree new_block = copy_node (block);
5952 tree origin;
5953
5954 origin = (BLOCK_FRAGMENT_ORIGIN (block)
5955 ? BLOCK_FRAGMENT_ORIGIN (block)
5956 : block);
5957 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
5958 BLOCK_FRAGMENT_CHAIN (new_block)
5959 = BLOCK_FRAGMENT_CHAIN (origin);
5960 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
5961
5962 NOTE_BLOCK (insn) = new_block;
5963 block = new_block;
5964 }
5965
5966 BLOCK_SUBBLOCKS (block) = 0;
5967 TREE_ASM_WRITTEN (block) = 1;
5968 BLOCK_SUPERCONTEXT (block) = current_block;
5969 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5970 BLOCK_SUBBLOCKS (current_block) = block;
5971 current_block = block;
5972 VARRAY_PUSH_TREE (*p_block_stack, block);
5973 }
5974 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5975 {
5976 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
5977 VARRAY_POP (*p_block_stack);
5978 BLOCK_SUBBLOCKS (current_block)
5979 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5980 current_block = BLOCK_SUPERCONTEXT (current_block);
5981 }
5982 }
5983 else if (GET_CODE (insn) == CALL_INSN
5984 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5985 {
5986 rtx cp = PATTERN (insn);
5987 reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
5988 if (XEXP (cp, 1))
5989 reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
5990 if (XEXP (cp, 2))
5991 reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
5992 }
5993 }
5994 }
5995
5996 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
5997 appears in the block tree, select one of the fragments to become
5998 the new origin block. */
5999
6000 static void
6001 reorder_fix_fragments (block)
6002 tree block;
6003 {
6004 while (block)
6005 {
6006 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
6007 tree new_origin = NULL_TREE;
6008
6009 if (dup_origin)
6010 {
6011 if (! TREE_ASM_WRITTEN (dup_origin))
6012 {
6013 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
6014
6015 /* Find the first of the remaining fragments. There must
6016 be at least one -- the current block. */
6017 while (! TREE_ASM_WRITTEN (new_origin))
6018 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
6019 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
6020 }
6021 }
6022 else if (! dup_origin)
6023 new_origin = block;
6024
6025 /* Re-root the rest of the fragments to the new origin. In the
6026 case that DUP_ORIGIN was null, that means BLOCK was the origin
6027 of a chain of fragments and we want to remove those fragments
6028 that didn't make it to the output. */
6029 if (new_origin)
6030 {
6031 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
6032 tree chain = *pp;
6033
6034 while (chain)
6035 {
6036 if (TREE_ASM_WRITTEN (chain))
6037 {
6038 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
6039 *pp = chain;
6040 pp = &BLOCK_FRAGMENT_CHAIN (chain);
6041 }
6042 chain = BLOCK_FRAGMENT_CHAIN (chain);
6043 }
6044 *pp = NULL_TREE;
6045 }
6046
6047 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
6048 block = BLOCK_CHAIN (block);
6049 }
6050 }
6051
6052 /* Reverse the order of elements in the chain T of blocks,
6053 and return the new head of the chain (old last element). */
6054
6055 static tree
6056 blocks_nreverse (t)
6057 tree t;
6058 {
6059 tree prev = 0, decl, next;
6060 for (decl = t; decl; decl = next)
6061 {
6062 next = BLOCK_CHAIN (decl);
6063 BLOCK_CHAIN (decl) = prev;
6064 prev = decl;
6065 }
6066 return prev;
6067 }
6068
6069 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
6070 non-NULL, list them all into VECTOR, in a depth-first preorder
6071 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
6072 blocks. */
6073
6074 static int
6075 all_blocks (block, vector)
6076 tree block;
6077 tree *vector;
6078 {
6079 int n_blocks = 0;
6080
6081 while (block)
6082 {
6083 TREE_ASM_WRITTEN (block) = 0;
6084
6085 /* Record this block. */
6086 if (vector)
6087 vector[n_blocks] = block;
6088
6089 ++n_blocks;
6090
6091 /* Record the subblocks, and their subblocks... */
6092 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
6093 vector ? vector + n_blocks : 0);
6094 block = BLOCK_CHAIN (block);
6095 }
6096
6097 return n_blocks;
6098 }
6099
6100 /* Return a vector containing all the blocks rooted at BLOCK. The
6101 number of elements in the vector is stored in N_BLOCKS_P. The
6102 vector is dynamically allocated; it is the caller's responsibility
6103 to call `free' on the pointer returned. */
6104
6105 static tree *
6106 get_block_vector (block, n_blocks_p)
6107 tree block;
6108 int *n_blocks_p;
6109 {
6110 tree *block_vector;
6111
6112 *n_blocks_p = all_blocks (block, NULL);
6113 block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
6114 all_blocks (block, block_vector);
6115
6116 return block_vector;
6117 }
6118
6119 static int next_block_index = 2;
6120
6121 /* Set BLOCK_NUMBER for all the blocks in FN. */
6122
6123 void
6124 number_blocks (fn)
6125 tree fn;
6126 {
6127 int i;
6128 int n_blocks;
6129 tree *block_vector;
6130
6131 /* For SDB and XCOFF debugging output, we start numbering the blocks
6132 from 1 within each function, rather than keeping a running
6133 count. */
6134 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
6135 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
6136 next_block_index = 1;
6137 #endif
6138
6139 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
6140
6141 /* The top-level BLOCK isn't numbered at all. */
6142 for (i = 1; i < n_blocks; ++i)
6143 /* We number the blocks from two. */
6144 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
6145
6146 free (block_vector);
6147
6148 return;
6149 }
6150
6151 /* If VAR is present in a subblock of BLOCK, return the subblock. */
6152
6153 tree
6154 debug_find_var_in_block_tree (var, block)
6155 tree var;
6156 tree block;
6157 {
6158 tree t;
6159
6160 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
6161 if (t == var)
6162 return block;
6163
6164 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
6165 {
6166 tree ret = debug_find_var_in_block_tree (var, t);
6167 if (ret)
6168 return ret;
6169 }
6170
6171 return NULL_TREE;
6172 }
6173 \f
6174 /* Allocate a function structure and reset its contents to the defaults. */
6175
6176 static void
6177 prepare_function_start ()
6178 {
6179 cfun = (struct function *) ggc_alloc_cleared (sizeof (struct function));
6180
6181 init_stmt_for_function ();
6182 init_eh_for_function ();
6183
6184 cse_not_expected = ! optimize;
6185
6186 /* Caller save not needed yet. */
6187 caller_save_needed = 0;
6188
6189 /* No stack slots have been made yet. */
6190 stack_slot_list = 0;
6191
6192 current_function_has_nonlocal_label = 0;
6193 current_function_has_nonlocal_goto = 0;
6194
6195 /* There is no stack slot for handling nonlocal gotos. */
6196 nonlocal_goto_handler_slots = 0;
6197 nonlocal_goto_stack_level = 0;
6198
6199 /* No labels have been declared for nonlocal use. */
6200 nonlocal_labels = 0;
6201 nonlocal_goto_handler_labels = 0;
6202
6203 /* No function calls so far in this function. */
6204 function_call_count = 0;
6205
6206 /* No parm regs have been allocated.
6207 (This is important for output_inline_function.) */
6208 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
6209
6210 /* Initialize the RTL mechanism. */
6211 init_emit ();
6212
6213 /* Do per-function initialization of the alias analyzer. */
6214 init_alias_once_per_function ();
6215
6216 /* Initialize the queue of pending postincrement and postdecrements,
6217 and some other info in expr.c. */
6218 init_expr ();
6219
6220 /* We haven't done register allocation yet. */
6221 reg_renumber = 0;
6222
6223 init_varasm_status (cfun);
6224
6225 /* Clear out data used for inlining. */
6226 cfun->inlinable = 0;
6227 cfun->original_decl_initial = 0;
6228 cfun->original_arg_vector = 0;
6229
6230 cfun->stack_alignment_needed = STACK_BOUNDARY;
6231 cfun->preferred_stack_boundary = STACK_BOUNDARY;
6232
6233 /* Set if a call to setjmp is seen. */
6234 current_function_calls_setjmp = 0;
6235
6236 /* Set if a call to longjmp is seen. */
6237 current_function_calls_longjmp = 0;
6238
6239 current_function_calls_alloca = 0;
6240 current_function_contains_functions = 0;
6241 current_function_is_leaf = 0;
6242 current_function_nothrow = 0;
6243 current_function_sp_is_unchanging = 0;
6244 current_function_uses_only_leaf_regs = 0;
6245 current_function_has_computed_jump = 0;
6246 current_function_is_thunk = 0;
6247
6248 current_function_returns_pcc_struct = 0;
6249 current_function_returns_struct = 0;
6250 current_function_epilogue_delay_list = 0;
6251 current_function_uses_const_pool = 0;
6252 current_function_uses_pic_offset_table = 0;
6253 current_function_cannot_inline = 0;
6254
6255 /* We have not yet needed to make a label to jump to for tail-recursion. */
6256 tail_recursion_label = 0;
6257
6258 /* We haven't had a need to make a save area for ap yet. */
6259 arg_pointer_save_area = 0;
6260
6261 /* No stack slots allocated yet. */
6262 frame_offset = 0;
6263
6264 /* No SAVE_EXPRs in this function yet. */
6265 save_expr_regs = 0;
6266
6267 /* No RTL_EXPRs in this function yet. */
6268 rtl_expr_chain = 0;
6269
6270 /* Set up to allocate temporaries. */
6271 init_temp_slots ();
6272
6273 /* Indicate that we need to distinguish between the return value of the
6274 present function and the return value of a function being called. */
6275 rtx_equal_function_value_matters = 1;
6276
6277 /* Indicate that we have not instantiated virtual registers yet. */
6278 virtuals_instantiated = 0;
6279
6280 /* Indicate that we want CONCATs now. */
6281 generating_concat_p = 1;
6282
6283 /* Indicate we have no need of a frame pointer yet. */
6284 frame_pointer_needed = 0;
6285
6286 /* By default assume not varargs or stdarg. */
6287 current_function_varargs = 0;
6288 current_function_stdarg = 0;
6289
6290 /* We haven't made any trampolines for this function yet. */
6291 trampoline_list = 0;
6292
6293 init_pending_stack_adjust ();
6294 inhibit_defer_pop = 0;
6295
6296 current_function_outgoing_args_size = 0;
6297
6298 cfun->arc_profile = profile_arc_flag || flag_test_coverage;
6299
6300 cfun->arc_profile = profile_arc_flag || flag_test_coverage;
6301
6302 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
6303
6304 (*lang_hooks.function.init) (cfun);
6305 if (init_machine_status)
6306 cfun->machine = (*init_machine_status) ();
6307 }
6308
6309 /* Initialize the rtl expansion mechanism so that we can do simple things
6310 like generate sequences. This is used to provide a context during global
6311 initialization of some passes. */
6312 void
6313 init_dummy_function_start ()
6314 {
6315 prepare_function_start ();
6316 }
6317
6318 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
6319 and initialize static variables for generating RTL for the statements
6320 of the function. */
6321
6322 void
6323 init_function_start (subr, filename, line)
6324 tree subr;
6325 const char *filename;
6326 int line;
6327 {
6328 prepare_function_start ();
6329
6330 current_function_name = (*lang_hooks.decl_printable_name) (subr, 2);
6331 cfun->decl = subr;
6332
6333 /* Nonzero if this is a nested function that uses a static chain. */
6334
6335 current_function_needs_context
6336 = (decl_function_context (current_function_decl) != 0
6337 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6338
6339 /* Within function body, compute a type's size as soon it is laid out. */
6340 immediate_size_expand++;
6341
6342 /* Prevent ever trying to delete the first instruction of a function.
6343 Also tell final how to output a linenum before the function prologue.
6344 Note linenums could be missing, e.g. when compiling a Java .class file. */
6345 if (line > 0)
6346 emit_line_note (filename, line);
6347
6348 /* Make sure first insn is a note even if we don't want linenums.
6349 This makes sure the first insn will never be deleted.
6350 Also, final expects a note to appear there. */
6351 emit_note (NULL, NOTE_INSN_DELETED);
6352
6353 /* Set flags used by final.c. */
6354 if (aggregate_value_p (DECL_RESULT (subr)))
6355 {
6356 #ifdef PCC_STATIC_STRUCT_RETURN
6357 current_function_returns_pcc_struct = 1;
6358 #endif
6359 current_function_returns_struct = 1;
6360 }
6361
6362 /* Warn if this value is an aggregate type,
6363 regardless of which calling convention we are using for it. */
6364 if (warn_aggregate_return
6365 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6366 warning ("function returns an aggregate");
6367
6368 current_function_returns_pointer
6369 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6370 }
6371
6372 /* Make sure all values used by the optimization passes have sane
6373 defaults. */
6374 void
6375 init_function_for_compilation ()
6376 {
6377 reg_renumber = 0;
6378
6379 /* No prologue/epilogue insns yet. */
6380 VARRAY_GROW (prologue, 0);
6381 VARRAY_GROW (epilogue, 0);
6382 VARRAY_GROW (sibcall_epilogue, 0);
6383 }
6384
6385 /* Indicate that the current function uses extra args
6386 not explicitly mentioned in the argument list in any fashion. */
6387
6388 void
6389 mark_varargs ()
6390 {
6391 current_function_varargs = 1;
6392 }
6393
6394 /* Expand a call to __main at the beginning of a possible main function. */
6395
6396 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6397 #undef HAS_INIT_SECTION
6398 #define HAS_INIT_SECTION
6399 #endif
6400
6401 void
6402 expand_main_function ()
6403 {
6404 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
6405 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
6406 {
6407 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
6408 rtx tmp, seq;
6409
6410 start_sequence ();
6411 /* Forcibly align the stack. */
6412 #ifdef STACK_GROWS_DOWNWARD
6413 tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
6414 stack_pointer_rtx, 1, OPTAB_WIDEN);
6415 #else
6416 tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
6417 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
6418 tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
6419 stack_pointer_rtx, 1, OPTAB_WIDEN);
6420 #endif
6421 if (tmp != stack_pointer_rtx)
6422 emit_move_insn (stack_pointer_rtx, tmp);
6423
6424 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
6425 tmp = force_reg (Pmode, const0_rtx);
6426 allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
6427 seq = get_insns ();
6428 end_sequence ();
6429
6430 for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
6431 if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
6432 break;
6433 if (tmp)
6434 emit_insn_before (seq, tmp);
6435 else
6436 emit_insn (seq);
6437 }
6438 #endif
6439
6440 #ifndef HAS_INIT_SECTION
6441 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), LCT_NORMAL,
6442 VOIDmode, 0);
6443 #endif
6444 }
6445 \f
6446 extern struct obstack permanent_obstack;
6447
6448 /* The PENDING_SIZES represent the sizes of variable-sized types.
6449 Create RTL for the various sizes now (using temporary variables),
6450 so that we can refer to the sizes from the RTL we are generating
6451 for the current function. The PENDING_SIZES are a TREE_LIST. The
6452 TREE_VALUE of each node is a SAVE_EXPR. */
6453
6454 void
6455 expand_pending_sizes (pending_sizes)
6456 tree pending_sizes;
6457 {
6458 tree tem;
6459
6460 /* Evaluate now the sizes of any types declared among the arguments. */
6461 for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
6462 {
6463 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
6464 /* Flush the queue in case this parameter declaration has
6465 side-effects. */
6466 emit_queue ();
6467 }
6468 }
6469
6470 /* Start the RTL for a new function, and set variables used for
6471 emitting RTL.
6472 SUBR is the FUNCTION_DECL node.
6473 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6474 the function's parameters, which must be run at any return statement. */
6475
6476 void
6477 expand_function_start (subr, parms_have_cleanups)
6478 tree subr;
6479 int parms_have_cleanups;
6480 {
6481 tree tem;
6482 rtx last_ptr = NULL_RTX;
6483
6484 /* Make sure volatile mem refs aren't considered
6485 valid operands of arithmetic insns. */
6486 init_recog_no_volatile ();
6487
6488 current_function_instrument_entry_exit
6489 = (flag_instrument_function_entry_exit
6490 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6491
6492 current_function_profile
6493 = (profile_flag
6494 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6495
6496 current_function_limit_stack
6497 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
6498
6499 /* If function gets a static chain arg, store it in the stack frame.
6500 Do this first, so it gets the first stack slot offset. */
6501 if (current_function_needs_context)
6502 {
6503 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6504
6505 /* Delay copying static chain if it is not a register to avoid
6506 conflicts with regs used for parameters. */
6507 if (! SMALL_REGISTER_CLASSES
6508 || GET_CODE (static_chain_incoming_rtx) == REG)
6509 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6510 }
6511
6512 /* If the parameters of this function need cleaning up, get a label
6513 for the beginning of the code which executes those cleanups. This must
6514 be done before doing anything with return_label. */
6515 if (parms_have_cleanups)
6516 cleanup_label = gen_label_rtx ();
6517 else
6518 cleanup_label = 0;
6519
6520 /* Make the label for return statements to jump to. Do not special
6521 case machines with special return instructions -- they will be
6522 handled later during jump, ifcvt, or epilogue creation. */
6523 return_label = gen_label_rtx ();
6524
6525 /* Initialize rtx used to return the value. */
6526 /* Do this before assign_parms so that we copy the struct value address
6527 before any library calls that assign parms might generate. */
6528
6529 /* Decide whether to return the value in memory or in a register. */
6530 if (aggregate_value_p (DECL_RESULT (subr)))
6531 {
6532 /* Returning something that won't go in a register. */
6533 rtx value_address = 0;
6534
6535 #ifdef PCC_STATIC_STRUCT_RETURN
6536 if (current_function_returns_pcc_struct)
6537 {
6538 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6539 value_address = assemble_static_space (size);
6540 }
6541 else
6542 #endif
6543 {
6544 /* Expect to be passed the address of a place to store the value.
6545 If it is passed as an argument, assign_parms will take care of
6546 it. */
6547 if (struct_value_incoming_rtx)
6548 {
6549 value_address = gen_reg_rtx (Pmode);
6550 emit_move_insn (value_address, struct_value_incoming_rtx);
6551 }
6552 }
6553 if (value_address)
6554 {
6555 rtx x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6556 set_mem_attributes (x, DECL_RESULT (subr), 1);
6557 SET_DECL_RTL (DECL_RESULT (subr), x);
6558 }
6559 }
6560 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6561 /* If return mode is void, this decl rtl should not be used. */
6562 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
6563 else
6564 {
6565 /* Compute the return values into a pseudo reg, which we will copy
6566 into the true return register after the cleanups are done. */
6567
6568 /* In order to figure out what mode to use for the pseudo, we
6569 figure out what the mode of the eventual return register will
6570 actually be, and use that. */
6571 rtx hard_reg
6572 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
6573 subr, 1);
6574
6575 /* Structures that are returned in registers are not aggregate_value_p,
6576 so we may see a PARALLEL. Don't play pseudo games with this. */
6577 if (! REG_P (hard_reg))
6578 SET_DECL_RTL (DECL_RESULT (subr), hard_reg);
6579 else
6580 {
6581 /* Create the pseudo. */
6582 SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
6583
6584 /* Needed because we may need to move this to memory
6585 in case it's a named return value whose address is taken. */
6586 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6587 }
6588 }
6589
6590 /* Initialize rtx for parameters and local variables.
6591 In some cases this requires emitting insns. */
6592
6593 assign_parms (subr);
6594
6595 /* Copy the static chain now if it wasn't a register. The delay is to
6596 avoid conflicts with the parameter passing registers. */
6597
6598 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6599 if (GET_CODE (static_chain_incoming_rtx) != REG)
6600 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6601
6602 /* The following was moved from init_function_start.
6603 The move is supposed to make sdb output more accurate. */
6604 /* Indicate the beginning of the function body,
6605 as opposed to parm setup. */
6606 emit_note (NULL, NOTE_INSN_FUNCTION_BEG);
6607
6608 if (GET_CODE (get_last_insn ()) != NOTE)
6609 emit_note (NULL, NOTE_INSN_DELETED);
6610 parm_birth_insn = get_last_insn ();
6611
6612 context_display = 0;
6613 if (current_function_needs_context)
6614 {
6615 /* Fetch static chain values for containing functions. */
6616 tem = decl_function_context (current_function_decl);
6617 /* Copy the static chain pointer into a pseudo. If we have
6618 small register classes, copy the value from memory if
6619 static_chain_incoming_rtx is a REG. */
6620 if (tem)
6621 {
6622 /* If the static chain originally came in a register, put it back
6623 there, then move it out in the next insn. The reason for
6624 this peculiar code is to satisfy function integration. */
6625 if (SMALL_REGISTER_CLASSES
6626 && GET_CODE (static_chain_incoming_rtx) == REG)
6627 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6628 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6629 }
6630
6631 while (tem)
6632 {
6633 tree rtlexp = make_node (RTL_EXPR);
6634
6635 RTL_EXPR_RTL (rtlexp) = last_ptr;
6636 context_display = tree_cons (tem, rtlexp, context_display);
6637 tem = decl_function_context (tem);
6638 if (tem == 0)
6639 break;
6640 /* Chain thru stack frames, assuming pointer to next lexical frame
6641 is found at the place we always store it. */
6642 #ifdef FRAME_GROWS_DOWNWARD
6643 last_ptr = plus_constant (last_ptr,
6644 -(HOST_WIDE_INT) GET_MODE_SIZE (Pmode));
6645 #endif
6646 last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
6647 set_mem_alias_set (last_ptr, get_frame_alias_set ());
6648 last_ptr = copy_to_reg (last_ptr);
6649
6650 /* If we are not optimizing, ensure that we know that this
6651 piece of context is live over the entire function. */
6652 if (! optimize)
6653 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6654 save_expr_regs);
6655 }
6656 }
6657
6658 if (current_function_instrument_entry_exit)
6659 {
6660 rtx fun = DECL_RTL (current_function_decl);
6661 if (GET_CODE (fun) == MEM)
6662 fun = XEXP (fun, 0);
6663 else
6664 abort ();
6665 emit_library_call (profile_function_entry_libfunc, LCT_NORMAL, VOIDmode,
6666 2, fun, Pmode,
6667 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6668 0,
6669 hard_frame_pointer_rtx),
6670 Pmode);
6671 }
6672
6673 if (current_function_profile)
6674 {
6675 current_function_profile_label_no = profile_label_no++;
6676 #ifdef PROFILE_HOOK
6677 PROFILE_HOOK (current_function_profile_label_no);
6678 #endif
6679 }
6680
6681 /* After the display initializations is where the tail-recursion label
6682 should go, if we end up needing one. Ensure we have a NOTE here
6683 since some things (like trampolines) get placed before this. */
6684 tail_recursion_reentry = emit_note (NULL, NOTE_INSN_DELETED);
6685
6686 /* Evaluate now the sizes of any types declared among the arguments. */
6687 expand_pending_sizes (nreverse (get_pending_sizes ()));
6688
6689 /* Make sure there is a line number after the function entry setup code. */
6690 force_next_line_note ();
6691 }
6692 \f
6693 /* Undo the effects of init_dummy_function_start. */
6694 void
6695 expand_dummy_function_end ()
6696 {
6697 /* End any sequences that failed to be closed due to syntax errors. */
6698 while (in_sequence_p ())
6699 end_sequence ();
6700
6701 /* Outside function body, can't compute type's actual size
6702 until next function's body starts. */
6703
6704 free_after_parsing (cfun);
6705 free_after_compilation (cfun);
6706 cfun = 0;
6707 }
6708
6709 /* Call DOIT for each hard register used as a return value from
6710 the current function. */
6711
6712 void
6713 diddle_return_value (doit, arg)
6714 void (*doit) PARAMS ((rtx, void *));
6715 void *arg;
6716 {
6717 rtx outgoing = current_function_return_rtx;
6718
6719 if (! outgoing)
6720 return;
6721
6722 if (GET_CODE (outgoing) == REG)
6723 (*doit) (outgoing, arg);
6724 else if (GET_CODE (outgoing) == PARALLEL)
6725 {
6726 int i;
6727
6728 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6729 {
6730 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6731
6732 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6733 (*doit) (x, arg);
6734 }
6735 }
6736 }
6737
6738 static void
6739 do_clobber_return_reg (reg, arg)
6740 rtx reg;
6741 void *arg ATTRIBUTE_UNUSED;
6742 {
6743 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6744 }
6745
6746 void
6747 clobber_return_register ()
6748 {
6749 diddle_return_value (do_clobber_return_reg, NULL);
6750
6751 /* In case we do use pseudo to return value, clobber it too. */
6752 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6753 {
6754 tree decl_result = DECL_RESULT (current_function_decl);
6755 rtx decl_rtl = DECL_RTL (decl_result);
6756 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
6757 {
6758 do_clobber_return_reg (decl_rtl, NULL);
6759 }
6760 }
6761 }
6762
6763 static void
6764 do_use_return_reg (reg, arg)
6765 rtx reg;
6766 void *arg ATTRIBUTE_UNUSED;
6767 {
6768 emit_insn (gen_rtx_USE (VOIDmode, reg));
6769 }
6770
6771 void
6772 use_return_register ()
6773 {
6774 diddle_return_value (do_use_return_reg, NULL);
6775 }
6776
6777 static GTY(()) rtx initial_trampoline;
6778
6779 /* Generate RTL for the end of the current function.
6780 FILENAME and LINE are the current position in the source file.
6781
6782 It is up to language-specific callers to do cleanups for parameters--
6783 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6784
6785 void
6786 expand_function_end (filename, line, end_bindings)
6787 const char *filename;
6788 int line;
6789 int end_bindings;
6790 {
6791 tree link;
6792 rtx clobber_after;
6793
6794 finish_expr_for_function ();
6795
6796 /* If arg_pointer_save_area was referenced only from a nested
6797 function, we will not have initialized it yet. Do that now. */
6798 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
6799 get_arg_pointer_save_area (cfun);
6800
6801 #ifdef NON_SAVING_SETJMP
6802 /* Don't put any variables in registers if we call setjmp
6803 on a machine that fails to restore the registers. */
6804 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6805 {
6806 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6807 setjmp_protect (DECL_INITIAL (current_function_decl));
6808
6809 setjmp_protect_args ();
6810 }
6811 #endif
6812
6813 /* Initialize any trampolines required by this function. */
6814 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6815 {
6816 tree function = TREE_PURPOSE (link);
6817 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6818 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6819 #ifdef TRAMPOLINE_TEMPLATE
6820 rtx blktramp;
6821 #endif
6822 rtx seq;
6823
6824 #ifdef TRAMPOLINE_TEMPLATE
6825 /* First make sure this compilation has a template for
6826 initializing trampolines. */
6827 if (initial_trampoline == 0)
6828 {
6829 initial_trampoline
6830 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6831 set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
6832 }
6833 #endif
6834
6835 /* Generate insns to initialize the trampoline. */
6836 start_sequence ();
6837 tramp = round_trampoline_addr (XEXP (tramp, 0));
6838 #ifdef TRAMPOLINE_TEMPLATE
6839 blktramp = replace_equiv_address (initial_trampoline, tramp);
6840 emit_block_move (blktramp, initial_trampoline,
6841 GEN_INT (TRAMPOLINE_SIZE));
6842 #endif
6843 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6844 seq = get_insns ();
6845 end_sequence ();
6846
6847 /* Put those insns at entry to the containing function (this one). */
6848 emit_insn_before (seq, tail_recursion_reentry);
6849 }
6850
6851 /* If we are doing stack checking and this function makes calls,
6852 do a stack probe at the start of the function to ensure we have enough
6853 space for another stack frame. */
6854 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6855 {
6856 rtx insn, seq;
6857
6858 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6859 if (GET_CODE (insn) == CALL_INSN)
6860 {
6861 start_sequence ();
6862 probe_stack_range (STACK_CHECK_PROTECT,
6863 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6864 seq = get_insns ();
6865 end_sequence ();
6866 emit_insn_before (seq, tail_recursion_reentry);
6867 break;
6868 }
6869 }
6870
6871 /* Warn about unused parms if extra warnings were specified. */
6872 /* Either ``-W -Wunused'' or ``-Wunused-parameter'' enables this
6873 warning. WARN_UNUSED_PARAMETER is negative when set by
6874 -Wunused. */
6875 if (warn_unused_parameter > 0
6876 || (warn_unused_parameter < 0 && extra_warnings))
6877 {
6878 tree decl;
6879
6880 for (decl = DECL_ARGUMENTS (current_function_decl);
6881 decl; decl = TREE_CHAIN (decl))
6882 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6883 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6884 warning_with_decl (decl, "unused parameter `%s'");
6885 }
6886
6887 /* Delete handlers for nonlocal gotos if nothing uses them. */
6888 if (nonlocal_goto_handler_slots != 0
6889 && ! current_function_has_nonlocal_label)
6890 delete_handlers ();
6891
6892 /* End any sequences that failed to be closed due to syntax errors. */
6893 while (in_sequence_p ())
6894 end_sequence ();
6895
6896 /* Outside function body, can't compute type's actual size
6897 until next function's body starts. */
6898 immediate_size_expand--;
6899
6900 clear_pending_stack_adjust ();
6901 do_pending_stack_adjust ();
6902
6903 /* Mark the end of the function body.
6904 If control reaches this insn, the function can drop through
6905 without returning a value. */
6906 emit_note (NULL, NOTE_INSN_FUNCTION_END);
6907
6908 /* Must mark the last line number note in the function, so that the test
6909 coverage code can avoid counting the last line twice. This just tells
6910 the code to ignore the immediately following line note, since there
6911 already exists a copy of this note somewhere above. This line number
6912 note is still needed for debugging though, so we can't delete it. */
6913 if (flag_test_coverage)
6914 emit_note (NULL, NOTE_INSN_REPEATED_LINE_NUMBER);
6915
6916 /* Output a linenumber for the end of the function.
6917 SDB depends on this. */
6918 emit_line_note_force (filename, line);
6919
6920 /* Before the return label (if any), clobber the return
6921 registers so that they are not propagated live to the rest of
6922 the function. This can only happen with functions that drop
6923 through; if there had been a return statement, there would
6924 have either been a return rtx, or a jump to the return label.
6925
6926 We delay actual code generation after the current_function_value_rtx
6927 is computed. */
6928 clobber_after = get_last_insn ();
6929
6930 /* Output the label for the actual return from the function,
6931 if one is expected. This happens either because a function epilogue
6932 is used instead of a return instruction, or because a return was done
6933 with a goto in order to run local cleanups, or because of pcc-style
6934 structure returning. */
6935 if (return_label)
6936 emit_label (return_label);
6937
6938 /* C++ uses this. */
6939 if (end_bindings)
6940 expand_end_bindings (0, 0, 0);
6941
6942 if (current_function_instrument_entry_exit)
6943 {
6944 rtx fun = DECL_RTL (current_function_decl);
6945 if (GET_CODE (fun) == MEM)
6946 fun = XEXP (fun, 0);
6947 else
6948 abort ();
6949 emit_library_call (profile_function_exit_libfunc, LCT_NORMAL, VOIDmode,
6950 2, fun, Pmode,
6951 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6952 0,
6953 hard_frame_pointer_rtx),
6954 Pmode);
6955 }
6956
6957 /* Let except.c know where it should emit the call to unregister
6958 the function context for sjlj exceptions. */
6959 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
6960 sjlj_emit_function_exit_after (get_last_insn ());
6961
6962 /* If we had calls to alloca, and this machine needs
6963 an accurate stack pointer to exit the function,
6964 insert some code to save and restore the stack pointer. */
6965 #ifdef EXIT_IGNORE_STACK
6966 if (! EXIT_IGNORE_STACK)
6967 #endif
6968 if (current_function_calls_alloca)
6969 {
6970 rtx tem = 0;
6971
6972 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6973 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6974 }
6975
6976 /* If scalar return value was computed in a pseudo-reg, or was a named
6977 return value that got dumped to the stack, copy that to the hard
6978 return register. */
6979 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6980 {
6981 tree decl_result = DECL_RESULT (current_function_decl);
6982 rtx decl_rtl = DECL_RTL (decl_result);
6983
6984 if (REG_P (decl_rtl)
6985 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
6986 : DECL_REGISTER (decl_result))
6987 {
6988 rtx real_decl_rtl = current_function_return_rtx;
6989
6990 /* This should be set in assign_parms. */
6991 if (! REG_FUNCTION_VALUE_P (real_decl_rtl))
6992 abort ();
6993
6994 /* If this is a BLKmode structure being returned in registers,
6995 then use the mode computed in expand_return. Note that if
6996 decl_rtl is memory, then its mode may have been changed,
6997 but that current_function_return_rtx has not. */
6998 if (GET_MODE (real_decl_rtl) == BLKmode)
6999 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
7000
7001 /* If a named return value dumped decl_return to memory, then
7002 we may need to re-do the PROMOTE_MODE signed/unsigned
7003 extension. */
7004 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
7005 {
7006 int unsignedp = TREE_UNSIGNED (TREE_TYPE (decl_result));
7007
7008 #ifdef PROMOTE_FUNCTION_RETURN
7009 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
7010 &unsignedp, 1);
7011 #endif
7012
7013 convert_move (real_decl_rtl, decl_rtl, unsignedp);
7014 }
7015 else if (GET_CODE (real_decl_rtl) == PARALLEL)
7016 emit_group_load (real_decl_rtl, decl_rtl,
7017 int_size_in_bytes (TREE_TYPE (decl_result)));
7018 else
7019 emit_move_insn (real_decl_rtl, decl_rtl);
7020 }
7021 }
7022
7023 /* If returning a structure, arrange to return the address of the value
7024 in a place where debuggers expect to find it.
7025
7026 If returning a structure PCC style,
7027 the caller also depends on this value.
7028 And current_function_returns_pcc_struct is not necessarily set. */
7029 if (current_function_returns_struct
7030 || current_function_returns_pcc_struct)
7031 {
7032 rtx value_address
7033 = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7034 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
7035 #ifdef FUNCTION_OUTGOING_VALUE
7036 rtx outgoing
7037 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
7038 current_function_decl);
7039 #else
7040 rtx outgoing
7041 = FUNCTION_VALUE (build_pointer_type (type), current_function_decl);
7042 #endif
7043
7044 /* Mark this as a function return value so integrate will delete the
7045 assignment and USE below when inlining this function. */
7046 REG_FUNCTION_VALUE_P (outgoing) = 1;
7047
7048 #ifdef POINTERS_EXTEND_UNSIGNED
7049 /* The address may be ptr_mode and OUTGOING may be Pmode. */
7050 if (GET_MODE (outgoing) != GET_MODE (value_address))
7051 value_address = convert_memory_address (GET_MODE (outgoing),
7052 value_address);
7053 #endif
7054
7055 emit_move_insn (outgoing, value_address);
7056
7057 /* Show return register used to hold result (in this case the address
7058 of the result. */
7059 current_function_return_rtx = outgoing;
7060 }
7061
7062 /* If this is an implementation of throw, do what's necessary to
7063 communicate between __builtin_eh_return and the epilogue. */
7064 expand_eh_return ();
7065
7066 /* Emit the actual code to clobber return register. */
7067 {
7068 rtx seq, after;
7069
7070 start_sequence ();
7071 clobber_return_register ();
7072 seq = get_insns ();
7073 end_sequence ();
7074
7075 after = emit_insn_after (seq, clobber_after);
7076
7077 if (clobber_after != after)
7078 cfun->x_clobber_return_insn = after;
7079 }
7080
7081 /* ??? This should no longer be necessary since stupid is no longer with
7082 us, but there are some parts of the compiler (eg reload_combine, and
7083 sh mach_dep_reorg) that still try and compute their own lifetime info
7084 instead of using the general framework. */
7085 use_return_register ();
7086
7087 /* Fix up any gotos that jumped out to the outermost
7088 binding level of the function.
7089 Must follow emitting RETURN_LABEL. */
7090
7091 /* If you have any cleanups to do at this point,
7092 and they need to create temporary variables,
7093 then you will lose. */
7094 expand_fixups (get_insns ());
7095 }
7096
7097 rtx
7098 get_arg_pointer_save_area (f)
7099 struct function *f;
7100 {
7101 rtx ret = f->x_arg_pointer_save_area;
7102
7103 if (! ret)
7104 {
7105 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
7106 f->x_arg_pointer_save_area = ret;
7107 }
7108
7109 if (f == cfun && ! f->arg_pointer_save_area_init)
7110 {
7111 rtx seq;
7112
7113 /* Save the arg pointer at the beginning of the function. The
7114 generated stack slot may not be a valid memory address, so we
7115 have to check it and fix it if necessary. */
7116 start_sequence ();
7117 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
7118 seq = get_insns ();
7119 end_sequence ();
7120
7121 push_topmost_sequence ();
7122 emit_insn_after (seq, get_insns ());
7123 pop_topmost_sequence ();
7124 }
7125
7126 return ret;
7127 }
7128 \f
7129 /* Extend a vector that records the INSN_UIDs of INSNS
7130 (a list of one or more insns). */
7131
7132 static void
7133 record_insns (insns, vecp)
7134 rtx insns;
7135 varray_type *vecp;
7136 {
7137 int i, len;
7138 rtx tmp;
7139
7140 tmp = insns;
7141 len = 0;
7142 while (tmp != NULL_RTX)
7143 {
7144 len++;
7145 tmp = NEXT_INSN (tmp);
7146 }
7147
7148 i = VARRAY_SIZE (*vecp);
7149 VARRAY_GROW (*vecp, i + len);
7150 tmp = insns;
7151 while (tmp != NULL_RTX)
7152 {
7153 VARRAY_INT (*vecp, i) = INSN_UID (tmp);
7154 i++;
7155 tmp = NEXT_INSN (tmp);
7156 }
7157 }
7158
7159 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
7160 be running after reorg, SEQUENCE rtl is possible. */
7161
7162 static int
7163 contains (insn, vec)
7164 rtx insn;
7165 varray_type vec;
7166 {
7167 int i, j;
7168
7169 if (GET_CODE (insn) == INSN
7170 && GET_CODE (PATTERN (insn)) == SEQUENCE)
7171 {
7172 int count = 0;
7173 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7174 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7175 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
7176 count++;
7177 return count;
7178 }
7179 else
7180 {
7181 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7182 if (INSN_UID (insn) == VARRAY_INT (vec, j))
7183 return 1;
7184 }
7185 return 0;
7186 }
7187
7188 int
7189 prologue_epilogue_contains (insn)
7190 rtx insn;
7191 {
7192 if (contains (insn, prologue))
7193 return 1;
7194 if (contains (insn, epilogue))
7195 return 1;
7196 return 0;
7197 }
7198
7199 int
7200 sibcall_epilogue_contains (insn)
7201 rtx insn;
7202 {
7203 if (sibcall_epilogue)
7204 return contains (insn, sibcall_epilogue);
7205 return 0;
7206 }
7207
7208 #ifdef HAVE_return
7209 /* Insert gen_return at the end of block BB. This also means updating
7210 block_for_insn appropriately. */
7211
7212 static void
7213 emit_return_into_block (bb, line_note)
7214 basic_block bb;
7215 rtx line_note;
7216 {
7217 rtx p, end;
7218
7219 p = NEXT_INSN (bb->end);
7220 end = emit_jump_insn_after (gen_return (), bb->end);
7221 if (line_note)
7222 emit_line_note_after (NOTE_SOURCE_FILE (line_note),
7223 NOTE_LINE_NUMBER (line_note), PREV_INSN (bb->end));
7224 }
7225 #endif /* HAVE_return */
7226
7227 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
7228
7229 /* These functions convert the epilogue into a variant that does not modify the
7230 stack pointer. This is used in cases where a function returns an object
7231 whose size is not known until it is computed. The called function leaves the
7232 object on the stack, leaves the stack depressed, and returns a pointer to
7233 the object.
7234
7235 What we need to do is track all modifications and references to the stack
7236 pointer, deleting the modifications and changing the references to point to
7237 the location the stack pointer would have pointed to had the modifications
7238 taken place.
7239
7240 These functions need to be portable so we need to make as few assumptions
7241 about the epilogue as we can. However, the epilogue basically contains
7242 three things: instructions to reset the stack pointer, instructions to
7243 reload registers, possibly including the frame pointer, and an
7244 instruction to return to the caller.
7245
7246 If we can't be sure of what a relevant epilogue insn is doing, we abort.
7247 We also make no attempt to validate the insns we make since if they are
7248 invalid, we probably can't do anything valid. The intent is that these
7249 routines get "smarter" as more and more machines start to use them and
7250 they try operating on different epilogues.
7251
7252 We use the following structure to track what the part of the epilogue that
7253 we've already processed has done. We keep two copies of the SP equivalence,
7254 one for use during the insn we are processing and one for use in the next
7255 insn. The difference is because one part of a PARALLEL may adjust SP
7256 and the other may use it. */
7257
7258 struct epi_info
7259 {
7260 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
7261 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
7262 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
7263 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
7264 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
7265 should be set to once we no longer need
7266 its value. */
7267 };
7268
7269 static void handle_epilogue_set PARAMS ((rtx, struct epi_info *));
7270 static void emit_equiv_load PARAMS ((struct epi_info *));
7271
7272 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
7273 no modifications to the stack pointer. Return the new list of insns. */
7274
7275 static rtx
7276 keep_stack_depressed (insns)
7277 rtx insns;
7278 {
7279 int j;
7280 struct epi_info info;
7281 rtx insn, next;
7282
7283 /* If the epilogue is just a single instruction, it ust be OK as is. */
7284
7285 if (NEXT_INSN (insns) == NULL_RTX)
7286 return insns;
7287
7288 /* Otherwise, start a sequence, initialize the information we have, and
7289 process all the insns we were given. */
7290 start_sequence ();
7291
7292 info.sp_equiv_reg = stack_pointer_rtx;
7293 info.sp_offset = 0;
7294 info.equiv_reg_src = 0;
7295
7296 insn = insns;
7297 next = NULL_RTX;
7298 while (insn != NULL_RTX)
7299 {
7300 next = NEXT_INSN (insn);
7301
7302 if (!INSN_P (insn))
7303 {
7304 add_insn (insn);
7305 insn = next;
7306 continue;
7307 }
7308
7309 /* If this insn references the register that SP is equivalent to and
7310 we have a pending load to that register, we must force out the load
7311 first and then indicate we no longer know what SP's equivalent is. */
7312 if (info.equiv_reg_src != 0
7313 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
7314 {
7315 emit_equiv_load (&info);
7316 info.sp_equiv_reg = 0;
7317 }
7318
7319 info.new_sp_equiv_reg = info.sp_equiv_reg;
7320 info.new_sp_offset = info.sp_offset;
7321
7322 /* If this is a (RETURN) and the return address is on the stack,
7323 update the address and change to an indirect jump. */
7324 if (GET_CODE (PATTERN (insn)) == RETURN
7325 || (GET_CODE (PATTERN (insn)) == PARALLEL
7326 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
7327 {
7328 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
7329 rtx base = 0;
7330 HOST_WIDE_INT offset = 0;
7331 rtx jump_insn, jump_set;
7332
7333 /* If the return address is in a register, we can emit the insn
7334 unchanged. Otherwise, it must be a MEM and we see what the
7335 base register and offset are. In any case, we have to emit any
7336 pending load to the equivalent reg of SP, if any. */
7337 if (GET_CODE (retaddr) == REG)
7338 {
7339 emit_equiv_load (&info);
7340 add_insn (insn);
7341 insn = next;
7342 continue;
7343 }
7344 else if (GET_CODE (retaddr) == MEM
7345 && GET_CODE (XEXP (retaddr, 0)) == REG)
7346 base = gen_rtx_REG (Pmode, REGNO (XEXP (retaddr, 0))), offset = 0;
7347 else if (GET_CODE (retaddr) == MEM
7348 && GET_CODE (XEXP (retaddr, 0)) == PLUS
7349 && GET_CODE (XEXP (XEXP (retaddr, 0), 0)) == REG
7350 && GET_CODE (XEXP (XEXP (retaddr, 0), 1)) == CONST_INT)
7351 {
7352 base = gen_rtx_REG (Pmode, REGNO (XEXP (XEXP (retaddr, 0), 0)));
7353 offset = INTVAL (XEXP (XEXP (retaddr, 0), 1));
7354 }
7355 else
7356 abort ();
7357
7358 /* If the base of the location containing the return pointer
7359 is SP, we must update it with the replacement address. Otherwise,
7360 just build the necessary MEM. */
7361 retaddr = plus_constant (base, offset);
7362 if (base == stack_pointer_rtx)
7363 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
7364 plus_constant (info.sp_equiv_reg,
7365 info.sp_offset));
7366
7367 retaddr = gen_rtx_MEM (Pmode, retaddr);
7368
7369 /* If there is a pending load to the equivalent register for SP
7370 and we reference that register, we must load our address into
7371 a scratch register and then do that load. */
7372 if (info.equiv_reg_src
7373 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
7374 {
7375 unsigned int regno;
7376 rtx reg;
7377
7378 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7379 if (HARD_REGNO_MODE_OK (regno, Pmode)
7380 && !fixed_regs[regno]
7381 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
7382 && !REGNO_REG_SET_P (EXIT_BLOCK_PTR->global_live_at_start,
7383 regno)
7384 && !refers_to_regno_p (regno,
7385 regno + HARD_REGNO_NREGS (regno,
7386 Pmode),
7387 info.equiv_reg_src, NULL))
7388 break;
7389
7390 if (regno == FIRST_PSEUDO_REGISTER)
7391 abort ();
7392
7393 reg = gen_rtx_REG (Pmode, regno);
7394 emit_move_insn (reg, retaddr);
7395 retaddr = reg;
7396 }
7397
7398 emit_equiv_load (&info);
7399 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
7400
7401 /* Show the SET in the above insn is a RETURN. */
7402 jump_set = single_set (jump_insn);
7403 if (jump_set == 0)
7404 abort ();
7405 else
7406 SET_IS_RETURN_P (jump_set) = 1;
7407 }
7408
7409 /* If SP is not mentioned in the pattern and its equivalent register, if
7410 any, is not modified, just emit it. Otherwise, if neither is set,
7411 replace the reference to SP and emit the insn. If none of those are
7412 true, handle each SET individually. */
7413 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
7414 && (info.sp_equiv_reg == stack_pointer_rtx
7415 || !reg_set_p (info.sp_equiv_reg, insn)))
7416 add_insn (insn);
7417 else if (! reg_set_p (stack_pointer_rtx, insn)
7418 && (info.sp_equiv_reg == stack_pointer_rtx
7419 || !reg_set_p (info.sp_equiv_reg, insn)))
7420 {
7421 if (! validate_replace_rtx (stack_pointer_rtx,
7422 plus_constant (info.sp_equiv_reg,
7423 info.sp_offset),
7424 insn))
7425 abort ();
7426
7427 add_insn (insn);
7428 }
7429 else if (GET_CODE (PATTERN (insn)) == SET)
7430 handle_epilogue_set (PATTERN (insn), &info);
7431 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7432 {
7433 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
7434 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
7435 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
7436 }
7437 else
7438 add_insn (insn);
7439
7440 info.sp_equiv_reg = info.new_sp_equiv_reg;
7441 info.sp_offset = info.new_sp_offset;
7442
7443 insn = next;
7444 }
7445
7446 insns = get_insns ();
7447 end_sequence ();
7448 return insns;
7449 }
7450
7451 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
7452 structure that contains information about what we've seen so far. We
7453 process this SET by either updating that data or by emitting one or
7454 more insns. */
7455
7456 static void
7457 handle_epilogue_set (set, p)
7458 rtx set;
7459 struct epi_info *p;
7460 {
7461 /* First handle the case where we are setting SP. Record what it is being
7462 set from. If unknown, abort. */
7463 if (reg_set_p (stack_pointer_rtx, set))
7464 {
7465 if (SET_DEST (set) != stack_pointer_rtx)
7466 abort ();
7467
7468 if (GET_CODE (SET_SRC (set)) == PLUS
7469 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
7470 {
7471 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
7472 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
7473 }
7474 else
7475 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
7476
7477 /* If we are adjusting SP, we adjust from the old data. */
7478 if (p->new_sp_equiv_reg == stack_pointer_rtx)
7479 {
7480 p->new_sp_equiv_reg = p->sp_equiv_reg;
7481 p->new_sp_offset += p->sp_offset;
7482 }
7483
7484 if (p->new_sp_equiv_reg == 0 || GET_CODE (p->new_sp_equiv_reg) != REG)
7485 abort ();
7486
7487 return;
7488 }
7489
7490 /* Next handle the case where we are setting SP's equivalent register.
7491 If we already have a value to set it to, abort. We could update, but
7492 there seems little point in handling that case. Note that we have
7493 to allow for the case where we are setting the register set in
7494 the previous part of a PARALLEL inside a single insn. But use the
7495 old offset for any updates within this insn. */
7496 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
7497 {
7498 if (!rtx_equal_p (p->new_sp_equiv_reg, SET_DEST (set))
7499 || p->equiv_reg_src != 0)
7500 abort ();
7501 else
7502 p->equiv_reg_src
7503 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7504 plus_constant (p->sp_equiv_reg,
7505 p->sp_offset));
7506 }
7507
7508 /* Otherwise, replace any references to SP in the insn to its new value
7509 and emit the insn. */
7510 else
7511 {
7512 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7513 plus_constant (p->sp_equiv_reg,
7514 p->sp_offset));
7515 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
7516 plus_constant (p->sp_equiv_reg,
7517 p->sp_offset));
7518 emit_insn (set);
7519 }
7520 }
7521
7522 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
7523
7524 static void
7525 emit_equiv_load (p)
7526 struct epi_info *p;
7527 {
7528 if (p->equiv_reg_src != 0)
7529 emit_move_insn (p->sp_equiv_reg, p->equiv_reg_src);
7530
7531 p->equiv_reg_src = 0;
7532 }
7533 #endif
7534
7535 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
7536 this into place with notes indicating where the prologue ends and where
7537 the epilogue begins. Update the basic block information when possible. */
7538
7539 void
7540 thread_prologue_and_epilogue_insns (f)
7541 rtx f ATTRIBUTE_UNUSED;
7542 {
7543 int inserted = 0;
7544 edge e;
7545 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
7546 rtx seq;
7547 #endif
7548 #ifdef HAVE_prologue
7549 rtx prologue_end = NULL_RTX;
7550 #endif
7551 #if defined (HAVE_epilogue) || defined(HAVE_return)
7552 rtx epilogue_end = NULL_RTX;
7553 #endif
7554
7555 #ifdef HAVE_prologue
7556 if (HAVE_prologue)
7557 {
7558 start_sequence ();
7559 seq = gen_prologue ();
7560 emit_insn (seq);
7561
7562 /* Retain a map of the prologue insns. */
7563 record_insns (seq, &prologue);
7564 prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
7565
7566 seq = get_insns ();
7567 end_sequence ();
7568
7569 /* Can't deal with multiple successors of the entry block
7570 at the moment. Function should always have at least one
7571 entry point. */
7572 if (!ENTRY_BLOCK_PTR->succ || ENTRY_BLOCK_PTR->succ->succ_next)
7573 abort ();
7574
7575 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
7576 inserted = 1;
7577 }
7578 #endif
7579
7580 /* If the exit block has no non-fake predecessors, we don't need
7581 an epilogue. */
7582 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7583 if ((e->flags & EDGE_FAKE) == 0)
7584 break;
7585 if (e == NULL)
7586 goto epilogue_done;
7587
7588 #ifdef HAVE_return
7589 if (optimize && HAVE_return)
7590 {
7591 /* If we're allowed to generate a simple return instruction,
7592 then by definition we don't need a full epilogue. Examine
7593 the block that falls through to EXIT. If it does not
7594 contain any code, examine its predecessors and try to
7595 emit (conditional) return instructions. */
7596
7597 basic_block last;
7598 edge e_next;
7599 rtx label;
7600
7601 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7602 if (e->flags & EDGE_FALLTHRU)
7603 break;
7604 if (e == NULL)
7605 goto epilogue_done;
7606 last = e->src;
7607
7608 /* Verify that there are no active instructions in the last block. */
7609 label = last->end;
7610 while (label && GET_CODE (label) != CODE_LABEL)
7611 {
7612 if (active_insn_p (label))
7613 break;
7614 label = PREV_INSN (label);
7615 }
7616
7617 if (last->head == label && GET_CODE (label) == CODE_LABEL)
7618 {
7619 rtx epilogue_line_note = NULL_RTX;
7620
7621 /* Locate the line number associated with the closing brace,
7622 if we can find one. */
7623 for (seq = get_last_insn ();
7624 seq && ! active_insn_p (seq);
7625 seq = PREV_INSN (seq))
7626 if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
7627 {
7628 epilogue_line_note = seq;
7629 break;
7630 }
7631
7632 for (e = last->pred; e; e = e_next)
7633 {
7634 basic_block bb = e->src;
7635 rtx jump;
7636
7637 e_next = e->pred_next;
7638 if (bb == ENTRY_BLOCK_PTR)
7639 continue;
7640
7641 jump = bb->end;
7642 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
7643 continue;
7644
7645 /* If we have an unconditional jump, we can replace that
7646 with a simple return instruction. */
7647 if (simplejump_p (jump))
7648 {
7649 emit_return_into_block (bb, epilogue_line_note);
7650 delete_insn (jump);
7651 }
7652
7653 /* If we have a conditional jump, we can try to replace
7654 that with a conditional return instruction. */
7655 else if (condjump_p (jump))
7656 {
7657 rtx ret, *loc;
7658
7659 ret = SET_SRC (PATTERN (jump));
7660 if (GET_CODE (XEXP (ret, 1)) == LABEL_REF)
7661 loc = &XEXP (ret, 1);
7662 else
7663 loc = &XEXP (ret, 2);
7664 ret = gen_rtx_RETURN (VOIDmode);
7665
7666 if (! validate_change (jump, loc, ret, 0))
7667 continue;
7668 if (JUMP_LABEL (jump))
7669 LABEL_NUSES (JUMP_LABEL (jump))--;
7670
7671 /* If this block has only one successor, it both jumps
7672 and falls through to the fallthru block, so we can't
7673 delete the edge. */
7674 if (bb->succ->succ_next == NULL)
7675 continue;
7676 }
7677 else
7678 continue;
7679
7680 /* Fix up the CFG for the successful change we just made. */
7681 redirect_edge_succ (e, EXIT_BLOCK_PTR);
7682 }
7683
7684 /* Emit a return insn for the exit fallthru block. Whether
7685 this is still reachable will be determined later. */
7686
7687 emit_barrier_after (last->end);
7688 emit_return_into_block (last, epilogue_line_note);
7689 epilogue_end = last->end;
7690 last->succ->flags &= ~EDGE_FALLTHRU;
7691 goto epilogue_done;
7692 }
7693 }
7694 #endif
7695 #ifdef HAVE_epilogue
7696 if (HAVE_epilogue)
7697 {
7698 /* Find the edge that falls through to EXIT. Other edges may exist
7699 due to RETURN instructions, but those don't need epilogues.
7700 There really shouldn't be a mixture -- either all should have
7701 been converted or none, however... */
7702
7703 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7704 if (e->flags & EDGE_FALLTHRU)
7705 break;
7706 if (e == NULL)
7707 goto epilogue_done;
7708
7709 start_sequence ();
7710 epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
7711
7712 seq = gen_epilogue ();
7713
7714 #ifdef INCOMING_RETURN_ADDR_RTX
7715 /* If this function returns with the stack depressed and we can support
7716 it, massage the epilogue to actually do that. */
7717 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
7718 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
7719 seq = keep_stack_depressed (seq);
7720 #endif
7721
7722 emit_jump_insn (seq);
7723
7724 /* Retain a map of the epilogue insns. */
7725 record_insns (seq, &epilogue);
7726
7727 seq = get_insns ();
7728 end_sequence ();
7729
7730 insert_insn_on_edge (seq, e);
7731 inserted = 1;
7732 }
7733 #endif
7734 epilogue_done:
7735
7736 if (inserted)
7737 commit_edge_insertions ();
7738
7739 #ifdef HAVE_sibcall_epilogue
7740 /* Emit sibling epilogues before any sibling call sites. */
7741 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7742 {
7743 basic_block bb = e->src;
7744 rtx insn = bb->end;
7745 rtx i;
7746 rtx newinsn;
7747
7748 if (GET_CODE (insn) != CALL_INSN
7749 || ! SIBLING_CALL_P (insn))
7750 continue;
7751
7752 start_sequence ();
7753 seq = gen_sibcall_epilogue ();
7754 end_sequence ();
7755
7756 /* Retain a map of the epilogue insns. Used in life analysis to
7757 avoid getting rid of sibcall epilogue insns. Do this before we
7758 actually emit the sequence. */
7759 record_insns (seq, &sibcall_epilogue);
7760
7761 i = PREV_INSN (insn);
7762 newinsn = emit_insn_before (seq, insn);
7763 }
7764 #endif
7765
7766 #ifdef HAVE_prologue
7767 if (prologue_end)
7768 {
7769 rtx insn, prev;
7770
7771 /* GDB handles `break f' by setting a breakpoint on the first
7772 line note after the prologue. Which means (1) that if
7773 there are line number notes before where we inserted the
7774 prologue we should move them, and (2) we should generate a
7775 note before the end of the first basic block, if there isn't
7776 one already there.
7777
7778 ??? This behaviour is completely broken when dealing with
7779 multiple entry functions. We simply place the note always
7780 into first basic block and let alternate entry points
7781 to be missed.
7782 */
7783
7784 for (insn = prologue_end; insn; insn = prev)
7785 {
7786 prev = PREV_INSN (insn);
7787 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7788 {
7789 /* Note that we cannot reorder the first insn in the
7790 chain, since rest_of_compilation relies on that
7791 remaining constant. */
7792 if (prev == NULL)
7793 break;
7794 reorder_insns (insn, insn, prologue_end);
7795 }
7796 }
7797
7798 /* Find the last line number note in the first block. */
7799 for (insn = ENTRY_BLOCK_PTR->next_bb->end;
7800 insn != prologue_end && insn;
7801 insn = PREV_INSN (insn))
7802 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7803 break;
7804
7805 /* If we didn't find one, make a copy of the first line number
7806 we run across. */
7807 if (! insn)
7808 {
7809 for (insn = next_active_insn (prologue_end);
7810 insn;
7811 insn = PREV_INSN (insn))
7812 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7813 {
7814 emit_line_note_after (NOTE_SOURCE_FILE (insn),
7815 NOTE_LINE_NUMBER (insn),
7816 prologue_end);
7817 break;
7818 }
7819 }
7820 }
7821 #endif
7822 #ifdef HAVE_epilogue
7823 if (epilogue_end)
7824 {
7825 rtx insn, next;
7826
7827 /* Similarly, move any line notes that appear after the epilogue.
7828 There is no need, however, to be quite so anal about the existence
7829 of such a note. */
7830 for (insn = epilogue_end; insn; insn = next)
7831 {
7832 next = NEXT_INSN (insn);
7833 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7834 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
7835 }
7836 }
7837 #endif
7838 }
7839
7840 /* Reposition the prologue-end and epilogue-begin notes after instruction
7841 scheduling and delayed branch scheduling. */
7842
7843 void
7844 reposition_prologue_and_epilogue_notes (f)
7845 rtx f ATTRIBUTE_UNUSED;
7846 {
7847 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7848 rtx insn, last, note;
7849 int len;
7850
7851 if ((len = VARRAY_SIZE (prologue)) > 0)
7852 {
7853 last = 0, note = 0;
7854
7855 /* Scan from the beginning until we reach the last prologue insn.
7856 We apparently can't depend on basic_block_{head,end} after
7857 reorg has run. */
7858 for (insn = f; insn; insn = NEXT_INSN (insn))
7859 {
7860 if (GET_CODE (insn) == NOTE)
7861 {
7862 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
7863 note = insn;
7864 }
7865 else if (contains (insn, prologue))
7866 {
7867 last = insn;
7868 if (--len == 0)
7869 break;
7870 }
7871 }
7872
7873 if (last)
7874 {
7875 rtx next;
7876
7877 /* Find the prologue-end note if we haven't already, and
7878 move it to just after the last prologue insn. */
7879 if (note == 0)
7880 {
7881 for (note = last; (note = NEXT_INSN (note));)
7882 if (GET_CODE (note) == NOTE
7883 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
7884 break;
7885 }
7886
7887 next = NEXT_INSN (note);
7888
7889 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
7890 if (GET_CODE (last) == CODE_LABEL)
7891 last = NEXT_INSN (last);
7892 reorder_insns (note, note, last);
7893 }
7894 }
7895
7896 if ((len = VARRAY_SIZE (epilogue)) > 0)
7897 {
7898 last = 0, note = 0;
7899
7900 /* Scan from the end until we reach the first epilogue insn.
7901 We apparently can't depend on basic_block_{head,end} after
7902 reorg has run. */
7903 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
7904 {
7905 if (GET_CODE (insn) == NOTE)
7906 {
7907 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
7908 note = insn;
7909 }
7910 else if (contains (insn, epilogue))
7911 {
7912 last = insn;
7913 if (--len == 0)
7914 break;
7915 }
7916 }
7917
7918 if (last)
7919 {
7920 /* Find the epilogue-begin note if we haven't already, and
7921 move it to just before the first epilogue insn. */
7922 if (note == 0)
7923 {
7924 for (note = insn; (note = PREV_INSN (note));)
7925 if (GET_CODE (note) == NOTE
7926 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
7927 break;
7928 }
7929
7930 if (PREV_INSN (last) != note)
7931 reorder_insns (note, note, PREV_INSN (last));
7932 }
7933 }
7934 #endif /* HAVE_prologue or HAVE_epilogue */
7935 }
7936
7937 /* Called once, at initialization, to initialize function.c. */
7938
7939 void
7940 init_function_once ()
7941 {
7942 VARRAY_INT_INIT (prologue, 0, "prologue");
7943 VARRAY_INT_INIT (epilogue, 0, "epilogue");
7944 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
7945 }
7946
7947 #include "gt-function.h"