]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/function.c
backport: basic-block.h: Include vec.h, errors.h.
[thirdparty/gcc.git] / gcc / function.c
CommitLineData
5e6908ea 1/* Expands front end tree to back end RTL for GCC.
af841dbd 2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
589fe865 3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
6f086dfc 4
1322177d 5This file is part of GCC.
6f086dfc 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
6f086dfc 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
6f086dfc
RS
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
6f086dfc 21
6f086dfc
RS
22/* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
8fff4fc1 34 not get a hard register. */
6f086dfc
RS
35
36#include "config.h"
670ee920 37#include "system.h"
4977bab6
ZW
38#include "coretypes.h"
39#include "tm.h"
6f086dfc
RS
40#include "rtl.h"
41#include "tree.h"
42#include "flags.h"
1ef08c63 43#include "except.h"
6f086dfc 44#include "function.h"
6f086dfc 45#include "expr.h"
c6b97fac 46#include "optabs.h"
e78d8e51 47#include "libfuncs.h"
6f086dfc
RS
48#include "regs.h"
49#include "hard-reg-set.h"
50#include "insn-config.h"
51#include "recog.h"
52#include "output.h"
bdac5f58 53#include "basic-block.h"
10f0ad3d 54#include "toplev.h"
e2500fed 55#include "hashtab.h"
87ff9c8e 56#include "ggc.h"
b1474bb7 57#include "tm_p.h"
c0e7830f 58#include "integrate.h"
7afff7cf 59#include "langhooks.h"
61f71b34 60#include "target.h"
623a66fa 61#include "cfglayout.h"
6f086dfc 62
d16790f2
JW
63#ifndef LOCAL_ALIGNMENT
64#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
65#endif
66
95f3f59e
JDA
67#ifndef STACK_ALIGNMENT_NEEDED
68#define STACK_ALIGNMENT_NEEDED 1
69#endif
70
975f3818
RS
71#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
72
293e3de4
RS
73/* Some systems use __main in a way incompatible with its use in gcc, in these
74 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
75 give the same symbol without quotes for an alternative entry point. You
0f41302f 76 must define both, or neither. */
293e3de4
RS
77#ifndef NAME__MAIN
78#define NAME__MAIN "__main"
293e3de4
RS
79#endif
80
6f086dfc
RS
81/* Round a value to the lowest integer less than it that is a multiple of
82 the required alignment. Avoid using division in case the value is
83 negative. Assume the alignment is a power of two. */
84#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
85
86/* Similar, but round to the next highest integer that meets the
87 alignment. */
88#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
89
54ff41b7
JW
90/* Nonzero if function being compiled doesn't contain any calls
91 (ignoring the prologue and epilogue). This is set prior to
92 local register allocation and is valid for the remaining
718fe406 93 compiler passes. */
54ff41b7
JW
94int current_function_is_leaf;
95
fdb8a883
JW
96/* Nonzero if function being compiled doesn't modify the stack pointer
97 (ignoring the prologue and epilogue). This is only valid after
718fe406 98 life_analysis has run. */
fdb8a883
JW
99int current_function_sp_is_unchanging;
100
54ff41b7
JW
101/* Nonzero if the function being compiled is a leaf function which only
102 uses leaf registers. This is valid after reload (specifically after
103 sched2) and is useful only if the port defines LEAF_REGISTERS. */
54ff41b7
JW
104int current_function_uses_only_leaf_regs;
105
6f086dfc 106/* Nonzero once virtual register instantiation has been done.
c39ada04
DD
107 assign_stack_local uses frame_pointer_rtx when this is nonzero.
108 calls.c:emit_library_call_value_1 uses it to set up
109 post-instantiation libcalls. */
110int virtuals_instantiated;
6f086dfc 111
df696a75 112/* Assign unique numbers to labels generated for profiling, debugging, etc. */
17211ab5 113static GTY(()) int funcdef_no;
f6f315fe 114
414c4dc4
NC
115/* These variables hold pointers to functions to create and destroy
116 target specific, per-function data structures. */
fa8db1f7 117struct machine_function * (*init_machine_status) (void);
46766466 118
b384405b 119/* The currently compiled function. */
01d939e8 120struct function *cfun = 0;
b384405b 121
5c7675e9 122/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
e2500fed
GK
123static GTY(()) varray_type prologue;
124static GTY(()) varray_type epilogue;
0a1c58a2
JL
125
126/* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
127 in this function. */
e2500fed 128static GTY(()) varray_type sibcall_epilogue;
6f086dfc
RS
129\f
130/* In order to evaluate some expressions, such as function calls returning
131 structures in memory, we need to temporarily allocate stack locations.
132 We record each allocated temporary in the following structure.
133
134 Associated with each temporary slot is a nesting level. When we pop up
135 one level, all temporaries associated with the previous level are freed.
136 Normally, all temporaries are freed after the execution of the statement
137 in which they were created. However, if we are inside a ({...}) grouping,
138 the result may be in a temporary and hence must be preserved. If the
139 result could be in a temporary, we preserve it if we can determine which
140 one it is in. If we cannot determine which temporary may contain the
141 result, all temporaries are preserved. A temporary is preserved by
142 pretending it was allocated at the previous nesting level.
143
144 Automatic variables are also assigned temporary slots, at the nesting
145 level where they are defined. They are marked a "kept" so that
146 free_temp_slots will not free them. */
147
e2500fed 148struct temp_slot GTY(())
6f086dfc
RS
149{
150 /* Points to next temporary slot. */
151 struct temp_slot *next;
0aea6467
ZD
152 /* Points to previous temporary slot. */
153 struct temp_slot *prev;
154
0f41302f 155 /* The rtx to used to reference the slot. */
6f086dfc 156 rtx slot;
e5e76139
RK
157 /* The rtx used to represent the address if not the address of the
158 slot above. May be an EXPR_LIST if multiple addresses exist. */
159 rtx address;
718fe406 160 /* The alignment (in bits) of the slot. */
b5c02bff 161 unsigned int align;
6f086dfc 162 /* The size, in units, of the slot. */
e5e809f4 163 HOST_WIDE_INT size;
1da68f56
RK
164 /* The type of the object in the slot, or zero if it doesn't correspond
165 to a type. We use this to determine whether a slot can be reused.
166 It can be reused if objects of the type of the new slot will always
167 conflict with objects of the type of the old slot. */
168 tree type;
cc2902df 169 /* Nonzero if this temporary is currently in use. */
6f086dfc 170 char in_use;
cc2902df 171 /* Nonzero if this temporary has its address taken. */
a25d4ba2 172 char addr_taken;
6f086dfc
RS
173 /* Nesting level at which this slot is being used. */
174 int level;
cc2902df 175 /* Nonzero if this should survive a call to free_temp_slots. */
6f086dfc 176 int keep;
fc91b0d0
RK
177 /* The offset of the slot from the frame_pointer, including extra space
178 for alignment. This info is for combine_temp_slots. */
e5e809f4 179 HOST_WIDE_INT base_offset;
fc91b0d0
RK
180 /* The size of the slot, including extra space for alignment. This
181 info is for combine_temp_slots. */
e5e809f4 182 HOST_WIDE_INT full_size;
6f086dfc 183};
6f086dfc 184\f
e15679f8
RK
185/* Forward declarations. */
186
fa8db1f7
AJ
187static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
188 struct function *);
189static struct temp_slot *find_temp_slot_from_address (rtx);
fa8db1f7
AJ
190static void instantiate_decls (tree, int);
191static void instantiate_decls_1 (tree, int);
192static void instantiate_decl (rtx, HOST_WIDE_INT, int);
193static rtx instantiate_new_reg (rtx, HOST_WIDE_INT *);
194static int instantiate_virtual_regs_1 (rtx *, rtx, int);
fa8db1f7
AJ
195static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
196static void pad_below (struct args_size *, enum machine_mode, tree);
fa8db1f7
AJ
197static void reorder_blocks_1 (rtx, tree, varray_type *);
198static void reorder_fix_fragments (tree);
fa8db1f7
AJ
199static int all_blocks (tree, tree *);
200static tree *get_block_vector (tree, int *);
201extern tree debug_find_var_in_block_tree (tree, tree);
1f52178b 202/* We always define `record_insns' even if it's not used so that we
ec97b83a 203 can always export `prologue_epilogue_contains'. */
fa8db1f7
AJ
204static void record_insns (rtx, varray_type *) ATTRIBUTE_UNUSED;
205static int contains (rtx, varray_type);
73ef99fb 206#ifdef HAVE_return
fa8db1f7 207static void emit_return_into_block (basic_block, rtx);
73ef99fb 208#endif
3258e996 209#if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
fa8db1f7 210static rtx keep_stack_depressed (rtx);
7393c642 211#endif
3a70d621 212static void prepare_function_start (tree);
fa8db1f7
AJ
213static void do_clobber_return_reg (rtx, void *);
214static void do_use_return_reg (rtx, void *);
215static void instantiate_virtual_regs_lossage (rtx);
4c4d143a 216static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
c20bf1f3 217\f
6f086dfc 218/* Pointer to chain of `struct function' for containing functions. */
1be4cd1f 219struct function *outer_function_chain;
6f086dfc
RS
220
221/* Given a function decl for a containing function,
222 return the `struct function' for it. */
223
224struct function *
fa8db1f7 225find_function_data (tree decl)
6f086dfc
RS
226{
227 struct function *p;
e5e809f4 228
eb3ae3e1 229 for (p = outer_function_chain; p; p = p->outer)
6f086dfc
RS
230 if (p->decl == decl)
231 return p;
e5e809f4 232
0bccc606 233 gcc_unreachable ();
6f086dfc
RS
234}
235
236/* Save the current context for compilation of a nested function.
8c5666b4 237 This is called from language-specific code. The caller should use
b03e38e1 238 the enter_nested langhook to save any language-specific state,
8c5666b4
BS
239 since this function knows only about language-independent
240 variables. */
6f086dfc
RS
241
242void
fa8db1f7 243push_function_context_to (tree context)
6f086dfc 244{
eb3ae3e1 245 struct function *p;
36edd3cc
BS
246
247 if (context)
248 {
eb3ae3e1
ZW
249 if (context == current_function_decl)
250 cfun->contains_functions = 1;
251 else
252 {
253 struct function *containing = find_function_data (context);
254 containing->contains_functions = 1;
255 }
36edd3cc 256 }
b384405b 257
01d939e8 258 if (cfun == 0)
b384405b 259 init_dummy_function_start ();
01d939e8 260 p = cfun;
6f086dfc 261
eb3ae3e1 262 p->outer = outer_function_chain;
6f086dfc 263 outer_function_chain = p;
6f086dfc 264
ae2bcd98 265 lang_hooks.function.enter_nested (p);
b384405b 266
01d939e8 267 cfun = 0;
6f086dfc
RS
268}
269
e4a4639e 270void
fa8db1f7 271push_function_context (void)
e4a4639e 272{
a0dabda5 273 push_function_context_to (current_function_decl);
e4a4639e
JM
274}
275
6f086dfc
RS
276/* Restore the last saved context, at the end of a nested function.
277 This function is called from language-specific code. */
278
279void
fa8db1f7 280pop_function_context_from (tree context ATTRIBUTE_UNUSED)
6f086dfc
RS
281{
282 struct function *p = outer_function_chain;
283
01d939e8 284 cfun = p;
eb3ae3e1 285 outer_function_chain = p->outer;
6f086dfc 286
6f086dfc 287 current_function_decl = p->decl;
7cbc7b0c 288 reg_renumber = 0;
6f086dfc 289
6f086dfc 290 restore_emit_status (p);
6f086dfc 291
ae2bcd98 292 lang_hooks.function.leave_nested (p);
46766466 293
6f086dfc 294 /* Reset variables that have known state during rtx generation. */
6f086dfc 295 virtuals_instantiated = 0;
1b3d8f8a 296 generating_concat_p = 1;
6f086dfc 297}
e4a4639e 298
36edd3cc 299void
fa8db1f7 300pop_function_context (void)
e4a4639e 301{
a0dabda5 302 pop_function_context_from (current_function_decl);
e4a4639e 303}
e2ecd91c 304
fa51b01b
RH
305/* Clear out all parts of the state in F that can safely be discarded
306 after the function has been parsed, but not compiled, to let
307 garbage collection reclaim the memory. */
308
309void
fa8db1f7 310free_after_parsing (struct function *f)
fa51b01b
RH
311{
312 /* f->expr->forced_labels is used by code generation. */
313 /* f->emit->regno_reg_rtx is used by code generation. */
314 /* f->varasm is used by code generation. */
315 /* f->eh->eh_return_stub_label is used by code generation. */
316
ae2bcd98 317 lang_hooks.function.final (f);
fa51b01b
RH
318}
319
e2ecd91c
BS
320/* Clear out all parts of the state in F that can safely be discarded
321 after the function has been compiled, to let garbage collection
0a8a198c 322 reclaim the memory. */
21cd906e 323
e2ecd91c 324void
fa8db1f7 325free_after_compilation (struct function *f)
e2ecd91c 326{
e2500fed
GK
327 f->eh = NULL;
328 f->expr = NULL;
329 f->emit = NULL;
330 f->varasm = NULL;
331 f->machine = NULL;
fa51b01b 332
0aea6467
ZD
333 f->x_avail_temp_slots = NULL;
334 f->x_used_temp_slots = NULL;
fa51b01b
RH
335 f->arg_offset_rtx = NULL;
336 f->return_rtx = NULL;
337 f->internal_arg_pointer = NULL;
fa51b01b 338 f->x_nonlocal_goto_handler_labels = NULL;
fa51b01b 339 f->x_return_label = NULL;
6e3077c6 340 f->x_naked_return_label = NULL;
fa51b01b 341 f->x_stack_slot_list = NULL;
fa51b01b
RH
342 f->x_tail_recursion_reentry = NULL;
343 f->x_arg_pointer_save_area = NULL;
fa51b01b 344 f->x_parm_birth_insn = NULL;
fa51b01b
RH
345 f->original_arg_vector = NULL;
346 f->original_decl_initial = NULL;
fa51b01b 347 f->epilogue_delay_list = NULL;
e2ecd91c 348}
6f086dfc
RS
349\f
350/* Allocate fixed slots in the stack frame of the current function. */
351
49ad7cfa
BS
352/* Return size needed for stack frame based on slots so far allocated in
353 function F.
c795bca9 354 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
6f086dfc
RS
355 the caller may have to do that. */
356
8af5168b 357HOST_WIDE_INT
fa8db1f7 358get_func_frame_size (struct function *f)
6f086dfc
RS
359{
360#ifdef FRAME_GROWS_DOWNWARD
49ad7cfa 361 return -f->x_frame_offset;
6f086dfc 362#else
49ad7cfa 363 return f->x_frame_offset;
6f086dfc
RS
364#endif
365}
366
49ad7cfa
BS
367/* Return size needed for stack frame based on slots so far allocated.
368 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
369 the caller may have to do that. */
370HOST_WIDE_INT
fa8db1f7 371get_frame_size (void)
49ad7cfa 372{
01d939e8 373 return get_func_frame_size (cfun);
49ad7cfa
BS
374}
375
6f086dfc
RS
376/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
377 with machine mode MODE.
718fe406 378
6f086dfc
RS
379 ALIGN controls the amount of alignment for the address of the slot:
380 0 means according to MODE,
381 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
cfa29a4c 382 -2 means use BITS_PER_UNIT,
6f086dfc
RS
383 positive specifies alignment boundary in bits.
384
e2ecd91c 385 We do not round to stack_boundary here.
6f086dfc 386
e2ecd91c
BS
387 FUNCTION specifies the function to allocate in. */
388
389static rtx
fa8db1f7
AJ
390assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
391 struct function *function)
6f086dfc 392{
b3694847 393 rtx x, addr;
6f086dfc 394 int bigend_correction = 0;
95899b34 395 unsigned int alignment;
58dbcf05 396 int frame_off, frame_alignment, frame_phase;
6f086dfc
RS
397
398 if (align == 0)
399 {
d16790f2
JW
400 tree type;
401
6f086dfc 402 if (mode == BLKmode)
d16790f2 403 alignment = BIGGEST_ALIGNMENT;
dbab7b72 404 else
718fe406 405 alignment = GET_MODE_ALIGNMENT (mode);
d16790f2
JW
406
407 /* Allow the target to (possibly) increase the alignment of this
408 stack slot. */
ae2bcd98 409 type = lang_hooks.types.type_for_mode (mode, 0);
d16790f2
JW
410 if (type)
411 alignment = LOCAL_ALIGNMENT (type, alignment);
412
413 alignment /= BITS_PER_UNIT;
6f086dfc
RS
414 }
415 else if (align == -1)
416 {
417 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
418 size = CEIL_ROUND (size, alignment);
419 }
cfa29a4c
EB
420 else if (align == -2)
421 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
6f086dfc
RS
422 else
423 alignment = align / BITS_PER_UNIT;
424
1474e303 425#ifdef FRAME_GROWS_DOWNWARD
e2ecd91c 426 function->x_frame_offset -= size;
1474e303
JL
427#endif
428
a0871656
JH
429 /* Ignore alignment we can't do with expected alignment of the boundary. */
430 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
431 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
432
433 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
434 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
435
58dbcf05
AH
436 /* Calculate how many bytes the start of local variables is off from
437 stack alignment. */
438 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
439 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
440 frame_phase = frame_off ? frame_alignment - frame_off : 0;
441
95f3f59e
JDA
442 /* Round the frame offset to the specified alignment. The default is
443 to always honor requests to align the stack but a port may choose to
444 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
445 if (STACK_ALIGNMENT_NEEDED
446 || mode != BLKmode
447 || size != 0)
448 {
449 /* We must be careful here, since FRAME_OFFSET might be negative and
450 division with a negative dividend isn't as well defined as we might
451 like. So we instead assume that ALIGNMENT is a power of two and
452 use logical operations which are unambiguous. */
6f086dfc 453#ifdef FRAME_GROWS_DOWNWARD
95f3f59e 454 function->x_frame_offset
e140e27d
RH
455 = (FLOOR_ROUND (function->x_frame_offset - frame_phase,
456 (unsigned HOST_WIDE_INT) alignment)
95f3f59e 457 + frame_phase);
6f086dfc 458#else
95f3f59e 459 function->x_frame_offset
e140e27d
RH
460 = (CEIL_ROUND (function->x_frame_offset - frame_phase,
461 (unsigned HOST_WIDE_INT) alignment)
95f3f59e 462 + frame_phase);
6f086dfc 463#endif
95f3f59e 464 }
6f086dfc
RS
465
466 /* On a big-endian machine, if we are allocating more space than we will use,
467 use the least significant bytes of those that are allocated. */
f76b9db2 468 if (BYTES_BIG_ENDIAN && mode != BLKmode)
6f086dfc 469 bigend_correction = size - GET_MODE_SIZE (mode);
6f086dfc 470
6f086dfc
RS
471 /* If we have already instantiated virtual registers, return the actual
472 address relative to the frame pointer. */
01d939e8 473 if (function == cfun && virtuals_instantiated)
6f086dfc 474 addr = plus_constant (frame_pointer_rtx,
c41536f5 475 trunc_int_for_mode
6f086dfc 476 (frame_offset + bigend_correction
c41536f5 477 + STARTING_FRAME_OFFSET, Pmode));
6f086dfc
RS
478 else
479 addr = plus_constant (virtual_stack_vars_rtx,
c41536f5
AO
480 trunc_int_for_mode
481 (function->x_frame_offset + bigend_correction,
482 Pmode));
6f086dfc
RS
483
484#ifndef FRAME_GROWS_DOWNWARD
e2ecd91c 485 function->x_frame_offset += size;
6f086dfc
RS
486#endif
487
38a448ca 488 x = gen_rtx_MEM (mode, addr);
6f086dfc 489
e2ecd91c
BS
490 function->x_stack_slot_list
491 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
492
6f086dfc
RS
493 return x;
494}
495
e2ecd91c
BS
496/* Wrapper around assign_stack_local_1; assign a local stack slot for the
497 current function. */
3bdf5ad1 498
e2ecd91c 499rtx
fa8db1f7 500assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
6f086dfc 501{
01d939e8 502 return assign_stack_local_1 (mode, size, align, cfun);
6f086dfc 503}
0aea6467
ZD
504
505\f
506/* Removes temporary slot TEMP from LIST. */
507
508static void
509cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
510{
511 if (temp->next)
512 temp->next->prev = temp->prev;
513 if (temp->prev)
514 temp->prev->next = temp->next;
515 else
516 *list = temp->next;
517
518 temp->prev = temp->next = NULL;
519}
520
521/* Inserts temporary slot TEMP to LIST. */
522
523static void
524insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
525{
526 temp->next = *list;
527 if (*list)
528 (*list)->prev = temp;
529 temp->prev = NULL;
530 *list = temp;
531}
532
533/* Returns the list of used temp slots at LEVEL. */
534
535static struct temp_slot **
536temp_slots_at_level (int level)
537{
538 level++;
539
540 if (!used_temp_slots)
541 VARRAY_GENERIC_PTR_INIT (used_temp_slots, 3, "used_temp_slots");
542
543 while (level >= (int) VARRAY_ACTIVE_SIZE (used_temp_slots))
544 VARRAY_PUSH_GENERIC_PTR (used_temp_slots, NULL);
545
546 return (struct temp_slot **) &VARRAY_GENERIC_PTR (used_temp_slots, level);
547}
548
549/* Returns the maximal temporary slot level. */
550
551static int
552max_slot_level (void)
553{
554 if (!used_temp_slots)
555 return -1;
556
557 return VARRAY_ACTIVE_SIZE (used_temp_slots) - 1;
558}
559
560/* Moves temporary slot TEMP to LEVEL. */
561
562static void
563move_slot_to_level (struct temp_slot *temp, int level)
564{
565 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
566 insert_slot_to_list (temp, temp_slots_at_level (level));
567 temp->level = level;
568}
569
570/* Make temporary slot TEMP available. */
571
572static void
573make_slot_available (struct temp_slot *temp)
574{
575 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
576 insert_slot_to_list (temp, &avail_temp_slots);
577 temp->in_use = 0;
578 temp->level = -1;
579}
6f086dfc
RS
580\f
581/* Allocate a temporary stack slot and record it for possible later
582 reuse.
583
584 MODE is the machine mode to be given to the returned rtx.
585
586 SIZE is the size in units of the space required. We do no rounding here
587 since assign_stack_local will do any required rounding.
588
d93d4205
MS
589 KEEP is 1 if this slot is to be retained after a call to
590 free_temp_slots. Automatic variables for a block are allocated
7efcb746
PB
591 with this flag. KEEP values of 2 or 3 were needed respectively
592 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
593 or for SAVE_EXPRs, but they are now unused and will abort.
a4c6502a
MM
594
595 TYPE is the type that will be used for the stack slot. */
6f086dfc 596
a06ef755 597rtx
fa8db1f7
AJ
598assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, int keep,
599 tree type)
6f086dfc 600{
74e2819c 601 unsigned int align;
0aea6467 602 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
faa964e5 603 rtx slot;
6f086dfc 604
303ec2aa
RK
605 /* If SIZE is -1 it means that somebody tried to allocate a temporary
606 of a variable size. */
0bccc606 607 gcc_assert (size != -1);
303ec2aa 608
7efcb746 609 /* These are now unused. */
0bccc606 610 gcc_assert (keep <= 1);
7efcb746 611
d16790f2
JW
612 if (mode == BLKmode)
613 align = BIGGEST_ALIGNMENT;
dbab7b72
JH
614 else
615 align = GET_MODE_ALIGNMENT (mode);
6f086dfc 616
d16790f2 617 if (! type)
ae2bcd98 618 type = lang_hooks.types.type_for_mode (mode, 0);
3bdf5ad1 619
d16790f2
JW
620 if (type)
621 align = LOCAL_ALIGNMENT (type, align);
622
623 /* Try to find an available, already-allocated temporary of the proper
624 mode which meets the size and alignment requirements. Choose the
625 smallest one with the closest alignment. */
0aea6467
ZD
626 for (p = avail_temp_slots; p; p = p->next)
627 {
628 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
629 && objects_must_conflict_p (p->type, type)
630 && (best_p == 0 || best_p->size > p->size
631 || (best_p->size == p->size && best_p->align > p->align)))
632 {
633 if (p->align == align && p->size == size)
634 {
635 selected = p;
636 cut_slot_from_list (selected, &avail_temp_slots);
637 best_p = 0;
638 break;
639 }
640 best_p = p;
641 }
642 }
6f086dfc
RS
643
644 /* Make our best, if any, the one to use. */
645 if (best_p)
a45035b6 646 {
0aea6467
ZD
647 selected = best_p;
648 cut_slot_from_list (selected, &avail_temp_slots);
649
a45035b6
JW
650 /* If there are enough aligned bytes left over, make them into a new
651 temp_slot so that the extra bytes don't get wasted. Do this only
652 for BLKmode slots, so that we can be sure of the alignment. */
3bdf5ad1 653 if (GET_MODE (best_p->slot) == BLKmode)
a45035b6 654 {
d16790f2 655 int alignment = best_p->align / BITS_PER_UNIT;
e5e809f4 656 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
a45035b6
JW
657
658 if (best_p->size - rounded_size >= alignment)
659 {
703ad42b 660 p = ggc_alloc (sizeof (struct temp_slot));
a25d4ba2 661 p->in_use = p->addr_taken = 0;
a45035b6 662 p->size = best_p->size - rounded_size;
307d8cd6
RK
663 p->base_offset = best_p->base_offset + rounded_size;
664 p->full_size = best_p->full_size - rounded_size;
38a448ca
RH
665 p->slot = gen_rtx_MEM (BLKmode,
666 plus_constant (XEXP (best_p->slot, 0),
667 rounded_size));
d16790f2 668 p->align = best_p->align;
e5e76139 669 p->address = 0;
1da68f56 670 p->type = best_p->type;
0aea6467 671 insert_slot_to_list (p, &avail_temp_slots);
a45035b6 672
38a448ca
RH
673 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
674 stack_slot_list);
a45035b6
JW
675
676 best_p->size = rounded_size;
291dde90 677 best_p->full_size = rounded_size;
a45035b6
JW
678 }
679 }
a45035b6 680 }
718fe406 681
6f086dfc 682 /* If we still didn't find one, make a new temporary. */
0aea6467 683 if (selected == 0)
6f086dfc 684 {
e5e809f4
JL
685 HOST_WIDE_INT frame_offset_old = frame_offset;
686
703ad42b 687 p = ggc_alloc (sizeof (struct temp_slot));
e5e809f4 688
c87a0a39
JL
689 /* We are passing an explicit alignment request to assign_stack_local.
690 One side effect of that is assign_stack_local will not round SIZE
691 to ensure the frame offset remains suitably aligned.
692
693 So for requests which depended on the rounding of SIZE, we go ahead
694 and round it now. We also make sure ALIGNMENT is at least
695 BIGGEST_ALIGNMENT. */
0bccc606 696 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
6f67a30d 697 p->slot = assign_stack_local (mode,
010529e5 698 (mode == BLKmode
fc555370 699 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
010529e5 700 : size),
6f67a30d 701 align);
d16790f2
JW
702
703 p->align = align;
e5e809f4 704
b2a80c0d
DE
705 /* The following slot size computation is necessary because we don't
706 know the actual size of the temporary slot until assign_stack_local
707 has performed all the frame alignment and size rounding for the
fc91b0d0
RK
708 requested temporary. Note that extra space added for alignment
709 can be either above or below this stack slot depending on which
710 way the frame grows. We include the extra space if and only if it
711 is above this slot. */
b2a80c0d
DE
712#ifdef FRAME_GROWS_DOWNWARD
713 p->size = frame_offset_old - frame_offset;
714#else
fc91b0d0
RK
715 p->size = size;
716#endif
e5e809f4 717
fc91b0d0
RK
718 /* Now define the fields used by combine_temp_slots. */
719#ifdef FRAME_GROWS_DOWNWARD
720 p->base_offset = frame_offset;
721 p->full_size = frame_offset_old - frame_offset;
722#else
723 p->base_offset = frame_offset_old;
724 p->full_size = frame_offset - frame_offset_old;
b2a80c0d 725#endif
e5e76139 726 p->address = 0;
0aea6467
ZD
727
728 selected = p;
6f086dfc
RS
729 }
730
0aea6467 731 p = selected;
6f086dfc 732 p->in_use = 1;
a25d4ba2 733 p->addr_taken = 0;
1da68f56 734 p->type = type;
7efcb746
PB
735 p->level = temp_slot_level;
736 p->keep = keep;
1995f267 737
0aea6467
ZD
738 pp = temp_slots_at_level (p->level);
739 insert_slot_to_list (p, pp);
faa964e5
UW
740
741 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
742 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
743 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
3bdf5ad1 744
1da68f56
RK
745 /* If we know the alias set for the memory that will be used, use
746 it. If there's no TYPE, then we don't know anything about the
747 alias set for the memory. */
faa964e5
UW
748 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
749 set_mem_align (slot, align);
1da68f56 750
30f7a378 751 /* If a type is specified, set the relevant flags. */
3bdf5ad1 752 if (type != 0)
1da68f56 753 {
faa964e5
UW
754 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
755 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
1da68f56 756 }
3bdf5ad1 757
faa964e5 758 return slot;
6f086dfc 759}
d16790f2
JW
760
761/* Allocate a temporary stack slot and record it for possible later
762 reuse. First three arguments are same as in preceding function. */
763
764rtx
fa8db1f7 765assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
d16790f2
JW
766{
767 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
768}
638141a6 769\f
9432c136
EB
770/* Assign a temporary.
771 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
772 and so that should be used in error messages. In either case, we
773 allocate of the given type.
230f21b4
PB
774 KEEP is as for assign_stack_temp.
775 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
b55d9ff8
RK
776 it is 0 if a register is OK.
777 DONT_PROMOTE is 1 if we should not promote values in register
778 to wider modes. */
230f21b4
PB
779
780rtx
fa8db1f7
AJ
781assign_temp (tree type_or_decl, int keep, int memory_required,
782 int dont_promote ATTRIBUTE_UNUSED)
230f21b4 783{
9432c136
EB
784 tree type, decl;
785 enum machine_mode mode;
9e1622ed 786#ifdef PROMOTE_MODE
9432c136
EB
787 int unsignedp;
788#endif
789
790 if (DECL_P (type_or_decl))
791 decl = type_or_decl, type = TREE_TYPE (decl);
792 else
793 decl = NULL, type = type_or_decl;
794
795 mode = TYPE_MODE (type);
9e1622ed 796#ifdef PROMOTE_MODE
8df83eae 797 unsignedp = TYPE_UNSIGNED (type);
0ce8a59c 798#endif
638141a6 799
230f21b4
PB
800 if (mode == BLKmode || memory_required)
801 {
e5e809f4 802 HOST_WIDE_INT size = int_size_in_bytes (type);
e30bb772 803 tree size_tree;
230f21b4
PB
804 rtx tmp;
805
44affdae
JH
806 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
807 problems with allocating the stack space. */
808 if (size == 0)
809 size = 1;
810
230f21b4
PB
811 /* Unfortunately, we don't yet know how to allocate variable-sized
812 temporaries. However, sometimes we have a fixed upper limit on
813 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
0f41302f 814 instead. This is the case for Chill variable-sized strings. */
230f21b4
PB
815 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
816 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
3bdf5ad1
RK
817 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
818 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
230f21b4 819
e30bb772
RK
820 /* If we still haven't been able to get a size, see if the language
821 can compute a maximum size. */
822 if (size == -1
8963a517 823 && (size_tree = lang_hooks.types.max_size (type)) != 0
e30bb772
RK
824 && host_integerp (size_tree, 1))
825 size = tree_low_cst (size_tree, 1);
826
9432c136
EB
827 /* The size of the temporary may be too large to fit into an integer. */
828 /* ??? Not sure this should happen except for user silliness, so limit
797a6ac1 829 this to things that aren't compiler-generated temporaries. The
9432c136
EB
830 rest of the time we'll abort in assign_stack_temp_for_type. */
831 if (decl && size == -1
832 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
833 {
971801ff 834 error ("%Jsize of variable %qD is too large", decl, decl);
9432c136
EB
835 size = 1;
836 }
837
d16790f2 838 tmp = assign_stack_temp_for_type (mode, size, keep, type);
230f21b4
PB
839 return tmp;
840 }
638141a6 841
9e1622ed 842#ifdef PROMOTE_MODE
b55d9ff8
RK
843 if (! dont_promote)
844 mode = promote_mode (type, mode, &unsignedp, 0);
230f21b4 845#endif
638141a6 846
230f21b4
PB
847 return gen_reg_rtx (mode);
848}
638141a6 849\f
a45035b6
JW
850/* Combine temporary stack slots which are adjacent on the stack.
851
852 This allows for better use of already allocated stack space. This is only
853 done for BLKmode slots because we can be sure that we won't have alignment
854 problems in this case. */
855
856void
fa8db1f7 857combine_temp_slots (void)
a45035b6 858{
0aea6467 859 struct temp_slot *p, *q, *next, *next_q;
e5e809f4
JL
860 int num_slots;
861
a4c6502a
MM
862 /* We can't combine slots, because the information about which slot
863 is in which alias set will be lost. */
864 if (flag_strict_aliasing)
865 return;
866
718fe406 867 /* If there are a lot of temp slots, don't do anything unless
d6a7951f 868 high levels of optimization. */
e5e809f4 869 if (! flag_expensive_optimizations)
0aea6467 870 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
e5e809f4
JL
871 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
872 return;
a45035b6 873
0aea6467 874 for (p = avail_temp_slots; p; p = next)
e9b7093a
RS
875 {
876 int delete_p = 0;
e5e809f4 877
0aea6467
ZD
878 next = p->next;
879
880 if (GET_MODE (p->slot) != BLKmode)
881 continue;
882
883 for (q = p->next; q; q = next_q)
e9b7093a 884 {
0aea6467
ZD
885 int delete_q = 0;
886
887 next_q = q->next;
888
889 if (GET_MODE (q->slot) != BLKmode)
890 continue;
891
892 if (p->base_offset + p->full_size == q->base_offset)
893 {
894 /* Q comes after P; combine Q into P. */
895 p->size += q->size;
896 p->full_size += q->full_size;
897 delete_q = 1;
898 }
899 else if (q->base_offset + q->full_size == p->base_offset)
900 {
901 /* P comes after Q; combine P into Q. */
902 q->size += p->size;
903 q->full_size += p->full_size;
904 delete_p = 1;
905 break;
906 }
907 if (delete_q)
908 cut_slot_from_list (q, &avail_temp_slots);
e9b7093a 909 }
0aea6467
ZD
910
911 /* Either delete P or advance past it. */
912 if (delete_p)
913 cut_slot_from_list (p, &avail_temp_slots);
e9b7093a 914 }
a45035b6 915}
6f086dfc 916\f
e5e76139
RK
917/* Find the temp slot corresponding to the object at address X. */
918
919static struct temp_slot *
fa8db1f7 920find_temp_slot_from_address (rtx x)
e5e76139
RK
921{
922 struct temp_slot *p;
923 rtx next;
0aea6467 924 int i;
e5e76139 925
0aea6467
ZD
926 for (i = max_slot_level (); i >= 0; i--)
927 for (p = *temp_slots_at_level (i); p; p = p->next)
928 {
929 if (XEXP (p->slot, 0) == x
930 || p->address == x
931 || (GET_CODE (x) == PLUS
932 && XEXP (x, 0) == virtual_stack_vars_rtx
933 && GET_CODE (XEXP (x, 1)) == CONST_INT
934 && INTVAL (XEXP (x, 1)) >= p->base_offset
935 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
936 return p;
937
938 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
939 for (next = p->address; next; next = XEXP (next, 1))
940 if (XEXP (next, 0) == x)
941 return p;
942 }
e5e76139 943
14a774a9
RK
944 /* If we have a sum involving a register, see if it points to a temp
945 slot. */
f8cfc6aa 946 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
14a774a9
RK
947 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
948 return p;
f8cfc6aa 949 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
14a774a9
RK
950 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
951 return p;
952
e5e76139
RK
953 return 0;
954}
718fe406 955
9faa82d8 956/* Indicate that NEW is an alternate way of referring to the temp slot
e5e809f4 957 that previously was known by OLD. */
e5e76139
RK
958
959void
fa8db1f7 960update_temp_slot_address (rtx old, rtx new)
e5e76139 961{
14a774a9 962 struct temp_slot *p;
e5e76139 963
14a774a9 964 if (rtx_equal_p (old, new))
e5e76139 965 return;
14a774a9
RK
966
967 p = find_temp_slot_from_address (old);
968
700f19f0
RK
969 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
970 is a register, see if one operand of the PLUS is a temporary
971 location. If so, NEW points into it. Otherwise, if both OLD and
972 NEW are a PLUS and if there is a register in common between them.
973 If so, try a recursive call on those values. */
14a774a9
RK
974 if (p == 0)
975 {
700f19f0
RK
976 if (GET_CODE (old) != PLUS)
977 return;
978
f8cfc6aa 979 if (REG_P (new))
700f19f0
RK
980 {
981 update_temp_slot_address (XEXP (old, 0), new);
982 update_temp_slot_address (XEXP (old, 1), new);
983 return;
984 }
985 else if (GET_CODE (new) != PLUS)
14a774a9
RK
986 return;
987
988 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
989 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
990 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
991 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
992 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
993 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
994 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
995 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
996
997 return;
998 }
999
718fe406 1000 /* Otherwise add an alias for the temp's address. */
e5e76139
RK
1001 else if (p->address == 0)
1002 p->address = new;
1003 else
1004 {
1005 if (GET_CODE (p->address) != EXPR_LIST)
38a448ca 1006 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
e5e76139 1007
38a448ca 1008 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
e5e76139
RK
1009 }
1010}
1011
a25d4ba2 1012/* If X could be a reference to a temporary slot, mark the fact that its
9faa82d8 1013 address was taken. */
a25d4ba2
RK
1014
1015void
fa8db1f7 1016mark_temp_addr_taken (rtx x)
a25d4ba2
RK
1017{
1018 struct temp_slot *p;
1019
1020 if (x == 0)
1021 return;
1022
1023 /* If X is not in memory or is at a constant address, it cannot be in
1024 a temporary slot. */
3c0cb5de 1025 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
a25d4ba2
RK
1026 return;
1027
1028 p = find_temp_slot_from_address (XEXP (x, 0));
1029 if (p != 0)
1030 p->addr_taken = 1;
1031}
1032
9cca6a99
MS
1033/* If X could be a reference to a temporary slot, mark that slot as
1034 belonging to the to one level higher than the current level. If X
1035 matched one of our slots, just mark that one. Otherwise, we can't
1036 easily predict which it is, so upgrade all of them. Kept slots
1037 need not be touched.
6f086dfc
RS
1038
1039 This is called when an ({...}) construct occurs and a statement
1040 returns a value in memory. */
1041
1042void
fa8db1f7 1043preserve_temp_slots (rtx x)
6f086dfc 1044{
0aea6467 1045 struct temp_slot *p = 0, *next;
6f086dfc 1046
73620b82
RK
1047 /* If there is no result, we still might have some objects whose address
1048 were taken, so we need to make sure they stay around. */
e3a77161 1049 if (x == 0)
73620b82 1050 {
0aea6467
ZD
1051 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1052 {
1053 next = p->next;
1054
1055 if (p->addr_taken)
1056 move_slot_to_level (p, temp_slot_level - 1);
1057 }
73620b82 1058
8fff4fc1
RH
1059 return;
1060 }
f7b6d104 1061
8fff4fc1
RH
1062 /* If X is a register that is being used as a pointer, see if we have
1063 a temporary slot we know it points to. To be consistent with
1064 the code below, we really should preserve all non-kept slots
1065 if we can't find a match, but that seems to be much too costly. */
1066 if (REG_P (x) && REG_POINTER (x))
1067 p = find_temp_slot_from_address (x);
f7b6d104 1068
8fff4fc1
RH
1069 /* If X is not in memory or is at a constant address, it cannot be in
1070 a temporary slot, but it can contain something whose address was
1071 taken. */
1072 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1073 {
1074 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1075 {
1076 next = p->next;
b5bd3b3c 1077
8fff4fc1
RH
1078 if (p->addr_taken)
1079 move_slot_to_level (p, temp_slot_level - 1);
e9a25f70 1080 }
c5c76735 1081
8fff4fc1
RH
1082 return;
1083 }
1084
1085 /* First see if we can find a match. */
1086 if (p == 0)
1087 p = find_temp_slot_from_address (XEXP (x, 0));
1088
1089 if (p != 0)
1090 {
1091 /* Move everything at our level whose address was taken to our new
1092 level in case we used its address. */
1093 struct temp_slot *q;
1094
1095 if (p->level == temp_slot_level)
fbdfe39c 1096 {
8fff4fc1 1097 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
8b04083b 1098 {
8fff4fc1 1099 next = q->next;
8b04083b 1100
8fff4fc1
RH
1101 if (p != q && q->addr_taken)
1102 move_slot_to_level (q, temp_slot_level - 1);
8b04083b 1103 }
8fff4fc1
RH
1104
1105 move_slot_to_level (p, temp_slot_level - 1);
1106 p->addr_taken = 0;
fbdfe39c 1107 }
8fff4fc1 1108 return;
f7b6d104 1109 }
e9a25f70 1110
8fff4fc1
RH
1111 /* Otherwise, preserve all non-kept slots at this level. */
1112 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
e9a25f70 1113 {
8fff4fc1 1114 next = p->next;
fe9b4957 1115
8fff4fc1
RH
1116 if (!p->keep)
1117 move_slot_to_level (p, temp_slot_level - 1);
1118 }
fe9b4957
MM
1119}
1120
8fff4fc1
RH
1121/* Free all temporaries used so far. This is normally called at the
1122 end of generating code for a statement. */
fe9b4957 1123
8fff4fc1
RH
1124void
1125free_temp_slots (void)
fe9b4957 1126{
8fff4fc1 1127 struct temp_slot *p, *next;
fe9b4957 1128
8fff4fc1
RH
1129 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1130 {
1131 next = p->next;
fe9b4957 1132
8fff4fc1
RH
1133 if (!p->keep)
1134 make_slot_available (p);
1135 }
fe9b4957 1136
8fff4fc1
RH
1137 combine_temp_slots ();
1138}
fe9b4957 1139
8fff4fc1 1140/* Push deeper into the nesting level for stack temporaries. */
fe9b4957 1141
8fff4fc1
RH
1142void
1143push_temp_slots (void)
fe9b4957 1144{
8fff4fc1 1145 temp_slot_level++;
fe9b4957
MM
1146}
1147
8fff4fc1
RH
1148/* Pop a temporary nesting level. All slots in use in the current level
1149 are freed. */
fe9b4957 1150
8fff4fc1
RH
1151void
1152pop_temp_slots (void)
fe9b4957 1153{
8fff4fc1 1154 struct temp_slot *p, *next;
fe9b4957 1155
8fff4fc1
RH
1156 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1157 {
1158 next = p->next;
1159 make_slot_available (p);
1160 }
e9a25f70 1161
8fff4fc1 1162 combine_temp_slots ();
b987f237 1163
8fff4fc1 1164 temp_slot_level--;
8c36698e
NC
1165}
1166
8fff4fc1 1167/* Initialize temporary slots. */
e9a25f70
JL
1168
1169void
8fff4fc1 1170init_temp_slots (void)
e9a25f70 1171{
8fff4fc1
RH
1172 /* We have not allocated any temporaries yet. */
1173 avail_temp_slots = 0;
1174 used_temp_slots = 0;
1175 temp_slot_level = 0;
8fff4fc1
RH
1176}
1177\f
1178/* These routines are responsible for converting virtual register references
1179 to the actual hard register references once RTL generation is complete.
718fe406 1180
8fff4fc1
RH
1181 The following four variables are used for communication between the
1182 routines. They contain the offsets of the virtual registers from their
1183 respective hard registers. */
fe9b4957 1184
8fff4fc1
RH
1185static int in_arg_offset;
1186static int var_offset;
1187static int dynamic_offset;
1188static int out_arg_offset;
1189static int cfa_offset;
8a5275eb 1190
8fff4fc1
RH
1191/* In most machines, the stack pointer register is equivalent to the bottom
1192 of the stack. */
718fe406 1193
8fff4fc1
RH
1194#ifndef STACK_POINTER_OFFSET
1195#define STACK_POINTER_OFFSET 0
1196#endif
8c36698e 1197
8fff4fc1
RH
1198/* If not defined, pick an appropriate default for the offset of dynamically
1199 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1200 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
fe9b4957 1201
8fff4fc1 1202#ifndef STACK_DYNAMIC_OFFSET
8a5275eb 1203
8fff4fc1
RH
1204/* The bottom of the stack points to the actual arguments. If
1205 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1206 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1207 stack space for register parameters is not pushed by the caller, but
1208 rather part of the fixed stack areas and hence not included in
1209 `current_function_outgoing_args_size'. Nevertheless, we must allow
1210 for it when allocating stack dynamic objects. */
8a5275eb 1211
8fff4fc1
RH
1212#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1213#define STACK_DYNAMIC_OFFSET(FNDECL) \
1214((ACCUMULATE_OUTGOING_ARGS \
1215 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1216 + (STACK_POINTER_OFFSET)) \
4fa48eae 1217
8fff4fc1
RH
1218#else
1219#define STACK_DYNAMIC_OFFSET(FNDECL) \
1220((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1221 + (STACK_POINTER_OFFSET))
1222#endif
1223#endif
4fa48eae 1224
8fff4fc1 1225/* On most machines, the CFA coincides with the first incoming parm. */
4fa48eae 1226
8fff4fc1
RH
1227#ifndef ARG_POINTER_CFA_OFFSET
1228#define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
1229#endif
4fa48eae 1230
659e47fb 1231\f
6f086dfc
RS
1232/* Pass through the INSNS of function FNDECL and convert virtual register
1233 references to hard register references. */
1234
1235void
fd743bc1 1236instantiate_virtual_regs (void)
6f086dfc
RS
1237{
1238 rtx insn;
1239
1240 /* Compute the offsets to use for this function. */
fd743bc1 1241 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
6f086dfc 1242 var_offset = STARTING_FRAME_OFFSET;
fd743bc1 1243 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
6f086dfc 1244 out_arg_offset = STACK_POINTER_OFFSET;
fd743bc1 1245 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
6f086dfc
RS
1246
1247 /* Scan all variables and parameters of this function. For each that is
1248 in memory, instantiate all virtual registers if the result is a valid
1249 address. If not, we do it later. That will handle most uses of virtual
1250 regs on many machines. */
fd743bc1 1251 instantiate_decls (current_function_decl, 1);
6f086dfc
RS
1252
1253 /* Initialize recognition, indicating that volatile is OK. */
1254 init_recog ();
1255
1256 /* Scan through all the insns, instantiating every virtual register still
1257 present. */
fd743bc1 1258 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6f086dfc
RS
1259 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1260 || GET_CODE (insn) == CALL_INSN)
1261 {
1262 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
5dc96d60
JH
1263 if (INSN_DELETED_P (insn))
1264 continue;
5f4f0e22 1265 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
87c61e2d
JL
1266 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1267 if (GET_CODE (insn) == CALL_INSN)
1268 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
1269 NULL_RTX, 0);
f90e4dea
JH
1270
1271 /* Past this point all ASM statements should match. Verify that
1272 to avoid failures later in the compilation process. */
1273 if (asm_noperands (PATTERN (insn)) >= 0
1274 && ! check_asm_operands (PATTERN (insn)))
1275 instantiate_virtual_regs_lossage (insn);
6f086dfc
RS
1276 }
1277
1278 /* Now instantiate the remaining register equivalences for debugging info.
1279 These will not be valid addresses. */
fd743bc1 1280 instantiate_decls (current_function_decl, 0);
6f086dfc
RS
1281
1282 /* Indicate that, from now on, assign_stack_local should use
1283 frame_pointer_rtx. */
1284 virtuals_instantiated = 1;
1285}
1286
1287/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1288 all virtual registers in their DECL_RTL's.
1289
1290 If VALID_ONLY, do this only if the resulting address is still valid.
1291 Otherwise, always do it. */
1292
1293static void
fa8db1f7 1294instantiate_decls (tree fndecl, int valid_only)
6f086dfc
RS
1295{
1296 tree decl;
1297
6f086dfc
RS
1298 /* Process all parameters of the function. */
1299 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1300 {
e5e809f4 1301 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
ae0ed63a 1302 HOST_WIDE_INT size_rtl;
e5e809f4 1303
718fe406 1304 instantiate_decl (DECL_RTL (decl), size, valid_only);
ce717ce4
JW
1305
1306 /* If the parameter was promoted, then the incoming RTL mode may be
1307 larger than the declared type size. We must use the larger of
1308 the two sizes. */
ae0ed63a
JM
1309 size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
1310 size = MAX (size_rtl, size);
ce717ce4 1311 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
6f086dfc
RS
1312 }
1313
0f41302f 1314 /* Now process all variables defined in the function or its subblocks. */
6f086dfc 1315 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
6f086dfc
RS
1316}
1317
1318/* Subroutine of instantiate_decls: Process all decls in the given
1319 BLOCK node and all its subblocks. */
1320
1321static void
fa8db1f7 1322instantiate_decls_1 (tree let, int valid_only)
6f086dfc
RS
1323{
1324 tree t;
1325
1326 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
19e7881c 1327 if (DECL_RTL_SET_P (t))
797a6ac1 1328 instantiate_decl (DECL_RTL (t),
19e7881c
MM
1329 int_size_in_bytes (TREE_TYPE (t)),
1330 valid_only);
6f086dfc
RS
1331
1332 /* Process all subblocks. */
1333 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1334 instantiate_decls_1 (t, valid_only);
1335}
5a73491b 1336
8008b228 1337/* Subroutine of the preceding procedures: Given RTL representing a
5a73491b
RK
1338 decl and the size of the object, do any instantiation required.
1339
cc2902df 1340 If VALID_ONLY is nonzero, it means that the RTL should only be
5a73491b
RK
1341 changed if the new address is valid. */
1342
1343static void
fa8db1f7 1344instantiate_decl (rtx x, HOST_WIDE_INT size, int valid_only)
5a73491b
RK
1345{
1346 enum machine_mode mode;
1347 rtx addr;
1348
1349 /* If this is not a MEM, no need to do anything. Similarly if the
1350 address is a constant or a register that is not a virtual register. */
1351
3c0cb5de 1352 if (x == 0 || !MEM_P (x))
5a73491b
RK
1353 return;
1354
1355 addr = XEXP (x, 0);
1356 if (CONSTANT_P (addr)
f8cfc6aa 1357 || (REG_P (addr)
5a73491b
RK
1358 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1359 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1360 return;
1361
1362 /* If we should only do this if the address is valid, copy the address.
1363 We need to do this so we can undo any changes that might make the
1364 address invalid. This copy is unfortunate, but probably can't be
1365 avoided. */
1366
1367 if (valid_only)
1368 addr = copy_rtx (addr);
1369
1370 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
1371
770ae6cc 1372 if (valid_only && size >= 0)
87ce34d6 1373 {
770ae6cc
RK
1374 unsigned HOST_WIDE_INT decl_size = size;
1375
87ce34d6
JW
1376 /* Now verify that the resulting address is valid for every integer or
1377 floating-point mode up to and including SIZE bytes long. We do this
1378 since the object might be accessed in any mode and frame addresses
1379 are shared. */
1380
1381 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
770ae6cc 1382 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
87ce34d6
JW
1383 mode = GET_MODE_WIDER_MODE (mode))
1384 if (! memory_address_p (mode, addr))
1385 return;
5a73491b 1386
87ce34d6 1387 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
770ae6cc 1388 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
87ce34d6
JW
1389 mode = GET_MODE_WIDER_MODE (mode))
1390 if (! memory_address_p (mode, addr))
1391 return;
1392 }
5a73491b 1393
87ce34d6
JW
1394 /* Put back the address now that we have updated it and we either know
1395 it is valid or we don't care whether it is valid. */
5a73491b
RK
1396
1397 XEXP (x, 0) = addr;
1398}
6f086dfc 1399\f
d1405722 1400/* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
d6a7951f 1401 is a virtual register, return the equivalent hard register and set the
d1405722
RK
1402 offset indirectly through the pointer. Otherwise, return 0. */
1403
1404static rtx
fa8db1f7 1405instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
d1405722
RK
1406{
1407 rtx new;
1408 HOST_WIDE_INT offset;
1409
1410 if (x == virtual_incoming_args_rtx)
1411 new = arg_pointer_rtx, offset = in_arg_offset;
1412 else if (x == virtual_stack_vars_rtx)
1413 new = frame_pointer_rtx, offset = var_offset;
1414 else if (x == virtual_stack_dynamic_rtx)
1415 new = stack_pointer_rtx, offset = dynamic_offset;
1416 else if (x == virtual_outgoing_args_rtx)
1417 new = stack_pointer_rtx, offset = out_arg_offset;
1418 else if (x == virtual_cfa_rtx)
1419 new = arg_pointer_rtx, offset = cfa_offset;
1420 else
1421 return 0;
1422
1423 *poffset = offset;
1424 return new;
1425}
1426\f
5dc96d60
JH
1427
1428/* Called when instantiate_virtual_regs has failed to update the instruction.
1429 Usually this means that non-matching instruction has been emit, however for
1430 asm statements it may be the problem in the constraints. */
1431static void
fa8db1f7 1432instantiate_virtual_regs_lossage (rtx insn)
5dc96d60 1433{
0bccc606 1434 gcc_assert (asm_noperands (PATTERN (insn)) >= 0);
971801ff 1435 error_for_asm (insn, "impossible constraint in %<asm%>");
0bccc606 1436 delete_insn (insn);
5dc96d60 1437}
6f086dfc
RS
1438/* Given a pointer to a piece of rtx and an optional pointer to the
1439 containing object, instantiate any virtual registers present in it.
1440
1441 If EXTRA_INSNS, we always do the replacement and generate
1442 any extra insns before OBJECT. If it zero, we do nothing if replacement
1443 is not valid.
1444
1445 Return 1 if we either had nothing to do or if we were able to do the
718fe406 1446 needed replacement. Return 0 otherwise; we only return zero if
6f086dfc
RS
1447 EXTRA_INSNS is zero.
1448
1449 We first try some simple transformations to avoid the creation of extra
1450 pseudos. */
1451
1452static int
fa8db1f7 1453instantiate_virtual_regs_1 (rtx *loc, rtx object, int extra_insns)
6f086dfc
RS
1454{
1455 rtx x;
1456 RTX_CODE code;
1457 rtx new = 0;
07444f1d 1458 HOST_WIDE_INT offset = 0;
6f086dfc
RS
1459 rtx temp;
1460 rtx seq;
1461 int i, j;
6f7d635c 1462 const char *fmt;
6f086dfc
RS
1463
1464 /* Re-start here to avoid recursion in common cases. */
1465 restart:
1466
1467 x = *loc;
1468 if (x == 0)
1469 return 1;
1470
5dc96d60
JH
1471 /* We may have detected and deleted invalid asm statements. */
1472 if (object && INSN_P (object) && INSN_DELETED_P (object))
1473 return 1;
1474
6f086dfc
RS
1475 code = GET_CODE (x);
1476
1477 /* Check for some special cases. */
1478 switch (code)
1479 {
1480 case CONST_INT:
1481 case CONST_DOUBLE:
69ef87e2 1482 case CONST_VECTOR:
6f086dfc
RS
1483 case CONST:
1484 case SYMBOL_REF:
1485 case CODE_LABEL:
1486 case PC:
1487 case CC0:
1488 case ASM_INPUT:
1489 case ADDR_VEC:
1490 case ADDR_DIFF_VEC:
1491 case RETURN:
1492 return 1;
1493
1494 case SET:
1495 /* We are allowed to set the virtual registers. This means that
38e01259 1496 the actual register should receive the source minus the
6f086dfc
RS
1497 appropriate offset. This is used, for example, in the handling
1498 of non-local gotos. */
d1405722 1499 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
6f086dfc 1500 {
14a774a9
RK
1501 rtx src = SET_SRC (x);
1502
d1405722
RK
1503 /* We are setting the register, not using it, so the relevant
1504 offset is the negative of the offset to use were we using
1505 the register. */
1506 offset = - offset;
14a774a9
RK
1507 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
1508
6f086dfc
RS
1509 /* The only valid sources here are PLUS or REG. Just do
1510 the simplest possible thing to handle them. */
f8cfc6aa 1511 if (!REG_P (src) && GET_CODE (src) != PLUS)
5dc96d60
JH
1512 {
1513 instantiate_virtual_regs_lossage (object);
1514 return 1;
1515 }
6f086dfc
RS
1516
1517 start_sequence ();
f8cfc6aa 1518 if (!REG_P (src))
14a774a9 1519 temp = force_operand (src, NULL_RTX);
6f086dfc 1520 else
14a774a9 1521 temp = src;
5f4f0e22 1522 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
6f086dfc
RS
1523 seq = get_insns ();
1524 end_sequence ();
1525
2f937369 1526 emit_insn_before (seq, object);
6f086dfc
RS
1527 SET_DEST (x) = new;
1528
e9a25f70 1529 if (! validate_change (object, &SET_SRC (x), temp, 0)
6f086dfc 1530 || ! extra_insns)
5dc96d60 1531 instantiate_virtual_regs_lossage (object);
6f086dfc
RS
1532
1533 return 1;
1534 }
1535
1536 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
1537 loc = &SET_SRC (x);
1538 goto restart;
1539
1540 case PLUS:
1541 /* Handle special case of virtual register plus constant. */
1542 if (CONSTANT_P (XEXP (x, 1)))
1543 {
b1f82ccf 1544 rtx old, new_offset;
6f086dfc
RS
1545
1546 /* Check for (plus (plus VIRT foo) (const_int)) first. */
1547 if (GET_CODE (XEXP (x, 0)) == PLUS)
1548 {
d1405722
RK
1549 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
1550 {
1551 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
1552 extra_insns);
1553 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
1554 }
6f086dfc
RS
1555 else
1556 {
1557 loc = &XEXP (x, 0);
1558 goto restart;
1559 }
6f086dfc
RS
1560 }
1561
d1405722
RK
1562#ifdef POINTERS_EXTEND_UNSIGNED
1563 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1564 we can commute the PLUS and SUBREG because pointers into the
1565 frame are well-behaved. */
1566 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
1567 && GET_CODE (XEXP (x, 1)) == CONST_INT
1568 && 0 != (new
1569 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
1570 &offset))
1571 && validate_change (object, loc,
1572 plus_constant (gen_lowpart (ptr_mode,
1573 new),
1574 offset
1575 + INTVAL (XEXP (x, 1))),
1576 0))
1577 return 1;
1578#endif
1579 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
6f086dfc
RS
1580 {
1581 /* We know the second operand is a constant. Unless the
1582 first operand is a REG (which has been already checked),
1583 it needs to be checked. */
f8cfc6aa 1584 if (!REG_P (XEXP (x, 0)))
6f086dfc
RS
1585 {
1586 loc = &XEXP (x, 0);
1587 goto restart;
1588 }
1589 return 1;
1590 }
1591
b1f82ccf 1592 new_offset = plus_constant (XEXP (x, 1), offset);
6f086dfc 1593
b1f82ccf
DE
1594 /* If the new constant is zero, try to replace the sum with just
1595 the register. */
1596 if (new_offset == const0_rtx
1597 && validate_change (object, loc, new, 0))
6f086dfc
RS
1598 return 1;
1599
b1f82ccf
DE
1600 /* Next try to replace the register and new offset.
1601 There are two changes to validate here and we can't assume that
1602 in the case of old offset equals new just changing the register
1603 will yield a valid insn. In the interests of a little efficiency,
1604 however, we only call validate change once (we don't queue up the
0f41302f 1605 changes and then call apply_change_group). */
b1f82ccf
DE
1606
1607 old = XEXP (x, 0);
1608 if (offset == 0
1609 ? ! validate_change (object, &XEXP (x, 0), new, 0)
1610 : (XEXP (x, 0) = new,
1611 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
6f086dfc
RS
1612 {
1613 if (! extra_insns)
1614 {
1615 XEXP (x, 0) = old;
1616 return 0;
1617 }
1618
1619 /* Otherwise copy the new constant into a register and replace
1620 constant with that register. */
1621 temp = gen_reg_rtx (Pmode);
b1f82ccf 1622 XEXP (x, 0) = new;
6f086dfc 1623 if (validate_change (object, &XEXP (x, 1), temp, 0))
b1f82ccf 1624 emit_insn_before (gen_move_insn (temp, new_offset), object);
6f086dfc
RS
1625 else
1626 {
1627 /* If that didn't work, replace this expression with a
1628 register containing the sum. */
1629
6f086dfc 1630 XEXP (x, 0) = old;
38a448ca 1631 new = gen_rtx_PLUS (Pmode, new, new_offset);
6f086dfc
RS
1632
1633 start_sequence ();
5f4f0e22 1634 temp = force_operand (new, NULL_RTX);
6f086dfc
RS
1635 seq = get_insns ();
1636 end_sequence ();
1637
2f937369 1638 emit_insn_before (seq, object);
6f086dfc
RS
1639 if (! validate_change (object, loc, temp, 0)
1640 && ! validate_replace_rtx (x, temp, object))
5dc96d60
JH
1641 {
1642 instantiate_virtual_regs_lossage (object);
1643 return 1;
1644 }
6f086dfc
RS
1645 }
1646 }
1647
1648 return 1;
1649 }
1650
1651 /* Fall through to generic two-operand expression case. */
1652 case EXPR_LIST:
1653 case CALL:
1654 case COMPARE:
1655 case MINUS:
1656 case MULT:
1657 case DIV: case UDIV:
1658 case MOD: case UMOD:
1659 case AND: case IOR: case XOR:
45620ed4
RK
1660 case ROTATERT: case ROTATE:
1661 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
6f086dfc
RS
1662 case NE: case EQ:
1663 case GE: case GT: case GEU: case GTU:
1664 case LE: case LT: case LEU: case LTU:
1665 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
1666 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
1667 loc = &XEXP (x, 0);
1668 goto restart;
1669
1670 case MEM:
1671 /* Most cases of MEM that convert to valid addresses have already been
4fd796bb 1672 handled by our scan of decls. The only special handling we
6f086dfc 1673 need here is to make a copy of the rtx to ensure it isn't being
718fe406 1674 shared if we have to change it to a pseudo.
6f086dfc
RS
1675
1676 If the rtx is a simple reference to an address via a virtual register,
1677 it can potentially be shared. In such cases, first try to make it
1678 a valid address, which can also be shared. Otherwise, copy it and
718fe406 1679 proceed normally.
6f086dfc
RS
1680
1681 First check for common cases that need no processing. These are
1682 usually due to instantiation already being done on a previous instance
1683 of a shared rtx. */
1684
1685 temp = XEXP (x, 0);
1686 if (CONSTANT_ADDRESS_P (temp)
1687#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1688 || temp == arg_pointer_rtx
b37f453b
DE
1689#endif
1690#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1691 || temp == hard_frame_pointer_rtx
6f086dfc
RS
1692#endif
1693 || temp == frame_pointer_rtx)
1694 return 1;
1695
1696 if (GET_CODE (temp) == PLUS
1697 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
1698 && (XEXP (temp, 0) == frame_pointer_rtx
b37f453b
DE
1699#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1700 || XEXP (temp, 0) == hard_frame_pointer_rtx
1701#endif
6f086dfc
RS
1702#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1703 || XEXP (temp, 0) == arg_pointer_rtx
1704#endif
1705 ))
1706 return 1;
1707
1708 if (temp == virtual_stack_vars_rtx
1709 || temp == virtual_incoming_args_rtx
1710 || (GET_CODE (temp) == PLUS
1711 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
1712 && (XEXP (temp, 0) == virtual_stack_vars_rtx
1713 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
1714 {
1715 /* This MEM may be shared. If the substitution can be done without
1716 the need to generate new pseudos, we want to do it in place
1717 so all copies of the shared rtx benefit. The call below will
1718 only make substitutions if the resulting address is still
1719 valid.
1720
1721 Note that we cannot pass X as the object in the recursive call
1722 since the insn being processed may not allow all valid
6461be14
RS
1723 addresses. However, if we were not passed on object, we can
1724 only modify X without copying it if X will have a valid
1725 address.
6f086dfc 1726
6461be14
RS
1727 ??? Also note that this can still lose if OBJECT is an insn that
1728 has less restrictions on an address that some other insn.
1729 In that case, we will modify the shared address. This case
4fd796bb
RK
1730 doesn't seem very likely, though. One case where this could
1731 happen is in the case of a USE or CLOBBER reference, but we
1732 take care of that below. */
6461be14
RS
1733
1734 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
1735 object ? object : x, 0))
6f086dfc
RS
1736 return 1;
1737
1738 /* Otherwise make a copy and process that copy. We copy the entire
1739 RTL expression since it might be a PLUS which could also be
1740 shared. */
1741 *loc = x = copy_rtx (x);
1742 }
1743
1744 /* Fall through to generic unary operation case. */
21b8482a 1745 case PREFETCH:
6f086dfc
RS
1746 case SUBREG:
1747 case STRICT_LOW_PART:
1748 case NEG: case NOT:
1749 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
1750 case SIGN_EXTEND: case ZERO_EXTEND:
1751 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
1752 case FLOAT: case FIX:
1753 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
1754 case ABS:
1755 case SQRT:
1756 case FFS:
2928cd7a
RH
1757 case CLZ: case CTZ:
1758 case POPCOUNT: case PARITY:
6f086dfc
RS
1759 /* These case either have just one operand or we know that we need not
1760 check the rest of the operands. */
1761 loc = &XEXP (x, 0);
1762 goto restart;
1763
4fd796bb
RK
1764 case USE:
1765 case CLOBBER:
1766 /* If the operand is a MEM, see if the change is a valid MEM. If not,
1767 go ahead and make the invalid one, but do it to a copy. For a REG,
718fe406 1768 just make the recursive call, since there's no chance of a problem. */
4fd796bb 1769
3c0cb5de 1770 if ((MEM_P (XEXP (x, 0))
4fd796bb
RK
1771 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
1772 0))
f8cfc6aa 1773 || (REG_P (XEXP (x, 0))
7694ce35 1774 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4fd796bb
RK
1775 return 1;
1776
1777 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
1778 loc = &XEXP (x, 0);
1779 goto restart;
1780
6f086dfc
RS
1781 case REG:
1782 /* Try to replace with a PLUS. If that doesn't work, compute the sum
1783 in front of this insn and substitute the temporary. */
d1405722 1784 if ((new = instantiate_new_reg (x, &offset)) != 0)
6f086dfc
RS
1785 {
1786 temp = plus_constant (new, offset);
1787 if (!validate_change (object, loc, temp, 0))
1788 {
1789 if (! extra_insns)
1790 return 0;
1791
1792 start_sequence ();
5f4f0e22 1793 temp = force_operand (temp, NULL_RTX);
6f086dfc
RS
1794 seq = get_insns ();
1795 end_sequence ();
1796
2f937369 1797 emit_insn_before (seq, object);
6f086dfc
RS
1798 if (! validate_change (object, loc, temp, 0)
1799 && ! validate_replace_rtx (x, temp, object))
5dc96d60 1800 instantiate_virtual_regs_lossage (object);
6f086dfc
RS
1801 }
1802 }
1803
1804 return 1;
e9a25f70 1805
e9a25f70
JL
1806 default:
1807 break;
6f086dfc
RS
1808 }
1809
1810 /* Scan all subexpressions. */
1811 fmt = GET_RTX_FORMAT (code);
1812 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1813 if (*fmt == 'e')
1814 {
1815 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
1816 return 0;
1817 }
1818 else if (*fmt == 'E')
1819 for (j = 0; j < XVECLEN (x, i); j++)
1820 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
1821 extra_insns))
1822 return 0;
1823
1824 return 1;
1825}
1826\f
d181c154
RS
1827/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1828 This means a type for which function calls must pass an address to the
1829 function or get an address back from the function.
1830 EXP may be a type node or an expression (whose type is tested). */
6f086dfc
RS
1831
1832int
61f71b34 1833aggregate_value_p (tree exp, tree fntype)
6f086dfc 1834{
9d790a4f
RS
1835 int i, regno, nregs;
1836 rtx reg;
2f939d94
TP
1837
1838 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
d181c154 1839
61f71b34
DD
1840 if (fntype)
1841 switch (TREE_CODE (fntype))
1842 {
1843 case CALL_EXPR:
1844 fntype = get_callee_fndecl (fntype);
1845 fntype = fntype ? TREE_TYPE (fntype) : 0;
1846 break;
1847 case FUNCTION_DECL:
1848 fntype = TREE_TYPE (fntype);
1849 break;
1850 case FUNCTION_TYPE:
1851 case METHOD_TYPE:
1852 break;
1853 case IDENTIFIER_NODE:
1854 fntype = 0;
1855 break;
1856 default:
1857 /* We don't expect other rtl types here. */
0bccc606 1858 gcc_unreachable ();
61f71b34
DD
1859 }
1860
d7bf8ada
MM
1861 if (TREE_CODE (type) == VOID_TYPE)
1862 return 0;
cc77ae10
JM
1863 /* If the front end has decided that this needs to be passed by
1864 reference, do so. */
1865 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1866 && DECL_BY_REFERENCE (exp))
1867 return 1;
61f71b34 1868 if (targetm.calls.return_in_memory (type, fntype))
6f086dfc 1869 return 1;
956d6950 1870 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
49a2e5b2
DE
1871 and thus can't be returned in registers. */
1872 if (TREE_ADDRESSABLE (type))
1873 return 1;
05e3bdb9 1874 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
6f086dfc 1875 return 1;
9d790a4f
RS
1876 /* Make sure we have suitable call-clobbered regs to return
1877 the value in; if not, we must return it in memory. */
4dc07bd7 1878 reg = hard_function_value (type, 0, 0);
e71f7aa5
JW
1879
1880 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1881 it is OK. */
f8cfc6aa 1882 if (!REG_P (reg))
e71f7aa5
JW
1883 return 0;
1884
9d790a4f 1885 regno = REGNO (reg);
66fd46b6 1886 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
9d790a4f
RS
1887 for (i = 0; i < nregs; i++)
1888 if (! call_used_regs[regno + i])
1889 return 1;
6f086dfc
RS
1890 return 0;
1891}
1892\f
8fff4fc1
RH
1893/* Return true if we should assign DECL a pseudo register; false if it
1894 should live on the local stack. */
1895
1896bool
1897use_register_for_decl (tree decl)
1898{
1899 /* Honor volatile. */
1900 if (TREE_SIDE_EFFECTS (decl))
1901 return false;
1902
1903 /* Honor addressability. */
1904 if (TREE_ADDRESSABLE (decl))
1905 return false;
1906
1907 /* Only register-like things go in registers. */
1908 if (DECL_MODE (decl) == BLKmode)
1909 return false;
1910
1911 /* If -ffloat-store specified, don't put explicit float variables
1912 into registers. */
1913 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1914 propagates values across these stores, and it probably shouldn't. */
1915 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1916 return false;
1917
1918 /* Compiler-generated temporaries can always go in registers. */
1919 if (DECL_ARTIFICIAL (decl))
1920 return true;
1921
1922#ifdef NON_SAVING_SETJMP
1923 /* Protect variables not declared "register" from setjmp. */
1924 if (NON_SAVING_SETJMP
1925 && current_function_calls_setjmp
1926 && !DECL_REGISTER (decl))
1927 return false;
1928#endif
1929
1930 return (optimize || DECL_REGISTER (decl));
1931}
1932
0976078c
RH
1933/* Return true if TYPE should be passed by invisible reference. */
1934
1935bool
8cd5a4e0
RH
1936pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1937 tree type, bool named_arg)
0976078c
RH
1938{
1939 if (type)
1940 {
1941 /* If this type contains non-trivial constructors, then it is
1942 forbidden for the middle-end to create any new copies. */
1943 if (TREE_ADDRESSABLE (type))
1944 return true;
1945
d58247a3
RH
1946 /* GCC post 3.4 passes *all* variable sized types by reference. */
1947 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
0976078c
RH
1948 return true;
1949 }
1950
8cd5a4e0 1951 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
0976078c
RH
1952}
1953
6cdd5672
RH
1954/* Return true if TYPE, which is passed by reference, should be callee
1955 copied instead of caller copied. */
1956
1957bool
1958reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1959 tree type, bool named_arg)
1960{
1961 if (type && TREE_ADDRESSABLE (type))
1962 return false;
1963 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1964}
1965
6071dc7f
RH
1966/* Structures to communicate between the subroutines of assign_parms.
1967 The first holds data persistent across all parameters, the second
1968 is cleared out for each parameter. */
6f086dfc 1969
6071dc7f 1970struct assign_parm_data_all
6f086dfc 1971{
6f086dfc 1972 CUMULATIVE_ARGS args_so_far;
6f086dfc 1973 struct args_size stack_args_size;
6071dc7f
RH
1974 tree function_result_decl;
1975 tree orig_fnargs;
1976 rtx conversion_insns;
1977 HOST_WIDE_INT pretend_args_size;
1978 HOST_WIDE_INT extra_pretend_bytes;
1979 int reg_parm_stack_space;
1980};
6f086dfc 1981
6071dc7f
RH
1982struct assign_parm_data_one
1983{
1984 tree nominal_type;
1985 tree passed_type;
1986 rtx entry_parm;
1987 rtx stack_parm;
1988 enum machine_mode nominal_mode;
1989 enum machine_mode passed_mode;
1990 enum machine_mode promoted_mode;
1991 struct locate_and_pad_arg_data locate;
1992 int partial;
1993 BOOL_BITFIELD named_arg : 1;
1994 BOOL_BITFIELD last_named : 1;
1995 BOOL_BITFIELD passed_pointer : 1;
1996 BOOL_BITFIELD on_stack : 1;
1997 BOOL_BITFIELD loaded_in_reg : 1;
1998};
ebb904cb 1999
6071dc7f 2000/* A subroutine of assign_parms. Initialize ALL. */
6f086dfc 2001
6071dc7f
RH
2002static void
2003assign_parms_initialize_all (struct assign_parm_data_all *all)
2004{
2005 tree fntype;
6f086dfc 2006
6071dc7f
RH
2007 memset (all, 0, sizeof (*all));
2008
2009 fntype = TREE_TYPE (current_function_decl);
2010
2011#ifdef INIT_CUMULATIVE_INCOMING_ARGS
2012 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
2013#else
2014 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
2015 current_function_decl, -1);
2016#endif
2017
2018#ifdef REG_PARM_STACK_SPACE
2019 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
2020#endif
2021}
6f086dfc 2022
6071dc7f
RH
2023/* If ARGS contains entries with complex types, split the entry into two
2024 entries of the component type. Return a new list of substitutions are
2025 needed, else the old list. */
2026
2027static tree
2028split_complex_args (tree args)
2029{
2030 tree p;
2031
2032 /* Before allocating memory, check for the common case of no complex. */
2033 for (p = args; p; p = TREE_CHAIN (p))
2034 {
2035 tree type = TREE_TYPE (p);
2036 if (TREE_CODE (type) == COMPLEX_TYPE
2037 && targetm.calls.split_complex_arg (type))
2038 goto found;
2039 }
2040 return args;
2041
2042 found:
2043 args = copy_list (args);
2044
2045 for (p = args; p; p = TREE_CHAIN (p))
2046 {
2047 tree type = TREE_TYPE (p);
2048 if (TREE_CODE (type) == COMPLEX_TYPE
2049 && targetm.calls.split_complex_arg (type))
2050 {
2051 tree decl;
2052 tree subtype = TREE_TYPE (type);
2053
2054 /* Rewrite the PARM_DECL's type with its component. */
2055 TREE_TYPE (p) = subtype;
2056 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2057 DECL_MODE (p) = VOIDmode;
2058 DECL_SIZE (p) = NULL;
2059 DECL_SIZE_UNIT (p) = NULL;
2060 layout_decl (p, 0);
2061
2062 /* Build a second synthetic decl. */
2063 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
2064 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2065 layout_decl (decl, 0);
2066
2067 /* Splice it in; skip the new decl. */
2068 TREE_CHAIN (decl) = TREE_CHAIN (p);
2069 TREE_CHAIN (p) = decl;
2070 p = decl;
2071 }
2072 }
2073
2074 return args;
2075}
2076
2077/* A subroutine of assign_parms. Adjust the parameter list to incorporate
2078 the hidden struct return argument, and (abi willing) complex args.
2079 Return the new parameter list. */
2080
2081static tree
2082assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2083{
2084 tree fndecl = current_function_decl;
2085 tree fntype = TREE_TYPE (fndecl);
2086 tree fnargs = DECL_ARGUMENTS (fndecl);
6f086dfc
RS
2087
2088 /* If struct value address is treated as the first argument, make it so. */
61f71b34 2089 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
6f086dfc 2090 && ! current_function_returns_pcc_struct
61f71b34 2091 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
6f086dfc 2092 {
f9f29478 2093 tree type = build_pointer_type (TREE_TYPE (fntype));
6071dc7f 2094 tree decl;
6f086dfc 2095
6071dc7f
RH
2096 decl = build_decl (PARM_DECL, NULL_TREE, type);
2097 DECL_ARG_TYPE (decl) = type;
2098 DECL_ARTIFICIAL (decl) = 1;
6f086dfc 2099
6071dc7f
RH
2100 TREE_CHAIN (decl) = fnargs;
2101 fnargs = decl;
2102 all->function_result_decl = decl;
6f086dfc 2103 }
718fe406 2104
6071dc7f 2105 all->orig_fnargs = fnargs;
ded9bf77 2106
42ba5130
RH
2107 /* If the target wants to split complex arguments into scalars, do so. */
2108 if (targetm.calls.split_complex_arg)
ded9bf77
AH
2109 fnargs = split_complex_args (fnargs);
2110
6071dc7f
RH
2111 return fnargs;
2112}
e7949876 2113
6071dc7f
RH
2114/* A subroutine of assign_parms. Examine PARM and pull out type and mode
2115 data for the parameter. Incorporate ABI specifics such as pass-by-
2116 reference and type promotion. */
6f086dfc 2117
6071dc7f
RH
2118static void
2119assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2120 struct assign_parm_data_one *data)
2121{
2122 tree nominal_type, passed_type;
2123 enum machine_mode nominal_mode, passed_mode, promoted_mode;
6f086dfc 2124
6071dc7f
RH
2125 memset (data, 0, sizeof (*data));
2126
2127 /* Set LAST_NAMED if this is last named arg before last anonymous args. */
2128 if (current_function_stdarg)
6f086dfc 2129 {
6071dc7f
RH
2130 tree tem;
2131 for (tem = TREE_CHAIN (parm); tem; tem = TREE_CHAIN (tem))
2132 if (DECL_NAME (tem))
2133 break;
2134 if (tem == 0)
2135 data->last_named = true;
2136 }
108b7d3d 2137
6071dc7f
RH
2138 /* Set NAMED_ARG if this arg should be treated as a named arg. For
2139 most machines, if this is a varargs/stdarg function, then we treat
2140 the last named arg as if it were anonymous too. */
2141 if (targetm.calls.strict_argument_naming (&all->args_so_far))
2142 data->named_arg = 1;
2143 else
2144 data->named_arg = !data->last_named;
2145
2146 nominal_type = TREE_TYPE (parm);
2147 passed_type = DECL_ARG_TYPE (parm);
2148
2149 /* Look out for errors propagating this far. Also, if the parameter's
2150 type is void then its value doesn't matter. */
2151 if (TREE_TYPE (parm) == error_mark_node
2152 /* This can happen after weird syntax errors
2153 or if an enum type is defined among the parms. */
2154 || TREE_CODE (parm) != PARM_DECL
2155 || passed_type == NULL
2156 || VOID_TYPE_P (nominal_type))
2157 {
2158 nominal_type = passed_type = void_type_node;
2159 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2160 goto egress;
2161 }
108b7d3d 2162
6071dc7f
RH
2163 /* Find mode of arg as it is passed, and mode of arg as it should be
2164 during execution of this function. */
2165 passed_mode = TYPE_MODE (passed_type);
2166 nominal_mode = TYPE_MODE (nominal_type);
2167
2168 /* If the parm is to be passed as a transparent union, use the type of
2169 the first field for the tests below. We have already verified that
2170 the modes are the same. */
2171 if (DECL_TRANSPARENT_UNION (parm)
2172 || (TREE_CODE (passed_type) == UNION_TYPE
2173 && TYPE_TRANSPARENT_UNION (passed_type)))
2174 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2175
0976078c
RH
2176 /* See if this arg was passed by invisible reference. */
2177 if (pass_by_reference (&all->args_so_far, passed_mode,
2178 passed_type, data->named_arg))
6071dc7f
RH
2179 {
2180 passed_type = nominal_type = build_pointer_type (passed_type);
2181 data->passed_pointer = true;
2182 passed_mode = nominal_mode = Pmode;
2183 }
6f086dfc 2184
6071dc7f
RH
2185 /* Find mode as it is passed by the ABI. */
2186 promoted_mode = passed_mode;
2187 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2188 {
2189 int unsignedp = TYPE_UNSIGNED (passed_type);
2190 promoted_mode = promote_mode (passed_type, promoted_mode,
2191 &unsignedp, 1);
2192 }
6f086dfc 2193
6071dc7f
RH
2194 egress:
2195 data->nominal_type = nominal_type;
2196 data->passed_type = passed_type;
2197 data->nominal_mode = nominal_mode;
2198 data->passed_mode = passed_mode;
2199 data->promoted_mode = promoted_mode;
2200}
16bae307 2201
6071dc7f 2202/* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
6f086dfc 2203
6071dc7f
RH
2204static void
2205assign_parms_setup_varargs (struct assign_parm_data_all *all,
2206 struct assign_parm_data_one *data, bool no_rtl)
2207{
2208 int varargs_pretend_bytes = 0;
2209
2210 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2211 data->promoted_mode,
2212 data->passed_type,
2213 &varargs_pretend_bytes, no_rtl);
2214
2215 /* If the back-end has requested extra stack space, record how much is
2216 needed. Do not change pretend_args_size otherwise since it may be
2217 nonzero from an earlier partial argument. */
2218 if (varargs_pretend_bytes > 0)
2219 all->pretend_args_size = varargs_pretend_bytes;
2220}
a53e14c0 2221
6071dc7f
RH
2222/* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2223 the incoming location of the current parameter. */
2224
2225static void
2226assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2227 struct assign_parm_data_one *data)
2228{
2229 HOST_WIDE_INT pretend_bytes = 0;
2230 rtx entry_parm;
2231 bool in_regs;
2232
2233 if (data->promoted_mode == VOIDmode)
2234 {
2235 data->entry_parm = data->stack_parm = const0_rtx;
2236 return;
2237 }
a53e14c0 2238
6f086dfc 2239#ifdef FUNCTION_INCOMING_ARG
6071dc7f
RH
2240 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2241 data->passed_type, data->named_arg);
6f086dfc 2242#else
6071dc7f
RH
2243 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2244 data->passed_type, data->named_arg);
6f086dfc
RS
2245#endif
2246
6071dc7f
RH
2247 if (entry_parm == 0)
2248 data->promoted_mode = data->passed_mode;
6f086dfc 2249
6071dc7f
RH
2250 /* Determine parm's home in the stack, in case it arrives in the stack
2251 or we should pretend it did. Compute the stack position and rtx where
2252 the argument arrives and its size.
6f086dfc 2253
6071dc7f
RH
2254 There is one complexity here: If this was a parameter that would
2255 have been passed in registers, but wasn't only because it is
2256 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2257 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2258 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2259 as it was the previous time. */
2260 in_regs = entry_parm != 0;
6f086dfc 2261#ifdef STACK_PARMS_IN_REG_PARM_AREA
6071dc7f 2262 in_regs = true;
e7949876 2263#endif
6071dc7f
RH
2264 if (!in_regs && !data->named_arg)
2265 {
2266 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
e7949876 2267 {
6071dc7f 2268 rtx tem;
6f086dfc 2269#ifdef FUNCTION_INCOMING_ARG
6071dc7f
RH
2270 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2271 data->passed_type, true);
6f086dfc 2272#else
6071dc7f
RH
2273 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2274 data->passed_type, true);
6f086dfc 2275#endif
6071dc7f 2276 in_regs = tem != NULL;
e7949876 2277 }
6071dc7f 2278 }
e7949876 2279
6071dc7f
RH
2280 /* If this parameter was passed both in registers and in the stack, use
2281 the copy on the stack. */
fe984136
RH
2282 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2283 data->passed_type))
6071dc7f 2284 entry_parm = 0;
e7949876 2285
6071dc7f
RH
2286 if (entry_parm)
2287 {
2288 int partial;
2289
2290 partial = FUNCTION_ARG_PARTIAL_NREGS (all->args_so_far,
2291 data->promoted_mode,
2292 data->passed_type,
2293 data->named_arg);
2294 data->partial = partial;
2295
2296 /* The caller might already have allocated stack space for the
2297 register parameters. */
2298 if (partial != 0 && all->reg_parm_stack_space == 0)
975f3818 2299 {
6071dc7f
RH
2300 /* Part of this argument is passed in registers and part
2301 is passed on the stack. Ask the prologue code to extend
2302 the stack part so that we can recreate the full value.
2303
2304 PRETEND_BYTES is the size of the registers we need to store.
2305 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2306 stack space that the prologue should allocate.
2307
2308 Internally, gcc assumes that the argument pointer is aligned
2309 to STACK_BOUNDARY bits. This is used both for alignment
2310 optimizations (see init_emit) and to locate arguments that are
2311 aligned to more than PARM_BOUNDARY bits. We must preserve this
2312 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2313 a stack boundary. */
2314
2315 /* We assume at most one partial arg, and it must be the first
2316 argument on the stack. */
0bccc606 2317 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
6071dc7f
RH
2318
2319 pretend_bytes = partial * UNITS_PER_WORD;
2320 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2321
2322 /* We want to align relative to the actual stack pointer, so
2323 don't include this in the stack size until later. */
2324 all->extra_pretend_bytes = all->pretend_args_size;
975f3818 2325 }
6071dc7f 2326 }
e7949876 2327
6071dc7f
RH
2328 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2329 entry_parm ? data->partial : 0, current_function_decl,
2330 &all->stack_args_size, &data->locate);
6f086dfc 2331
6071dc7f
RH
2332 /* Adjust offsets to include the pretend args. */
2333 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2334 data->locate.slot_offset.constant += pretend_bytes;
2335 data->locate.offset.constant += pretend_bytes;
ebca59c3 2336
6071dc7f
RH
2337 data->entry_parm = entry_parm;
2338}
6f086dfc 2339
6071dc7f
RH
2340/* A subroutine of assign_parms. If there is actually space on the stack
2341 for this parm, count it in stack_args_size and return true. */
6f086dfc 2342
6071dc7f
RH
2343static bool
2344assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2345 struct assign_parm_data_one *data)
2346{
2e6ae27f 2347 /* Trivially true if we've no incoming register. */
6071dc7f
RH
2348 if (data->entry_parm == NULL)
2349 ;
2350 /* Also true if we're partially in registers and partially not,
2351 since we've arranged to drop the entire argument on the stack. */
2352 else if (data->partial != 0)
2353 ;
2354 /* Also true if the target says that it's passed in both registers
2355 and on the stack. */
2356 else if (GET_CODE (data->entry_parm) == PARALLEL
2357 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2358 ;
2359 /* Also true if the target says that there's stack allocated for
2360 all register parameters. */
2361 else if (all->reg_parm_stack_space > 0)
2362 ;
2363 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2364 else
2365 return false;
6f086dfc 2366
6071dc7f
RH
2367 all->stack_args_size.constant += data->locate.size.constant;
2368 if (data->locate.size.var)
2369 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
718fe406 2370
6071dc7f
RH
2371 return true;
2372}
0d1416c6 2373
6071dc7f
RH
2374/* A subroutine of assign_parms. Given that this parameter is allocated
2375 stack space by the ABI, find it. */
6f086dfc 2376
6071dc7f
RH
2377static void
2378assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2379{
2380 rtx offset_rtx, stack_parm;
2381 unsigned int align, boundary;
6f086dfc 2382
6071dc7f
RH
2383 /* If we're passing this arg using a reg, make its stack home the
2384 aligned stack slot. */
2385 if (data->entry_parm)
2386 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2387 else
2388 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2389
2390 stack_parm = current_function_internal_arg_pointer;
2391 if (offset_rtx != const0_rtx)
2392 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2393 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2394
2395 set_mem_attributes (stack_parm, parm, 1);
2396
2397 boundary = FUNCTION_ARG_BOUNDARY (data->promoted_mode, data->passed_type);
2398 align = 0;
2399
2400 /* If we're padding upward, we know that the alignment of the slot
2401 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2402 intentionally forcing upward padding. Otherwise we have to come
2403 up with a guess at the alignment based on OFFSET_RTX. */
2404 if (data->locate.where_pad == upward || data->entry_parm)
2405 align = boundary;
2406 else if (GET_CODE (offset_rtx) == CONST_INT)
2407 {
2408 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2409 align = align & -align;
2410 }
2411 if (align > 0)
2412 set_mem_align (stack_parm, align);
2413
2414 if (data->entry_parm)
2415 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2416
2417 data->stack_parm = stack_parm;
2418}
2419
2420/* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2421 always valid and contiguous. */
2422
2423static void
2424assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2425{
2426 rtx entry_parm = data->entry_parm;
2427 rtx stack_parm = data->stack_parm;
2428
2429 /* If this parm was passed part in regs and part in memory, pretend it
2430 arrived entirely in memory by pushing the register-part onto the stack.
2431 In the special case of a DImode or DFmode that is split, we could put
2432 it together in a pseudoreg directly, but for now that's not worth
2433 bothering with. */
2434 if (data->partial != 0)
2435 {
2436 /* Handle calls that pass values in multiple non-contiguous
2437 locations. The Irix 6 ABI has examples of this. */
2438 if (GET_CODE (entry_parm) == PARALLEL)
2439 emit_group_store (validize_mem (stack_parm), entry_parm,
2440 data->passed_type,
2441 int_size_in_bytes (data->passed_type));
6f086dfc 2442 else
6071dc7f
RH
2443 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2444 data->partial);
6f086dfc 2445
6071dc7f
RH
2446 entry_parm = stack_parm;
2447 }
6f086dfc 2448
6071dc7f
RH
2449 /* If we didn't decide this parm came in a register, by default it came
2450 on the stack. */
2451 else if (entry_parm == NULL)
2452 entry_parm = stack_parm;
2453
2454 /* When an argument is passed in multiple locations, we can't make use
2455 of this information, but we can save some copying if the whole argument
2456 is passed in a single register. */
2457 else if (GET_CODE (entry_parm) == PARALLEL
2458 && data->nominal_mode != BLKmode
2459 && data->passed_mode != BLKmode)
2460 {
2461 size_t i, len = XVECLEN (entry_parm, 0);
2462
2463 for (i = 0; i < len; i++)
2464 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2465 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2466 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2467 == data->passed_mode)
2468 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2469 {
2470 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2471 break;
2472 }
2473 }
e68a6ce1 2474
6071dc7f
RH
2475 data->entry_parm = entry_parm;
2476}
6f086dfc 2477
6071dc7f
RH
2478/* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2479 always valid and properly aligned. */
6f086dfc 2480
6f086dfc 2481
6071dc7f
RH
2482static void
2483assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2484{
2485 rtx stack_parm = data->stack_parm;
2486
2487 /* If we can't trust the parm stack slot to be aligned enough for its
2488 ultimate type, don't use that slot after entry. We'll make another
2489 stack slot, if we need one. */
2490 if (STRICT_ALIGNMENT && stack_parm
2491 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2492 stack_parm = NULL;
2493
2494 /* If parm was passed in memory, and we need to convert it on entry,
2495 don't store it back in that same slot. */
2496 else if (data->entry_parm == stack_parm
2497 && data->nominal_mode != BLKmode
2498 && data->nominal_mode != data->passed_mode)
2499 stack_parm = NULL;
2500
2501 data->stack_parm = stack_parm;
2502}
a0506b54 2503
6071dc7f
RH
2504/* A subroutine of assign_parms. Return true if the current parameter
2505 should be stored as a BLKmode in the current frame. */
2506
2507static bool
2508assign_parm_setup_block_p (struct assign_parm_data_one *data)
2509{
2510 if (data->nominal_mode == BLKmode)
2511 return true;
2512 if (GET_CODE (data->entry_parm) == PARALLEL)
2513 return true;
531547e9 2514
6e985040 2515#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2516 if (data->locate.where_pad == (BYTES_BIG_ENDIAN ? upward : downward)
2517 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD)
2518 return true;
6e985040 2519#endif
6071dc7f
RH
2520
2521 return false;
2522}
2523
2524/* A subroutine of assign_parms. Arrange for the parameter to be
2525 present and valid in DATA->STACK_RTL. */
2526
2527static void
2528assign_parm_setup_block (tree parm, struct assign_parm_data_one *data)
2529{
2530 rtx entry_parm = data->entry_parm;
2531 rtx stack_parm = data->stack_parm;
2532
2533 /* If we've a non-block object that's nevertheless passed in parts,
2534 reconstitute it in register operations rather than on the stack. */
2535 if (GET_CODE (entry_parm) == PARALLEL
2536 && data->nominal_mode != BLKmode
2537 && XVECLEN (entry_parm, 0) > 1
469c26f1 2538 && use_register_for_decl (parm))
6071dc7f
RH
2539 {
2540 rtx parmreg = gen_reg_rtx (data->nominal_mode);
2541
2542 emit_group_store (parmreg, entry_parm, data->nominal_type,
2543 int_size_in_bytes (data->nominal_type));
2544 SET_DECL_RTL (parm, parmreg);
2545 return;
2546 }
2547
2548 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2549 calls that pass values in multiple non-contiguous locations. */
2550 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2551 {
2552 HOST_WIDE_INT size = int_size_in_bytes (data->passed_type);
2553 HOST_WIDE_INT size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2554 rtx mem;
2555
2556 /* Note that we will be storing an integral number of words.
2557 So we have to be careful to ensure that we allocate an
2558 integral number of words. We do this below in the
2559 assign_stack_local if space was not allocated in the argument
2560 list. If it was, this will not work if PARM_BOUNDARY is not
2561 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2562 if it becomes a problem. Exception is when BLKmode arrives
2563 with arguments not conforming to word_mode. */
2564
2565 if (stack_parm == 0)
6f086dfc 2566 {
6071dc7f
RH
2567 stack_parm = assign_stack_local (BLKmode, size_stored, 0);
2568 data->stack_parm = stack_parm;
2569 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2570 set_mem_attributes (stack_parm, parm, 1);
2571 }
2572 else if (GET_CODE (entry_parm) == PARALLEL)
2573 ;
0bccc606
NS
2574 else
2575 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
6f086dfc 2576
6071dc7f 2577 mem = validize_mem (stack_parm);
c6b97fac 2578
6071dc7f
RH
2579 /* Handle values in multiple non-contiguous locations. */
2580 if (GET_CODE (entry_parm) == PARALLEL)
2581 emit_group_store (mem, entry_parm, data->passed_type, size);
c6b97fac 2582
6071dc7f
RH
2583 else if (size == 0)
2584 ;
5c07bd7a 2585
6071dc7f
RH
2586 /* If SIZE is that of a mode no bigger than a word, just use
2587 that mode's store operation. */
2588 else if (size <= UNITS_PER_WORD)
2589 {
2590 enum machine_mode mode
2591 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
c6b97fac 2592
6071dc7f 2593 if (mode != BLKmode
6e985040 2594#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2595 && (size == UNITS_PER_WORD
2596 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2597 != (BYTES_BIG_ENDIAN ? upward : downward)))
6e985040 2598#endif
6071dc7f
RH
2599 )
2600 {
2601 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
2602 emit_move_insn (change_address (mem, mode, 0), reg);
2603 }
c6b97fac 2604
6071dc7f
RH
2605 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2606 machine must be aligned to the left before storing
2607 to memory. Note that the previous test doesn't
2608 handle all cases (e.g. SIZE == 3). */
2609 else if (size != UNITS_PER_WORD
6e985040 2610#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2611 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2612 == downward)
6e985040 2613#else
6071dc7f 2614 && BYTES_BIG_ENDIAN
6e985040 2615#endif
6071dc7f
RH
2616 )
2617 {
2618 rtx tem, x;
2619 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2620 rtx reg = gen_rtx_REG (word_mode, REGNO (data->entry_parm));
2621
09b52670 2622 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
7d60be94 2623 build_int_cst (NULL_TREE, by),
4a90aeeb 2624 NULL_RTX, 1);
6071dc7f
RH
2625 tem = change_address (mem, word_mode, 0);
2626 emit_move_insn (tem, x);
6f086dfc 2627 }
6071dc7f
RH
2628 else
2629 move_block_from_reg (REGNO (data->entry_parm), mem,
2630 size_stored / UNITS_PER_WORD);
6f086dfc 2631 }
6071dc7f
RH
2632 else
2633 move_block_from_reg (REGNO (data->entry_parm), mem,
2634 size_stored / UNITS_PER_WORD);
2635 }
2636
2637 SET_DECL_RTL (parm, stack_parm);
2638}
2639
2640/* A subroutine of assign_parms. Allocate a pseudo to hold the current
2641 parameter. Get it there. Perform all ABI specified conversions. */
2642
2643static void
2644assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2645 struct assign_parm_data_one *data)
2646{
2647 rtx parmreg;
2648 enum machine_mode promoted_nominal_mode;
2649 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2650 bool did_conversion = false;
2651
2652 /* Store the parm in a pseudoregister during the function, but we may
2653 need to do it in a wider mode. */
2654
2655 promoted_nominal_mode
2656 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 0);
2657
2658 parmreg = gen_reg_rtx (promoted_nominal_mode);
2659
2660 if (!DECL_ARTIFICIAL (parm))
2661 mark_user_reg (parmreg);
2662
2663 /* If this was an item that we received a pointer to,
2664 set DECL_RTL appropriately. */
2665 if (data->passed_pointer)
2666 {
2667 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2668 set_mem_attributes (x, parm, 1);
2669 SET_DECL_RTL (parm, x);
2670 }
2671 else
389fdba0 2672 SET_DECL_RTL (parm, parmreg);
6071dc7f
RH
2673
2674 /* Copy the value into the register. */
2675 if (data->nominal_mode != data->passed_mode
2676 || promoted_nominal_mode != data->promoted_mode)
2677 {
2678 int save_tree_used;
2679
2680 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2681 mode, by the caller. We now have to convert it to
2682 NOMINAL_MODE, if different. However, PARMREG may be in
2683 a different mode than NOMINAL_MODE if it is being stored
2684 promoted.
2685
2686 If ENTRY_PARM is a hard register, it might be in a register
2687 not valid for operating in its mode (e.g., an odd-numbered
2688 register for a DFmode). In that case, moves are the only
2689 thing valid, so we can't do a convert from there. This
2690 occurs when the calling sequence allow such misaligned
2691 usages.
2692
2693 In addition, the conversion may involve a call, which could
2694 clobber parameters which haven't been copied to pseudo
2695 registers yet. Therefore, we must first copy the parm to
2696 a pseudo reg here, and save the conversion until after all
2697 parameters have been moved. */
2698
2699 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2700
2701 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2702
2703 push_to_sequence (all->conversion_insns);
2704 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2705
2706 if (GET_CODE (tempreg) == SUBREG
2707 && GET_MODE (tempreg) == data->nominal_mode
2708 && REG_P (SUBREG_REG (tempreg))
2709 && data->nominal_mode == data->passed_mode
2710 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2711 && GET_MODE_SIZE (GET_MODE (tempreg))
2712 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
6f086dfc 2713 {
6071dc7f
RH
2714 /* The argument is already sign/zero extended, so note it
2715 into the subreg. */
2716 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2717 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2718 }
00d8a4c1 2719
6071dc7f
RH
2720 /* TREE_USED gets set erroneously during expand_assignment. */
2721 save_tree_used = TREE_USED (parm);
2722 expand_assignment (parm, make_tree (data->nominal_type, tempreg), 0);
2723 TREE_USED (parm) = save_tree_used;
2724 all->conversion_insns = get_insns ();
2725 end_sequence ();
00d8a4c1 2726
6071dc7f
RH
2727 did_conversion = true;
2728 }
2729 else
2730 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2731
2732 /* If we were passed a pointer but the actual value can safely live
2733 in a register, put it in one. */
2734 if (data->passed_pointer
2735 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2736 /* If by-reference argument was promoted, demote it. */
2737 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2738 || use_register_for_decl (parm)))
2739 {
2740 /* We can't use nominal_mode, because it will have been set to
2741 Pmode above. We must use the actual mode of the parm. */
2742 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2743 mark_user_reg (parmreg);
cd5b3469 2744
6071dc7f
RH
2745 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2746 {
2747 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2748 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2749
2750 push_to_sequence (all->conversion_insns);
2751 emit_move_insn (tempreg, DECL_RTL (parm));
2752 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2753 emit_move_insn (parmreg, tempreg);
2754 all->conversion_insns = get_insns();
2755 end_sequence ();
6f086dfc 2756
6071dc7f
RH
2757 did_conversion = true;
2758 }
2759 else
2760 emit_move_insn (parmreg, DECL_RTL (parm));
6f086dfc 2761
6071dc7f 2762 SET_DECL_RTL (parm, parmreg);
797a6ac1 2763
6071dc7f
RH
2764 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2765 now the parm. */
2766 data->stack_parm = NULL;
2767 }
ddef6bc7 2768
6071dc7f
RH
2769 /* If we are passed an arg by reference and it is our responsibility
2770 to make a copy, do it now.
2771 PASSED_TYPE and PASSED mode now refer to the pointer, not the
2772 original argument, so we must recreate them in the call to
2773 FUNCTION_ARG_CALLEE_COPIES. */
2774 /* ??? Later add code to handle the case that if the argument isn't
2775 modified, don't do the copy. */
2776
b6448565 2777 else if (data->passed_pointer)
6071dc7f 2778 {
b6448565
RH
2779 tree type = TREE_TYPE (data->passed_type);
2780
6cdd5672
RH
2781 if (reference_callee_copied (&all->args_so_far, TYPE_MODE (type),
2782 type, data->named_arg))
b6448565
RH
2783 {
2784 rtx copy;
137a2a7b 2785
b6448565
RH
2786 /* This sequence may involve a library call perhaps clobbering
2787 registers that haven't been copied to pseudos yet. */
137a2a7b 2788
b6448565 2789 push_to_sequence (all->conversion_insns);
137a2a7b 2790
b6448565
RH
2791 if (!COMPLETE_TYPE_P (type)
2792 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2793 {
2794 /* This is a variable sized object. */
2795 copy = allocate_dynamic_stack_space (expr_size (parm), NULL_RTX,
2796 TYPE_ALIGN (type));
2797 copy = gen_rtx_MEM (BLKmode, copy);
2798 }
2799 else
2800 copy = assign_stack_temp (TYPE_MODE (type),
2801 int_size_in_bytes (type), 1);
2802 set_mem_attributes (copy, parm, 1);
6071dc7f 2803
b6448565
RH
2804 store_expr (parm, copy, 0);
2805 emit_move_insn (parmreg, XEXP (copy, 0));
2806 all->conversion_insns = get_insns ();
2807 end_sequence ();
6071dc7f 2808
b6448565
RH
2809 did_conversion = true;
2810 }
6071dc7f 2811 }
74bd77a8 2812
6071dc7f
RH
2813 /* Mark the register as eliminable if we did no conversion and it was
2814 copied from memory at a fixed offset, and the arg pointer was not
2815 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2816 offset formed an invalid address, such memory-equivalences as we
2817 make here would screw up life analysis for it. */
2818 if (data->nominal_mode == data->passed_mode
2819 && !did_conversion
2820 && data->stack_parm != 0
2821 && MEM_P (data->stack_parm)
2822 && data->locate.offset.var == 0
2823 && reg_mentioned_p (virtual_incoming_args_rtx,
2824 XEXP (data->stack_parm, 0)))
2825 {
2826 rtx linsn = get_last_insn ();
2827 rtx sinsn, set;
a03caf76 2828
6071dc7f
RH
2829 /* Mark complex types separately. */
2830 if (GET_CODE (parmreg) == CONCAT)
2831 {
2832 enum machine_mode submode
2833 = GET_MODE_INNER (GET_MODE (parmreg));
2834 int regnor = REGNO (gen_realpart (submode, parmreg));
2835 int regnoi = REGNO (gen_imagpart (submode, parmreg));
2836 rtx stackr = gen_realpart (submode, data->stack_parm);
2837 rtx stacki = gen_imagpart (submode, data->stack_parm);
2838
2839 /* Scan backwards for the set of the real and
2840 imaginary parts. */
2841 for (sinsn = linsn; sinsn != 0;
2842 sinsn = prev_nonnote_insn (sinsn))
2843 {
2844 set = single_set (sinsn);
2845 if (set == 0)
2846 continue;
2847
2848 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2849 REG_NOTES (sinsn)
2850 = gen_rtx_EXPR_LIST (REG_EQUIV, stacki,
2851 REG_NOTES (sinsn));
2852 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2853 REG_NOTES (sinsn)
2854 = gen_rtx_EXPR_LIST (REG_EQUIV, stackr,
2855 REG_NOTES (sinsn));
a03caf76 2856 }
6071dc7f
RH
2857 }
2858 else if ((set = single_set (linsn)) != 0
2859 && SET_DEST (set) == parmreg)
2860 REG_NOTES (linsn)
2861 = gen_rtx_EXPR_LIST (REG_EQUIV,
2862 data->stack_parm, REG_NOTES (linsn));
2863 }
2864
2865 /* For pointer data type, suggest pointer register. */
2866 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2867 mark_reg_pointer (parmreg,
2868 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2869}
2870
2871/* A subroutine of assign_parms. Allocate stack space to hold the current
2872 parameter. Get it there. Perform all ABI specified conversions. */
2873
2874static void
2875assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2876 struct assign_parm_data_one *data)
2877{
2878 /* Value must be stored in the stack slot STACK_PARM during function
2879 execution. */
2880
2881 if (data->promoted_mode != data->nominal_mode)
2882 {
2883 /* Conversion is required. */
2884 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
6f086dfc 2885
6071dc7f
RH
2886 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2887
2888 push_to_sequence (all->conversion_insns);
2889 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2890 TYPE_UNSIGNED (TREE_TYPE (parm)));
2891
2892 if (data->stack_parm)
2893 /* ??? This may need a big-endian conversion on sparc64. */
2894 data->stack_parm
2895 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2896
2897 all->conversion_insns = get_insns ();
2898 end_sequence ();
2899 }
2900
2901 if (data->entry_parm != data->stack_parm)
2902 {
2903 if (data->stack_parm == 0)
2904 {
2905 data->stack_parm
2906 = assign_stack_local (GET_MODE (data->entry_parm),
2907 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2908 0);
2909 set_mem_attributes (data->stack_parm, parm, 1);
6f086dfc 2910 }
6071dc7f
RH
2911
2912 if (data->promoted_mode != data->nominal_mode)
6f086dfc 2913 {
6071dc7f
RH
2914 push_to_sequence (all->conversion_insns);
2915 emit_move_insn (validize_mem (data->stack_parm),
2916 validize_mem (data->entry_parm));
2917 all->conversion_insns = get_insns ();
2918 end_sequence ();
2919 }
2920 else
2921 emit_move_insn (validize_mem (data->stack_parm),
2922 validize_mem (data->entry_parm));
2923 }
6f086dfc 2924
6071dc7f
RH
2925 SET_DECL_RTL (parm, data->stack_parm);
2926}
3412b298 2927
6071dc7f
RH
2928/* A subroutine of assign_parms. If the ABI splits complex arguments, then
2929 undo the frobbing that we did in assign_parms_augmented_arg_list. */
86f8eff3 2930
6071dc7f
RH
2931static void
2932assign_parms_unsplit_complex (tree orig_fnargs, tree fnargs)
2933{
2934 tree parm;
f4ef873c 2935
6071dc7f
RH
2936 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2937 {
2938 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2939 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2940 {
2941 rtx tmp, real, imag;
2942 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
6f086dfc 2943
6071dc7f
RH
2944 real = DECL_RTL (fnargs);
2945 imag = DECL_RTL (TREE_CHAIN (fnargs));
2946 if (inner != GET_MODE (real))
6f086dfc 2947 {
6071dc7f
RH
2948 real = gen_lowpart_SUBREG (inner, real);
2949 imag = gen_lowpart_SUBREG (inner, imag);
2950 }
2951 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2952 SET_DECL_RTL (parm, tmp);
7e41ffa2 2953
6071dc7f
RH
2954 real = DECL_INCOMING_RTL (fnargs);
2955 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2956 if (inner != GET_MODE (real))
2957 {
2958 real = gen_lowpart_SUBREG (inner, real);
2959 imag = gen_lowpart_SUBREG (inner, imag);
6f086dfc 2960 }
6071dc7f
RH
2961 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2962 set_decl_incoming_rtl (parm, tmp);
2963 fnargs = TREE_CHAIN (fnargs);
2964 }
2965 else
2966 {
2967 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2968 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
6f086dfc 2969
6071dc7f
RH
2970 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2971 instead of the copy of decl, i.e. FNARGS. */
2972 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2973 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
6f086dfc 2974 }
6071dc7f
RH
2975
2976 fnargs = TREE_CHAIN (fnargs);
6f086dfc 2977 }
6071dc7f
RH
2978}
2979
2980/* Assign RTL expressions to the function's parameters. This may involve
2981 copying them into registers and using those registers as the DECL_RTL. */
2982
2983void
2984assign_parms (tree fndecl)
2985{
2986 struct assign_parm_data_all all;
2987 tree fnargs, parm;
2988 rtx internal_arg_pointer;
2989 int varargs_setup = 0;
6f086dfc 2990
6071dc7f
RH
2991 /* If the reg that the virtual arg pointer will be translated into is
2992 not a fixed reg or is the stack pointer, make a copy of the virtual
2993 arg pointer, and address parms via the copy. The frame pointer is
2994 considered fixed even though it is not marked as such.
2995
2996 The second time through, simply use ap to avoid generating rtx. */
2997
2998 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
2999 || ! (fixed_regs[ARG_POINTER_REGNUM]
3000 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
3001 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3002 else
3003 internal_arg_pointer = virtual_incoming_args_rtx;
3004 current_function_internal_arg_pointer = internal_arg_pointer;
3005
3006 assign_parms_initialize_all (&all);
3007 fnargs = assign_parms_augmented_arg_list (&all);
3008
3009 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
ded9bf77 3010 {
6071dc7f
RH
3011 struct assign_parm_data_one data;
3012
3013 /* Extract the type of PARM; adjust it according to ABI. */
3014 assign_parm_find_data_types (&all, parm, &data);
3015
3016 /* Early out for errors and void parameters. */
3017 if (data.passed_mode == VOIDmode)
ded9bf77 3018 {
6071dc7f
RH
3019 SET_DECL_RTL (parm, const0_rtx);
3020 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3021 continue;
3022 }
196c42cd 3023
6071dc7f
RH
3024 /* Handle stdargs. LAST_NAMED is a slight mis-nomer; it's also true
3025 for the unnamed dummy argument following the last named argument.
3026 See ABI silliness wrt strict_argument_naming and NAMED_ARG. So
3027 we only want to do this when we get to the actual last named
3028 argument, which will be the first time LAST_NAMED gets set. */
3029 if (data.last_named && !varargs_setup)
3030 {
3031 varargs_setup = true;
3032 assign_parms_setup_varargs (&all, &data, false);
3033 }
196c42cd 3034
6071dc7f
RH
3035 /* Find out where the parameter arrives in this function. */
3036 assign_parm_find_entry_rtl (&all, &data);
3037
3038 /* Find out where stack space for this parameter might be. */
3039 if (assign_parm_is_stack_parm (&all, &data))
3040 {
3041 assign_parm_find_stack_rtl (parm, &data);
3042 assign_parm_adjust_entry_rtl (&data);
ded9bf77 3043 }
6071dc7f
RH
3044
3045 /* Record permanently how this parm was passed. */
3046 set_decl_incoming_rtl (parm, data.entry_parm);
3047
3048 /* Update info on where next arg arrives in registers. */
3049 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3050 data.passed_type, data.named_arg);
3051
3052 assign_parm_adjust_stack_rtl (&data);
3053
3054 if (assign_parm_setup_block_p (&data))
3055 assign_parm_setup_block (parm, &data);
3056 else if (data.passed_pointer || use_register_for_decl (parm))
3057 assign_parm_setup_reg (&all, parm, &data);
3058 else
3059 assign_parm_setup_stack (&all, parm, &data);
ded9bf77
AH
3060 }
3061
6071dc7f
RH
3062 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3063 assign_parms_unsplit_complex (all.orig_fnargs, fnargs);
3064
3412b298
JW
3065 /* Output all parameter conversion instructions (possibly including calls)
3066 now that all parameters have been copied out of hard registers. */
6071dc7f 3067 emit_insn (all.conversion_insns);
3412b298 3068
b36a8cc2
OH
3069 /* If we are receiving a struct value address as the first argument, set up
3070 the RTL for the function result. As this might require code to convert
3071 the transmitted address to Pmode, we do this here to ensure that possible
3072 preliminary conversions of the address have been emitted already. */
6071dc7f 3073 if (all.function_result_decl)
b36a8cc2 3074 {
6071dc7f
RH
3075 tree result = DECL_RESULT (current_function_decl);
3076 rtx addr = DECL_RTL (all.function_result_decl);
b36a8cc2 3077 rtx x;
fa8db1f7 3078
cc77ae10
JM
3079 if (DECL_BY_REFERENCE (result))
3080 x = addr;
3081 else
3082 {
3083 addr = convert_memory_address (Pmode, addr);
3084 x = gen_rtx_MEM (DECL_MODE (result), addr);
3085 set_mem_attributes (x, result, 1);
3086 }
b36a8cc2
OH
3087 SET_DECL_RTL (result, x);
3088 }
3089
53c428d0 3090 /* We have aligned all the args, so add space for the pretend args. */
6071dc7f
RH
3091 current_function_pretend_args_size = all.pretend_args_size;
3092 all.stack_args_size.constant += all.extra_pretend_bytes;
3093 current_function_args_size = all.stack_args_size.constant;
6f086dfc
RS
3094
3095 /* Adjust function incoming argument size for alignment and
3096 minimum length. */
3097
3098#ifdef REG_PARM_STACK_SPACE
3099 current_function_args_size = MAX (current_function_args_size,
3100 REG_PARM_STACK_SPACE (fndecl));
6f90e075 3101#endif
6f086dfc 3102
4433e339
RH
3103 current_function_args_size
3104 = ((current_function_args_size + STACK_BYTES - 1)
3105 / STACK_BYTES) * STACK_BYTES;
4433e339 3106
6f086dfc
RS
3107#ifdef ARGS_GROW_DOWNWARD
3108 current_function_arg_offset_rtx
477eff96 3109 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
6071dc7f
RH
3110 : expand_expr (size_diffop (all.stack_args_size.var,
3111 size_int (-all.stack_args_size.constant)),
a57263bc 3112 NULL_RTX, VOIDmode, 0));
6f086dfc 3113#else
6071dc7f 3114 current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
6f086dfc
RS
3115#endif
3116
3117 /* See how many bytes, if any, of its args a function should try to pop
3118 on return. */
3119
64e6d9cc 3120 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
6f086dfc
RS
3121 current_function_args_size);
3122
3b69d50e
RK
3123 /* For stdarg.h function, save info about
3124 regs and stack space used by the named args. */
6f086dfc 3125
6071dc7f 3126 current_function_args_info = all.args_so_far;
6f086dfc
RS
3127
3128 /* Set the rtx used for the function return value. Put this in its
3129 own variable so any optimizers that need this information don't have
3130 to include tree.h. Do this here so it gets done when an inlined
3131 function gets output. */
3132
19e7881c
MM
3133 current_function_return_rtx
3134 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3135 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
ce5e43d0
JJ
3136
3137 /* If scalar return value was computed in a pseudo-reg, or was a named
3138 return value that got dumped to the stack, copy that to the hard
3139 return register. */
3140 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3141 {
3142 tree decl_result = DECL_RESULT (fndecl);
3143 rtx decl_rtl = DECL_RTL (decl_result);
3144
3145 if (REG_P (decl_rtl)
3146 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3147 : DECL_REGISTER (decl_result))
3148 {
3149 rtx real_decl_rtl;
3150
3151#ifdef FUNCTION_OUTGOING_VALUE
3152 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
3153 fndecl);
3154#else
3155 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
3156 fndecl);
3157#endif
3158 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3159 /* The delay slot scheduler assumes that current_function_return_rtx
3160 holds the hard register containing the return value, not a
3161 temporary pseudo. */
3162 current_function_return_rtx = real_decl_rtl;
3163 }
3164 }
6f086dfc
RS
3165}
3166\f
75dc3319
RK
3167/* Indicate whether REGNO is an incoming argument to the current function
3168 that was promoted to a wider mode. If so, return the RTX for the
3169 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3170 that REGNO is promoted from and whether the promotion was signed or
3171 unsigned. */
3172
75dc3319 3173rtx
fa8db1f7 3174promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
75dc3319
RK
3175{
3176 tree arg;
3177
3178 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3179 arg = TREE_CHAIN (arg))
f8cfc6aa 3180 if (REG_P (DECL_INCOMING_RTL (arg))
621061f4
RK
3181 && REGNO (DECL_INCOMING_RTL (arg)) == regno
3182 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
75dc3319
RK
3183 {
3184 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
8df83eae 3185 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg));
75dc3319 3186
a5a52dbc 3187 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
75dc3319
RK
3188 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3189 && mode != DECL_MODE (arg))
3190 {
3191 *pmode = DECL_MODE (arg);
3192 *punsignedp = unsignedp;
3193 return DECL_INCOMING_RTL (arg);
3194 }
3195 }
3196
3197 return 0;
3198}
3199
75dc3319 3200\f
6f086dfc
RS
3201/* Compute the size and offset from the start of the stacked arguments for a
3202 parm passed in mode PASSED_MODE and with type TYPE.
3203
3204 INITIAL_OFFSET_PTR points to the current offset into the stacked
3205 arguments.
3206
e7949876
AM
3207 The starting offset and size for this parm are returned in
3208 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3209 nonzero, the offset is that of stack slot, which is returned in
3210 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3211 padding required from the initial offset ptr to the stack slot.
6f086dfc 3212
cc2902df 3213 IN_REGS is nonzero if the argument will be passed in registers. It will
6f086dfc
RS
3214 never be set if REG_PARM_STACK_SPACE is not defined.
3215
3216 FNDECL is the function in which the argument was defined.
3217
3218 There are two types of rounding that are done. The first, controlled by
3219 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3220 list to be aligned to the specific boundary (in bits). This rounding
3221 affects the initial and starting offsets, but not the argument size.
3222
3223 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3224 optionally rounds the size of the parm to PARM_BOUNDARY. The
3225 initial offset is not affected by this rounding, while the size always
3226 is and the starting offset may be. */
3227
e7949876
AM
3228/* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3229 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
6f086dfc 3230 callers pass in the total size of args so far as
e7949876 3231 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
6f086dfc 3232
6f086dfc 3233void
fa8db1f7
AJ
3234locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3235 int partial, tree fndecl ATTRIBUTE_UNUSED,
3236 struct args_size *initial_offset_ptr,
3237 struct locate_and_pad_arg_data *locate)
6f086dfc 3238{
e7949876
AM
3239 tree sizetree;
3240 enum direction where_pad;
3241 int boundary;
3242 int reg_parm_stack_space = 0;
3243 int part_size_in_regs;
6f086dfc
RS
3244
3245#ifdef REG_PARM_STACK_SPACE
e7949876 3246 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
e7949876 3247
6f086dfc
RS
3248 /* If we have found a stack parm before we reach the end of the
3249 area reserved for registers, skip that area. */
3250 if (! in_regs)
3251 {
6f086dfc
RS
3252 if (reg_parm_stack_space > 0)
3253 {
3254 if (initial_offset_ptr->var)
3255 {
3256 initial_offset_ptr->var
3257 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
fed3cef0 3258 ssize_int (reg_parm_stack_space));
6f086dfc
RS
3259 initial_offset_ptr->constant = 0;
3260 }
3261 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3262 initial_offset_ptr->constant = reg_parm_stack_space;
3263 }
3264 }
3265#endif /* REG_PARM_STACK_SPACE */
3266
e7949876
AM
3267 part_size_in_regs = 0;
3268 if (reg_parm_stack_space == 0)
3269 part_size_in_regs = ((partial * UNITS_PER_WORD)
3270 / (PARM_BOUNDARY / BITS_PER_UNIT)
3271 * (PARM_BOUNDARY / BITS_PER_UNIT));
3272
3273 sizetree
3274 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3275 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3276 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
6e985040 3277 locate->where_pad = where_pad;
6f086dfc
RS
3278
3279#ifdef ARGS_GROW_DOWNWARD
e7949876 3280 locate->slot_offset.constant = -initial_offset_ptr->constant;
6f086dfc 3281 if (initial_offset_ptr->var)
e7949876
AM
3282 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3283 initial_offset_ptr->var);
9dff28ab 3284
e7949876
AM
3285 {
3286 tree s2 = sizetree;
3287 if (where_pad != none
3288 && (!host_integerp (sizetree, 1)
3289 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3290 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3291 SUB_PARM_SIZE (locate->slot_offset, s2);
3292 }
3293
3294 locate->slot_offset.constant += part_size_in_regs;
9dff28ab
JDA
3295
3296 if (!in_regs
3297#ifdef REG_PARM_STACK_SPACE
3298 || REG_PARM_STACK_SPACE (fndecl) > 0
3299#endif
3300 )
e7949876
AM
3301 pad_to_arg_alignment (&locate->slot_offset, boundary,
3302 &locate->alignment_pad);
9dff28ab 3303
e7949876
AM
3304 locate->size.constant = (-initial_offset_ptr->constant
3305 - locate->slot_offset.constant);
6f086dfc 3306 if (initial_offset_ptr->var)
e7949876
AM
3307 locate->size.var = size_binop (MINUS_EXPR,
3308 size_binop (MINUS_EXPR,
3309 ssize_int (0),
3310 initial_offset_ptr->var),
3311 locate->slot_offset.var);
3312
3313 /* Pad_below needs the pre-rounded size to know how much to pad
3314 below. */
3315 locate->offset = locate->slot_offset;
3316 if (where_pad == downward)
3317 pad_below (&locate->offset, passed_mode, sizetree);
9dff28ab 3318
6f086dfc 3319#else /* !ARGS_GROW_DOWNWARD */
832ea3b3
FS
3320 if (!in_regs
3321#ifdef REG_PARM_STACK_SPACE
3322 || REG_PARM_STACK_SPACE (fndecl) > 0
3323#endif
3324 )
e7949876
AM
3325 pad_to_arg_alignment (initial_offset_ptr, boundary,
3326 &locate->alignment_pad);
3327 locate->slot_offset = *initial_offset_ptr;
6f086dfc
RS
3328
3329#ifdef PUSH_ROUNDING
3330 if (passed_mode != BLKmode)
3331 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3332#endif
3333
d4b0a7a0
DE
3334 /* Pad_below needs the pre-rounded size to know how much to pad below
3335 so this must be done before rounding up. */
e7949876
AM
3336 locate->offset = locate->slot_offset;
3337 if (where_pad == downward)
3338 pad_below (&locate->offset, passed_mode, sizetree);
d4b0a7a0 3339
6f086dfc 3340 if (where_pad != none
1468899d
RK
3341 && (!host_integerp (sizetree, 1)
3342 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
6f086dfc
RS
3343 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3344
e7949876
AM
3345 ADD_PARM_SIZE (locate->size, sizetree);
3346
3347 locate->size.constant -= part_size_in_regs;
6f086dfc
RS
3348#endif /* ARGS_GROW_DOWNWARD */
3349}
3350
e16c591a
RS
3351/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3352 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3353
6f086dfc 3354static void
fa8db1f7
AJ
3355pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3356 struct args_size *alignment_pad)
6f086dfc 3357{
a544cfd2
KG
3358 tree save_var = NULL_TREE;
3359 HOST_WIDE_INT save_constant = 0;
a751cd5b 3360 int boundary_in_bytes = boundary / BITS_PER_UNIT;
a594a19c
GK
3361 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3362
3363#ifdef SPARC_STACK_BOUNDARY_HACK
3364 /* The sparc port has a bug. It sometimes claims a STACK_BOUNDARY
3365 higher than the real alignment of %sp. However, when it does this,
3366 the alignment of %sp+STACK_POINTER_OFFSET will be STACK_BOUNDARY.
3367 This is a temporary hack while the sparc port is fixed. */
3368 if (SPARC_STACK_BOUNDARY_HACK)
3369 sp_offset = 0;
3370#endif
4fc026cd 3371
9399d5c6 3372 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
4fc026cd
CM
3373 {
3374 save_var = offset_ptr->var;
3375 save_constant = offset_ptr->constant;
3376 }
3377
3378 alignment_pad->var = NULL_TREE;
3379 alignment_pad->constant = 0;
4fc026cd 3380
6f086dfc
RS
3381 if (boundary > BITS_PER_UNIT)
3382 {
3383 if (offset_ptr->var)
3384 {
a594a19c
GK
3385 tree sp_offset_tree = ssize_int (sp_offset);
3386 tree offset = size_binop (PLUS_EXPR,
3387 ARGS_SIZE_TREE (*offset_ptr),
3388 sp_offset_tree);
6f086dfc 3389#ifdef ARGS_GROW_DOWNWARD
a594a19c 3390 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
6f086dfc 3391#else
a594a19c 3392 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
6f086dfc 3393#endif
a594a19c
GK
3394
3395 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
e7949876
AM
3396 /* ARGS_SIZE_TREE includes constant term. */
3397 offset_ptr->constant = 0;
dd3f0101
KH
3398 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3399 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
fed3cef0 3400 save_var);
6f086dfc
RS
3401 }
3402 else
718fe406 3403 {
a594a19c 3404 offset_ptr->constant = -sp_offset +
6f086dfc 3405#ifdef ARGS_GROW_DOWNWARD
a594a19c 3406 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
6f086dfc 3407#else
a594a19c 3408 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
6f086dfc 3409#endif
718fe406
KH
3410 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3411 alignment_pad->constant = offset_ptr->constant - save_constant;
3412 }
6f086dfc
RS
3413 }
3414}
3415
3416static void
fa8db1f7 3417pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
6f086dfc
RS
3418{
3419 if (passed_mode != BLKmode)
3420 {
3421 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3422 offset_ptr->constant
3423 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3424 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3425 - GET_MODE_SIZE (passed_mode));
3426 }
3427 else
3428 {
3429 if (TREE_CODE (sizetree) != INTEGER_CST
3430 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3431 {
3432 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3433 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3434 /* Add it in. */
3435 ADD_PARM_SIZE (*offset_ptr, s2);
3436 SUB_PARM_SIZE (*offset_ptr, sizetree);
3437 }
3438 }
3439}
6f086dfc
RS
3440\f
3441/* Walk the tree of blocks describing the binding levels within a function
6de9cd9a 3442 and warn about variables the might be killed by setjmp or vfork.
6f086dfc
RS
3443 This is done after calling flow_analysis and before global_alloc
3444 clobbers the pseudo-regs to hard regs. */
3445
3446void
6de9cd9a 3447setjmp_vars_warning (tree block)
6f086dfc 3448{
b3694847 3449 tree decl, sub;
6de9cd9a 3450
6f086dfc
RS
3451 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3452 {
6de9cd9a 3453 if (TREE_CODE (decl) == VAR_DECL
bc41842b 3454 && DECL_RTL_SET_P (decl)
f8cfc6aa 3455 && REG_P (DECL_RTL (decl))
6f086dfc 3456 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
971801ff
JM
3457 warning ("%Jvariable %qD might be clobbered by %<longjmp%>"
3458 " or %<vfork%>",
ddd2d57e 3459 decl, decl);
6f086dfc 3460 }
6de9cd9a 3461
6f086dfc 3462 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
6de9cd9a 3463 setjmp_vars_warning (sub);
6f086dfc
RS
3464}
3465
6de9cd9a 3466/* Do the appropriate part of setjmp_vars_warning
6f086dfc
RS
3467 but for arguments instead of local variables. */
3468
3469void
fa8db1f7 3470setjmp_args_warning (void)
6f086dfc 3471{
b3694847 3472 tree decl;
6f086dfc
RS
3473 for (decl = DECL_ARGUMENTS (current_function_decl);
3474 decl; decl = TREE_CHAIN (decl))
3475 if (DECL_RTL (decl) != 0
f8cfc6aa 3476 && REG_P (DECL_RTL (decl))
6f086dfc 3477 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
971801ff 3478 warning ("%Jargument %qD might be clobbered by %<longjmp%> or %<vfork%>",
ddd2d57e 3479 decl, decl);
6f086dfc
RS
3480}
3481
6f086dfc 3482\f
a20612aa
RH
3483/* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3484 and create duplicate blocks. */
3485/* ??? Need an option to either create block fragments or to create
3486 abstract origin duplicates of a source block. It really depends
3487 on what optimization has been performed. */
467456d0 3488
116eebd6 3489void
fa8db1f7 3490reorder_blocks (void)
467456d0 3491{
116eebd6 3492 tree block = DECL_INITIAL (current_function_decl);
18c038b9 3493 varray_type block_stack;
467456d0 3494
1a4450c7 3495 if (block == NULL_TREE)
116eebd6 3496 return;
fc289cd1 3497
18c038b9
MM
3498 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
3499
a20612aa 3500 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
6de9cd9a 3501 clear_block_marks (block);
a20612aa 3502
116eebd6
MM
3503 /* Prune the old trees away, so that they don't get in the way. */
3504 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3505 BLOCK_CHAIN (block) = NULL_TREE;
fc289cd1 3506
a20612aa 3507 /* Recreate the block tree from the note nesting. */
116eebd6 3508 reorder_blocks_1 (get_insns (), block, &block_stack);
718fe406 3509 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
18c038b9 3510
a20612aa
RH
3511 /* Remove deleted blocks from the block fragment chains. */
3512 reorder_fix_fragments (block);
467456d0
RS
3513}
3514
a20612aa 3515/* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
0a1c58a2 3516
6de9cd9a
DN
3517void
3518clear_block_marks (tree block)
cc1fe44f 3519{
a20612aa 3520 while (block)
cc1fe44f 3521 {
a20612aa 3522 TREE_ASM_WRITTEN (block) = 0;
6de9cd9a 3523 clear_block_marks (BLOCK_SUBBLOCKS (block));
a20612aa 3524 block = BLOCK_CHAIN (block);
cc1fe44f
DD
3525 }
3526}
3527
0a1c58a2 3528static void
fa8db1f7 3529reorder_blocks_1 (rtx insns, tree current_block, varray_type *p_block_stack)
0a1c58a2
JL
3530{
3531 rtx insn;
3532
3533 for (insn = insns; insn; insn = NEXT_INSN (insn))
3534 {
4b4bf941 3535 if (NOTE_P (insn))
0a1c58a2
JL
3536 {
3537 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3538 {
3539 tree block = NOTE_BLOCK (insn);
a20612aa
RH
3540
3541 /* If we have seen this block before, that means it now
3542 spans multiple address regions. Create a new fragment. */
0a1c58a2
JL
3543 if (TREE_ASM_WRITTEN (block))
3544 {
a20612aa
RH
3545 tree new_block = copy_node (block);
3546 tree origin;
3547
3548 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3549 ? BLOCK_FRAGMENT_ORIGIN (block)
3550 : block);
3551 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3552 BLOCK_FRAGMENT_CHAIN (new_block)
3553 = BLOCK_FRAGMENT_CHAIN (origin);
3554 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3555
3556 NOTE_BLOCK (insn) = new_block;
3557 block = new_block;
0a1c58a2 3558 }
a20612aa 3559
0a1c58a2
JL
3560 BLOCK_SUBBLOCKS (block) = 0;
3561 TREE_ASM_WRITTEN (block) = 1;
339a28b9
ZW
3562 /* When there's only one block for the entire function,
3563 current_block == block and we mustn't do this, it
3564 will cause infinite recursion. */
3565 if (block != current_block)
3566 {
3567 BLOCK_SUPERCONTEXT (block) = current_block;
3568 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3569 BLOCK_SUBBLOCKS (current_block) = block;
3570 current_block = block;
3571 }
0a1c58a2
JL
3572 VARRAY_PUSH_TREE (*p_block_stack, block);
3573 }
3574 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3575 {
3576 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
3577 VARRAY_POP (*p_block_stack);
3578 BLOCK_SUBBLOCKS (current_block)
3579 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3580 current_block = BLOCK_SUPERCONTEXT (current_block);
3581 }
3582 }
0a1c58a2
JL
3583 }
3584}
3585
a20612aa
RH
3586/* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
3587 appears in the block tree, select one of the fragments to become
3588 the new origin block. */
3589
3590static void
fa8db1f7 3591reorder_fix_fragments (tree block)
a20612aa
RH
3592{
3593 while (block)
3594 {
3595 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
3596 tree new_origin = NULL_TREE;
3597
3598 if (dup_origin)
3599 {
3600 if (! TREE_ASM_WRITTEN (dup_origin))
3601 {
3602 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
797a6ac1 3603
a20612aa
RH
3604 /* Find the first of the remaining fragments. There must
3605 be at least one -- the current block. */
3606 while (! TREE_ASM_WRITTEN (new_origin))
3607 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
3608 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
3609 }
3610 }
3611 else if (! dup_origin)
3612 new_origin = block;
3613
3614 /* Re-root the rest of the fragments to the new origin. In the
3615 case that DUP_ORIGIN was null, that means BLOCK was the origin
3616 of a chain of fragments and we want to remove those fragments
3617 that didn't make it to the output. */
3618 if (new_origin)
3619 {
3620 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
3621 tree chain = *pp;
3622
3623 while (chain)
3624 {
3625 if (TREE_ASM_WRITTEN (chain))
3626 {
3627 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
3628 *pp = chain;
3629 pp = &BLOCK_FRAGMENT_CHAIN (chain);
3630 }
3631 chain = BLOCK_FRAGMENT_CHAIN (chain);
3632 }
3633 *pp = NULL_TREE;
3634 }
3635
3636 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
3637 block = BLOCK_CHAIN (block);
3638 }
3639}
3640
467456d0
RS
3641/* Reverse the order of elements in the chain T of blocks,
3642 and return the new head of the chain (old last element). */
3643
6de9cd9a 3644tree
fa8db1f7 3645blocks_nreverse (tree t)
467456d0 3646{
b3694847 3647 tree prev = 0, decl, next;
467456d0
RS
3648 for (decl = t; decl; decl = next)
3649 {
3650 next = BLOCK_CHAIN (decl);
3651 BLOCK_CHAIN (decl) = prev;
3652 prev = decl;
3653 }
3654 return prev;
3655}
3656
18c038b9
MM
3657/* Count the subblocks of the list starting with BLOCK. If VECTOR is
3658 non-NULL, list them all into VECTOR, in a depth-first preorder
3659 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
b2a59b15 3660 blocks. */
467456d0
RS
3661
3662static int
fa8db1f7 3663all_blocks (tree block, tree *vector)
467456d0 3664{
b2a59b15
MS
3665 int n_blocks = 0;
3666
a84efb51
JO
3667 while (block)
3668 {
3669 TREE_ASM_WRITTEN (block) = 0;
b2a59b15 3670
a84efb51
JO
3671 /* Record this block. */
3672 if (vector)
3673 vector[n_blocks] = block;
b2a59b15 3674
a84efb51 3675 ++n_blocks;
718fe406 3676
a84efb51
JO
3677 /* Record the subblocks, and their subblocks... */
3678 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3679 vector ? vector + n_blocks : 0);
3680 block = BLOCK_CHAIN (block);
3681 }
467456d0
RS
3682
3683 return n_blocks;
3684}
18c038b9
MM
3685
3686/* Return a vector containing all the blocks rooted at BLOCK. The
3687 number of elements in the vector is stored in N_BLOCKS_P. The
3688 vector is dynamically allocated; it is the caller's responsibility
3689 to call `free' on the pointer returned. */
718fe406 3690
18c038b9 3691static tree *
fa8db1f7 3692get_block_vector (tree block, int *n_blocks_p)
18c038b9
MM
3693{
3694 tree *block_vector;
3695
3696 *n_blocks_p = all_blocks (block, NULL);
703ad42b 3697 block_vector = xmalloc (*n_blocks_p * sizeof (tree));
18c038b9
MM
3698 all_blocks (block, block_vector);
3699
3700 return block_vector;
3701}
3702
f83b236e 3703static GTY(()) int next_block_index = 2;
18c038b9
MM
3704
3705/* Set BLOCK_NUMBER for all the blocks in FN. */
3706
3707void
fa8db1f7 3708number_blocks (tree fn)
18c038b9
MM
3709{
3710 int i;
3711 int n_blocks;
3712 tree *block_vector;
3713
3714 /* For SDB and XCOFF debugging output, we start numbering the blocks
3715 from 1 within each function, rather than keeping a running
3716 count. */
3717#if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
b0e3a658
RK
3718 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3719 next_block_index = 1;
18c038b9
MM
3720#endif
3721
3722 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3723
3724 /* The top-level BLOCK isn't numbered at all. */
3725 for (i = 1; i < n_blocks; ++i)
3726 /* We number the blocks from two. */
3727 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3728
3729 free (block_vector);
3730
3731 return;
3732}
df8992f8
RH
3733
3734/* If VAR is present in a subblock of BLOCK, return the subblock. */
3735
3736tree
fa8db1f7 3737debug_find_var_in_block_tree (tree var, tree block)
df8992f8
RH
3738{
3739 tree t;
3740
3741 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3742 if (t == var)
3743 return block;
3744
3745 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3746 {
3747 tree ret = debug_find_var_in_block_tree (var, t);
3748 if (ret)
3749 return ret;
3750 }
3751
3752 return NULL_TREE;
3753}
467456d0 3754\f
3a70d621
RH
3755/* Allocate a function structure for FNDECL and set its contents
3756 to the defaults. */
7a80cf9a 3757
3a70d621
RH
3758void
3759allocate_struct_function (tree fndecl)
6f086dfc 3760{
3a70d621 3761 tree result;
6de9cd9a 3762 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
6f086dfc 3763
3a70d621 3764 cfun = ggc_alloc_cleared (sizeof (struct function));
b384405b 3765
3a70d621
RH
3766 cfun->stack_alignment_needed = STACK_BOUNDARY;
3767 cfun->preferred_stack_boundary = STACK_BOUNDARY;
6f086dfc 3768
3a70d621 3769 current_function_funcdef_no = funcdef_no++;
6f086dfc 3770
3a70d621 3771 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
6f086dfc 3772
3a70d621 3773 init_eh_for_function ();
6f086dfc 3774
ae2bcd98 3775 lang_hooks.function.init (cfun);
3a70d621
RH
3776 if (init_machine_status)
3777 cfun->machine = (*init_machine_status) ();
e2ecd91c 3778
3a70d621
RH
3779 if (fndecl == NULL)
3780 return;
a0871656 3781
1da326c3 3782 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3a70d621 3783 cfun->decl = fndecl;
6f086dfc 3784
3a70d621 3785 result = DECL_RESULT (fndecl);
61f71b34 3786 if (aggregate_value_p (result, fndecl))
3a70d621
RH
3787 {
3788#ifdef PCC_STATIC_STRUCT_RETURN
3789 current_function_returns_pcc_struct = 1;
3790#endif
3791 current_function_returns_struct = 1;
3792 }
6f086dfc 3793
3a70d621 3794 current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
6f086dfc 3795
6de9cd9a
DN
3796 current_function_stdarg
3797 = (fntype
3798 && TYPE_ARG_TYPES (fntype) != 0
3799 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3800 != void_type_node));
3a70d621 3801}
6f086dfc 3802
3a70d621 3803/* Reset cfun, and other non-struct-function variables to defaults as
2067c116 3804 appropriate for emitting rtl at the start of a function. */
6f086dfc 3805
3a70d621
RH
3806static void
3807prepare_function_start (tree fndecl)
3808{
1da326c3
SB
3809 if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
3810 cfun = DECL_STRUCT_FUNCTION (fndecl);
3a70d621
RH
3811 else
3812 allocate_struct_function (fndecl);
0de456a5
JH
3813 init_emit ();
3814 init_varasm_status (cfun);
3815 init_expr ();
6f086dfc 3816
3a70d621 3817 cse_not_expected = ! optimize;
6f086dfc 3818
3a70d621
RH
3819 /* Caller save not needed yet. */
3820 caller_save_needed = 0;
6f086dfc 3821
3a70d621
RH
3822 /* We haven't done register allocation yet. */
3823 reg_renumber = 0;
6f086dfc 3824
b384405b
BS
3825 /* Indicate that we have not instantiated virtual registers yet. */
3826 virtuals_instantiated = 0;
3827
1b3d8f8a
GK
3828 /* Indicate that we want CONCATs now. */
3829 generating_concat_p = 1;
3830
b384405b
BS
3831 /* Indicate we have no need of a frame pointer yet. */
3832 frame_pointer_needed = 0;
b384405b
BS
3833}
3834
3835/* Initialize the rtl expansion mechanism so that we can do simple things
3836 like generate sequences. This is used to provide a context during global
3837 initialization of some passes. */
3838void
fa8db1f7 3839init_dummy_function_start (void)
b384405b 3840{
3a70d621 3841 prepare_function_start (NULL);
b384405b
BS
3842}
3843
3844/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3845 and initialize static variables for generating RTL for the statements
3846 of the function. */
3847
3848void
fa8db1f7 3849init_function_start (tree subr)
b384405b 3850{
3a70d621 3851 prepare_function_start (subr);
b384405b 3852
ee6b0296
NS
3853 /* Prevent ever trying to delete the first instruction of a
3854 function. Also tell final how to output a linenum before the
3855 function prologue. Note linenums could be missing, e.g. when
3856 compiling a Java .class file. */
3c20847b 3857 if (! DECL_IS_BUILTIN (subr))
f31686a3 3858 emit_line_note (DECL_SOURCE_LOCATION (subr));
6f086dfc
RS
3859
3860 /* Make sure first insn is a note even if we don't want linenums.
3861 This makes sure the first insn will never be deleted.
3862 Also, final expects a note to appear there. */
2e040219 3863 emit_note (NOTE_INSN_DELETED);
6f086dfc 3864
6f086dfc
RS
3865 /* Warn if this value is an aggregate type,
3866 regardless of which calling convention we are using for it. */
3867 if (warn_aggregate_return
05e3bdb9 3868 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6f086dfc 3869 warning ("function returns an aggregate");
49ad7cfa 3870}
5c7675e9 3871
49ad7cfa
BS
3872/* Make sure all values used by the optimization passes have sane
3873 defaults. */
3874void
fa8db1f7 3875init_function_for_compilation (void)
49ad7cfa
BS
3876{
3877 reg_renumber = 0;
0a1c58a2 3878
5c7675e9 3879 /* No prologue/epilogue insns yet. */
0a1c58a2
JL
3880 VARRAY_GROW (prologue, 0);
3881 VARRAY_GROW (epilogue, 0);
3882 VARRAY_GROW (sibcall_epilogue, 0);
6f086dfc
RS
3883}
3884
6f086dfc
RS
3885/* Expand a call to __main at the beginning of a possible main function. */
3886
e2fd1d94
JM
3887#if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
3888#undef HAS_INIT_SECTION
3889#define HAS_INIT_SECTION
3890#endif
3891
6f086dfc 3892void
fa8db1f7 3893expand_main_function (void)
6f086dfc 3894{
1d482056
RH
3895#ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
3896 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
3897 {
3898 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
8a723db2 3899 rtx tmp, seq;
1d482056 3900
8a723db2 3901 start_sequence ();
ef89d648 3902 /* Forcibly align the stack. */
1d482056 3903#ifdef STACK_GROWS_DOWNWARD
ef89d648
ZW
3904 tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
3905 stack_pointer_rtx, 1, OPTAB_WIDEN);
1d482056 3906#else
ef89d648
ZW
3907 tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3908 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
3909 tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
3910 stack_pointer_rtx, 1, OPTAB_WIDEN);
1d482056
RH
3911#endif
3912 if (tmp != stack_pointer_rtx)
3913 emit_move_insn (stack_pointer_rtx, tmp);
797a6ac1 3914
1d482056
RH
3915 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
3916 tmp = force_reg (Pmode, const0_rtx);
3917 allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
2f937369 3918 seq = get_insns ();
8a723db2
DD
3919 end_sequence ();
3920
3921 for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
3922 if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
3923 break;
3924 if (tmp)
3925 emit_insn_before (seq, tmp);
3926 else
3927 emit_insn (seq);
1d482056
RH
3928 }
3929#endif
3930
3931#ifndef HAS_INIT_SECTION
68d28100 3932 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
1d482056 3933#endif
6f086dfc
RS
3934}
3935\f
1f731749
MM
3936/* The PENDING_SIZES represent the sizes of variable-sized types.
3937 Create RTL for the various sizes now (using temporary variables),
3938 so that we can refer to the sizes from the RTL we are generating
3939 for the current function. The PENDING_SIZES are a TREE_LIST. The
3940 TREE_VALUE of each node is a SAVE_EXPR. */
3941
3942void
fa8db1f7 3943expand_pending_sizes (tree pending_sizes)
1f731749
MM
3944{
3945 tree tem;
3946
3947 /* Evaluate now the sizes of any types declared among the arguments. */
3948 for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
ad76cef8 3949 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
1f731749
MM
3950}
3951
6f086dfc
RS
3952/* Start the RTL for a new function, and set variables used for
3953 emitting RTL.
3954 SUBR is the FUNCTION_DECL node.
3955 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
3956 the function's parameters, which must be run at any return statement. */
3957
3958void
b79c5284 3959expand_function_start (tree subr)
6f086dfc 3960{
6f086dfc
RS
3961 /* Make sure volatile mem refs aren't considered
3962 valid operands of arithmetic insns. */
3963 init_recog_no_volatile ();
3964
70f4f91c
WC
3965 current_function_profile
3966 = (profile_flag
3967 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
3968
a157febd
GK
3969 current_function_limit_stack
3970 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
3971
52a11cbf
RH
3972 /* Make the label for return statements to jump to. Do not special
3973 case machines with special return instructions -- they will be
3974 handled later during jump, ifcvt, or epilogue creation. */
6f086dfc 3975 return_label = gen_label_rtx ();
6f086dfc
RS
3976
3977 /* Initialize rtx used to return the value. */
3978 /* Do this before assign_parms so that we copy the struct value address
3979 before any library calls that assign parms might generate. */
3980
3981 /* Decide whether to return the value in memory or in a register. */
61f71b34 3982 if (aggregate_value_p (DECL_RESULT (subr), subr))
6f086dfc
RS
3983 {
3984 /* Returning something that won't go in a register. */
b3694847 3985 rtx value_address = 0;
6f086dfc
RS
3986
3987#ifdef PCC_STATIC_STRUCT_RETURN
3988 if (current_function_returns_pcc_struct)
3989 {
3990 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
3991 value_address = assemble_static_space (size);
3992 }
3993 else
3994#endif
3995 {
61f71b34 3996 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 1);
6f086dfc
RS
3997 /* Expect to be passed the address of a place to store the value.
3998 If it is passed as an argument, assign_parms will take care of
3999 it. */
61f71b34 4000 if (sv)
6f086dfc
RS
4001 {
4002 value_address = gen_reg_rtx (Pmode);
61f71b34 4003 emit_move_insn (value_address, sv);
6f086dfc
RS
4004 }
4005 }
4006 if (value_address)
ccdecf58 4007 {
01c98570
JM
4008 rtx x = value_address;
4009 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4010 {
4011 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4012 set_mem_attributes (x, DECL_RESULT (subr), 1);
4013 }
abde42f7 4014 SET_DECL_RTL (DECL_RESULT (subr), x);
ccdecf58 4015 }
6f086dfc
RS
4016 }
4017 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4018 /* If return mode is void, this decl rtl should not be used. */
19e7881c 4019 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
d5bf1143 4020 else
a53e14c0 4021 {
d5bf1143
RH
4022 /* Compute the return values into a pseudo reg, which we will copy
4023 into the true return register after the cleanups are done. */
db3c0315
MM
4024
4025 /* In order to figure out what mode to use for the pseudo, we
4026 figure out what the mode of the eventual return register will
4027 actually be, and use that. */
4028 rtx hard_reg
4029 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
4030 subr, 1);
4031
80a480ca 4032 /* Structures that are returned in registers are not aggregate_value_p,
084a1106
JDA
4033 so we may see a PARALLEL or a REG. */
4034 if (REG_P (hard_reg))
4035 SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
80a480ca 4036 else
0bccc606
NS
4037 {
4038 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4039 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4040 }
a53e14c0 4041
084a1106
JDA
4042 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4043 result to the real return register(s). */
4044 DECL_REGISTER (DECL_RESULT (subr)) = 1;
a53e14c0 4045 }
6f086dfc
RS
4046
4047 /* Initialize rtx for parameters and local variables.
4048 In some cases this requires emitting insns. */
0d1416c6 4049 assign_parms (subr);
6f086dfc 4050
6de9cd9a
DN
4051 /* If function gets a static chain arg, store it. */
4052 if (cfun->static_chain_decl)
4053 {
7e140280
RH
4054 tree parm = cfun->static_chain_decl;
4055 rtx local = gen_reg_rtx (Pmode);
4056
4057 set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
4058 SET_DECL_RTL (parm, local);
7e140280 4059 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
6de9cd9a 4060
7e140280 4061 emit_move_insn (local, static_chain_incoming_rtx);
6de9cd9a
DN
4062 }
4063
4064 /* If the function receives a non-local goto, then store the
4065 bits we need to restore the frame pointer. */
4066 if (cfun->nonlocal_goto_save_area)
4067 {
4068 tree t_save;
4069 rtx r_save;
4070
4071 /* ??? We need to do this save early. Unfortunately here is
4072 before the frame variable gets declared. Help out... */
4073 expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4074
3244e67d
RS
4075 t_save = build4 (ARRAY_REF, ptr_type_node,
4076 cfun->nonlocal_goto_save_area,
4077 integer_zero_node, NULL_TREE, NULL_TREE);
6de9cd9a 4078 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5e89a381 4079 r_save = convert_memory_address (Pmode, r_save);
f0c51a1e 4080
6de9cd9a
DN
4081 emit_move_insn (r_save, virtual_stack_vars_rtx);
4082 update_nonlocal_goto_save_area ();
4083 }
f0c51a1e 4084
6f086dfc
RS
4085 /* The following was moved from init_function_start.
4086 The move is supposed to make sdb output more accurate. */
4087 /* Indicate the beginning of the function body,
4088 as opposed to parm setup. */
2e040219 4089 emit_note (NOTE_INSN_FUNCTION_BEG);
6f086dfc 4090
4b4bf941 4091 if (!NOTE_P (get_last_insn ()))
2e040219 4092 emit_note (NOTE_INSN_DELETED);
6f086dfc
RS
4093 parm_birth_insn = get_last_insn ();
4094
70f4f91c 4095 if (current_function_profile)
f6f315fe 4096 {
f6f315fe 4097#ifdef PROFILE_HOOK
df696a75 4098 PROFILE_HOOK (current_function_funcdef_no);
411707f4 4099#endif
f6f315fe 4100 }
411707f4 4101
6f086dfc
RS
4102 /* After the display initializations is where the tail-recursion label
4103 should go, if we end up needing one. Ensure we have a NOTE here
4104 since some things (like trampolines) get placed before this. */
2e040219 4105 tail_recursion_reentry = emit_note (NOTE_INSN_DELETED);
6f086dfc
RS
4106
4107 /* Evaluate now the sizes of any types declared among the arguments. */
1f731749 4108 expand_pending_sizes (nreverse (get_pending_sizes ()));
6f086dfc
RS
4109
4110 /* Make sure there is a line number after the function entry setup code. */
4111 force_next_line_note ();
4112}
4113\f
49ad7cfa
BS
4114/* Undo the effects of init_dummy_function_start. */
4115void
fa8db1f7 4116expand_dummy_function_end (void)
49ad7cfa
BS
4117{
4118 /* End any sequences that failed to be closed due to syntax errors. */
4119 while (in_sequence_p ())
4120 end_sequence ();
4121
4122 /* Outside function body, can't compute type's actual size
4123 until next function's body starts. */
fa51b01b 4124
01d939e8
BS
4125 free_after_parsing (cfun);
4126 free_after_compilation (cfun);
01d939e8 4127 cfun = 0;
49ad7cfa
BS
4128}
4129
c13fde05
RH
4130/* Call DOIT for each hard register used as a return value from
4131 the current function. */
bd695e1e
RH
4132
4133void
fa8db1f7 4134diddle_return_value (void (*doit) (rtx, void *), void *arg)
bd695e1e 4135{
c13fde05
RH
4136 rtx outgoing = current_function_return_rtx;
4137
4138 if (! outgoing)
4139 return;
bd695e1e 4140
f8cfc6aa 4141 if (REG_P (outgoing))
c13fde05
RH
4142 (*doit) (outgoing, arg);
4143 else if (GET_CODE (outgoing) == PARALLEL)
4144 {
4145 int i;
bd695e1e 4146
c13fde05
RH
4147 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4148 {
4149 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4150
f8cfc6aa 4151 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
c13fde05 4152 (*doit) (x, arg);
bd695e1e
RH
4153 }
4154 }
4155}
4156
c13fde05 4157static void
fa8db1f7 4158do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
c13fde05
RH
4159{
4160 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4161}
4162
4163void
fa8db1f7 4164clobber_return_register (void)
c13fde05
RH
4165{
4166 diddle_return_value (do_clobber_return_reg, NULL);
9c65bbf4
JH
4167
4168 /* In case we do use pseudo to return value, clobber it too. */
4169 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4170 {
4171 tree decl_result = DECL_RESULT (current_function_decl);
4172 rtx decl_rtl = DECL_RTL (decl_result);
4173 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4174 {
4175 do_clobber_return_reg (decl_rtl, NULL);
4176 }
4177 }
c13fde05
RH
4178}
4179
4180static void
fa8db1f7 4181do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
c13fde05
RH
4182{
4183 emit_insn (gen_rtx_USE (VOIDmode, reg));
4184}
4185
4186void
fa8db1f7 4187use_return_register (void)
c13fde05
RH
4188{
4189 diddle_return_value (do_use_return_reg, NULL);
4190}
4191
902edd36
JH
4192/* Possibly warn about unused parameters. */
4193void
4194do_warn_unused_parameter (tree fn)
4195{
4196 tree decl;
4197
4198 for (decl = DECL_ARGUMENTS (fn);
4199 decl; decl = TREE_CHAIN (decl))
4200 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4201 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
971801ff 4202 warning ("%Junused parameter %qD", decl, decl);
902edd36
JH
4203}
4204
e2500fed
GK
4205static GTY(()) rtx initial_trampoline;
4206
71c0e7fc 4207/* Generate RTL for the end of the current function. */
6f086dfc
RS
4208
4209void
fa8db1f7 4210expand_function_end (void)
6f086dfc 4211{
932f0847 4212 rtx clobber_after;
6f086dfc 4213
964be02f
RH
4214 /* If arg_pointer_save_area was referenced only from a nested
4215 function, we will not have initialized it yet. Do that now. */
4216 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4217 get_arg_pointer_save_area (cfun);
4218
11044f66
RK
4219 /* If we are doing stack checking and this function makes calls,
4220 do a stack probe at the start of the function to ensure we have enough
4221 space for another stack frame. */
4222 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4223 {
4224 rtx insn, seq;
4225
4226 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4b4bf941 4227 if (CALL_P (insn))
11044f66
RK
4228 {
4229 start_sequence ();
4230 probe_stack_range (STACK_CHECK_PROTECT,
4231 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4232 seq = get_insns ();
4233 end_sequence ();
2f937369 4234 emit_insn_before (seq, tail_recursion_reentry);
11044f66
RK
4235 break;
4236 }
4237 }
4238
902edd36
JH
4239 /* Possibly warn about unused parameters.
4240 When frontend does unit-at-a-time, the warning is already
4241 issued at finalization time. */
4242 if (warn_unused_parameter
4243 && !lang_hooks.callgraph.expand_function)
4244 do_warn_unused_parameter (current_function_decl);
6f086dfc 4245
6f086dfc
RS
4246 /* End any sequences that failed to be closed due to syntax errors. */
4247 while (in_sequence_p ())
5f4f0e22 4248 end_sequence ();
6f086dfc 4249
6f086dfc
RS
4250 clear_pending_stack_adjust ();
4251 do_pending_stack_adjust ();
4252
ffad84cd
AH
4253 /* @@@ This is a kludge. We want to ensure that instructions that
4254 may trap are not moved into the epilogue by scheduling, because
4255 we don't always emit unwind information for the epilogue.
4256 However, not all machine descriptions define a blockage insn, so
4257 emit an ASM_INPUT to act as one. */
4258 if (flag_non_call_exceptions)
4259 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
4260
6f086dfc
RS
4261 /* Mark the end of the function body.
4262 If control reaches this insn, the function can drop through
4263 without returning a value. */
2e040219 4264 emit_note (NOTE_INSN_FUNCTION_END);
6f086dfc 4265
82e415a3
DE
4266 /* Must mark the last line number note in the function, so that the test
4267 coverage code can avoid counting the last line twice. This just tells
4268 the code to ignore the immediately following line note, since there
4269 already exists a copy of this note somewhere above. This line number
4270 note is still needed for debugging though, so we can't delete it. */
4271 if (flag_test_coverage)
2e040219 4272 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
82e415a3 4273
6f086dfc
RS
4274 /* Output a linenumber for the end of the function.
4275 SDB depends on this. */
0cea056b
NS
4276 force_next_line_note ();
4277 emit_line_note (input_location);
6f086dfc 4278
fbffc70a 4279 /* Before the return label (if any), clobber the return
a1f300c0 4280 registers so that they are not propagated live to the rest of
fbffc70a
GK
4281 the function. This can only happen with functions that drop
4282 through; if there had been a return statement, there would
932f0847
JH
4283 have either been a return rtx, or a jump to the return label.
4284
4285 We delay actual code generation after the current_function_value_rtx
4286 is computed. */
4287 clobber_after = get_last_insn ();
fbffc70a 4288
6f086dfc
RS
4289 /* Output the label for the actual return from the function,
4290 if one is expected. This happens either because a function epilogue
4291 is used instead of a return instruction, or because a return was done
4292 with a goto in order to run local cleanups, or because of pcc-style
4293 structure returning. */
6f086dfc 4294 if (return_label)
fbffc70a 4295 emit_label (return_label);
6f086dfc 4296
52a11cbf
RH
4297 /* Let except.c know where it should emit the call to unregister
4298 the function context for sjlj exceptions. */
4299 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
4300 sjlj_emit_function_exit_after (get_last_insn ());
4301
6f086dfc
RS
4302 /* If we had calls to alloca, and this machine needs
4303 an accurate stack pointer to exit the function,
4304 insert some code to save and restore the stack pointer. */
9d05bbce
KH
4305 if (! EXIT_IGNORE_STACK
4306 && current_function_calls_alloca)
4307 {
4308 rtx tem = 0;
59257ff7 4309
9d05bbce
KH
4310 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4311 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4312 }
6f086dfc 4313
3e4eac3f
RH
4314 /* If scalar return value was computed in a pseudo-reg, or was a named
4315 return value that got dumped to the stack, copy that to the hard
4316 return register. */
19e7881c 4317 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6f086dfc 4318 {
3e4eac3f
RH
4319 tree decl_result = DECL_RESULT (current_function_decl);
4320 rtx decl_rtl = DECL_RTL (decl_result);
4321
4322 if (REG_P (decl_rtl)
4323 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4324 : DECL_REGISTER (decl_result))
4325 {
ce5e43d0 4326 rtx real_decl_rtl = current_function_return_rtx;
6f086dfc 4327
ce5e43d0 4328 /* This should be set in assign_parms. */
0bccc606 4329 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
3e4eac3f
RH
4330
4331 /* If this is a BLKmode structure being returned in registers,
4332 then use the mode computed in expand_return. Note that if
797a6ac1 4333 decl_rtl is memory, then its mode may have been changed,
3e4eac3f
RH
4334 but that current_function_return_rtx has not. */
4335 if (GET_MODE (real_decl_rtl) == BLKmode)
ce5e43d0 4336 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
3e4eac3f
RH
4337
4338 /* If a named return value dumped decl_return to memory, then
797a6ac1 4339 we may need to re-do the PROMOTE_MODE signed/unsigned
3e4eac3f
RH
4340 extension. */
4341 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4342 {
8df83eae 4343 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
3e4eac3f 4344
61f71b34
DD
4345 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4346 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4347 &unsignedp, 1);
3e4eac3f
RH
4348
4349 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4350 }
aa570f54 4351 else if (GET_CODE (real_decl_rtl) == PARALLEL)
084a1106
JDA
4352 {
4353 /* If expand_function_start has created a PARALLEL for decl_rtl,
4354 move the result to the real return registers. Otherwise, do
4355 a group load from decl_rtl for a named return. */
4356 if (GET_CODE (decl_rtl) == PARALLEL)
4357 emit_group_move (real_decl_rtl, decl_rtl);
4358 else
4359 emit_group_load (real_decl_rtl, decl_rtl,
6e985040 4360 TREE_TYPE (decl_result),
084a1106
JDA
4361 int_size_in_bytes (TREE_TYPE (decl_result)));
4362 }
3e4eac3f
RH
4363 else
4364 emit_move_insn (real_decl_rtl, decl_rtl);
3e4eac3f 4365 }
6f086dfc
RS
4366 }
4367
4368 /* If returning a structure, arrange to return the address of the value
4369 in a place where debuggers expect to find it.
4370
4371 If returning a structure PCC style,
4372 the caller also depends on this value.
4373 And current_function_returns_pcc_struct is not necessarily set. */
4374 if (current_function_returns_struct
4375 || current_function_returns_pcc_struct)
4376 {
cc77ae10 4377 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
6f086dfc 4378 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
cc77ae10
JM
4379 rtx outgoing;
4380
4381 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4382 type = TREE_TYPE (type);
4383 else
4384 value_address = XEXP (value_address, 0);
4385
6f086dfc 4386#ifdef FUNCTION_OUTGOING_VALUE
cc77ae10
JM
4387 outgoing = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
4388 current_function_decl);
6f086dfc 4389#else
cc77ae10
JM
4390 outgoing = FUNCTION_VALUE (build_pointer_type (type),
4391 current_function_decl);
4392#endif
6f086dfc
RS
4393
4394 /* Mark this as a function return value so integrate will delete the
4395 assignment and USE below when inlining this function. */
4396 REG_FUNCTION_VALUE_P (outgoing) = 1;
4397
d1608933 4398 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5ae6cd0d
MM
4399 value_address = convert_memory_address (GET_MODE (outgoing),
4400 value_address);
d1608933 4401
6f086dfc 4402 emit_move_insn (outgoing, value_address);
d1608933
RK
4403
4404 /* Show return register used to hold result (in this case the address
4405 of the result. */
4406 current_function_return_rtx = outgoing;
6f086dfc
RS
4407 }
4408
52a11cbf
RH
4409 /* If this is an implementation of throw, do what's necessary to
4410 communicate between __builtin_eh_return and the epilogue. */
4411 expand_eh_return ();
4412
932f0847
JH
4413 /* Emit the actual code to clobber return register. */
4414 {
609c3937 4415 rtx seq;
797a6ac1 4416
932f0847
JH
4417 start_sequence ();
4418 clobber_return_register ();
609c3937 4419 expand_naked_return ();
2f937369 4420 seq = get_insns ();
932f0847
JH
4421 end_sequence ();
4422
609c3937 4423 emit_insn_after (seq, clobber_after);
932f0847
JH
4424 }
4425
609c3937
RH
4426 /* Output the label for the naked return from the function. */
4427 emit_label (naked_return_label);
6e3077c6 4428
c13fde05
RH
4429 /* ??? This should no longer be necessary since stupid is no longer with
4430 us, but there are some parts of the compiler (eg reload_combine, and
4431 sh mach_dep_reorg) that still try and compute their own lifetime info
4432 instead of using the general framework. */
4433 use_return_register ();
6f086dfc 4434}
278ed218
RH
4435
4436rtx
fa8db1f7 4437get_arg_pointer_save_area (struct function *f)
278ed218
RH
4438{
4439 rtx ret = f->x_arg_pointer_save_area;
4440
4441 if (! ret)
4442 {
278ed218
RH
4443 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
4444 f->x_arg_pointer_save_area = ret;
964be02f
RH
4445 }
4446
4447 if (f == cfun && ! f->arg_pointer_save_area_init)
4448 {
4449 rtx seq;
278ed218 4450
797a6ac1 4451 /* Save the arg pointer at the beginning of the function. The
964be02f 4452 generated stack slot may not be a valid memory address, so we
278ed218
RH
4453 have to check it and fix it if necessary. */
4454 start_sequence ();
4455 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
2f937369 4456 seq = get_insns ();
278ed218
RH
4457 end_sequence ();
4458
964be02f
RH
4459 push_topmost_sequence ();
4460 emit_insn_after (seq, get_insns ());
4461 pop_topmost_sequence ();
278ed218
RH
4462 }
4463
4464 return ret;
4465}
bdac5f58 4466\f
2f937369
DM
4467/* Extend a vector that records the INSN_UIDs of INSNS
4468 (a list of one or more insns). */
bdac5f58 4469
0a1c58a2 4470static void
fa8db1f7 4471record_insns (rtx insns, varray_type *vecp)
bdac5f58 4472{
2f937369
DM
4473 int i, len;
4474 rtx tmp;
0a1c58a2 4475
2f937369
DM
4476 tmp = insns;
4477 len = 0;
4478 while (tmp != NULL_RTX)
4479 {
4480 len++;
4481 tmp = NEXT_INSN (tmp);
bdac5f58 4482 }
2f937369
DM
4483
4484 i = VARRAY_SIZE (*vecp);
4485 VARRAY_GROW (*vecp, i + len);
4486 tmp = insns;
4487 while (tmp != NULL_RTX)
bdac5f58 4488 {
2f937369
DM
4489 VARRAY_INT (*vecp, i) = INSN_UID (tmp);
4490 i++;
4491 tmp = NEXT_INSN (tmp);
bdac5f58 4492 }
bdac5f58
TW
4493}
4494
589fe865 4495/* Set the locator of the insn chain starting at INSN to LOC. */
0435312e 4496static void
fa8db1f7 4497set_insn_locators (rtx insn, int loc)
0435312e
JH
4498{
4499 while (insn != NULL_RTX)
4500 {
4501 if (INSN_P (insn))
4502 INSN_LOCATOR (insn) = loc;
4503 insn = NEXT_INSN (insn);
4504 }
4505}
4506
2f937369
DM
4507/* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4508 be running after reorg, SEQUENCE rtl is possible. */
bdac5f58 4509
10914065 4510static int
fa8db1f7 4511contains (rtx insn, varray_type vec)
bdac5f58 4512{
b3694847 4513 int i, j;
bdac5f58 4514
4b4bf941 4515 if (NONJUMP_INSN_P (insn)
bdac5f58
TW
4516 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4517 {
10914065 4518 int count = 0;
bdac5f58 4519 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
0a1c58a2
JL
4520 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
4521 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
10914065
TW
4522 count++;
4523 return count;
bdac5f58
TW
4524 }
4525 else
4526 {
0a1c58a2
JL
4527 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
4528 if (INSN_UID (insn) == VARRAY_INT (vec, j))
10914065 4529 return 1;
bdac5f58
TW
4530 }
4531 return 0;
4532}
5c7675e9
RH
4533
4534int
fa8db1f7 4535prologue_epilogue_contains (rtx insn)
5c7675e9 4536{
0a1c58a2 4537 if (contains (insn, prologue))
5c7675e9 4538 return 1;
0a1c58a2 4539 if (contains (insn, epilogue))
5c7675e9
RH
4540 return 1;
4541 return 0;
4542}
bdac5f58 4543
0a1c58a2 4544int
fa8db1f7 4545sibcall_epilogue_contains (rtx insn)
0a1c58a2
JL
4546{
4547 if (sibcall_epilogue)
4548 return contains (insn, sibcall_epilogue);
4549 return 0;
4550}
4551
73ef99fb 4552#ifdef HAVE_return
69732dcb
RH
4553/* Insert gen_return at the end of block BB. This also means updating
4554 block_for_insn appropriately. */
4555
4556static void
fa8db1f7 4557emit_return_into_block (basic_block bb, rtx line_note)
69732dcb 4558{
a813c111 4559 emit_jump_insn_after (gen_return (), BB_END (bb));
86c82654 4560 if (line_note)
a813c111 4561 emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
69732dcb 4562}
73ef99fb 4563#endif /* HAVE_return */
69732dcb 4564
3258e996
RK
4565#if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4566
4567/* These functions convert the epilogue into a variant that does not modify the
4568 stack pointer. This is used in cases where a function returns an object
d6a7951f 4569 whose size is not known until it is computed. The called function leaves the
3258e996
RK
4570 object on the stack, leaves the stack depressed, and returns a pointer to
4571 the object.
4572
4573 What we need to do is track all modifications and references to the stack
4574 pointer, deleting the modifications and changing the references to point to
4575 the location the stack pointer would have pointed to had the modifications
4576 taken place.
4577
4578 These functions need to be portable so we need to make as few assumptions
4579 about the epilogue as we can. However, the epilogue basically contains
4580 three things: instructions to reset the stack pointer, instructions to
4581 reload registers, possibly including the frame pointer, and an
4582 instruction to return to the caller.
4583
4584 If we can't be sure of what a relevant epilogue insn is doing, we abort.
4585 We also make no attempt to validate the insns we make since if they are
4586 invalid, we probably can't do anything valid. The intent is that these
4587 routines get "smarter" as more and more machines start to use them and
4588 they try operating on different epilogues.
4589
4590 We use the following structure to track what the part of the epilogue that
4591 we've already processed has done. We keep two copies of the SP equivalence,
4592 one for use during the insn we are processing and one for use in the next
4593 insn. The difference is because one part of a PARALLEL may adjust SP
4594 and the other may use it. */
4595
4596struct epi_info
4597{
4598 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
4599 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
3ef42a0c 4600 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
3258e996
RK
4601 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
4602 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
4603 should be set to once we no longer need
4604 its value. */
f285d67b
RK
4605 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4606 for registers. */
3258e996
RK
4607};
4608
fa8db1f7 4609static void handle_epilogue_set (rtx, struct epi_info *);
80fcc7bc 4610static void update_epilogue_consts (rtx, rtx, void *);
fa8db1f7 4611static void emit_equiv_load (struct epi_info *);
7393c642 4612
2f937369
DM
4613/* Modify INSN, a list of one or more insns that is part of the epilogue, to
4614 no modifications to the stack pointer. Return the new list of insns. */
7393c642 4615
3258e996 4616static rtx
fa8db1f7 4617keep_stack_depressed (rtx insns)
7393c642 4618{
2f937369 4619 int j;
3258e996 4620 struct epi_info info;
2f937369 4621 rtx insn, next;
7393c642 4622
f285d67b 4623 /* If the epilogue is just a single instruction, it must be OK as is. */
2f937369
DM
4624 if (NEXT_INSN (insns) == NULL_RTX)
4625 return insns;
7393c642 4626
3258e996
RK
4627 /* Otherwise, start a sequence, initialize the information we have, and
4628 process all the insns we were given. */
4629 start_sequence ();
4630
4631 info.sp_equiv_reg = stack_pointer_rtx;
4632 info.sp_offset = 0;
4633 info.equiv_reg_src = 0;
7393c642 4634
f285d67b
RK
4635 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4636 info.const_equiv[j] = 0;
4637
2f937369
DM
4638 insn = insns;
4639 next = NULL_RTX;
4640 while (insn != NULL_RTX)
7393c642 4641 {
2f937369 4642 next = NEXT_INSN (insn);
7393c642 4643
3258e996
RK
4644 if (!INSN_P (insn))
4645 {
4646 add_insn (insn);
2f937369 4647 insn = next;
3258e996
RK
4648 continue;
4649 }
7393c642 4650
3258e996
RK
4651 /* If this insn references the register that SP is equivalent to and
4652 we have a pending load to that register, we must force out the load
4653 first and then indicate we no longer know what SP's equivalent is. */
4654 if (info.equiv_reg_src != 0
4655 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
7393c642 4656 {
3258e996
RK
4657 emit_equiv_load (&info);
4658 info.sp_equiv_reg = 0;
4659 }
7393c642 4660
3258e996
RK
4661 info.new_sp_equiv_reg = info.sp_equiv_reg;
4662 info.new_sp_offset = info.sp_offset;
7393c642 4663
3258e996
RK
4664 /* If this is a (RETURN) and the return address is on the stack,
4665 update the address and change to an indirect jump. */
4666 if (GET_CODE (PATTERN (insn)) == RETURN
4667 || (GET_CODE (PATTERN (insn)) == PARALLEL
4668 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4669 {
4670 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4671 rtx base = 0;
4672 HOST_WIDE_INT offset = 0;
4673 rtx jump_insn, jump_set;
4674
4675 /* If the return address is in a register, we can emit the insn
4676 unchanged. Otherwise, it must be a MEM and we see what the
4677 base register and offset are. In any case, we have to emit any
4678 pending load to the equivalent reg of SP, if any. */
f8cfc6aa 4679 if (REG_P (retaddr))
3258e996
RK
4680 {
4681 emit_equiv_load (&info);
4682 add_insn (insn);
2f937369 4683 insn = next;
3258e996
RK
4684 continue;
4685 }
0bccc606 4686 else
3258e996 4687 {
0bccc606
NS
4688 rtx ret_ptr;
4689 gcc_assert (MEM_P (retaddr));
4690
4691 ret_ptr = XEXP (retaddr, 0);
4692
4693 if (REG_P (ret_ptr))
4694 {
4695 base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
4696 offset = 0;
4697 }
4698 else
4699 {
4700 gcc_assert (GET_CODE (ret_ptr) == PLUS
4701 && REG_P (XEXP (ret_ptr, 0))
4702 && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
4703 base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
4704 offset = INTVAL (XEXP (ret_ptr, 1));
4705 }
3258e996 4706 }
3258e996
RK
4707
4708 /* If the base of the location containing the return pointer
4709 is SP, we must update it with the replacement address. Otherwise,
4710 just build the necessary MEM. */
4711 retaddr = plus_constant (base, offset);
4712 if (base == stack_pointer_rtx)
4713 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4714 plus_constant (info.sp_equiv_reg,
4715 info.sp_offset));
4716
4717 retaddr = gen_rtx_MEM (Pmode, retaddr);
4718
4719 /* If there is a pending load to the equivalent register for SP
4720 and we reference that register, we must load our address into
4721 a scratch register and then do that load. */
4722 if (info.equiv_reg_src
4723 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4724 {
4725 unsigned int regno;
4726 rtx reg;
4727
4728 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4729 if (HARD_REGNO_MODE_OK (regno, Pmode)
53b6fb26
RK
4730 && !fixed_regs[regno]
4731 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
b5ed05aa
RK
4732 && !REGNO_REG_SET_P (EXIT_BLOCK_PTR->global_live_at_start,
4733 regno)
4734 && !refers_to_regno_p (regno,
66fd46b6
JH
4735 regno + hard_regno_nregs[regno]
4736 [Pmode],
f285d67b
RK
4737 info.equiv_reg_src, NULL)
4738 && info.const_equiv[regno] == 0)
3258e996
RK
4739 break;
4740
0bccc606 4741 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7393c642 4742
3258e996
RK
4743 reg = gen_rtx_REG (Pmode, regno);
4744 emit_move_insn (reg, retaddr);
4745 retaddr = reg;
4746 }
4747
4748 emit_equiv_load (&info);
4749 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4750
4751 /* Show the SET in the above insn is a RETURN. */
4752 jump_set = single_set (jump_insn);
0bccc606
NS
4753 gcc_assert (jump_set);
4754 SET_IS_RETURN_P (jump_set) = 1;
7393c642 4755 }
3258e996
RK
4756
4757 /* If SP is not mentioned in the pattern and its equivalent register, if
4758 any, is not modified, just emit it. Otherwise, if neither is set,
4759 replace the reference to SP and emit the insn. If none of those are
4760 true, handle each SET individually. */
4761 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4762 && (info.sp_equiv_reg == stack_pointer_rtx
4763 || !reg_set_p (info.sp_equiv_reg, insn)))
4764 add_insn (insn);
4765 else if (! reg_set_p (stack_pointer_rtx, insn)
4766 && (info.sp_equiv_reg == stack_pointer_rtx
4767 || !reg_set_p (info.sp_equiv_reg, insn)))
7393c642 4768 {
0bccc606
NS
4769 int changed;
4770
4771 changed = validate_replace_rtx (stack_pointer_rtx,
4772 plus_constant (info.sp_equiv_reg,
4773 info.sp_offset),
4774 insn);
4775 gcc_assert (changed);
7393c642 4776
3258e996
RK
4777 add_insn (insn);
4778 }
4779 else if (GET_CODE (PATTERN (insn)) == SET)
4780 handle_epilogue_set (PATTERN (insn), &info);
4781 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4782 {
4783 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4784 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4785 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4786 }
4787 else
4788 add_insn (insn);
4789
4790 info.sp_equiv_reg = info.new_sp_equiv_reg;
4791 info.sp_offset = info.new_sp_offset;
2f937369 4792
f285d67b
RK
4793 /* Now update any constants this insn sets. */
4794 note_stores (PATTERN (insn), update_epilogue_consts, &info);
2f937369 4795 insn = next;
3258e996
RK
4796 }
4797
2f937369 4798 insns = get_insns ();
3258e996 4799 end_sequence ();
2f937369 4800 return insns;
3258e996
RK
4801}
4802
d6a7951f 4803/* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
3258e996 4804 structure that contains information about what we've seen so far. We
797a6ac1 4805 process this SET by either updating that data or by emitting one or
3258e996
RK
4806 more insns. */
4807
4808static void
fa8db1f7 4809handle_epilogue_set (rtx set, struct epi_info *p)
3258e996
RK
4810{
4811 /* First handle the case where we are setting SP. Record what it is being
4812 set from. If unknown, abort. */
4813 if (reg_set_p (stack_pointer_rtx, set))
4814 {
0bccc606 4815 gcc_assert (SET_DEST (set) == stack_pointer_rtx);
3258e996 4816
f285d67b 4817 if (GET_CODE (SET_SRC (set)) == PLUS)
3258e996
RK
4818 {
4819 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
f285d67b
RK
4820 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4821 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
f285d67b 4822 else
0bccc606
NS
4823 {
4824 gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
4825 && (REGNO (XEXP (SET_SRC (set), 1))
4826 < FIRST_PSEUDO_REGISTER)
4827 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4828 p->new_sp_offset
4829 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4830 }
7393c642 4831 }
3258e996
RK
4832 else
4833 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4834
4835 /* If we are adjusting SP, we adjust from the old data. */
4836 if (p->new_sp_equiv_reg == stack_pointer_rtx)
4837 {
4838 p->new_sp_equiv_reg = p->sp_equiv_reg;
4839 p->new_sp_offset += p->sp_offset;
4840 }
4841
0bccc606 4842 gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
3258e996
RK
4843
4844 return;
4845 }
4846
4847 /* Next handle the case where we are setting SP's equivalent register.
4848 If we already have a value to set it to, abort. We could update, but
f189c7ca
RK
4849 there seems little point in handling that case. Note that we have
4850 to allow for the case where we are setting the register set in
4851 the previous part of a PARALLEL inside a single insn. But use the
f285d67b
RK
4852 old offset for any updates within this insn. We must allow for the case
4853 where the register is being set in a different (usually wider) mode than
4854 Pmode). */
f189c7ca 4855 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
3258e996 4856 {
0bccc606
NS
4857 gcc_assert (!p->equiv_reg_src
4858 && REG_P (p->new_sp_equiv_reg)
4859 && REG_P (SET_DEST (set))
4860 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
4861 <= BITS_PER_WORD)
4862 && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
4863 p->equiv_reg_src
4864 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4865 plus_constant (p->sp_equiv_reg,
4866 p->sp_offset));
3258e996
RK
4867 }
4868
4869 /* Otherwise, replace any references to SP in the insn to its new value
4870 and emit the insn. */
4871 else
4872 {
4873 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4874 plus_constant (p->sp_equiv_reg,
4875 p->sp_offset));
4876 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
4877 plus_constant (p->sp_equiv_reg,
4878 p->sp_offset));
4879 emit_insn (set);
7393c642
RK
4880 }
4881}
3258e996 4882
f285d67b
RK
4883/* Update the tracking information for registers set to constants. */
4884
4885static void
4886update_epilogue_consts (rtx dest, rtx x, void *data)
4887{
4888 struct epi_info *p = (struct epi_info *) data;
8fbc67c0 4889 rtx new;
f285d67b 4890
f8cfc6aa 4891 if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
f285d67b 4892 return;
8fbc67c0
RK
4893
4894 /* If we are either clobbering a register or doing a partial set,
4895 show we don't know the value. */
4896 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
f285d67b 4897 p->const_equiv[REGNO (dest)] = 0;
8fbc67c0
RK
4898
4899 /* If we are setting it to a constant, record that constant. */
4900 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
f285d67b 4901 p->const_equiv[REGNO (dest)] = SET_SRC (x);
8fbc67c0
RK
4902
4903 /* If this is a binary operation between a register we have been tracking
4904 and a constant, see if we can compute a new constant value. */
ec8e098d 4905 else if (ARITHMETIC_P (SET_SRC (x))
f8cfc6aa 4906 && REG_P (XEXP (SET_SRC (x), 0))
8fbc67c0
RK
4907 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
4908 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
4909 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
4910 && 0 != (new = simplify_binary_operation
4911 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
4912 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
4913 XEXP (SET_SRC (x), 1)))
4914 && GET_CODE (new) == CONST_INT)
4915 p->const_equiv[REGNO (dest)] = new;
4916
4917 /* Otherwise, we can't do anything with this value. */
4918 else
4919 p->const_equiv[REGNO (dest)] = 0;
f285d67b
RK
4920}
4921
3258e996
RK
4922/* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
4923
4924static void
fa8db1f7 4925emit_equiv_load (struct epi_info *p)
3258e996
RK
4926{
4927 if (p->equiv_reg_src != 0)
f285d67b
RK
4928 {
4929 rtx dest = p->sp_equiv_reg;
4930
4931 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
4932 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
4933 REGNO (p->sp_equiv_reg));
3258e996 4934
f285d67b
RK
4935 emit_move_insn (dest, p->equiv_reg_src);
4936 p->equiv_reg_src = 0;
4937 }
3258e996 4938}
7393c642
RK
4939#endif
4940
9faa82d8 4941/* Generate the prologue and epilogue RTL if the machine supports it. Thread
bdac5f58
TW
4942 this into place with notes indicating where the prologue ends and where
4943 the epilogue begins. Update the basic block information when possible. */
4944
4945void
fa8db1f7 4946thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
bdac5f58 4947{
ca1117cc 4948 int inserted = 0;
19d3c25c 4949 edge e;
91ea4f8d 4950#if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
19d3c25c 4951 rtx seq;
91ea4f8d 4952#endif
ca1117cc
RH
4953#ifdef HAVE_prologue
4954 rtx prologue_end = NULL_RTX;
4955#endif
86c82654
RH
4956#if defined (HAVE_epilogue) || defined(HAVE_return)
4957 rtx epilogue_end = NULL_RTX;
4958#endif
628f6a4e 4959 edge_iterator ei;
e881bb1b 4960
bdac5f58
TW
4961#ifdef HAVE_prologue
4962 if (HAVE_prologue)
4963 {
e881bb1b 4964 start_sequence ();
718fe406 4965 seq = gen_prologue ();
e881bb1b 4966 emit_insn (seq);
bdac5f58
TW
4967
4968 /* Retain a map of the prologue insns. */
0a1c58a2 4969 record_insns (seq, &prologue);
2e040219 4970 prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
9185a8d5 4971
2f937369 4972 seq = get_insns ();
e881bb1b 4973 end_sequence ();
0435312e 4974 set_insn_locators (seq, prologue_locator);
e881bb1b 4975
d6a7951f 4976 /* Can't deal with multiple successors of the entry block
75540af0
JH
4977 at the moment. Function should always have at least one
4978 entry point. */
628f6a4e 4979 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
e881bb1b 4980
628f6a4e 4981 insert_insn_on_edge (seq, EDGE_SUCC (ENTRY_BLOCK_PTR, 0));
75540af0 4982 inserted = 1;
bdac5f58 4983 }
bdac5f58 4984#endif
bdac5f58 4985
19d3c25c
RH
4986 /* If the exit block has no non-fake predecessors, we don't need
4987 an epilogue. */
628f6a4e 4988 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
19d3c25c
RH
4989 if ((e->flags & EDGE_FAKE) == 0)
4990 break;
4991 if (e == NULL)
4992 goto epilogue_done;
4993
69732dcb
RH
4994#ifdef HAVE_return
4995 if (optimize && HAVE_return)
4996 {
4997 /* If we're allowed to generate a simple return instruction,
4998 then by definition we don't need a full epilogue. Examine
718fe406
KH
4999 the block that falls through to EXIT. If it does not
5000 contain any code, examine its predecessors and try to
69732dcb
RH
5001 emit (conditional) return instructions. */
5002
5003 basic_block last;
69732dcb
RH
5004 rtx label;
5005
628f6a4e 5006 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
69732dcb
RH
5007 if (e->flags & EDGE_FALLTHRU)
5008 break;
5009 if (e == NULL)
5010 goto epilogue_done;
5011 last = e->src;
5012
5013 /* Verify that there are no active instructions in the last block. */
a813c111 5014 label = BB_END (last);
4b4bf941 5015 while (label && !LABEL_P (label))
69732dcb
RH
5016 {
5017 if (active_insn_p (label))
5018 break;
5019 label = PREV_INSN (label);
5020 }
5021
4b4bf941 5022 if (BB_HEAD (last) == label && LABEL_P (label))
69732dcb 5023 {
628f6a4e 5024 edge_iterator ei2;
718fe406 5025 rtx epilogue_line_note = NULL_RTX;
86c82654
RH
5026
5027 /* Locate the line number associated with the closing brace,
5028 if we can find one. */
5029 for (seq = get_last_insn ();
5030 seq && ! active_insn_p (seq);
5031 seq = PREV_INSN (seq))
4b4bf941 5032 if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0)
86c82654
RH
5033 {
5034 epilogue_line_note = seq;
5035 break;
5036 }
5037
628f6a4e 5038 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
69732dcb
RH
5039 {
5040 basic_block bb = e->src;
5041 rtx jump;
5042
69732dcb 5043 if (bb == ENTRY_BLOCK_PTR)
628f6a4e
BE
5044 {
5045 ei_next (&ei2);
5046 continue;
5047 }
69732dcb 5048
a813c111 5049 jump = BB_END (bb);
4b4bf941 5050 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
628f6a4e
BE
5051 {
5052 ei_next (&ei2);
5053 continue;
5054 }
69732dcb
RH
5055
5056 /* If we have an unconditional jump, we can replace that
5057 with a simple return instruction. */
5058 if (simplejump_p (jump))
5059 {
86c82654 5060 emit_return_into_block (bb, epilogue_line_note);
53c17031 5061 delete_insn (jump);
69732dcb
RH
5062 }
5063
5064 /* If we have a conditional jump, we can try to replace
5065 that with a conditional return instruction. */
5066 else if (condjump_p (jump))
5067 {
47009d11 5068 if (! redirect_jump (jump, 0, 0))
628f6a4e
BE
5069 {
5070 ei_next (&ei2);
5071 continue;
5072 }
718fe406 5073
3a75e42e
CP
5074 /* If this block has only one successor, it both jumps
5075 and falls through to the fallthru block, so we can't
5076 delete the edge. */
628f6a4e
BE
5077 if (EDGE_COUNT (bb->succs) == 1)
5078 {
5079 ei_next (&ei2);
5080 continue;
5081 }
69732dcb
RH
5082 }
5083 else
628f6a4e
BE
5084 {
5085 ei_next (&ei2);
5086 continue;
5087 }
69732dcb
RH
5088
5089 /* Fix up the CFG for the successful change we just made. */
86c82654 5090 redirect_edge_succ (e, EXIT_BLOCK_PTR);
69732dcb 5091 }
69732dcb 5092
2dd8bc01
GK
5093 /* Emit a return insn for the exit fallthru block. Whether
5094 this is still reachable will be determined later. */
69732dcb 5095
a813c111 5096 emit_barrier_after (BB_END (last));
86c82654 5097 emit_return_into_block (last, epilogue_line_note);
a813c111 5098 epilogue_end = BB_END (last);
628f6a4e 5099 EDGE_SUCC (last, 0)->flags &= ~EDGE_FALLTHRU;
718fe406 5100 goto epilogue_done;
2dd8bc01 5101 }
69732dcb
RH
5102 }
5103#endif
623a66fa
R
5104 /* Find the edge that falls through to EXIT. Other edges may exist
5105 due to RETURN instructions, but those don't need epilogues.
5106 There really shouldn't be a mixture -- either all should have
5107 been converted or none, however... */
5108
628f6a4e 5109 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
623a66fa
R
5110 if (e->flags & EDGE_FALLTHRU)
5111 break;
5112 if (e == NULL)
5113 goto epilogue_done;
5114
bdac5f58
TW
5115#ifdef HAVE_epilogue
5116 if (HAVE_epilogue)
5117 {
19d3c25c 5118 start_sequence ();
2e040219 5119 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
a78bdb38 5120
19d3c25c 5121 seq = gen_epilogue ();
7393c642 5122
3258e996
RK
5123#ifdef INCOMING_RETURN_ADDR_RTX
5124 /* If this function returns with the stack depressed and we can support
5125 it, massage the epilogue to actually do that. */
43db0363
RK
5126 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5127 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
3258e996
RK
5128 seq = keep_stack_depressed (seq);
5129#endif
7393c642 5130
19d3c25c 5131 emit_jump_insn (seq);
bdac5f58 5132
19d3c25c 5133 /* Retain a map of the epilogue insns. */
0a1c58a2 5134 record_insns (seq, &epilogue);
0435312e 5135 set_insn_locators (seq, epilogue_locator);
bdac5f58 5136
2f937369 5137 seq = get_insns ();
718fe406 5138 end_sequence ();
e881bb1b 5139
19d3c25c 5140 insert_insn_on_edge (seq, e);
ca1117cc 5141 inserted = 1;
bdac5f58 5142 }
623a66fa 5143 else
bdac5f58 5144#endif
623a66fa
R
5145 {
5146 basic_block cur_bb;
5147
5148 if (! next_active_insn (BB_END (e->src)))
5149 goto epilogue_done;
5150 /* We have a fall-through edge to the exit block, the source is not
5151 at the end of the function, and there will be an assembler epilogue
5152 at the end of the function.
5153 We can't use force_nonfallthru here, because that would try to
5154 use return. Inserting a jump 'by hand' is extremely messy, so
5155 we take advantage of cfg_layout_finalize using
5156 fixup_fallthru_exit_predecessor. */
35b6b437 5157 cfg_layout_initialize (0);
623a66fa
R
5158 FOR_EACH_BB (cur_bb)
5159 if (cur_bb->index >= 0 && cur_bb->next_bb->index >= 0)
5160 cur_bb->rbi->next = cur_bb->next_bb;
5161 cfg_layout_finalize ();
5162 }
19d3c25c 5163epilogue_done:
e881bb1b 5164
ca1117cc 5165 if (inserted)
e881bb1b 5166 commit_edge_insertions ();
0a1c58a2
JL
5167
5168#ifdef HAVE_sibcall_epilogue
5169 /* Emit sibling epilogues before any sibling call sites. */
628f6a4e 5170 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
0a1c58a2
JL
5171 {
5172 basic_block bb = e->src;
a813c111 5173 rtx insn = BB_END (bb);
0a1c58a2 5174 rtx i;
1b513b77 5175 rtx newinsn;
0a1c58a2 5176
4b4bf941 5177 if (!CALL_P (insn)
0a1c58a2 5178 || ! SIBLING_CALL_P (insn))
628f6a4e
BE
5179 {
5180 ei_next (&ei);
5181 continue;
5182 }
0a1c58a2
JL
5183
5184 start_sequence ();
0af5c896
RE
5185 emit_insn (gen_sibcall_epilogue ());
5186 seq = get_insns ();
0a1c58a2
JL
5187 end_sequence ();
5188
2f937369
DM
5189 /* Retain a map of the epilogue insns. Used in life analysis to
5190 avoid getting rid of sibcall epilogue insns. Do this before we
5191 actually emit the sequence. */
5192 record_insns (seq, &sibcall_epilogue);
0435312e 5193 set_insn_locators (seq, epilogue_locator);
2f937369 5194
0a1c58a2 5195 i = PREV_INSN (insn);
1b513b77 5196 newinsn = emit_insn_before (seq, insn);
628f6a4e 5197 ei_next (&ei);
0a1c58a2
JL
5198 }
5199#endif
ca1117cc
RH
5200
5201#ifdef HAVE_prologue
589fe865 5202 /* This is probably all useless now that we use locators. */
ca1117cc
RH
5203 if (prologue_end)
5204 {
5205 rtx insn, prev;
5206
5207 /* GDB handles `break f' by setting a breakpoint on the first
30196c1f 5208 line note after the prologue. Which means (1) that if
ca1117cc 5209 there are line number notes before where we inserted the
30196c1f
RH
5210 prologue we should move them, and (2) we should generate a
5211 note before the end of the first basic block, if there isn't
016030fe
JH
5212 one already there.
5213
8d9afc4e 5214 ??? This behavior is completely broken when dealing with
016030fe
JH
5215 multiple entry functions. We simply place the note always
5216 into first basic block and let alternate entry points
5217 to be missed.
5218 */
ca1117cc 5219
718fe406 5220 for (insn = prologue_end; insn; insn = prev)
ca1117cc
RH
5221 {
5222 prev = PREV_INSN (insn);
4b4bf941 5223 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
ca1117cc
RH
5224 {
5225 /* Note that we cannot reorder the first insn in the
5226 chain, since rest_of_compilation relies on that
30196c1f 5227 remaining constant. */
ca1117cc 5228 if (prev == NULL)
30196c1f
RH
5229 break;
5230 reorder_insns (insn, insn, prologue_end);
ca1117cc
RH
5231 }
5232 }
5233
30196c1f 5234 /* Find the last line number note in the first block. */
a813c111 5235 for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
016030fe 5236 insn != prologue_end && insn;
30196c1f 5237 insn = PREV_INSN (insn))
4b4bf941 5238 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
30196c1f
RH
5239 break;
5240
5241 /* If we didn't find one, make a copy of the first line number
5242 we run across. */
5243 if (! insn)
ca1117cc 5244 {
30196c1f
RH
5245 for (insn = next_active_insn (prologue_end);
5246 insn;
5247 insn = PREV_INSN (insn))
4b4bf941 5248 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
30196c1f 5249 {
5f2fc772 5250 emit_note_copy_after (insn, prologue_end);
30196c1f
RH
5251 break;
5252 }
ca1117cc
RH
5253 }
5254 }
5255#endif
86c82654
RH
5256#ifdef HAVE_epilogue
5257 if (epilogue_end)
5258 {
5259 rtx insn, next;
5260
5261 /* Similarly, move any line notes that appear after the epilogue.
ff7cc307 5262 There is no need, however, to be quite so anal about the existence
84c1fa24
UW
5263 of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly)
5264 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5265 info generation. */
718fe406 5266 for (insn = epilogue_end; insn; insn = next)
86c82654
RH
5267 {
5268 next = NEXT_INSN (insn);
4b4bf941 5269 if (NOTE_P (insn)
84c1fa24
UW
5270 && (NOTE_LINE_NUMBER (insn) > 0
5271 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
5272 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
86c82654
RH
5273 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5274 }
5275 }
5276#endif
bdac5f58
TW
5277}
5278
5279/* Reposition the prologue-end and epilogue-begin notes after instruction
5280 scheduling and delayed branch scheduling. */
5281
5282void
fa8db1f7 5283reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
bdac5f58
TW
5284{
5285#if defined (HAVE_prologue) || defined (HAVE_epilogue)
9f53e965 5286 rtx insn, last, note;
0a1c58a2
JL
5287 int len;
5288
5289 if ((len = VARRAY_SIZE (prologue)) > 0)
bdac5f58 5290 {
9f53e965 5291 last = 0, note = 0;
bdac5f58 5292
0a1c58a2
JL
5293 /* Scan from the beginning until we reach the last prologue insn.
5294 We apparently can't depend on basic_block_{head,end} after
5295 reorg has run. */
9f53e965 5296 for (insn = f; insn; insn = NEXT_INSN (insn))
bdac5f58 5297 {
4b4bf941 5298 if (NOTE_P (insn))
9392c110 5299 {
0a1c58a2
JL
5300 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5301 note = insn;
5302 }
9f53e965 5303 else if (contains (insn, prologue))
0a1c58a2 5304 {
9f53e965
RH
5305 last = insn;
5306 if (--len == 0)
5307 break;
5308 }
5309 }
797a6ac1 5310
9f53e965
RH
5311 if (last)
5312 {
9f53e965
RH
5313 /* Find the prologue-end note if we haven't already, and
5314 move it to just after the last prologue insn. */
5315 if (note == 0)
5316 {
5317 for (note = last; (note = NEXT_INSN (note));)
4b4bf941 5318 if (NOTE_P (note)
9f53e965
RH
5319 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5320 break;
5321 }
c93b03c2 5322
9f53e965 5323 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
4b4bf941 5324 if (LABEL_P (last))
9f53e965
RH
5325 last = NEXT_INSN (last);
5326 reorder_insns (note, note, last);
bdac5f58 5327 }
0a1c58a2
JL
5328 }
5329
5330 if ((len = VARRAY_SIZE (epilogue)) > 0)
5331 {
9f53e965 5332 last = 0, note = 0;
bdac5f58 5333
0a1c58a2
JL
5334 /* Scan from the end until we reach the first epilogue insn.
5335 We apparently can't depend on basic_block_{head,end} after
5336 reorg has run. */
9f53e965 5337 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
bdac5f58 5338 {
4b4bf941 5339 if (NOTE_P (insn))
9392c110 5340 {
0a1c58a2
JL
5341 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5342 note = insn;
5343 }
9f53e965 5344 else if (contains (insn, epilogue))
0a1c58a2 5345 {
9f53e965
RH
5346 last = insn;
5347 if (--len == 0)
5348 break;
5349 }
5350 }
c93b03c2 5351
9f53e965
RH
5352 if (last)
5353 {
5354 /* Find the epilogue-begin note if we haven't already, and
5355 move it to just before the first epilogue insn. */
5356 if (note == 0)
5357 {
5358 for (note = insn; (note = PREV_INSN (note));)
4b4bf941 5359 if (NOTE_P (note)
9f53e965
RH
5360 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5361 break;
9392c110 5362 }
9f53e965
RH
5363
5364 if (PREV_INSN (last) != note)
5365 reorder_insns (note, note, PREV_INSN (last));
bdac5f58
TW
5366 }
5367 }
5368#endif /* HAVE_prologue or HAVE_epilogue */
5369}
87ff9c8e 5370
87ff9c8e
RH
5371/* Called once, at initialization, to initialize function.c. */
5372
5373void
fa8db1f7 5374init_function_once (void)
87ff9c8e 5375{
0a1c58a2
JL
5376 VARRAY_INT_INIT (prologue, 0, "prologue");
5377 VARRAY_INT_INIT (epilogue, 0, "epilogue");
5378 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
87ff9c8e 5379}
e2500fed 5380
6de9cd9a
DN
5381/* Resets insn_block_boundaries array. */
5382
5383void
5384reset_block_changes (void)
5385{
5386 VARRAY_TREE_INIT (cfun->ib_boundaries_block, 100, "ib_boundaries_block");
5387 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, NULL_TREE);
5388}
5389
5390/* Record the boundary for BLOCK. */
5391void
5392record_block_change (tree block)
5393{
5394 int i, n;
5395 tree last_block;
5396
5397 if (!block)
5398 return;
5399
5400 last_block = VARRAY_TOP_TREE (cfun->ib_boundaries_block);
5401 VARRAY_POP (cfun->ib_boundaries_block);
5402 n = get_max_uid ();
5403 for (i = VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block); i < n; i++)
5404 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, last_block);
5405
5406 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, block);
5407}
5408
5409/* Finishes record of boundaries. */
5410void finalize_block_changes (void)
5411{
5412 record_block_change (DECL_INITIAL (current_function_decl));
5413}
5414
5415/* For INSN return the BLOCK it belongs to. */
5416void
5417check_block_change (rtx insn, tree *block)
5418{
5419 unsigned uid = INSN_UID (insn);
5420
5421 if (uid >= VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block))
5422 return;
5423
5424 *block = VARRAY_TREE (cfun->ib_boundaries_block, uid);
5425}
5426
5427/* Releases the ib_boundaries_block records. */
5428void
5429free_block_changes (void)
5430{
5431 cfun->ib_boundaries_block = NULL;
5432}
5433
faed5cc3
SB
5434/* Returns the name of the current function. */
5435const char *
5436current_function_name (void)
5437{
ae2bcd98 5438 return lang_hooks.decl_printable_name (cfun->decl, 2);
faed5cc3
SB
5439}
5440
e2500fed 5441#include "gt-function.h"