]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/function.c
arc-protos.h (arc_select_cc_mode, gen_compare_reg): Wrap in RTX_CODE macro guard.
[thirdparty/gcc.git] / gcc / function.c
CommitLineData
5e6908ea 1/* Expands front end tree to back end RTL for GCC.
af841dbd 2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
62e5bf5d 3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
bfc45551 4 Free Software Foundation, Inc.
6f086dfc 5
1322177d 6This file is part of GCC.
6f086dfc 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
9dcd6f09 10Software Foundation; either version 3, or (at your option) any later
1322177d 11version.
6f086dfc 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
6f086dfc
RS
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
6f086dfc 21
6f086dfc
RS
22/* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
8fff4fc1 34 not get a hard register. */
6f086dfc
RS
35
36#include "config.h"
670ee920 37#include "system.h"
4977bab6
ZW
38#include "coretypes.h"
39#include "tm.h"
6f086dfc
RS
40#include "rtl.h"
41#include "tree.h"
42#include "flags.h"
1ef08c63 43#include "except.h"
6f086dfc 44#include "function.h"
6f086dfc 45#include "expr.h"
c6b97fac 46#include "optabs.h"
e78d8e51 47#include "libfuncs.h"
6f086dfc
RS
48#include "regs.h"
49#include "hard-reg-set.h"
50#include "insn-config.h"
51#include "recog.h"
52#include "output.h"
bdac5f58 53#include "basic-block.h"
10f0ad3d 54#include "toplev.h"
e2500fed 55#include "hashtab.h"
87ff9c8e 56#include "ggc.h"
b1474bb7 57#include "tm_p.h"
c0e7830f 58#include "integrate.h"
7afff7cf 59#include "langhooks.h"
61f71b34 60#include "target.h"
623a66fa 61#include "cfglayout.h"
4744afba 62#include "tree-gimple.h"
ef330312 63#include "tree-pass.h"
7d69de61 64#include "predict.h"
6fb5fa3c
DB
65#include "df.h"
66#include "timevar.h"
e3df376d 67#include "vecprim.h"
7d69de61 68
d16790f2
JW
69#ifndef LOCAL_ALIGNMENT
70#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
71#endif
72
95f3f59e
JDA
73#ifndef STACK_ALIGNMENT_NEEDED
74#define STACK_ALIGNMENT_NEEDED 1
75#endif
76
975f3818
RS
77#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
78
293e3de4
RS
79/* Some systems use __main in a way incompatible with its use in gcc, in these
80 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
81 give the same symbol without quotes for an alternative entry point. You
0f41302f 82 must define both, or neither. */
293e3de4
RS
83#ifndef NAME__MAIN
84#define NAME__MAIN "__main"
293e3de4
RS
85#endif
86
6f086dfc
RS
87/* Round a value to the lowest integer less than it that is a multiple of
88 the required alignment. Avoid using division in case the value is
89 negative. Assume the alignment is a power of two. */
90#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
91
92/* Similar, but round to the next highest integer that meets the
93 alignment. */
94#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
95
54ff41b7
JW
96/* Nonzero if function being compiled doesn't contain any calls
97 (ignoring the prologue and epilogue). This is set prior to
98 local register allocation and is valid for the remaining
718fe406 99 compiler passes. */
54ff41b7
JW
100int current_function_is_leaf;
101
fdb8a883
JW
102/* Nonzero if function being compiled doesn't modify the stack pointer
103 (ignoring the prologue and epilogue). This is only valid after
6fb5fa3c 104 pass_stack_ptr_mod has run. */
fdb8a883
JW
105int current_function_sp_is_unchanging;
106
54ff41b7
JW
107/* Nonzero if the function being compiled is a leaf function which only
108 uses leaf registers. This is valid after reload (specifically after
109 sched2) and is useful only if the port defines LEAF_REGISTERS. */
54ff41b7
JW
110int current_function_uses_only_leaf_regs;
111
6f086dfc 112/* Nonzero once virtual register instantiation has been done.
c39ada04
DD
113 assign_stack_local uses frame_pointer_rtx when this is nonzero.
114 calls.c:emit_library_call_value_1 uses it to set up
115 post-instantiation libcalls. */
116int virtuals_instantiated;
6f086dfc 117
df696a75 118/* Assign unique numbers to labels generated for profiling, debugging, etc. */
17211ab5 119static GTY(()) int funcdef_no;
f6f315fe 120
414c4dc4
NC
121/* These variables hold pointers to functions to create and destroy
122 target specific, per-function data structures. */
fa8db1f7 123struct machine_function * (*init_machine_status) (void);
46766466 124
b384405b 125/* The currently compiled function. */
01d939e8 126struct function *cfun = 0;
b384405b 127
5c7675e9 128/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
f995dcfe
KH
129static VEC(int,heap) *prologue;
130static VEC(int,heap) *epilogue;
0a1c58a2
JL
131
132/* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
133 in this function. */
f995dcfe 134static VEC(int,heap) *sibcall_epilogue;
6f086dfc
RS
135\f
136/* In order to evaluate some expressions, such as function calls returning
137 structures in memory, we need to temporarily allocate stack locations.
138 We record each allocated temporary in the following structure.
139
140 Associated with each temporary slot is a nesting level. When we pop up
141 one level, all temporaries associated with the previous level are freed.
142 Normally, all temporaries are freed after the execution of the statement
143 in which they were created. However, if we are inside a ({...}) grouping,
144 the result may be in a temporary and hence must be preserved. If the
145 result could be in a temporary, we preserve it if we can determine which
146 one it is in. If we cannot determine which temporary may contain the
147 result, all temporaries are preserved. A temporary is preserved by
148 pretending it was allocated at the previous nesting level.
149
150 Automatic variables are also assigned temporary slots, at the nesting
151 level where they are defined. They are marked a "kept" so that
152 free_temp_slots will not free them. */
153
e2500fed 154struct temp_slot GTY(())
6f086dfc
RS
155{
156 /* Points to next temporary slot. */
157 struct temp_slot *next;
0aea6467
ZD
158 /* Points to previous temporary slot. */
159 struct temp_slot *prev;
160
0f41302f 161 /* The rtx to used to reference the slot. */
6f086dfc 162 rtx slot;
e5e76139
RK
163 /* The rtx used to represent the address if not the address of the
164 slot above. May be an EXPR_LIST if multiple addresses exist. */
165 rtx address;
718fe406 166 /* The alignment (in bits) of the slot. */
b5c02bff 167 unsigned int align;
6f086dfc 168 /* The size, in units, of the slot. */
e5e809f4 169 HOST_WIDE_INT size;
1da68f56
RK
170 /* The type of the object in the slot, or zero if it doesn't correspond
171 to a type. We use this to determine whether a slot can be reused.
172 It can be reused if objects of the type of the new slot will always
173 conflict with objects of the type of the old slot. */
174 tree type;
cc2902df 175 /* Nonzero if this temporary is currently in use. */
6f086dfc 176 char in_use;
cc2902df 177 /* Nonzero if this temporary has its address taken. */
a25d4ba2 178 char addr_taken;
6f086dfc
RS
179 /* Nesting level at which this slot is being used. */
180 int level;
cc2902df 181 /* Nonzero if this should survive a call to free_temp_slots. */
6f086dfc 182 int keep;
fc91b0d0
RK
183 /* The offset of the slot from the frame_pointer, including extra space
184 for alignment. This info is for combine_temp_slots. */
e5e809f4 185 HOST_WIDE_INT base_offset;
fc91b0d0
RK
186 /* The size of the slot, including extra space for alignment. This
187 info is for combine_temp_slots. */
e5e809f4 188 HOST_WIDE_INT full_size;
6f086dfc 189};
6f086dfc 190\f
e15679f8
RK
191/* Forward declarations. */
192
fa8db1f7
AJ
193static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
194 struct function *);
195static struct temp_slot *find_temp_slot_from_address (rtx);
fa8db1f7
AJ
196static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
197static void pad_below (struct args_size *, enum machine_mode, tree);
2c217442 198static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
fa8db1f7
AJ
199static int all_blocks (tree, tree *);
200static tree *get_block_vector (tree, int *);
201extern tree debug_find_var_in_block_tree (tree, tree);
1f52178b 202/* We always define `record_insns' even if it's not used so that we
ec97b83a 203 can always export `prologue_epilogue_contains'. */
f995dcfe 204static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
4f588890 205static int contains (const_rtx, VEC(int,heap) **);
73ef99fb 206#ifdef HAVE_return
6039a0c7 207static void emit_return_into_block (basic_block);
73ef99fb 208#endif
3258e996 209#if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
fa8db1f7 210static rtx keep_stack_depressed (rtx);
7393c642 211#endif
3a70d621 212static void prepare_function_start (tree);
fa8db1f7
AJ
213static void do_clobber_return_reg (rtx, void *);
214static void do_use_return_reg (rtx, void *);
4c4d143a 215static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
c20bf1f3 216\f
6f086dfc 217/* Pointer to chain of `struct function' for containing functions. */
1be4cd1f 218struct function *outer_function_chain;
6f086dfc
RS
219
220/* Given a function decl for a containing function,
221 return the `struct function' for it. */
222
223struct function *
fa8db1f7 224find_function_data (tree decl)
6f086dfc
RS
225{
226 struct function *p;
e5e809f4 227
eb3ae3e1 228 for (p = outer_function_chain; p; p = p->outer)
6f086dfc
RS
229 if (p->decl == decl)
230 return p;
e5e809f4 231
0bccc606 232 gcc_unreachable ();
6f086dfc
RS
233}
234
235/* Save the current context for compilation of a nested function.
8c5666b4 236 This is called from language-specific code. The caller should use
b03e38e1 237 the enter_nested langhook to save any language-specific state,
8c5666b4
BS
238 since this function knows only about language-independent
239 variables. */
6f086dfc
RS
240
241void
5acbdd12 242push_function_context_to (tree context ATTRIBUTE_UNUSED)
6f086dfc 243{
eb3ae3e1 244 struct function *p;
36edd3cc 245
01d939e8 246 if (cfun == 0)
b384405b 247 init_dummy_function_start ();
01d939e8 248 p = cfun;
6f086dfc 249
eb3ae3e1 250 p->outer = outer_function_chain;
6f086dfc 251 outer_function_chain = p;
6f086dfc 252
ae2bcd98 253 lang_hooks.function.enter_nested (p);
b384405b 254
01d939e8 255 cfun = 0;
6f086dfc
RS
256}
257
e4a4639e 258void
fa8db1f7 259push_function_context (void)
e4a4639e 260{
a0dabda5 261 push_function_context_to (current_function_decl);
e4a4639e
JM
262}
263
6f086dfc
RS
264/* Restore the last saved context, at the end of a nested function.
265 This function is called from language-specific code. */
266
267void
fa8db1f7 268pop_function_context_from (tree context ATTRIBUTE_UNUSED)
6f086dfc
RS
269{
270 struct function *p = outer_function_chain;
271
01d939e8 272 cfun = p;
eb3ae3e1 273 outer_function_chain = p->outer;
6f086dfc 274
6f086dfc 275 current_function_decl = p->decl;
6f086dfc 276
ae2bcd98 277 lang_hooks.function.leave_nested (p);
46766466 278
6f086dfc 279 /* Reset variables that have known state during rtx generation. */
6f086dfc 280 virtuals_instantiated = 0;
1b3d8f8a 281 generating_concat_p = 1;
6f086dfc 282}
e4a4639e 283
36edd3cc 284void
fa8db1f7 285pop_function_context (void)
e4a4639e 286{
a0dabda5 287 pop_function_context_from (current_function_decl);
e4a4639e 288}
e2ecd91c 289
fa51b01b
RH
290/* Clear out all parts of the state in F that can safely be discarded
291 after the function has been parsed, but not compiled, to let
292 garbage collection reclaim the memory. */
293
294void
fa8db1f7 295free_after_parsing (struct function *f)
fa51b01b
RH
296{
297 /* f->expr->forced_labels is used by code generation. */
298 /* f->emit->regno_reg_rtx is used by code generation. */
299 /* f->varasm is used by code generation. */
300 /* f->eh->eh_return_stub_label is used by code generation. */
301
ae2bcd98 302 lang_hooks.function.final (f);
fa51b01b
RH
303}
304
e2ecd91c
BS
305/* Clear out all parts of the state in F that can safely be discarded
306 after the function has been compiled, to let garbage collection
0a8a198c 307 reclaim the memory. */
21cd906e 308
e2ecd91c 309void
fa8db1f7 310free_after_compilation (struct function *f)
e2ecd91c 311{
f995dcfe
KH
312 VEC_free (int, heap, prologue);
313 VEC_free (int, heap, epilogue);
314 VEC_free (int, heap, sibcall_epilogue);
315
e2500fed
GK
316 f->eh = NULL;
317 f->expr = NULL;
318 f->emit = NULL;
319 f->varasm = NULL;
320 f->machine = NULL;
997de8ed 321 f->cfg = NULL;
fa51b01b 322
0aea6467
ZD
323 f->x_avail_temp_slots = NULL;
324 f->x_used_temp_slots = NULL;
fa51b01b
RH
325 f->arg_offset_rtx = NULL;
326 f->return_rtx = NULL;
327 f->internal_arg_pointer = NULL;
fa51b01b 328 f->x_nonlocal_goto_handler_labels = NULL;
fa51b01b 329 f->x_return_label = NULL;
6e3077c6 330 f->x_naked_return_label = NULL;
fa51b01b 331 f->x_stack_slot_list = NULL;
ede497cf 332 f->x_stack_check_probe_note = NULL;
fa51b01b 333 f->x_arg_pointer_save_area = NULL;
fa51b01b 334 f->x_parm_birth_insn = NULL;
fa51b01b 335 f->epilogue_delay_list = NULL;
e2ecd91c 336}
6f086dfc
RS
337\f
338/* Allocate fixed slots in the stack frame of the current function. */
339
49ad7cfa
BS
340/* Return size needed for stack frame based on slots so far allocated in
341 function F.
c795bca9 342 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
6f086dfc
RS
343 the caller may have to do that. */
344
7b25e663 345static HOST_WIDE_INT
fa8db1f7 346get_func_frame_size (struct function *f)
6f086dfc 347{
f62c8a5c
JJ
348 if (FRAME_GROWS_DOWNWARD)
349 return -f->x_frame_offset;
350 else
351 return f->x_frame_offset;
6f086dfc
RS
352}
353
49ad7cfa
BS
354/* Return size needed for stack frame based on slots so far allocated.
355 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
356 the caller may have to do that. */
9fb798d7 357
49ad7cfa 358HOST_WIDE_INT
fa8db1f7 359get_frame_size (void)
49ad7cfa 360{
01d939e8 361 return get_func_frame_size (cfun);
49ad7cfa
BS
362}
363
9fb798d7
EB
364/* Issue an error message and return TRUE if frame OFFSET overflows in
365 the signed target pointer arithmetics for function FUNC. Otherwise
366 return FALSE. */
367
368bool
369frame_offset_overflow (HOST_WIDE_INT offset, tree func)
370{
371 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
372
373 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
374 /* Leave room for the fixed part of the frame. */
375 - 64 * UNITS_PER_WORD)
376 {
377 error ("%Jtotal size of local objects too large", func);
378 return TRUE;
379 }
380
381 return FALSE;
382}
383
6f086dfc
RS
384/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
385 with machine mode MODE.
718fe406 386
6f086dfc
RS
387 ALIGN controls the amount of alignment for the address of the slot:
388 0 means according to MODE,
389 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
cfa29a4c 390 -2 means use BITS_PER_UNIT,
6f086dfc
RS
391 positive specifies alignment boundary in bits.
392
e2ecd91c 393 We do not round to stack_boundary here.
6f086dfc 394
e2ecd91c
BS
395 FUNCTION specifies the function to allocate in. */
396
397static rtx
fa8db1f7
AJ
398assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
399 struct function *function)
6f086dfc 400{
b3694847 401 rtx x, addr;
6f086dfc 402 int bigend_correction = 0;
95899b34 403 unsigned int alignment;
58dbcf05 404 int frame_off, frame_alignment, frame_phase;
6f086dfc
RS
405
406 if (align == 0)
407 {
d16790f2
JW
408 tree type;
409
6f086dfc 410 if (mode == BLKmode)
d16790f2 411 alignment = BIGGEST_ALIGNMENT;
dbab7b72 412 else
718fe406 413 alignment = GET_MODE_ALIGNMENT (mode);
d16790f2
JW
414
415 /* Allow the target to (possibly) increase the alignment of this
416 stack slot. */
ae2bcd98 417 type = lang_hooks.types.type_for_mode (mode, 0);
d16790f2
JW
418 if (type)
419 alignment = LOCAL_ALIGNMENT (type, alignment);
420
421 alignment /= BITS_PER_UNIT;
6f086dfc
RS
422 }
423 else if (align == -1)
424 {
425 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
426 size = CEIL_ROUND (size, alignment);
427 }
cfa29a4c
EB
428 else if (align == -2)
429 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
6f086dfc
RS
430 else
431 alignment = align / BITS_PER_UNIT;
432
f62c8a5c
JJ
433 if (FRAME_GROWS_DOWNWARD)
434 function->x_frame_offset -= size;
1474e303 435
a0871656
JH
436 /* Ignore alignment we can't do with expected alignment of the boundary. */
437 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
438 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
439
440 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
441 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
442
58dbcf05
AH
443 /* Calculate how many bytes the start of local variables is off from
444 stack alignment. */
445 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
446 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
447 frame_phase = frame_off ? frame_alignment - frame_off : 0;
448
95f3f59e
JDA
449 /* Round the frame offset to the specified alignment. The default is
450 to always honor requests to align the stack but a port may choose to
451 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
452 if (STACK_ALIGNMENT_NEEDED
453 || mode != BLKmode
454 || size != 0)
455 {
456 /* We must be careful here, since FRAME_OFFSET might be negative and
457 division with a negative dividend isn't as well defined as we might
458 like. So we instead assume that ALIGNMENT is a power of two and
459 use logical operations which are unambiguous. */
f62c8a5c
JJ
460 if (FRAME_GROWS_DOWNWARD)
461 function->x_frame_offset
462 = (FLOOR_ROUND (function->x_frame_offset - frame_phase,
463 (unsigned HOST_WIDE_INT) alignment)
464 + frame_phase);
465 else
466 function->x_frame_offset
467 = (CEIL_ROUND (function->x_frame_offset - frame_phase,
468 (unsigned HOST_WIDE_INT) alignment)
469 + frame_phase);
95f3f59e 470 }
6f086dfc
RS
471
472 /* On a big-endian machine, if we are allocating more space than we will use,
473 use the least significant bytes of those that are allocated. */
d70eadf7 474 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
6f086dfc 475 bigend_correction = size - GET_MODE_SIZE (mode);
6f086dfc 476
6f086dfc
RS
477 /* If we have already instantiated virtual registers, return the actual
478 address relative to the frame pointer. */
01d939e8 479 if (function == cfun && virtuals_instantiated)
6f086dfc 480 addr = plus_constant (frame_pointer_rtx,
c41536f5 481 trunc_int_for_mode
6f086dfc 482 (frame_offset + bigend_correction
c41536f5 483 + STARTING_FRAME_OFFSET, Pmode));
6f086dfc
RS
484 else
485 addr = plus_constant (virtual_stack_vars_rtx,
c41536f5
AO
486 trunc_int_for_mode
487 (function->x_frame_offset + bigend_correction,
488 Pmode));
6f086dfc 489
f62c8a5c
JJ
490 if (!FRAME_GROWS_DOWNWARD)
491 function->x_frame_offset += size;
6f086dfc 492
38a448ca 493 x = gen_rtx_MEM (mode, addr);
be0c514c 494 MEM_NOTRAP_P (x) = 1;
6f086dfc 495
e2ecd91c
BS
496 function->x_stack_slot_list
497 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
498
9fb798d7
EB
499 if (frame_offset_overflow (function->x_frame_offset, function->decl))
500 function->x_frame_offset = 0;
9070115b 501
6f086dfc
RS
502 return x;
503}
504
e2ecd91c
BS
505/* Wrapper around assign_stack_local_1; assign a local stack slot for the
506 current function. */
3bdf5ad1 507
e2ecd91c 508rtx
fa8db1f7 509assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
6f086dfc 510{
01d939e8 511 return assign_stack_local_1 (mode, size, align, cfun);
6f086dfc 512}
0aea6467
ZD
513
514\f
515/* Removes temporary slot TEMP from LIST. */
516
517static void
518cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
519{
520 if (temp->next)
521 temp->next->prev = temp->prev;
522 if (temp->prev)
523 temp->prev->next = temp->next;
524 else
525 *list = temp->next;
526
527 temp->prev = temp->next = NULL;
528}
529
530/* Inserts temporary slot TEMP to LIST. */
531
532static void
533insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
534{
535 temp->next = *list;
536 if (*list)
537 (*list)->prev = temp;
538 temp->prev = NULL;
539 *list = temp;
540}
541
542/* Returns the list of used temp slots at LEVEL. */
543
544static struct temp_slot **
545temp_slots_at_level (int level)
546{
6370682a 547 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
a590ac65 548 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
0aea6467 549
6370682a 550 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
0aea6467
ZD
551}
552
553/* Returns the maximal temporary slot level. */
554
555static int
556max_slot_level (void)
557{
558 if (!used_temp_slots)
559 return -1;
560
6370682a 561 return VEC_length (temp_slot_p, used_temp_slots) - 1;
0aea6467
ZD
562}
563
564/* Moves temporary slot TEMP to LEVEL. */
565
566static void
567move_slot_to_level (struct temp_slot *temp, int level)
568{
569 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
570 insert_slot_to_list (temp, temp_slots_at_level (level));
571 temp->level = level;
572}
573
574/* Make temporary slot TEMP available. */
575
576static void
577make_slot_available (struct temp_slot *temp)
578{
579 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
580 insert_slot_to_list (temp, &avail_temp_slots);
581 temp->in_use = 0;
582 temp->level = -1;
583}
6f086dfc
RS
584\f
585/* Allocate a temporary stack slot and record it for possible later
586 reuse.
587
588 MODE is the machine mode to be given to the returned rtx.
589
590 SIZE is the size in units of the space required. We do no rounding here
591 since assign_stack_local will do any required rounding.
592
d93d4205
MS
593 KEEP is 1 if this slot is to be retained after a call to
594 free_temp_slots. Automatic variables for a block are allocated
7efcb746
PB
595 with this flag. KEEP values of 2 or 3 were needed respectively
596 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
535a42b1 597 or for SAVE_EXPRs, but they are now unused.
a4c6502a
MM
598
599 TYPE is the type that will be used for the stack slot. */
6f086dfc 600
a06ef755 601rtx
535a42b1
NS
602assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
603 int keep, tree type)
6f086dfc 604{
74e2819c 605 unsigned int align;
0aea6467 606 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
faa964e5 607 rtx slot;
6f086dfc 608
303ec2aa
RK
609 /* If SIZE is -1 it means that somebody tried to allocate a temporary
610 of a variable size. */
0bccc606 611 gcc_assert (size != -1);
303ec2aa 612
7efcb746 613 /* These are now unused. */
0bccc606 614 gcc_assert (keep <= 1);
7efcb746 615
d16790f2
JW
616 if (mode == BLKmode)
617 align = BIGGEST_ALIGNMENT;
dbab7b72
JH
618 else
619 align = GET_MODE_ALIGNMENT (mode);
6f086dfc 620
d16790f2 621 if (! type)
ae2bcd98 622 type = lang_hooks.types.type_for_mode (mode, 0);
3bdf5ad1 623
d16790f2
JW
624 if (type)
625 align = LOCAL_ALIGNMENT (type, align);
626
627 /* Try to find an available, already-allocated temporary of the proper
628 mode which meets the size and alignment requirements. Choose the
3e8b0446
ZD
629 smallest one with the closest alignment.
630
631 If assign_stack_temp is called outside of the tree->rtl expansion,
632 we cannot reuse the stack slots (that may still refer to
633 VIRTUAL_STACK_VARS_REGNUM). */
634 if (!virtuals_instantiated)
0aea6467 635 {
3e8b0446 636 for (p = avail_temp_slots; p; p = p->next)
0aea6467 637 {
3e8b0446
ZD
638 if (p->align >= align && p->size >= size
639 && GET_MODE (p->slot) == mode
640 && objects_must_conflict_p (p->type, type)
641 && (best_p == 0 || best_p->size > p->size
642 || (best_p->size == p->size && best_p->align > p->align)))
0aea6467 643 {
3e8b0446
ZD
644 if (p->align == align && p->size == size)
645 {
646 selected = p;
647 cut_slot_from_list (selected, &avail_temp_slots);
648 best_p = 0;
649 break;
650 }
651 best_p = p;
0aea6467 652 }
0aea6467
ZD
653 }
654 }
6f086dfc
RS
655
656 /* Make our best, if any, the one to use. */
657 if (best_p)
a45035b6 658 {
0aea6467
ZD
659 selected = best_p;
660 cut_slot_from_list (selected, &avail_temp_slots);
661
a45035b6
JW
662 /* If there are enough aligned bytes left over, make them into a new
663 temp_slot so that the extra bytes don't get wasted. Do this only
664 for BLKmode slots, so that we can be sure of the alignment. */
3bdf5ad1 665 if (GET_MODE (best_p->slot) == BLKmode)
a45035b6 666 {
d16790f2 667 int alignment = best_p->align / BITS_PER_UNIT;
e5e809f4 668 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
a45035b6
JW
669
670 if (best_p->size - rounded_size >= alignment)
671 {
703ad42b 672 p = ggc_alloc (sizeof (struct temp_slot));
a25d4ba2 673 p->in_use = p->addr_taken = 0;
a45035b6 674 p->size = best_p->size - rounded_size;
307d8cd6
RK
675 p->base_offset = best_p->base_offset + rounded_size;
676 p->full_size = best_p->full_size - rounded_size;
be0c514c 677 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
d16790f2 678 p->align = best_p->align;
e5e76139 679 p->address = 0;
1da68f56 680 p->type = best_p->type;
0aea6467 681 insert_slot_to_list (p, &avail_temp_slots);
a45035b6 682
38a448ca
RH
683 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
684 stack_slot_list);
a45035b6
JW
685
686 best_p->size = rounded_size;
291dde90 687 best_p->full_size = rounded_size;
a45035b6
JW
688 }
689 }
a45035b6 690 }
718fe406 691
6f086dfc 692 /* If we still didn't find one, make a new temporary. */
0aea6467 693 if (selected == 0)
6f086dfc 694 {
e5e809f4
JL
695 HOST_WIDE_INT frame_offset_old = frame_offset;
696
703ad42b 697 p = ggc_alloc (sizeof (struct temp_slot));
e5e809f4 698
c87a0a39
JL
699 /* We are passing an explicit alignment request to assign_stack_local.
700 One side effect of that is assign_stack_local will not round SIZE
701 to ensure the frame offset remains suitably aligned.
702
703 So for requests which depended on the rounding of SIZE, we go ahead
704 and round it now. We also make sure ALIGNMENT is at least
705 BIGGEST_ALIGNMENT. */
0bccc606 706 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
6f67a30d 707 p->slot = assign_stack_local (mode,
010529e5 708 (mode == BLKmode
fc555370 709 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
010529e5 710 : size),
6f67a30d 711 align);
d16790f2
JW
712
713 p->align = align;
e5e809f4 714
b2a80c0d
DE
715 /* The following slot size computation is necessary because we don't
716 know the actual size of the temporary slot until assign_stack_local
717 has performed all the frame alignment and size rounding for the
fc91b0d0
RK
718 requested temporary. Note that extra space added for alignment
719 can be either above or below this stack slot depending on which
720 way the frame grows. We include the extra space if and only if it
721 is above this slot. */
f62c8a5c
JJ
722 if (FRAME_GROWS_DOWNWARD)
723 p->size = frame_offset_old - frame_offset;
724 else
725 p->size = size;
e5e809f4 726
fc91b0d0 727 /* Now define the fields used by combine_temp_slots. */
f62c8a5c
JJ
728 if (FRAME_GROWS_DOWNWARD)
729 {
730 p->base_offset = frame_offset;
731 p->full_size = frame_offset_old - frame_offset;
732 }
733 else
734 {
735 p->base_offset = frame_offset_old;
736 p->full_size = frame_offset - frame_offset_old;
737 }
e5e76139 738 p->address = 0;
0aea6467
ZD
739
740 selected = p;
6f086dfc
RS
741 }
742
0aea6467 743 p = selected;
6f086dfc 744 p->in_use = 1;
a25d4ba2 745 p->addr_taken = 0;
1da68f56 746 p->type = type;
7efcb746
PB
747 p->level = temp_slot_level;
748 p->keep = keep;
1995f267 749
0aea6467
ZD
750 pp = temp_slots_at_level (p->level);
751 insert_slot_to_list (p, pp);
faa964e5
UW
752
753 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
754 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
755 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
3bdf5ad1 756
1da68f56
RK
757 /* If we know the alias set for the memory that will be used, use
758 it. If there's no TYPE, then we don't know anything about the
759 alias set for the memory. */
faa964e5
UW
760 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
761 set_mem_align (slot, align);
1da68f56 762
30f7a378 763 /* If a type is specified, set the relevant flags. */
3bdf5ad1 764 if (type != 0)
1da68f56 765 {
faa964e5 766 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
07cb6e8c
JM
767 MEM_SET_IN_STRUCT_P (slot, (AGGREGATE_TYPE_P (type)
768 || TREE_CODE (type) == COMPLEX_TYPE));
1da68f56 769 }
be0c514c 770 MEM_NOTRAP_P (slot) = 1;
3bdf5ad1 771
faa964e5 772 return slot;
6f086dfc 773}
d16790f2
JW
774
775/* Allocate a temporary stack slot and record it for possible later
776 reuse. First three arguments are same as in preceding function. */
777
778rtx
fa8db1f7 779assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
d16790f2
JW
780{
781 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
782}
638141a6 783\f
9432c136
EB
784/* Assign a temporary.
785 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
786 and so that should be used in error messages. In either case, we
787 allocate of the given type.
230f21b4
PB
788 KEEP is as for assign_stack_temp.
789 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
b55d9ff8
RK
790 it is 0 if a register is OK.
791 DONT_PROMOTE is 1 if we should not promote values in register
792 to wider modes. */
230f21b4
PB
793
794rtx
fa8db1f7
AJ
795assign_temp (tree type_or_decl, int keep, int memory_required,
796 int dont_promote ATTRIBUTE_UNUSED)
230f21b4 797{
9432c136
EB
798 tree type, decl;
799 enum machine_mode mode;
9e1622ed 800#ifdef PROMOTE_MODE
9432c136
EB
801 int unsignedp;
802#endif
803
804 if (DECL_P (type_or_decl))
805 decl = type_or_decl, type = TREE_TYPE (decl);
806 else
807 decl = NULL, type = type_or_decl;
808
809 mode = TYPE_MODE (type);
9e1622ed 810#ifdef PROMOTE_MODE
8df83eae 811 unsignedp = TYPE_UNSIGNED (type);
0ce8a59c 812#endif
638141a6 813
230f21b4
PB
814 if (mode == BLKmode || memory_required)
815 {
e5e809f4 816 HOST_WIDE_INT size = int_size_in_bytes (type);
230f21b4
PB
817 rtx tmp;
818
44affdae
JH
819 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
820 problems with allocating the stack space. */
821 if (size == 0)
822 size = 1;
823
230f21b4 824 /* Unfortunately, we don't yet know how to allocate variable-sized
a441447f
OH
825 temporaries. However, sometimes we can find a fixed upper limit on
826 the size, so try that instead. */
827 else if (size == -1)
828 size = max_int_size_in_bytes (type);
e30bb772 829
9432c136
EB
830 /* The size of the temporary may be too large to fit into an integer. */
831 /* ??? Not sure this should happen except for user silliness, so limit
797a6ac1 832 this to things that aren't compiler-generated temporaries. The
535a42b1 833 rest of the time we'll die in assign_stack_temp_for_type. */
9432c136
EB
834 if (decl && size == -1
835 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
836 {
dee15844 837 error ("size of variable %q+D is too large", decl);
9432c136
EB
838 size = 1;
839 }
840
d16790f2 841 tmp = assign_stack_temp_for_type (mode, size, keep, type);
230f21b4
PB
842 return tmp;
843 }
638141a6 844
9e1622ed 845#ifdef PROMOTE_MODE
b55d9ff8
RK
846 if (! dont_promote)
847 mode = promote_mode (type, mode, &unsignedp, 0);
230f21b4 848#endif
638141a6 849
230f21b4
PB
850 return gen_reg_rtx (mode);
851}
638141a6 852\f
a45035b6
JW
853/* Combine temporary stack slots which are adjacent on the stack.
854
855 This allows for better use of already allocated stack space. This is only
856 done for BLKmode slots because we can be sure that we won't have alignment
857 problems in this case. */
858
6fe79279 859static void
fa8db1f7 860combine_temp_slots (void)
a45035b6 861{
0aea6467 862 struct temp_slot *p, *q, *next, *next_q;
e5e809f4
JL
863 int num_slots;
864
a4c6502a
MM
865 /* We can't combine slots, because the information about which slot
866 is in which alias set will be lost. */
867 if (flag_strict_aliasing)
868 return;
869
718fe406 870 /* If there are a lot of temp slots, don't do anything unless
d6a7951f 871 high levels of optimization. */
e5e809f4 872 if (! flag_expensive_optimizations)
0aea6467 873 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
e5e809f4
JL
874 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
875 return;
a45035b6 876
0aea6467 877 for (p = avail_temp_slots; p; p = next)
e9b7093a
RS
878 {
879 int delete_p = 0;
e5e809f4 880
0aea6467
ZD
881 next = p->next;
882
883 if (GET_MODE (p->slot) != BLKmode)
884 continue;
885
886 for (q = p->next; q; q = next_q)
e9b7093a 887 {
0aea6467
ZD
888 int delete_q = 0;
889
890 next_q = q->next;
891
892 if (GET_MODE (q->slot) != BLKmode)
893 continue;
894
895 if (p->base_offset + p->full_size == q->base_offset)
896 {
897 /* Q comes after P; combine Q into P. */
898 p->size += q->size;
899 p->full_size += q->full_size;
900 delete_q = 1;
901 }
902 else if (q->base_offset + q->full_size == p->base_offset)
903 {
904 /* P comes after Q; combine P into Q. */
905 q->size += p->size;
906 q->full_size += p->full_size;
907 delete_p = 1;
908 break;
909 }
910 if (delete_q)
911 cut_slot_from_list (q, &avail_temp_slots);
e9b7093a 912 }
0aea6467
ZD
913
914 /* Either delete P or advance past it. */
915 if (delete_p)
916 cut_slot_from_list (p, &avail_temp_slots);
e9b7093a 917 }
a45035b6 918}
6f086dfc 919\f
e5e76139
RK
920/* Find the temp slot corresponding to the object at address X. */
921
922static struct temp_slot *
fa8db1f7 923find_temp_slot_from_address (rtx x)
e5e76139
RK
924{
925 struct temp_slot *p;
926 rtx next;
0aea6467 927 int i;
e5e76139 928
0aea6467
ZD
929 for (i = max_slot_level (); i >= 0; i--)
930 for (p = *temp_slots_at_level (i); p; p = p->next)
931 {
932 if (XEXP (p->slot, 0) == x
933 || p->address == x
934 || (GET_CODE (x) == PLUS
935 && XEXP (x, 0) == virtual_stack_vars_rtx
936 && GET_CODE (XEXP (x, 1)) == CONST_INT
937 && INTVAL (XEXP (x, 1)) >= p->base_offset
938 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
939 return p;
940
941 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
942 for (next = p->address; next; next = XEXP (next, 1))
943 if (XEXP (next, 0) == x)
944 return p;
945 }
e5e76139 946
14a774a9
RK
947 /* If we have a sum involving a register, see if it points to a temp
948 slot. */
f8cfc6aa 949 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
14a774a9
RK
950 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
951 return p;
f8cfc6aa 952 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
14a774a9
RK
953 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
954 return p;
955
e5e76139
RK
956 return 0;
957}
718fe406 958
9faa82d8 959/* Indicate that NEW is an alternate way of referring to the temp slot
e5e809f4 960 that previously was known by OLD. */
e5e76139
RK
961
962void
fa8db1f7 963update_temp_slot_address (rtx old, rtx new)
e5e76139 964{
14a774a9 965 struct temp_slot *p;
e5e76139 966
14a774a9 967 if (rtx_equal_p (old, new))
e5e76139 968 return;
14a774a9
RK
969
970 p = find_temp_slot_from_address (old);
971
700f19f0
RK
972 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
973 is a register, see if one operand of the PLUS is a temporary
974 location. If so, NEW points into it. Otherwise, if both OLD and
975 NEW are a PLUS and if there is a register in common between them.
976 If so, try a recursive call on those values. */
14a774a9
RK
977 if (p == 0)
978 {
700f19f0
RK
979 if (GET_CODE (old) != PLUS)
980 return;
981
f8cfc6aa 982 if (REG_P (new))
700f19f0
RK
983 {
984 update_temp_slot_address (XEXP (old, 0), new);
985 update_temp_slot_address (XEXP (old, 1), new);
986 return;
987 }
988 else if (GET_CODE (new) != PLUS)
14a774a9
RK
989 return;
990
991 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
992 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
993 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
994 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
995 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
996 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
997 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
998 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
999
1000 return;
1001 }
1002
718fe406 1003 /* Otherwise add an alias for the temp's address. */
e5e76139
RK
1004 else if (p->address == 0)
1005 p->address = new;
1006 else
1007 {
1008 if (GET_CODE (p->address) != EXPR_LIST)
38a448ca 1009 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
e5e76139 1010
38a448ca 1011 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
e5e76139
RK
1012 }
1013}
1014
a25d4ba2 1015/* If X could be a reference to a temporary slot, mark the fact that its
9faa82d8 1016 address was taken. */
a25d4ba2
RK
1017
1018void
fa8db1f7 1019mark_temp_addr_taken (rtx x)
a25d4ba2
RK
1020{
1021 struct temp_slot *p;
1022
1023 if (x == 0)
1024 return;
1025
1026 /* If X is not in memory or is at a constant address, it cannot be in
1027 a temporary slot. */
3c0cb5de 1028 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
a25d4ba2
RK
1029 return;
1030
1031 p = find_temp_slot_from_address (XEXP (x, 0));
1032 if (p != 0)
1033 p->addr_taken = 1;
1034}
1035
9cca6a99
MS
1036/* If X could be a reference to a temporary slot, mark that slot as
1037 belonging to the to one level higher than the current level. If X
1038 matched one of our slots, just mark that one. Otherwise, we can't
1039 easily predict which it is, so upgrade all of them. Kept slots
1040 need not be touched.
6f086dfc
RS
1041
1042 This is called when an ({...}) construct occurs and a statement
1043 returns a value in memory. */
1044
1045void
fa8db1f7 1046preserve_temp_slots (rtx x)
6f086dfc 1047{
0aea6467 1048 struct temp_slot *p = 0, *next;
6f086dfc 1049
73620b82
RK
1050 /* If there is no result, we still might have some objects whose address
1051 were taken, so we need to make sure they stay around. */
e3a77161 1052 if (x == 0)
73620b82 1053 {
0aea6467
ZD
1054 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1055 {
1056 next = p->next;
1057
1058 if (p->addr_taken)
1059 move_slot_to_level (p, temp_slot_level - 1);
1060 }
73620b82 1061
8fff4fc1
RH
1062 return;
1063 }
f7b6d104 1064
8fff4fc1
RH
1065 /* If X is a register that is being used as a pointer, see if we have
1066 a temporary slot we know it points to. To be consistent with
1067 the code below, we really should preserve all non-kept slots
1068 if we can't find a match, but that seems to be much too costly. */
1069 if (REG_P (x) && REG_POINTER (x))
1070 p = find_temp_slot_from_address (x);
f7b6d104 1071
8fff4fc1
RH
1072 /* If X is not in memory or is at a constant address, it cannot be in
1073 a temporary slot, but it can contain something whose address was
1074 taken. */
1075 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1076 {
1077 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1078 {
1079 next = p->next;
b5bd3b3c 1080
8fff4fc1
RH
1081 if (p->addr_taken)
1082 move_slot_to_level (p, temp_slot_level - 1);
e9a25f70 1083 }
c5c76735 1084
8fff4fc1
RH
1085 return;
1086 }
1087
1088 /* First see if we can find a match. */
1089 if (p == 0)
1090 p = find_temp_slot_from_address (XEXP (x, 0));
1091
1092 if (p != 0)
1093 {
1094 /* Move everything at our level whose address was taken to our new
1095 level in case we used its address. */
1096 struct temp_slot *q;
1097
1098 if (p->level == temp_slot_level)
fbdfe39c 1099 {
8fff4fc1 1100 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
8b04083b 1101 {
8fff4fc1 1102 next = q->next;
8b04083b 1103
8fff4fc1
RH
1104 if (p != q && q->addr_taken)
1105 move_slot_to_level (q, temp_slot_level - 1);
8b04083b 1106 }
8fff4fc1
RH
1107
1108 move_slot_to_level (p, temp_slot_level - 1);
1109 p->addr_taken = 0;
fbdfe39c 1110 }
8fff4fc1 1111 return;
f7b6d104 1112 }
e9a25f70 1113
8fff4fc1
RH
1114 /* Otherwise, preserve all non-kept slots at this level. */
1115 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
e9a25f70 1116 {
8fff4fc1 1117 next = p->next;
fe9b4957 1118
8fff4fc1
RH
1119 if (!p->keep)
1120 move_slot_to_level (p, temp_slot_level - 1);
1121 }
fe9b4957
MM
1122}
1123
8fff4fc1
RH
1124/* Free all temporaries used so far. This is normally called at the
1125 end of generating code for a statement. */
fe9b4957 1126
8fff4fc1
RH
1127void
1128free_temp_slots (void)
fe9b4957 1129{
8fff4fc1 1130 struct temp_slot *p, *next;
fe9b4957 1131
8fff4fc1
RH
1132 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1133 {
1134 next = p->next;
fe9b4957 1135
8fff4fc1
RH
1136 if (!p->keep)
1137 make_slot_available (p);
1138 }
fe9b4957 1139
8fff4fc1
RH
1140 combine_temp_slots ();
1141}
fe9b4957 1142
8fff4fc1 1143/* Push deeper into the nesting level for stack temporaries. */
fe9b4957 1144
8fff4fc1
RH
1145void
1146push_temp_slots (void)
fe9b4957 1147{
8fff4fc1 1148 temp_slot_level++;
fe9b4957
MM
1149}
1150
8fff4fc1
RH
1151/* Pop a temporary nesting level. All slots in use in the current level
1152 are freed. */
fe9b4957 1153
8fff4fc1
RH
1154void
1155pop_temp_slots (void)
fe9b4957 1156{
8fff4fc1 1157 struct temp_slot *p, *next;
fe9b4957 1158
8fff4fc1
RH
1159 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1160 {
1161 next = p->next;
1162 make_slot_available (p);
1163 }
e9a25f70 1164
8fff4fc1 1165 combine_temp_slots ();
b987f237 1166
8fff4fc1 1167 temp_slot_level--;
8c36698e
NC
1168}
1169
8fff4fc1 1170/* Initialize temporary slots. */
e9a25f70
JL
1171
1172void
8fff4fc1 1173init_temp_slots (void)
e9a25f70 1174{
8fff4fc1
RH
1175 /* We have not allocated any temporaries yet. */
1176 avail_temp_slots = 0;
1177 used_temp_slots = 0;
1178 temp_slot_level = 0;
8fff4fc1
RH
1179}
1180\f
1181/* These routines are responsible for converting virtual register references
1182 to the actual hard register references once RTL generation is complete.
718fe406 1183
8fff4fc1
RH
1184 The following four variables are used for communication between the
1185 routines. They contain the offsets of the virtual registers from their
1186 respective hard registers. */
fe9b4957 1187
8fff4fc1
RH
1188static int in_arg_offset;
1189static int var_offset;
1190static int dynamic_offset;
1191static int out_arg_offset;
1192static int cfa_offset;
8a5275eb 1193
8fff4fc1
RH
1194/* In most machines, the stack pointer register is equivalent to the bottom
1195 of the stack. */
718fe406 1196
8fff4fc1
RH
1197#ifndef STACK_POINTER_OFFSET
1198#define STACK_POINTER_OFFSET 0
1199#endif
8c36698e 1200
8fff4fc1
RH
1201/* If not defined, pick an appropriate default for the offset of dynamically
1202 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1203 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
fe9b4957 1204
8fff4fc1 1205#ifndef STACK_DYNAMIC_OFFSET
8a5275eb 1206
8fff4fc1
RH
1207/* The bottom of the stack points to the actual arguments. If
1208 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1209 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1210 stack space for register parameters is not pushed by the caller, but
1211 rather part of the fixed stack areas and hence not included in
1212 `current_function_outgoing_args_size'. Nevertheless, we must allow
1213 for it when allocating stack dynamic objects. */
8a5275eb 1214
ac294f0b 1215#if defined(REG_PARM_STACK_SPACE)
8fff4fc1
RH
1216#define STACK_DYNAMIC_OFFSET(FNDECL) \
1217((ACCUMULATE_OUTGOING_ARGS \
ac294f0b
KT
1218 ? (current_function_outgoing_args_size \
1219 + (OUTGOING_REG_PARM_STACK_SPACE ? 0 : REG_PARM_STACK_SPACE (FNDECL))) \
1220 : 0) + (STACK_POINTER_OFFSET))
8fff4fc1
RH
1221#else
1222#define STACK_DYNAMIC_OFFSET(FNDECL) \
1223((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1224 + (STACK_POINTER_OFFSET))
1225#endif
1226#endif
4fa48eae 1227
659e47fb 1228\f
bbf9b913
RH
1229/* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1230 is a virtual register, return the equivalent hard register and set the
1231 offset indirectly through the pointer. Otherwise, return 0. */
6f086dfc 1232
bbf9b913
RH
1233static rtx
1234instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
6f086dfc 1235{
bbf9b913
RH
1236 rtx new;
1237 HOST_WIDE_INT offset;
6f086dfc 1238
bbf9b913
RH
1239 if (x == virtual_incoming_args_rtx)
1240 new = arg_pointer_rtx, offset = in_arg_offset;
1241 else if (x == virtual_stack_vars_rtx)
1242 new = frame_pointer_rtx, offset = var_offset;
1243 else if (x == virtual_stack_dynamic_rtx)
1244 new = stack_pointer_rtx, offset = dynamic_offset;
1245 else if (x == virtual_outgoing_args_rtx)
1246 new = stack_pointer_rtx, offset = out_arg_offset;
1247 else if (x == virtual_cfa_rtx)
f6672e8e
RH
1248 {
1249#ifdef FRAME_POINTER_CFA_OFFSET
1250 new = frame_pointer_rtx;
1251#else
1252 new = arg_pointer_rtx;
1253#endif
1254 offset = cfa_offset;
1255 }
bbf9b913
RH
1256 else
1257 return NULL_RTX;
6f086dfc 1258
bbf9b913
RH
1259 *poffset = offset;
1260 return new;
6f086dfc
RS
1261}
1262
bbf9b913
RH
1263/* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1264 Instantiate any virtual registers present inside of *LOC. The expression
1265 is simplified, as much as possible, but is not to be considered "valid"
1266 in any sense implied by the target. If any change is made, set CHANGED
1267 to true. */
6f086dfc 1268
bbf9b913
RH
1269static int
1270instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
6f086dfc 1271{
bbf9b913
RH
1272 HOST_WIDE_INT offset;
1273 bool *changed = (bool *) data;
1274 rtx x, new;
6f086dfc 1275
bbf9b913
RH
1276 x = *loc;
1277 if (x == 0)
1278 return 0;
1279
1280 switch (GET_CODE (x))
6f086dfc 1281 {
bbf9b913
RH
1282 case REG:
1283 new = instantiate_new_reg (x, &offset);
1284 if (new)
1285 {
1286 *loc = plus_constant (new, offset);
1287 if (changed)
1288 *changed = true;
1289 }
1290 return -1;
1291
1292 case PLUS:
1293 new = instantiate_new_reg (XEXP (x, 0), &offset);
1294 if (new)
1295 {
1296 new = plus_constant (new, offset);
1297 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
1298 if (changed)
1299 *changed = true;
1300 return -1;
1301 }
e5e809f4 1302
bbf9b913
RH
1303 /* FIXME -- from old code */
1304 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1305 we can commute the PLUS and SUBREG because pointers into the
1306 frame are well-behaved. */
1307 break;
ce717ce4 1308
bbf9b913
RH
1309 default:
1310 break;
6f086dfc
RS
1311 }
1312
bbf9b913 1313 return 0;
6f086dfc
RS
1314}
1315
bbf9b913
RH
1316/* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1317 matches the predicate for insn CODE operand OPERAND. */
6f086dfc 1318
bbf9b913
RH
1319static int
1320safe_insn_predicate (int code, int operand, rtx x)
6f086dfc 1321{
bbf9b913 1322 const struct insn_operand_data *op_data;
6f086dfc 1323
bbf9b913
RH
1324 if (code < 0)
1325 return true;
6f086dfc 1326
bbf9b913
RH
1327 op_data = &insn_data[code].operand[operand];
1328 if (op_data->predicate == NULL)
1329 return true;
5a73491b 1330
bbf9b913
RH
1331 return op_data->predicate (x, op_data->mode);
1332}
5a73491b 1333
bbf9b913
RH
1334/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1335 registers present inside of insn. The result will be a valid insn. */
5a73491b
RK
1336
1337static void
bbf9b913 1338instantiate_virtual_regs_in_insn (rtx insn)
5a73491b 1339{
bbf9b913
RH
1340 HOST_WIDE_INT offset;
1341 int insn_code, i;
9325973e 1342 bool any_change = false;
bbf9b913 1343 rtx set, new, x, seq;
32e66afd 1344
bbf9b913
RH
1345 /* There are some special cases to be handled first. */
1346 set = single_set (insn);
1347 if (set)
32e66afd 1348 {
bbf9b913
RH
1349 /* We're allowed to assign to a virtual register. This is interpreted
1350 to mean that the underlying register gets assigned the inverse
1351 transformation. This is used, for example, in the handling of
1352 non-local gotos. */
1353 new = instantiate_new_reg (SET_DEST (set), &offset);
1354 if (new)
1355 {
1356 start_sequence ();
32e66afd 1357
bbf9b913
RH
1358 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1359 x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
1360 GEN_INT (-offset));
1361 x = force_operand (x, new);
1362 if (x != new)
1363 emit_move_insn (new, x);
5a73491b 1364
bbf9b913
RH
1365 seq = get_insns ();
1366 end_sequence ();
5a73491b 1367
bbf9b913
RH
1368 emit_insn_before (seq, insn);
1369 delete_insn (insn);
1370 return;
1371 }
5a73491b 1372
bbf9b913
RH
1373 /* Handle a straight copy from a virtual register by generating a
1374 new add insn. The difference between this and falling through
1375 to the generic case is avoiding a new pseudo and eliminating a
1376 move insn in the initial rtl stream. */
1377 new = instantiate_new_reg (SET_SRC (set), &offset);
1378 if (new && offset != 0
1379 && REG_P (SET_DEST (set))
1380 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1381 {
1382 start_sequence ();
5a73491b 1383
bbf9b913
RH
1384 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1385 new, GEN_INT (offset), SET_DEST (set),
1386 1, OPTAB_LIB_WIDEN);
1387 if (x != SET_DEST (set))
1388 emit_move_insn (SET_DEST (set), x);
770ae6cc 1389
bbf9b913
RH
1390 seq = get_insns ();
1391 end_sequence ();
87ce34d6 1392
bbf9b913
RH
1393 emit_insn_before (seq, insn);
1394 delete_insn (insn);
87ce34d6 1395 return;
bbf9b913 1396 }
5a73491b 1397
bbf9b913 1398 extract_insn (insn);
9325973e 1399 insn_code = INSN_CODE (insn);
5a73491b 1400
bbf9b913
RH
1401 /* Handle a plus involving a virtual register by determining if the
1402 operands remain valid if they're modified in place. */
1403 if (GET_CODE (SET_SRC (set)) == PLUS
1404 && recog_data.n_operands >= 3
1405 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1406 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1407 && GET_CODE (recog_data.operand[2]) == CONST_INT
1408 && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
1409 {
1410 offset += INTVAL (recog_data.operand[2]);
5a73491b 1411
bbf9b913 1412 /* If the sum is zero, then replace with a plain move. */
9325973e
RH
1413 if (offset == 0
1414 && REG_P (SET_DEST (set))
1415 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
bbf9b913
RH
1416 {
1417 start_sequence ();
1418 emit_move_insn (SET_DEST (set), new);
1419 seq = get_insns ();
1420 end_sequence ();
d1405722 1421
bbf9b913
RH
1422 emit_insn_before (seq, insn);
1423 delete_insn (insn);
1424 return;
1425 }
d1405722 1426
bbf9b913 1427 x = gen_int_mode (offset, recog_data.operand_mode[2]);
bbf9b913
RH
1428
1429 /* Using validate_change and apply_change_group here leaves
1430 recog_data in an invalid state. Since we know exactly what
1431 we want to check, do those two by hand. */
1432 if (safe_insn_predicate (insn_code, 1, new)
1433 && safe_insn_predicate (insn_code, 2, x))
1434 {
1435 *recog_data.operand_loc[1] = recog_data.operand[1] = new;
1436 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1437 any_change = true;
9325973e
RH
1438
1439 /* Fall through into the regular operand fixup loop in
1440 order to take care of operands other than 1 and 2. */
bbf9b913
RH
1441 }
1442 }
1443 }
d1405722 1444 else
9325973e
RH
1445 {
1446 extract_insn (insn);
1447 insn_code = INSN_CODE (insn);
1448 }
5dc96d60 1449
bbf9b913
RH
1450 /* In the general case, we expect virtual registers to appear only in
1451 operands, and then only as either bare registers or inside memories. */
1452 for (i = 0; i < recog_data.n_operands; ++i)
1453 {
1454 x = recog_data.operand[i];
1455 switch (GET_CODE (x))
1456 {
1457 case MEM:
1458 {
1459 rtx addr = XEXP (x, 0);
1460 bool changed = false;
1461
1462 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1463 if (!changed)
1464 continue;
1465
1466 start_sequence ();
1467 x = replace_equiv_address (x, addr);
1468 seq = get_insns ();
1469 end_sequence ();
1470 if (seq)
1471 emit_insn_before (seq, insn);
1472 }
1473 break;
1474
1475 case REG:
1476 new = instantiate_new_reg (x, &offset);
1477 if (new == NULL)
1478 continue;
1479 if (offset == 0)
1480 x = new;
1481 else
1482 {
1483 start_sequence ();
6f086dfc 1484
bbf9b913
RH
1485 /* Careful, special mode predicates may have stuff in
1486 insn_data[insn_code].operand[i].mode that isn't useful
1487 to us for computing a new value. */
1488 /* ??? Recognize address_operand and/or "p" constraints
1489 to see if (plus new offset) is a valid before we put
1490 this through expand_simple_binop. */
1491 x = expand_simple_binop (GET_MODE (x), PLUS, new,
1492 GEN_INT (offset), NULL_RTX,
1493 1, OPTAB_LIB_WIDEN);
1494 seq = get_insns ();
1495 end_sequence ();
1496 emit_insn_before (seq, insn);
1497 }
1498 break;
6f086dfc 1499
bbf9b913
RH
1500 case SUBREG:
1501 new = instantiate_new_reg (SUBREG_REG (x), &offset);
1502 if (new == NULL)
1503 continue;
1504 if (offset != 0)
1505 {
1506 start_sequence ();
1507 new = expand_simple_binop (GET_MODE (new), PLUS, new,
1508 GEN_INT (offset), NULL_RTX,
1509 1, OPTAB_LIB_WIDEN);
1510 seq = get_insns ();
1511 end_sequence ();
1512 emit_insn_before (seq, insn);
1513 }
fbdd0b09
RH
1514 x = simplify_gen_subreg (recog_data.operand_mode[i], new,
1515 GET_MODE (new), SUBREG_BYTE (x));
bbf9b913 1516 break;
6f086dfc 1517
bbf9b913
RH
1518 default:
1519 continue;
1520 }
6f086dfc 1521
bbf9b913
RH
1522 /* At this point, X contains the new value for the operand.
1523 Validate the new value vs the insn predicate. Note that
1524 asm insns will have insn_code -1 here. */
1525 if (!safe_insn_predicate (insn_code, i, x))
6ba1bd36
JM
1526 {
1527 start_sequence ();
1528 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1529 seq = get_insns ();
1530 end_sequence ();
1531 if (seq)
1532 emit_insn_before (seq, insn);
1533 }
6f086dfc 1534
bbf9b913
RH
1535 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1536 any_change = true;
1537 }
6f086dfc 1538
bbf9b913
RH
1539 if (any_change)
1540 {
1541 /* Propagate operand changes into the duplicates. */
1542 for (i = 0; i < recog_data.n_dups; ++i)
1543 *recog_data.dup_loc[i]
3e916873 1544 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
5dc96d60 1545
bbf9b913
RH
1546 /* Force re-recognition of the instruction for validation. */
1547 INSN_CODE (insn) = -1;
1548 }
6f086dfc 1549
bbf9b913 1550 if (asm_noperands (PATTERN (insn)) >= 0)
6f086dfc 1551 {
bbf9b913 1552 if (!check_asm_operands (PATTERN (insn)))
6f086dfc 1553 {
bbf9b913
RH
1554 error_for_asm (insn, "impossible constraint in %<asm%>");
1555 delete_insn (insn);
1556 }
1557 }
1558 else
1559 {
1560 if (recog_memoized (insn) < 0)
1561 fatal_insn_not_found (insn);
1562 }
1563}
14a774a9 1564
bbf9b913
RH
1565/* Subroutine of instantiate_decls. Given RTL representing a decl,
1566 do any instantiation required. */
14a774a9 1567
bbf9b913
RH
1568static void
1569instantiate_decl (rtx x)
1570{
1571 rtx addr;
6f086dfc 1572
bbf9b913
RH
1573 if (x == 0)
1574 return;
6f086dfc 1575
bbf9b913
RH
1576 /* If this is a CONCAT, recurse for the pieces. */
1577 if (GET_CODE (x) == CONCAT)
1578 {
1579 instantiate_decl (XEXP (x, 0));
1580 instantiate_decl (XEXP (x, 1));
1581 return;
1582 }
6f086dfc 1583
bbf9b913
RH
1584 /* If this is not a MEM, no need to do anything. Similarly if the
1585 address is a constant or a register that is not a virtual register. */
1586 if (!MEM_P (x))
1587 return;
6f086dfc 1588
bbf9b913
RH
1589 addr = XEXP (x, 0);
1590 if (CONSTANT_P (addr)
1591 || (REG_P (addr)
1592 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1593 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1594 return;
6f086dfc 1595
bbf9b913
RH
1596 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1597}
6f086dfc 1598
434eba35
JJ
1599/* Helper for instantiate_decls called via walk_tree: Process all decls
1600 in the given DECL_VALUE_EXPR. */
1601
1602static tree
1603instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1604{
1605 tree t = *tp;
07beea0d 1606 if (! EXPR_P (t) && ! GIMPLE_STMT_P (t))
434eba35
JJ
1607 {
1608 *walk_subtrees = 0;
1609 if (DECL_P (t) && DECL_RTL_SET_P (t))
1610 instantiate_decl (DECL_RTL (t));
1611 }
1612 return NULL;
1613}
1614
bbf9b913
RH
1615/* Subroutine of instantiate_decls: Process all decls in the given
1616 BLOCK node and all its subblocks. */
6f086dfc 1617
bbf9b913
RH
1618static void
1619instantiate_decls_1 (tree let)
1620{
1621 tree t;
6f086dfc 1622
bbf9b913 1623 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
434eba35
JJ
1624 {
1625 if (DECL_RTL_SET_P (t))
1626 instantiate_decl (DECL_RTL (t));
1627 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1628 {
1629 tree v = DECL_VALUE_EXPR (t);
1630 walk_tree (&v, instantiate_expr, NULL, NULL);
1631 }
1632 }
6f086dfc 1633
bbf9b913
RH
1634 /* Process all subblocks. */
1635 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1636 instantiate_decls_1 (t);
1637}
6f086dfc 1638
bbf9b913
RH
1639/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1640 all virtual registers in their DECL_RTL's. */
6f086dfc 1641
bbf9b913
RH
1642static void
1643instantiate_decls (tree fndecl)
1644{
1645 tree decl;
6f086dfc 1646
bbf9b913
RH
1647 /* Process all parameters of the function. */
1648 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1649 {
1650 instantiate_decl (DECL_RTL (decl));
1651 instantiate_decl (DECL_INCOMING_RTL (decl));
434eba35
JJ
1652 if (DECL_HAS_VALUE_EXPR_P (decl))
1653 {
1654 tree v = DECL_VALUE_EXPR (decl);
1655 walk_tree (&v, instantiate_expr, NULL, NULL);
1656 }
bbf9b913 1657 }
4fd796bb 1658
bbf9b913
RH
1659 /* Now process all variables defined in the function or its subblocks. */
1660 instantiate_decls_1 (DECL_INITIAL (fndecl));
1661}
6f086dfc 1662
bbf9b913
RH
1663/* Pass through the INSNS of function FNDECL and convert virtual register
1664 references to hard register references. */
6f086dfc 1665
c2924966 1666static unsigned int
bbf9b913
RH
1667instantiate_virtual_regs (void)
1668{
1669 rtx insn;
6f086dfc 1670
bbf9b913
RH
1671 /* Compute the offsets to use for this function. */
1672 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1673 var_offset = STARTING_FRAME_OFFSET;
1674 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1675 out_arg_offset = STACK_POINTER_OFFSET;
f6672e8e
RH
1676#ifdef FRAME_POINTER_CFA_OFFSET
1677 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1678#else
bbf9b913 1679 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
f6672e8e 1680#endif
e9a25f70 1681
bbf9b913
RH
1682 /* Initialize recognition, indicating that volatile is OK. */
1683 init_recog ();
6f086dfc 1684
bbf9b913
RH
1685 /* Scan through all the insns, instantiating every virtual register still
1686 present. */
1687 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1688 if (INSN_P (insn))
6f086dfc 1689 {
bbf9b913
RH
1690 /* These patterns in the instruction stream can never be recognized.
1691 Fortunately, they shouldn't contain virtual registers either. */
1692 if (GET_CODE (PATTERN (insn)) == USE
1693 || GET_CODE (PATTERN (insn)) == CLOBBER
1694 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1695 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1696 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1697 continue;
1698
1699 instantiate_virtual_regs_in_insn (insn);
1700
1701 if (INSN_DELETED_P (insn))
1702 continue;
1703
1704 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1705
1706 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1707 if (GET_CODE (insn) == CALL_INSN)
1708 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1709 instantiate_virtual_regs_in_rtx, NULL);
6f086dfc 1710 }
6f086dfc 1711
bbf9b913
RH
1712 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1713 instantiate_decls (current_function_decl);
1714
1715 /* Indicate that, from now on, assign_stack_local should use
1716 frame_pointer_rtx. */
1717 virtuals_instantiated = 1;
c2924966 1718 return 0;
6f086dfc 1719}
ef330312
PB
1720
1721struct tree_opt_pass pass_instantiate_virtual_regs =
1722{
defb77dc 1723 "vregs", /* name */
ef330312
PB
1724 NULL, /* gate */
1725 instantiate_virtual_regs, /* execute */
1726 NULL, /* sub */
1727 NULL, /* next */
1728 0, /* static_pass_number */
1729 0, /* tv_id */
1730 0, /* properties_required */
1731 0, /* properties_provided */
1732 0, /* properties_destroyed */
1733 0, /* todo_flags_start */
defb77dc 1734 TODO_dump_func, /* todo_flags_finish */
ef330312
PB
1735 0 /* letter */
1736};
1737
6f086dfc 1738\f
d181c154
RS
1739/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1740 This means a type for which function calls must pass an address to the
1741 function or get an address back from the function.
1742 EXP may be a type node or an expression (whose type is tested). */
6f086dfc
RS
1743
1744int
586de218 1745aggregate_value_p (const_tree exp, const_tree fntype)
6f086dfc 1746{
9d790a4f
RS
1747 int i, regno, nregs;
1748 rtx reg;
2f939d94 1749
586de218 1750 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
d181c154 1751
500c353d
OH
1752 /* DECL node associated with FNTYPE when relevant, which we might need to
1753 check for by-invisible-reference returns, typically for CALL_EXPR input
1754 EXPressions. */
586de218 1755 const_tree fndecl = NULL_TREE;
500c353d 1756
61f71b34
DD
1757 if (fntype)
1758 switch (TREE_CODE (fntype))
1759 {
1760 case CALL_EXPR:
500c353d
OH
1761 fndecl = get_callee_fndecl (fntype);
1762 fntype = fndecl ? TREE_TYPE (fndecl) : 0;
61f71b34
DD
1763 break;
1764 case FUNCTION_DECL:
500c353d
OH
1765 fndecl = fntype;
1766 fntype = TREE_TYPE (fndecl);
61f71b34
DD
1767 break;
1768 case FUNCTION_TYPE:
1769 case METHOD_TYPE:
1770 break;
1771 case IDENTIFIER_NODE:
1772 fntype = 0;
1773 break;
1774 default:
1775 /* We don't expect other rtl types here. */
0bccc606 1776 gcc_unreachable ();
61f71b34
DD
1777 }
1778
d7bf8ada
MM
1779 if (TREE_CODE (type) == VOID_TYPE)
1780 return 0;
500c353d 1781
cc77ae10
JM
1782 /* If the front end has decided that this needs to be passed by
1783 reference, do so. */
1784 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1785 && DECL_BY_REFERENCE (exp))
1786 return 1;
500c353d
OH
1787
1788 /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
1789 called function RESULT_DECL, meaning the function returns in memory by
1790 invisible reference. This check lets front-ends not set TREE_ADDRESSABLE
1791 on the function type, which used to be the way to request such a return
1792 mechanism but might now be causing troubles at gimplification time if
1793 temporaries with the function type need to be created. */
1794 if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
1795 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
1796 return 1;
1797
61f71b34 1798 if (targetm.calls.return_in_memory (type, fntype))
6f086dfc 1799 return 1;
956d6950 1800 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
49a2e5b2
DE
1801 and thus can't be returned in registers. */
1802 if (TREE_ADDRESSABLE (type))
1803 return 1;
05e3bdb9 1804 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
6f086dfc 1805 return 1;
9d790a4f
RS
1806 /* Make sure we have suitable call-clobbered regs to return
1807 the value in; if not, we must return it in memory. */
1d636cc6 1808 reg = hard_function_value (type, 0, fntype, 0);
e71f7aa5
JW
1809
1810 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1811 it is OK. */
f8cfc6aa 1812 if (!REG_P (reg))
e71f7aa5
JW
1813 return 0;
1814
9d790a4f 1815 regno = REGNO (reg);
66fd46b6 1816 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
9d790a4f
RS
1817 for (i = 0; i < nregs; i++)
1818 if (! call_used_regs[regno + i])
1819 return 1;
6f086dfc
RS
1820 return 0;
1821}
1822\f
8fff4fc1
RH
1823/* Return true if we should assign DECL a pseudo register; false if it
1824 should live on the local stack. */
1825
1826bool
fa233e34 1827use_register_for_decl (const_tree decl)
8fff4fc1
RH
1828{
1829 /* Honor volatile. */
1830 if (TREE_SIDE_EFFECTS (decl))
1831 return false;
1832
1833 /* Honor addressability. */
1834 if (TREE_ADDRESSABLE (decl))
1835 return false;
1836
1837 /* Only register-like things go in registers. */
1838 if (DECL_MODE (decl) == BLKmode)
1839 return false;
1840
1841 /* If -ffloat-store specified, don't put explicit float variables
1842 into registers. */
1843 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1844 propagates values across these stores, and it probably shouldn't. */
1845 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1846 return false;
1847
78e0d62b
RH
1848 /* If we're not interested in tracking debugging information for
1849 this decl, then we can certainly put it in a register. */
1850 if (DECL_IGNORED_P (decl))
8fff4fc1
RH
1851 return true;
1852
8fff4fc1
RH
1853 return (optimize || DECL_REGISTER (decl));
1854}
1855
0976078c
RH
1856/* Return true if TYPE should be passed by invisible reference. */
1857
1858bool
8cd5a4e0
RH
1859pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1860 tree type, bool named_arg)
0976078c
RH
1861{
1862 if (type)
1863 {
1864 /* If this type contains non-trivial constructors, then it is
1865 forbidden for the middle-end to create any new copies. */
1866 if (TREE_ADDRESSABLE (type))
1867 return true;
1868
d58247a3
RH
1869 /* GCC post 3.4 passes *all* variable sized types by reference. */
1870 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
0976078c
RH
1871 return true;
1872 }
1873
8cd5a4e0 1874 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
0976078c
RH
1875}
1876
6cdd5672
RH
1877/* Return true if TYPE, which is passed by reference, should be callee
1878 copied instead of caller copied. */
1879
1880bool
1881reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1882 tree type, bool named_arg)
1883{
1884 if (type && TREE_ADDRESSABLE (type))
1885 return false;
1886 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1887}
1888
6071dc7f
RH
1889/* Structures to communicate between the subroutines of assign_parms.
1890 The first holds data persistent across all parameters, the second
1891 is cleared out for each parameter. */
6f086dfc 1892
6071dc7f 1893struct assign_parm_data_all
6f086dfc 1894{
6f086dfc 1895 CUMULATIVE_ARGS args_so_far;
6f086dfc 1896 struct args_size stack_args_size;
6071dc7f
RH
1897 tree function_result_decl;
1898 tree orig_fnargs;
bb27eeda
SE
1899 rtx first_conversion_insn;
1900 rtx last_conversion_insn;
6071dc7f
RH
1901 HOST_WIDE_INT pretend_args_size;
1902 HOST_WIDE_INT extra_pretend_bytes;
1903 int reg_parm_stack_space;
1904};
6f086dfc 1905
6071dc7f
RH
1906struct assign_parm_data_one
1907{
1908 tree nominal_type;
1909 tree passed_type;
1910 rtx entry_parm;
1911 rtx stack_parm;
1912 enum machine_mode nominal_mode;
1913 enum machine_mode passed_mode;
1914 enum machine_mode promoted_mode;
1915 struct locate_and_pad_arg_data locate;
1916 int partial;
1917 BOOL_BITFIELD named_arg : 1;
6071dc7f
RH
1918 BOOL_BITFIELD passed_pointer : 1;
1919 BOOL_BITFIELD on_stack : 1;
1920 BOOL_BITFIELD loaded_in_reg : 1;
1921};
ebb904cb 1922
6071dc7f 1923/* A subroutine of assign_parms. Initialize ALL. */
6f086dfc 1924
6071dc7f
RH
1925static void
1926assign_parms_initialize_all (struct assign_parm_data_all *all)
1927{
1928 tree fntype;
6f086dfc 1929
6071dc7f
RH
1930 memset (all, 0, sizeof (*all));
1931
1932 fntype = TREE_TYPE (current_function_decl);
1933
1934#ifdef INIT_CUMULATIVE_INCOMING_ARGS
1935 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1936#else
1937 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1938 current_function_decl, -1);
1939#endif
1940
1941#ifdef REG_PARM_STACK_SPACE
1942 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1943#endif
1944}
6f086dfc 1945
6071dc7f
RH
1946/* If ARGS contains entries with complex types, split the entry into two
1947 entries of the component type. Return a new list of substitutions are
1948 needed, else the old list. */
1949
1950static tree
1951split_complex_args (tree args)
1952{
1953 tree p;
1954
1955 /* Before allocating memory, check for the common case of no complex. */
1956 for (p = args; p; p = TREE_CHAIN (p))
1957 {
1958 tree type = TREE_TYPE (p);
1959 if (TREE_CODE (type) == COMPLEX_TYPE
1960 && targetm.calls.split_complex_arg (type))
1961 goto found;
1962 }
1963 return args;
1964
1965 found:
1966 args = copy_list (args);
1967
1968 for (p = args; p; p = TREE_CHAIN (p))
1969 {
1970 tree type = TREE_TYPE (p);
1971 if (TREE_CODE (type) == COMPLEX_TYPE
1972 && targetm.calls.split_complex_arg (type))
1973 {
1974 tree decl;
1975 tree subtype = TREE_TYPE (type);
6ccd356e 1976 bool addressable = TREE_ADDRESSABLE (p);
6071dc7f
RH
1977
1978 /* Rewrite the PARM_DECL's type with its component. */
1979 TREE_TYPE (p) = subtype;
1980 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1981 DECL_MODE (p) = VOIDmode;
1982 DECL_SIZE (p) = NULL;
1983 DECL_SIZE_UNIT (p) = NULL;
6ccd356e
AM
1984 /* If this arg must go in memory, put it in a pseudo here.
1985 We can't allow it to go in memory as per normal parms,
1986 because the usual place might not have the imag part
1987 adjacent to the real part. */
1988 DECL_ARTIFICIAL (p) = addressable;
1989 DECL_IGNORED_P (p) = addressable;
1990 TREE_ADDRESSABLE (p) = 0;
6071dc7f
RH
1991 layout_decl (p, 0);
1992
1993 /* Build a second synthetic decl. */
1994 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
1995 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
6ccd356e
AM
1996 DECL_ARTIFICIAL (decl) = addressable;
1997 DECL_IGNORED_P (decl) = addressable;
6071dc7f
RH
1998 layout_decl (decl, 0);
1999
2000 /* Splice it in; skip the new decl. */
2001 TREE_CHAIN (decl) = TREE_CHAIN (p);
2002 TREE_CHAIN (p) = decl;
2003 p = decl;
2004 }
2005 }
2006
2007 return args;
2008}
2009
2010/* A subroutine of assign_parms. Adjust the parameter list to incorporate
2011 the hidden struct return argument, and (abi willing) complex args.
2012 Return the new parameter list. */
2013
2014static tree
2015assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2016{
2017 tree fndecl = current_function_decl;
2018 tree fntype = TREE_TYPE (fndecl);
2019 tree fnargs = DECL_ARGUMENTS (fndecl);
6f086dfc
RS
2020
2021 /* If struct value address is treated as the first argument, make it so. */
61f71b34 2022 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
6f086dfc 2023 && ! current_function_returns_pcc_struct
61f71b34 2024 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
6f086dfc 2025 {
f9f29478 2026 tree type = build_pointer_type (TREE_TYPE (fntype));
6071dc7f 2027 tree decl;
6f086dfc 2028
6071dc7f
RH
2029 decl = build_decl (PARM_DECL, NULL_TREE, type);
2030 DECL_ARG_TYPE (decl) = type;
2031 DECL_ARTIFICIAL (decl) = 1;
78e0d62b 2032 DECL_IGNORED_P (decl) = 1;
6f086dfc 2033
6071dc7f
RH
2034 TREE_CHAIN (decl) = fnargs;
2035 fnargs = decl;
2036 all->function_result_decl = decl;
6f086dfc 2037 }
718fe406 2038
6071dc7f 2039 all->orig_fnargs = fnargs;
ded9bf77 2040
42ba5130
RH
2041 /* If the target wants to split complex arguments into scalars, do so. */
2042 if (targetm.calls.split_complex_arg)
ded9bf77
AH
2043 fnargs = split_complex_args (fnargs);
2044
6071dc7f
RH
2045 return fnargs;
2046}
e7949876 2047
6071dc7f
RH
2048/* A subroutine of assign_parms. Examine PARM and pull out type and mode
2049 data for the parameter. Incorporate ABI specifics such as pass-by-
2050 reference and type promotion. */
6f086dfc 2051
6071dc7f
RH
2052static void
2053assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2054 struct assign_parm_data_one *data)
2055{
2056 tree nominal_type, passed_type;
2057 enum machine_mode nominal_mode, passed_mode, promoted_mode;
6f086dfc 2058
6071dc7f
RH
2059 memset (data, 0, sizeof (*data));
2060
8117c488
NS
2061 /* NAMED_ARG is a mis-nomer. We really mean 'non-varadic'. */
2062 if (!current_function_stdarg)
2063 data->named_arg = 1; /* No varadic parms. */
2064 else if (TREE_CHAIN (parm))
2065 data->named_arg = 1; /* Not the last non-varadic parm. */
2066 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2067 data->named_arg = 1; /* Only varadic ones are unnamed. */
6071dc7f 2068 else
8117c488 2069 data->named_arg = 0; /* Treat as varadic. */
6071dc7f
RH
2070
2071 nominal_type = TREE_TYPE (parm);
2072 passed_type = DECL_ARG_TYPE (parm);
2073
2074 /* Look out for errors propagating this far. Also, if the parameter's
2075 type is void then its value doesn't matter. */
2076 if (TREE_TYPE (parm) == error_mark_node
2077 /* This can happen after weird syntax errors
2078 or if an enum type is defined among the parms. */
2079 || TREE_CODE (parm) != PARM_DECL
2080 || passed_type == NULL
2081 || VOID_TYPE_P (nominal_type))
2082 {
2083 nominal_type = passed_type = void_type_node;
2084 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2085 goto egress;
2086 }
108b7d3d 2087
6071dc7f
RH
2088 /* Find mode of arg as it is passed, and mode of arg as it should be
2089 during execution of this function. */
2090 passed_mode = TYPE_MODE (passed_type);
2091 nominal_mode = TYPE_MODE (nominal_type);
2092
2093 /* If the parm is to be passed as a transparent union, use the type of
2094 the first field for the tests below. We have already verified that
2095 the modes are the same. */
52dd234b
RH
2096 if (TREE_CODE (passed_type) == UNION_TYPE
2097 && TYPE_TRANSPARENT_UNION (passed_type))
6071dc7f
RH
2098 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2099
0976078c
RH
2100 /* See if this arg was passed by invisible reference. */
2101 if (pass_by_reference (&all->args_so_far, passed_mode,
2102 passed_type, data->named_arg))
6071dc7f
RH
2103 {
2104 passed_type = nominal_type = build_pointer_type (passed_type);
2105 data->passed_pointer = true;
2106 passed_mode = nominal_mode = Pmode;
2107 }
6f086dfc 2108
6071dc7f
RH
2109 /* Find mode as it is passed by the ABI. */
2110 promoted_mode = passed_mode;
2111 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2112 {
2113 int unsignedp = TYPE_UNSIGNED (passed_type);
2114 promoted_mode = promote_mode (passed_type, promoted_mode,
2115 &unsignedp, 1);
2116 }
6f086dfc 2117
6071dc7f
RH
2118 egress:
2119 data->nominal_type = nominal_type;
2120 data->passed_type = passed_type;
2121 data->nominal_mode = nominal_mode;
2122 data->passed_mode = passed_mode;
2123 data->promoted_mode = promoted_mode;
2124}
16bae307 2125
6071dc7f 2126/* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
6f086dfc 2127
6071dc7f
RH
2128static void
2129assign_parms_setup_varargs (struct assign_parm_data_all *all,
2130 struct assign_parm_data_one *data, bool no_rtl)
2131{
2132 int varargs_pretend_bytes = 0;
2133
2134 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2135 data->promoted_mode,
2136 data->passed_type,
2137 &varargs_pretend_bytes, no_rtl);
2138
2139 /* If the back-end has requested extra stack space, record how much is
2140 needed. Do not change pretend_args_size otherwise since it may be
2141 nonzero from an earlier partial argument. */
2142 if (varargs_pretend_bytes > 0)
2143 all->pretend_args_size = varargs_pretend_bytes;
2144}
a53e14c0 2145
6071dc7f
RH
2146/* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2147 the incoming location of the current parameter. */
2148
2149static void
2150assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2151 struct assign_parm_data_one *data)
2152{
2153 HOST_WIDE_INT pretend_bytes = 0;
2154 rtx entry_parm;
2155 bool in_regs;
2156
2157 if (data->promoted_mode == VOIDmode)
2158 {
2159 data->entry_parm = data->stack_parm = const0_rtx;
2160 return;
2161 }
a53e14c0 2162
6f086dfc 2163#ifdef FUNCTION_INCOMING_ARG
6071dc7f
RH
2164 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2165 data->passed_type, data->named_arg);
6f086dfc 2166#else
6071dc7f
RH
2167 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2168 data->passed_type, data->named_arg);
6f086dfc
RS
2169#endif
2170
6071dc7f
RH
2171 if (entry_parm == 0)
2172 data->promoted_mode = data->passed_mode;
6f086dfc 2173
6071dc7f
RH
2174 /* Determine parm's home in the stack, in case it arrives in the stack
2175 or we should pretend it did. Compute the stack position and rtx where
2176 the argument arrives and its size.
6f086dfc 2177
6071dc7f
RH
2178 There is one complexity here: If this was a parameter that would
2179 have been passed in registers, but wasn't only because it is
2180 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2181 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2182 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2183 as it was the previous time. */
2184 in_regs = entry_parm != 0;
6f086dfc 2185#ifdef STACK_PARMS_IN_REG_PARM_AREA
6071dc7f 2186 in_regs = true;
e7949876 2187#endif
6071dc7f
RH
2188 if (!in_regs && !data->named_arg)
2189 {
2190 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
e7949876 2191 {
6071dc7f 2192 rtx tem;
6f086dfc 2193#ifdef FUNCTION_INCOMING_ARG
6071dc7f
RH
2194 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2195 data->passed_type, true);
6f086dfc 2196#else
6071dc7f
RH
2197 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2198 data->passed_type, true);
6f086dfc 2199#endif
6071dc7f 2200 in_regs = tem != NULL;
e7949876 2201 }
6071dc7f 2202 }
e7949876 2203
6071dc7f
RH
2204 /* If this parameter was passed both in registers and in the stack, use
2205 the copy on the stack. */
fe984136
RH
2206 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2207 data->passed_type))
6071dc7f 2208 entry_parm = 0;
e7949876 2209
6071dc7f
RH
2210 if (entry_parm)
2211 {
2212 int partial;
2213
78a52f11
RH
2214 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2215 data->promoted_mode,
2216 data->passed_type,
2217 data->named_arg);
6071dc7f
RH
2218 data->partial = partial;
2219
2220 /* The caller might already have allocated stack space for the
2221 register parameters. */
2222 if (partial != 0 && all->reg_parm_stack_space == 0)
975f3818 2223 {
6071dc7f
RH
2224 /* Part of this argument is passed in registers and part
2225 is passed on the stack. Ask the prologue code to extend
2226 the stack part so that we can recreate the full value.
2227
2228 PRETEND_BYTES is the size of the registers we need to store.
2229 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2230 stack space that the prologue should allocate.
2231
2232 Internally, gcc assumes that the argument pointer is aligned
2233 to STACK_BOUNDARY bits. This is used both for alignment
2234 optimizations (see init_emit) and to locate arguments that are
2235 aligned to more than PARM_BOUNDARY bits. We must preserve this
2236 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2237 a stack boundary. */
2238
2239 /* We assume at most one partial arg, and it must be the first
2240 argument on the stack. */
0bccc606 2241 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
6071dc7f 2242
78a52f11 2243 pretend_bytes = partial;
6071dc7f
RH
2244 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2245
2246 /* We want to align relative to the actual stack pointer, so
2247 don't include this in the stack size until later. */
2248 all->extra_pretend_bytes = all->pretend_args_size;
975f3818 2249 }
6071dc7f 2250 }
e7949876 2251
6071dc7f
RH
2252 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2253 entry_parm ? data->partial : 0, current_function_decl,
2254 &all->stack_args_size, &data->locate);
6f086dfc 2255
6071dc7f
RH
2256 /* Adjust offsets to include the pretend args. */
2257 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2258 data->locate.slot_offset.constant += pretend_bytes;
2259 data->locate.offset.constant += pretend_bytes;
ebca59c3 2260
6071dc7f
RH
2261 data->entry_parm = entry_parm;
2262}
6f086dfc 2263
6071dc7f
RH
2264/* A subroutine of assign_parms. If there is actually space on the stack
2265 for this parm, count it in stack_args_size and return true. */
6f086dfc 2266
6071dc7f
RH
2267static bool
2268assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2269 struct assign_parm_data_one *data)
2270{
2e6ae27f 2271 /* Trivially true if we've no incoming register. */
6071dc7f
RH
2272 if (data->entry_parm == NULL)
2273 ;
2274 /* Also true if we're partially in registers and partially not,
2275 since we've arranged to drop the entire argument on the stack. */
2276 else if (data->partial != 0)
2277 ;
2278 /* Also true if the target says that it's passed in both registers
2279 and on the stack. */
2280 else if (GET_CODE (data->entry_parm) == PARALLEL
2281 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2282 ;
2283 /* Also true if the target says that there's stack allocated for
2284 all register parameters. */
2285 else if (all->reg_parm_stack_space > 0)
2286 ;
2287 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2288 else
2289 return false;
6f086dfc 2290
6071dc7f
RH
2291 all->stack_args_size.constant += data->locate.size.constant;
2292 if (data->locate.size.var)
2293 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
718fe406 2294
6071dc7f
RH
2295 return true;
2296}
0d1416c6 2297
6071dc7f
RH
2298/* A subroutine of assign_parms. Given that this parameter is allocated
2299 stack space by the ABI, find it. */
6f086dfc 2300
6071dc7f
RH
2301static void
2302assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2303{
2304 rtx offset_rtx, stack_parm;
2305 unsigned int align, boundary;
6f086dfc 2306
6071dc7f
RH
2307 /* If we're passing this arg using a reg, make its stack home the
2308 aligned stack slot. */
2309 if (data->entry_parm)
2310 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2311 else
2312 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2313
2314 stack_parm = current_function_internal_arg_pointer;
2315 if (offset_rtx != const0_rtx)
2316 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2317 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2318
2319 set_mem_attributes (stack_parm, parm, 1);
2320
bfc45551
AM
2321 boundary = data->locate.boundary;
2322 align = BITS_PER_UNIT;
6071dc7f
RH
2323
2324 /* If we're padding upward, we know that the alignment of the slot
2325 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2326 intentionally forcing upward padding. Otherwise we have to come
2327 up with a guess at the alignment based on OFFSET_RTX. */
bfc45551 2328 if (data->locate.where_pad != downward || data->entry_parm)
6071dc7f
RH
2329 align = boundary;
2330 else if (GET_CODE (offset_rtx) == CONST_INT)
2331 {
2332 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2333 align = align & -align;
2334 }
bfc45551 2335 set_mem_align (stack_parm, align);
6071dc7f
RH
2336
2337 if (data->entry_parm)
2338 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2339
2340 data->stack_parm = stack_parm;
2341}
2342
2343/* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2344 always valid and contiguous. */
2345
2346static void
2347assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2348{
2349 rtx entry_parm = data->entry_parm;
2350 rtx stack_parm = data->stack_parm;
2351
2352 /* If this parm was passed part in regs and part in memory, pretend it
2353 arrived entirely in memory by pushing the register-part onto the stack.
2354 In the special case of a DImode or DFmode that is split, we could put
2355 it together in a pseudoreg directly, but for now that's not worth
2356 bothering with. */
2357 if (data->partial != 0)
2358 {
2359 /* Handle calls that pass values in multiple non-contiguous
2360 locations. The Irix 6 ABI has examples of this. */
2361 if (GET_CODE (entry_parm) == PARALLEL)
2362 emit_group_store (validize_mem (stack_parm), entry_parm,
2363 data->passed_type,
2364 int_size_in_bytes (data->passed_type));
6f086dfc 2365 else
78a52f11
RH
2366 {
2367 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2368 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2369 data->partial / UNITS_PER_WORD);
2370 }
6f086dfc 2371
6071dc7f
RH
2372 entry_parm = stack_parm;
2373 }
6f086dfc 2374
6071dc7f
RH
2375 /* If we didn't decide this parm came in a register, by default it came
2376 on the stack. */
2377 else if (entry_parm == NULL)
2378 entry_parm = stack_parm;
2379
2380 /* When an argument is passed in multiple locations, we can't make use
2381 of this information, but we can save some copying if the whole argument
2382 is passed in a single register. */
2383 else if (GET_CODE (entry_parm) == PARALLEL
2384 && data->nominal_mode != BLKmode
2385 && data->passed_mode != BLKmode)
2386 {
2387 size_t i, len = XVECLEN (entry_parm, 0);
2388
2389 for (i = 0; i < len; i++)
2390 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2391 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2392 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2393 == data->passed_mode)
2394 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2395 {
2396 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2397 break;
2398 }
2399 }
e68a6ce1 2400
6071dc7f
RH
2401 data->entry_parm = entry_parm;
2402}
6f086dfc 2403
6071dc7f
RH
2404/* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2405 always valid and properly aligned. */
6f086dfc 2406
6071dc7f
RH
2407static void
2408assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2409{
2410 rtx stack_parm = data->stack_parm;
2411
2412 /* If we can't trust the parm stack slot to be aligned enough for its
2413 ultimate type, don't use that slot after entry. We'll make another
2414 stack slot, if we need one. */
bfc45551
AM
2415 if (stack_parm
2416 && ((STRICT_ALIGNMENT
2417 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2418 || (data->nominal_type
2419 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2420 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
6071dc7f
RH
2421 stack_parm = NULL;
2422
2423 /* If parm was passed in memory, and we need to convert it on entry,
2424 don't store it back in that same slot. */
2425 else if (data->entry_parm == stack_parm
2426 && data->nominal_mode != BLKmode
2427 && data->nominal_mode != data->passed_mode)
2428 stack_parm = NULL;
2429
7d69de61
RH
2430 /* If stack protection is in effect for this function, don't leave any
2431 pointers in their passed stack slots. */
2432 else if (cfun->stack_protect_guard
2433 && (flag_stack_protect == 2
2434 || data->passed_pointer
2435 || POINTER_TYPE_P (data->nominal_type)))
2436 stack_parm = NULL;
2437
6071dc7f
RH
2438 data->stack_parm = stack_parm;
2439}
a0506b54 2440
6071dc7f
RH
2441/* A subroutine of assign_parms. Return true if the current parameter
2442 should be stored as a BLKmode in the current frame. */
2443
2444static bool
2445assign_parm_setup_block_p (struct assign_parm_data_one *data)
2446{
2447 if (data->nominal_mode == BLKmode)
2448 return true;
2449 if (GET_CODE (data->entry_parm) == PARALLEL)
2450 return true;
531547e9 2451
6e985040 2452#ifdef BLOCK_REG_PADDING
ae8c9754
RS
2453 /* Only assign_parm_setup_block knows how to deal with register arguments
2454 that are padded at the least significant end. */
2455 if (REG_P (data->entry_parm)
2456 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2457 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2458 == (BYTES_BIG_ENDIAN ? upward : downward)))
6071dc7f 2459 return true;
6e985040 2460#endif
6071dc7f
RH
2461
2462 return false;
2463}
2464
2465/* A subroutine of assign_parms. Arrange for the parameter to be
2466 present and valid in DATA->STACK_RTL. */
2467
2468static void
27e29549
RH
2469assign_parm_setup_block (struct assign_parm_data_all *all,
2470 tree parm, struct assign_parm_data_one *data)
6071dc7f
RH
2471{
2472 rtx entry_parm = data->entry_parm;
2473 rtx stack_parm = data->stack_parm;
bfc45551
AM
2474 HOST_WIDE_INT size;
2475 HOST_WIDE_INT size_stored;
17284759 2476 rtx orig_entry_parm = entry_parm;
6071dc7f 2477
27e29549
RH
2478 if (GET_CODE (entry_parm) == PARALLEL)
2479 entry_parm = emit_group_move_into_temps (entry_parm);
2480
6071dc7f
RH
2481 /* If we've a non-block object that's nevertheless passed in parts,
2482 reconstitute it in register operations rather than on the stack. */
2483 if (GET_CODE (entry_parm) == PARALLEL
640019aa 2484 && data->nominal_mode != BLKmode)
6071dc7f 2485 {
17284759 2486 rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
6071dc7f 2487
640019aa
AH
2488 if ((XVECLEN (entry_parm, 0) > 1
2489 || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
2490 && use_register_for_decl (parm))
2491 {
2492 rtx parmreg = gen_reg_rtx (data->nominal_mode);
27e29549 2493
bb27eeda
SE
2494 push_to_sequence2 (all->first_conversion_insn,
2495 all->last_conversion_insn);
4af46a32 2496
640019aa
AH
2497 /* For values returned in multiple registers, handle possible
2498 incompatible calls to emit_group_store.
4af46a32 2499
640019aa
AH
2500 For example, the following would be invalid, and would have to
2501 be fixed by the conditional below:
4af46a32 2502
640019aa
AH
2503 emit_group_store ((reg:SF), (parallel:DF))
2504 emit_group_store ((reg:SI), (parallel:DI))
2505
2506 An example of this are doubles in e500 v2:
2507 (parallel:DF (expr_list (reg:SI) (const_int 0))
2508 (expr_list (reg:SI) (const_int 4))). */
2509 if (data->nominal_mode != data->passed_mode)
2510 {
2511 rtx t = gen_reg_rtx (GET_MODE (entry_parm));
2512 emit_group_store (t, entry_parm, NULL_TREE,
2513 GET_MODE_SIZE (GET_MODE (entry_parm)));
2514 convert_move (parmreg, t, 0);
2515 }
2516 else
2517 emit_group_store (parmreg, entry_parm, data->nominal_type,
2518 int_size_in_bytes (data->nominal_type));
27e29549 2519
bb27eeda
SE
2520 all->first_conversion_insn = get_insns ();
2521 all->last_conversion_insn = get_last_insn ();
640019aa 2522 end_sequence ();
27e29549 2523
640019aa
AH
2524 SET_DECL_RTL (parm, parmreg);
2525 return;
2526 }
6071dc7f
RH
2527 }
2528
bfc45551
AM
2529 size = int_size_in_bytes (data->passed_type);
2530 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2531 if (stack_parm == 0)
2532 {
a561d88b 2533 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
bfc45551 2534 stack_parm = assign_stack_local (BLKmode, size_stored,
a561d88b 2535 DECL_ALIGN (parm));
bfc45551
AM
2536 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2537 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2538 set_mem_attributes (stack_parm, parm, 1);
2539 }
2540
6071dc7f
RH
2541 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2542 calls that pass values in multiple non-contiguous locations. */
2543 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2544 {
6071dc7f
RH
2545 rtx mem;
2546
2547 /* Note that we will be storing an integral number of words.
2548 So we have to be careful to ensure that we allocate an
bfc45551 2549 integral number of words. We do this above when we call
6071dc7f
RH
2550 assign_stack_local if space was not allocated in the argument
2551 list. If it was, this will not work if PARM_BOUNDARY is not
2552 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2553 if it becomes a problem. Exception is when BLKmode arrives
2554 with arguments not conforming to word_mode. */
2555
bfc45551
AM
2556 if (data->stack_parm == 0)
2557 ;
6071dc7f
RH
2558 else if (GET_CODE (entry_parm) == PARALLEL)
2559 ;
0bccc606
NS
2560 else
2561 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
6f086dfc 2562
6071dc7f 2563 mem = validize_mem (stack_parm);
c6b97fac 2564
6071dc7f
RH
2565 /* Handle values in multiple non-contiguous locations. */
2566 if (GET_CODE (entry_parm) == PARALLEL)
27e29549 2567 {
bb27eeda
SE
2568 push_to_sequence2 (all->first_conversion_insn,
2569 all->last_conversion_insn);
27e29549 2570 emit_group_store (mem, entry_parm, data->passed_type, size);
bb27eeda
SE
2571 all->first_conversion_insn = get_insns ();
2572 all->last_conversion_insn = get_last_insn ();
27e29549
RH
2573 end_sequence ();
2574 }
c6b97fac 2575
6071dc7f
RH
2576 else if (size == 0)
2577 ;
5c07bd7a 2578
6071dc7f
RH
2579 /* If SIZE is that of a mode no bigger than a word, just use
2580 that mode's store operation. */
2581 else if (size <= UNITS_PER_WORD)
2582 {
2583 enum machine_mode mode
2584 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
c6b97fac 2585
6071dc7f 2586 if (mode != BLKmode
6e985040 2587#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2588 && (size == UNITS_PER_WORD
2589 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2590 != (BYTES_BIG_ENDIAN ? upward : downward)))
6e985040 2591#endif
6071dc7f
RH
2592 )
2593 {
2594 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
2595 emit_move_insn (change_address (mem, mode, 0), reg);
2596 }
c6b97fac 2597
6071dc7f
RH
2598 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2599 machine must be aligned to the left before storing
2600 to memory. Note that the previous test doesn't
2601 handle all cases (e.g. SIZE == 3). */
2602 else if (size != UNITS_PER_WORD
6e985040 2603#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2604 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2605 == downward)
6e985040 2606#else
6071dc7f 2607 && BYTES_BIG_ENDIAN
6e985040 2608#endif
6071dc7f
RH
2609 )
2610 {
2611 rtx tem, x;
2612 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
65c844e2 2613 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
6071dc7f 2614
09b52670 2615 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
7d60be94 2616 build_int_cst (NULL_TREE, by),
4a90aeeb 2617 NULL_RTX, 1);
6071dc7f
RH
2618 tem = change_address (mem, word_mode, 0);
2619 emit_move_insn (tem, x);
6f086dfc 2620 }
6071dc7f 2621 else
27e29549 2622 move_block_from_reg (REGNO (entry_parm), mem,
6071dc7f 2623 size_stored / UNITS_PER_WORD);
6f086dfc 2624 }
6071dc7f 2625 else
27e29549 2626 move_block_from_reg (REGNO (entry_parm), mem,
6071dc7f
RH
2627 size_stored / UNITS_PER_WORD);
2628 }
bfc45551
AM
2629 else if (data->stack_parm == 0)
2630 {
bb27eeda 2631 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
bfc45551
AM
2632 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2633 BLOCK_OP_NORMAL);
bb27eeda
SE
2634 all->first_conversion_insn = get_insns ();
2635 all->last_conversion_insn = get_last_insn ();
bfc45551
AM
2636 end_sequence ();
2637 }
6071dc7f 2638
bfc45551 2639 data->stack_parm = stack_parm;
6071dc7f
RH
2640 SET_DECL_RTL (parm, stack_parm);
2641}
2642
2643/* A subroutine of assign_parms. Allocate a pseudo to hold the current
2644 parameter. Get it there. Perform all ABI specified conversions. */
2645
2646static void
2647assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2648 struct assign_parm_data_one *data)
2649{
2650 rtx parmreg;
2651 enum machine_mode promoted_nominal_mode;
2652 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2653 bool did_conversion = false;
2654
2655 /* Store the parm in a pseudoregister during the function, but we may
2656 need to do it in a wider mode. */
2657
3f9e6aed
PB
2658 /* This is not really promoting for a call. However we need to be
2659 consistent with assign_parm_find_data_types and expand_expr_real_1. */
6071dc7f 2660 promoted_nominal_mode
3f9e6aed 2661 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
6071dc7f
RH
2662
2663 parmreg = gen_reg_rtx (promoted_nominal_mode);
2664
2665 if (!DECL_ARTIFICIAL (parm))
2666 mark_user_reg (parmreg);
2667
2668 /* If this was an item that we received a pointer to,
2669 set DECL_RTL appropriately. */
2670 if (data->passed_pointer)
2671 {
2672 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2673 set_mem_attributes (x, parm, 1);
2674 SET_DECL_RTL (parm, x);
2675 }
2676 else
389fdba0 2677 SET_DECL_RTL (parm, parmreg);
6071dc7f
RH
2678
2679 /* Copy the value into the register. */
2680 if (data->nominal_mode != data->passed_mode
2681 || promoted_nominal_mode != data->promoted_mode)
2682 {
2683 int save_tree_used;
2684
2685 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2686 mode, by the caller. We now have to convert it to
2687 NOMINAL_MODE, if different. However, PARMREG may be in
2688 a different mode than NOMINAL_MODE if it is being stored
2689 promoted.
2690
2691 If ENTRY_PARM is a hard register, it might be in a register
2692 not valid for operating in its mode (e.g., an odd-numbered
2693 register for a DFmode). In that case, moves are the only
2694 thing valid, so we can't do a convert from there. This
2695 occurs when the calling sequence allow such misaligned
2696 usages.
2697
2698 In addition, the conversion may involve a call, which could
2699 clobber parameters which haven't been copied to pseudo
2700 registers yet. Therefore, we must first copy the parm to
2701 a pseudo reg here, and save the conversion until after all
2702 parameters have been moved. */
2703
2704 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2705
2706 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2707
bb27eeda 2708 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
6071dc7f
RH
2709 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2710
2711 if (GET_CODE (tempreg) == SUBREG
2712 && GET_MODE (tempreg) == data->nominal_mode
2713 && REG_P (SUBREG_REG (tempreg))
2714 && data->nominal_mode == data->passed_mode
2715 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2716 && GET_MODE_SIZE (GET_MODE (tempreg))
2717 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
6f086dfc 2718 {
6071dc7f
RH
2719 /* The argument is already sign/zero extended, so note it
2720 into the subreg. */
2721 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2722 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2723 }
00d8a4c1 2724
6071dc7f
RH
2725 /* TREE_USED gets set erroneously during expand_assignment. */
2726 save_tree_used = TREE_USED (parm);
79f5e442 2727 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
6071dc7f 2728 TREE_USED (parm) = save_tree_used;
bb27eeda
SE
2729 all->first_conversion_insn = get_insns ();
2730 all->last_conversion_insn = get_last_insn ();
6071dc7f 2731 end_sequence ();
00d8a4c1 2732
6071dc7f
RH
2733 did_conversion = true;
2734 }
2735 else
2736 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2737
2738 /* If we were passed a pointer but the actual value can safely live
2739 in a register, put it in one. */
2740 if (data->passed_pointer
2741 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2742 /* If by-reference argument was promoted, demote it. */
2743 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2744 || use_register_for_decl (parm)))
2745 {
2746 /* We can't use nominal_mode, because it will have been set to
2747 Pmode above. We must use the actual mode of the parm. */
2748 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2749 mark_user_reg (parmreg);
cd5b3469 2750
6071dc7f
RH
2751 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2752 {
2753 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2754 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2755
bb27eeda
SE
2756 push_to_sequence2 (all->first_conversion_insn,
2757 all->last_conversion_insn);
6071dc7f
RH
2758 emit_move_insn (tempreg, DECL_RTL (parm));
2759 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2760 emit_move_insn (parmreg, tempreg);
bb27eeda
SE
2761 all->first_conversion_insn = get_insns ();
2762 all->last_conversion_insn = get_last_insn ();
6071dc7f 2763 end_sequence ();
6f086dfc 2764
6071dc7f
RH
2765 did_conversion = true;
2766 }
2767 else
2768 emit_move_insn (parmreg, DECL_RTL (parm));
6f086dfc 2769
6071dc7f 2770 SET_DECL_RTL (parm, parmreg);
797a6ac1 2771
6071dc7f
RH
2772 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2773 now the parm. */
2774 data->stack_parm = NULL;
2775 }
ddef6bc7 2776
6071dc7f
RH
2777 /* Mark the register as eliminable if we did no conversion and it was
2778 copied from memory at a fixed offset, and the arg pointer was not
2779 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2780 offset formed an invalid address, such memory-equivalences as we
2781 make here would screw up life analysis for it. */
2782 if (data->nominal_mode == data->passed_mode
2783 && !did_conversion
2784 && data->stack_parm != 0
2785 && MEM_P (data->stack_parm)
2786 && data->locate.offset.var == 0
2787 && reg_mentioned_p (virtual_incoming_args_rtx,
2788 XEXP (data->stack_parm, 0)))
2789 {
2790 rtx linsn = get_last_insn ();
2791 rtx sinsn, set;
a03caf76 2792
6071dc7f
RH
2793 /* Mark complex types separately. */
2794 if (GET_CODE (parmreg) == CONCAT)
2795 {
2796 enum machine_mode submode
2797 = GET_MODE_INNER (GET_MODE (parmreg));
1466e387
RH
2798 int regnor = REGNO (XEXP (parmreg, 0));
2799 int regnoi = REGNO (XEXP (parmreg, 1));
2800 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2801 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2802 GET_MODE_SIZE (submode));
6071dc7f
RH
2803
2804 /* Scan backwards for the set of the real and
2805 imaginary parts. */
2806 for (sinsn = linsn; sinsn != 0;
2807 sinsn = prev_nonnote_insn (sinsn))
2808 {
2809 set = single_set (sinsn);
2810 if (set == 0)
2811 continue;
2812
2813 if (SET_DEST (set) == regno_reg_rtx [regnoi])
a31830a7 2814 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
6071dc7f 2815 else if (SET_DEST (set) == regno_reg_rtx [regnor])
a31830a7 2816 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
a03caf76 2817 }
6071dc7f
RH
2818 }
2819 else if ((set = single_set (linsn)) != 0
2820 && SET_DEST (set) == parmreg)
a31830a7 2821 set_unique_reg_note (linsn, REG_EQUIV, data->stack_parm);
6071dc7f
RH
2822 }
2823
2824 /* For pointer data type, suggest pointer register. */
2825 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2826 mark_reg_pointer (parmreg,
2827 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2828}
2829
2830/* A subroutine of assign_parms. Allocate stack space to hold the current
2831 parameter. Get it there. Perform all ABI specified conversions. */
2832
2833static void
2834assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2835 struct assign_parm_data_one *data)
2836{
2837 /* Value must be stored in the stack slot STACK_PARM during function
2838 execution. */
bfc45551 2839 bool to_conversion = false;
6071dc7f
RH
2840
2841 if (data->promoted_mode != data->nominal_mode)
2842 {
2843 /* Conversion is required. */
2844 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
6f086dfc 2845
6071dc7f
RH
2846 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2847
bb27eeda 2848 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
bfc45551
AM
2849 to_conversion = true;
2850
6071dc7f
RH
2851 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2852 TYPE_UNSIGNED (TREE_TYPE (parm)));
2853
2854 if (data->stack_parm)
2855 /* ??? This may need a big-endian conversion on sparc64. */
2856 data->stack_parm
2857 = adjust_address (data->stack_parm, data->nominal_mode, 0);
6071dc7f
RH
2858 }
2859
2860 if (data->entry_parm != data->stack_parm)
2861 {
bfc45551
AM
2862 rtx src, dest;
2863
6071dc7f
RH
2864 if (data->stack_parm == 0)
2865 {
2866 data->stack_parm
2867 = assign_stack_local (GET_MODE (data->entry_parm),
2868 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
bfc45551 2869 TYPE_ALIGN (data->passed_type));
6071dc7f 2870 set_mem_attributes (data->stack_parm, parm, 1);
6f086dfc 2871 }
6071dc7f 2872
bfc45551
AM
2873 dest = validize_mem (data->stack_parm);
2874 src = validize_mem (data->entry_parm);
2875
2876 if (MEM_P (src))
6f086dfc 2877 {
bfc45551
AM
2878 /* Use a block move to handle potentially misaligned entry_parm. */
2879 if (!to_conversion)
bb27eeda
SE
2880 push_to_sequence2 (all->first_conversion_insn,
2881 all->last_conversion_insn);
bfc45551
AM
2882 to_conversion = true;
2883
2884 emit_block_move (dest, src,
2885 GEN_INT (int_size_in_bytes (data->passed_type)),
2886 BLOCK_OP_NORMAL);
6071dc7f
RH
2887 }
2888 else
bfc45551
AM
2889 emit_move_insn (dest, src);
2890 }
2891
2892 if (to_conversion)
2893 {
bb27eeda
SE
2894 all->first_conversion_insn = get_insns ();
2895 all->last_conversion_insn = get_last_insn ();
bfc45551 2896 end_sequence ();
6071dc7f 2897 }
6f086dfc 2898
6071dc7f
RH
2899 SET_DECL_RTL (parm, data->stack_parm);
2900}
3412b298 2901
6071dc7f
RH
2902/* A subroutine of assign_parms. If the ABI splits complex arguments, then
2903 undo the frobbing that we did in assign_parms_augmented_arg_list. */
86f8eff3 2904
6071dc7f 2905static void
6ccd356e 2906assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
6071dc7f
RH
2907{
2908 tree parm;
6ccd356e 2909 tree orig_fnargs = all->orig_fnargs;
f4ef873c 2910
6071dc7f
RH
2911 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2912 {
2913 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2914 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2915 {
2916 rtx tmp, real, imag;
2917 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
6f086dfc 2918
6071dc7f
RH
2919 real = DECL_RTL (fnargs);
2920 imag = DECL_RTL (TREE_CHAIN (fnargs));
2921 if (inner != GET_MODE (real))
6f086dfc 2922 {
6071dc7f
RH
2923 real = gen_lowpart_SUBREG (inner, real);
2924 imag = gen_lowpart_SUBREG (inner, imag);
2925 }
6ccd356e
AM
2926
2927 if (TREE_ADDRESSABLE (parm))
2928 {
2929 rtx rmem, imem;
2930 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2931
2932 /* split_complex_arg put the real and imag parts in
2933 pseudos. Move them to memory. */
bfc45551
AM
2934 tmp = assign_stack_local (DECL_MODE (parm), size,
2935 TYPE_ALIGN (TREE_TYPE (parm)));
6ccd356e
AM
2936 set_mem_attributes (tmp, parm, 1);
2937 rmem = adjust_address_nv (tmp, inner, 0);
2938 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
bb27eeda
SE
2939 push_to_sequence2 (all->first_conversion_insn,
2940 all->last_conversion_insn);
6ccd356e
AM
2941 emit_move_insn (rmem, real);
2942 emit_move_insn (imem, imag);
bb27eeda
SE
2943 all->first_conversion_insn = get_insns ();
2944 all->last_conversion_insn = get_last_insn ();
6ccd356e
AM
2945 end_sequence ();
2946 }
2947 else
2948 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
6071dc7f 2949 SET_DECL_RTL (parm, tmp);
7e41ffa2 2950
6071dc7f
RH
2951 real = DECL_INCOMING_RTL (fnargs);
2952 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2953 if (inner != GET_MODE (real))
2954 {
2955 real = gen_lowpart_SUBREG (inner, real);
2956 imag = gen_lowpart_SUBREG (inner, imag);
6f086dfc 2957 }
6071dc7f
RH
2958 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2959 set_decl_incoming_rtl (parm, tmp);
2960 fnargs = TREE_CHAIN (fnargs);
2961 }
2962 else
2963 {
2964 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2965 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
6f086dfc 2966
6071dc7f
RH
2967 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2968 instead of the copy of decl, i.e. FNARGS. */
2969 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2970 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
6f086dfc 2971 }
6071dc7f
RH
2972
2973 fnargs = TREE_CHAIN (fnargs);
6f086dfc 2974 }
6071dc7f
RH
2975}
2976
2977/* Assign RTL expressions to the function's parameters. This may involve
2978 copying them into registers and using those registers as the DECL_RTL. */
2979
6fe79279 2980static void
6071dc7f
RH
2981assign_parms (tree fndecl)
2982{
2983 struct assign_parm_data_all all;
2984 tree fnargs, parm;
6f086dfc 2985
150cdc9e
RH
2986 current_function_internal_arg_pointer
2987 = targetm.calls.internal_arg_pointer ();
6071dc7f
RH
2988
2989 assign_parms_initialize_all (&all);
2990 fnargs = assign_parms_augmented_arg_list (&all);
2991
2992 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
ded9bf77 2993 {
6071dc7f
RH
2994 struct assign_parm_data_one data;
2995
2996 /* Extract the type of PARM; adjust it according to ABI. */
2997 assign_parm_find_data_types (&all, parm, &data);
2998
2999 /* Early out for errors and void parameters. */
3000 if (data.passed_mode == VOIDmode)
ded9bf77 3001 {
6071dc7f
RH
3002 SET_DECL_RTL (parm, const0_rtx);
3003 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3004 continue;
3005 }
196c42cd 3006
8117c488
NS
3007 if (current_function_stdarg && !TREE_CHAIN (parm))
3008 assign_parms_setup_varargs (&all, &data, false);
196c42cd 3009
6071dc7f
RH
3010 /* Find out where the parameter arrives in this function. */
3011 assign_parm_find_entry_rtl (&all, &data);
3012
3013 /* Find out where stack space for this parameter might be. */
3014 if (assign_parm_is_stack_parm (&all, &data))
3015 {
3016 assign_parm_find_stack_rtl (parm, &data);
3017 assign_parm_adjust_entry_rtl (&data);
ded9bf77 3018 }
6071dc7f
RH
3019
3020 /* Record permanently how this parm was passed. */
3021 set_decl_incoming_rtl (parm, data.entry_parm);
3022
3023 /* Update info on where next arg arrives in registers. */
3024 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3025 data.passed_type, data.named_arg);
3026
3027 assign_parm_adjust_stack_rtl (&data);
3028
3029 if (assign_parm_setup_block_p (&data))
27e29549 3030 assign_parm_setup_block (&all, parm, &data);
6071dc7f
RH
3031 else if (data.passed_pointer || use_register_for_decl (parm))
3032 assign_parm_setup_reg (&all, parm, &data);
3033 else
3034 assign_parm_setup_stack (&all, parm, &data);
ded9bf77
AH
3035 }
3036
6071dc7f 3037 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
6ccd356e 3038 assign_parms_unsplit_complex (&all, fnargs);
6071dc7f 3039
3412b298
JW
3040 /* Output all parameter conversion instructions (possibly including calls)
3041 now that all parameters have been copied out of hard registers. */
bb27eeda 3042 emit_insn (all.first_conversion_insn);
3412b298 3043
b36a8cc2
OH
3044 /* If we are receiving a struct value address as the first argument, set up
3045 the RTL for the function result. As this might require code to convert
3046 the transmitted address to Pmode, we do this here to ensure that possible
3047 preliminary conversions of the address have been emitted already. */
6071dc7f 3048 if (all.function_result_decl)
b36a8cc2 3049 {
6071dc7f
RH
3050 tree result = DECL_RESULT (current_function_decl);
3051 rtx addr = DECL_RTL (all.function_result_decl);
b36a8cc2 3052 rtx x;
fa8db1f7 3053
cc77ae10
JM
3054 if (DECL_BY_REFERENCE (result))
3055 x = addr;
3056 else
3057 {
3058 addr = convert_memory_address (Pmode, addr);
3059 x = gen_rtx_MEM (DECL_MODE (result), addr);
3060 set_mem_attributes (x, result, 1);
3061 }
b36a8cc2
OH
3062 SET_DECL_RTL (result, x);
3063 }
3064
53c428d0 3065 /* We have aligned all the args, so add space for the pretend args. */
6071dc7f
RH
3066 current_function_pretend_args_size = all.pretend_args_size;
3067 all.stack_args_size.constant += all.extra_pretend_bytes;
3068 current_function_args_size = all.stack_args_size.constant;
6f086dfc
RS
3069
3070 /* Adjust function incoming argument size for alignment and
3071 minimum length. */
3072
3073#ifdef REG_PARM_STACK_SPACE
3074 current_function_args_size = MAX (current_function_args_size,
3075 REG_PARM_STACK_SPACE (fndecl));
6f90e075 3076#endif
6f086dfc 3077
53366450
PB
3078 current_function_args_size = CEIL_ROUND (current_function_args_size,
3079 PARM_BOUNDARY / BITS_PER_UNIT);
4433e339 3080
6f086dfc
RS
3081#ifdef ARGS_GROW_DOWNWARD
3082 current_function_arg_offset_rtx
477eff96 3083 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
6071dc7f
RH
3084 : expand_expr (size_diffop (all.stack_args_size.var,
3085 size_int (-all.stack_args_size.constant)),
a57263bc 3086 NULL_RTX, VOIDmode, 0));
6f086dfc 3087#else
6071dc7f 3088 current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
6f086dfc
RS
3089#endif
3090
3091 /* See how many bytes, if any, of its args a function should try to pop
3092 on return. */
3093
64e6d9cc 3094 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
6f086dfc
RS
3095 current_function_args_size);
3096
3b69d50e
RK
3097 /* For stdarg.h function, save info about
3098 regs and stack space used by the named args. */
6f086dfc 3099
6071dc7f 3100 current_function_args_info = all.args_so_far;
6f086dfc
RS
3101
3102 /* Set the rtx used for the function return value. Put this in its
3103 own variable so any optimizers that need this information don't have
3104 to include tree.h. Do this here so it gets done when an inlined
3105 function gets output. */
3106
19e7881c
MM
3107 current_function_return_rtx
3108 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3109 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
ce5e43d0
JJ
3110
3111 /* If scalar return value was computed in a pseudo-reg, or was a named
3112 return value that got dumped to the stack, copy that to the hard
3113 return register. */
3114 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3115 {
3116 tree decl_result = DECL_RESULT (fndecl);
3117 rtx decl_rtl = DECL_RTL (decl_result);
3118
3119 if (REG_P (decl_rtl)
3120 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3121 : DECL_REGISTER (decl_result))
3122 {
3123 rtx real_decl_rtl;
3124
1d636cc6
RG
3125 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3126 fndecl, true);
ce5e43d0
JJ
3127 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3128 /* The delay slot scheduler assumes that current_function_return_rtx
3129 holds the hard register containing the return value, not a
3130 temporary pseudo. */
3131 current_function_return_rtx = real_decl_rtl;
3132 }
3133 }
6f086dfc 3134}
4744afba
RH
3135
3136/* A subroutine of gimplify_parameters, invoked via walk_tree.
3137 For all seen types, gimplify their sizes. */
3138
3139static tree
3140gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3141{
3142 tree t = *tp;
3143
3144 *walk_subtrees = 0;
3145 if (TYPE_P (t))
3146 {
3147 if (POINTER_TYPE_P (t))
3148 *walk_subtrees = 1;
ad50bc8d
RH
3149 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3150 && !TYPE_SIZES_GIMPLIFIED (t))
4744afba
RH
3151 {
3152 gimplify_type_sizes (t, (tree *) data);
3153 *walk_subtrees = 1;
3154 }
3155 }
3156
3157 return NULL;
3158}
3159
3160/* Gimplify the parameter list for current_function_decl. This involves
3161 evaluating SAVE_EXPRs of variable sized parameters and generating code
3162 to implement callee-copies reference parameters. Returns a list of
3163 statements to add to the beginning of the function, or NULL if nothing
3164 to do. */
3165
3166tree
3167gimplify_parameters (void)
3168{
3169 struct assign_parm_data_all all;
3170 tree fnargs, parm, stmts = NULL;
3171
3172 assign_parms_initialize_all (&all);
3173 fnargs = assign_parms_augmented_arg_list (&all);
3174
3175 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3176 {
3177 struct assign_parm_data_one data;
3178
3179 /* Extract the type of PARM; adjust it according to ABI. */
3180 assign_parm_find_data_types (&all, parm, &data);
3181
3182 /* Early out for errors and void parameters. */
3183 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3184 continue;
3185
3186 /* Update info on where next arg arrives in registers. */
3187 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3188 data.passed_type, data.named_arg);
3189
3190 /* ??? Once upon a time variable_size stuffed parameter list
3191 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3192 turned out to be less than manageable in the gimple world.
3193 Now we have to hunt them down ourselves. */
3194 walk_tree_without_duplicates (&data.passed_type,
3195 gimplify_parm_type, &stmts);
3196
3197 if (!TREE_CONSTANT (DECL_SIZE (parm)))
3198 {
3199 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3200 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3201 }
3202
3203 if (data.passed_pointer)
3204 {
3205 tree type = TREE_TYPE (data.passed_type);
3206 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3207 type, data.named_arg))
3208 {
3209 tree local, t;
3210
3211 /* For constant sized objects, this is trivial; for
3212 variable-sized objects, we have to play games. */
3213 if (TREE_CONSTANT (DECL_SIZE (parm)))
3214 {
3215 local = create_tmp_var (type, get_name (parm));
3216 DECL_IGNORED_P (local) = 0;
3217 }
3218 else
3219 {
5039610b 3220 tree ptr_type, addr;
4744afba
RH
3221
3222 ptr_type = build_pointer_type (type);
3223 addr = create_tmp_var (ptr_type, get_name (parm));
3224 DECL_IGNORED_P (addr) = 0;
3225 local = build_fold_indirect_ref (addr);
3226
4744afba 3227 t = built_in_decls[BUILT_IN_ALLOCA];
5039610b 3228 t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
4744afba 3229 t = fold_convert (ptr_type, t);
b56b9fe3 3230 t = build_gimple_modify_stmt (addr, t);
4744afba
RH
3231 gimplify_and_add (t, &stmts);
3232 }
3233
b56b9fe3 3234 t = build_gimple_modify_stmt (local, parm);
4744afba
RH
3235 gimplify_and_add (t, &stmts);
3236
833b3afe
DB
3237 SET_DECL_VALUE_EXPR (parm, local);
3238 DECL_HAS_VALUE_EXPR_P (parm) = 1;
4744afba
RH
3239 }
3240 }
3241 }
3242
3243 return stmts;
3244}
75dc3319 3245\f
6f086dfc
RS
3246/* Compute the size and offset from the start of the stacked arguments for a
3247 parm passed in mode PASSED_MODE and with type TYPE.
3248
3249 INITIAL_OFFSET_PTR points to the current offset into the stacked
3250 arguments.
3251
e7949876
AM
3252 The starting offset and size for this parm are returned in
3253 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3254 nonzero, the offset is that of stack slot, which is returned in
3255 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3256 padding required from the initial offset ptr to the stack slot.
6f086dfc 3257
cc2902df 3258 IN_REGS is nonzero if the argument will be passed in registers. It will
6f086dfc
RS
3259 never be set if REG_PARM_STACK_SPACE is not defined.
3260
3261 FNDECL is the function in which the argument was defined.
3262
3263 There are two types of rounding that are done. The first, controlled by
3264 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3265 list to be aligned to the specific boundary (in bits). This rounding
3266 affects the initial and starting offsets, but not the argument size.
3267
3268 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3269 optionally rounds the size of the parm to PARM_BOUNDARY. The
3270 initial offset is not affected by this rounding, while the size always
3271 is and the starting offset may be. */
3272
e7949876
AM
3273/* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3274 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
6f086dfc 3275 callers pass in the total size of args so far as
e7949876 3276 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
6f086dfc 3277
6f086dfc 3278void
fa8db1f7
AJ
3279locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3280 int partial, tree fndecl ATTRIBUTE_UNUSED,
3281 struct args_size *initial_offset_ptr,
3282 struct locate_and_pad_arg_data *locate)
6f086dfc 3283{
e7949876
AM
3284 tree sizetree;
3285 enum direction where_pad;
c7e777b5 3286 unsigned int boundary;
e7949876
AM
3287 int reg_parm_stack_space = 0;
3288 int part_size_in_regs;
6f086dfc
RS
3289
3290#ifdef REG_PARM_STACK_SPACE
e7949876 3291 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
e7949876 3292
6f086dfc
RS
3293 /* If we have found a stack parm before we reach the end of the
3294 area reserved for registers, skip that area. */
3295 if (! in_regs)
3296 {
6f086dfc
RS
3297 if (reg_parm_stack_space > 0)
3298 {
3299 if (initial_offset_ptr->var)
3300 {
3301 initial_offset_ptr->var
3302 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
fed3cef0 3303 ssize_int (reg_parm_stack_space));
6f086dfc
RS
3304 initial_offset_ptr->constant = 0;
3305 }
3306 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3307 initial_offset_ptr->constant = reg_parm_stack_space;
3308 }
3309 }
3310#endif /* REG_PARM_STACK_SPACE */
3311
78a52f11 3312 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
e7949876
AM
3313
3314 sizetree
3315 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3316 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3317 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
6e985040 3318 locate->where_pad = where_pad;
bfc45551 3319 locate->boundary = boundary;
6f086dfc 3320
c7e777b5
RH
3321 /* Remember if the outgoing parameter requires extra alignment on the
3322 calling function side. */
3323 if (boundary > PREFERRED_STACK_BOUNDARY)
3324 boundary = PREFERRED_STACK_BOUNDARY;
3325 if (cfun->stack_alignment_needed < boundary)
3326 cfun->stack_alignment_needed = boundary;
3327
6f086dfc 3328#ifdef ARGS_GROW_DOWNWARD
e7949876 3329 locate->slot_offset.constant = -initial_offset_ptr->constant;
6f086dfc 3330 if (initial_offset_ptr->var)
e7949876
AM
3331 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3332 initial_offset_ptr->var);
9dff28ab 3333
e7949876
AM
3334 {
3335 tree s2 = sizetree;
3336 if (where_pad != none
3337 && (!host_integerp (sizetree, 1)
3338 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3339 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3340 SUB_PARM_SIZE (locate->slot_offset, s2);
3341 }
3342
3343 locate->slot_offset.constant += part_size_in_regs;
9dff28ab
JDA
3344
3345 if (!in_regs
3346#ifdef REG_PARM_STACK_SPACE
3347 || REG_PARM_STACK_SPACE (fndecl) > 0
3348#endif
3349 )
e7949876
AM
3350 pad_to_arg_alignment (&locate->slot_offset, boundary,
3351 &locate->alignment_pad);
9dff28ab 3352
e7949876
AM
3353 locate->size.constant = (-initial_offset_ptr->constant
3354 - locate->slot_offset.constant);
6f086dfc 3355 if (initial_offset_ptr->var)
e7949876
AM
3356 locate->size.var = size_binop (MINUS_EXPR,
3357 size_binop (MINUS_EXPR,
3358 ssize_int (0),
3359 initial_offset_ptr->var),
3360 locate->slot_offset.var);
3361
3362 /* Pad_below needs the pre-rounded size to know how much to pad
3363 below. */
3364 locate->offset = locate->slot_offset;
3365 if (where_pad == downward)
3366 pad_below (&locate->offset, passed_mode, sizetree);
9dff28ab 3367
6f086dfc 3368#else /* !ARGS_GROW_DOWNWARD */
832ea3b3
FS
3369 if (!in_regs
3370#ifdef REG_PARM_STACK_SPACE
3371 || REG_PARM_STACK_SPACE (fndecl) > 0
3372#endif
3373 )
e7949876
AM
3374 pad_to_arg_alignment (initial_offset_ptr, boundary,
3375 &locate->alignment_pad);
3376 locate->slot_offset = *initial_offset_ptr;
6f086dfc
RS
3377
3378#ifdef PUSH_ROUNDING
3379 if (passed_mode != BLKmode)
3380 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3381#endif
3382
d4b0a7a0
DE
3383 /* Pad_below needs the pre-rounded size to know how much to pad below
3384 so this must be done before rounding up. */
e7949876
AM
3385 locate->offset = locate->slot_offset;
3386 if (where_pad == downward)
3387 pad_below (&locate->offset, passed_mode, sizetree);
d4b0a7a0 3388
6f086dfc 3389 if (where_pad != none
1468899d
RK
3390 && (!host_integerp (sizetree, 1)
3391 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
6f086dfc
RS
3392 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3393
e7949876
AM
3394 ADD_PARM_SIZE (locate->size, sizetree);
3395
3396 locate->size.constant -= part_size_in_regs;
6f086dfc
RS
3397#endif /* ARGS_GROW_DOWNWARD */
3398}
3399
e16c591a
RS
3400/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3401 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3402
6f086dfc 3403static void
fa8db1f7
AJ
3404pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3405 struct args_size *alignment_pad)
6f086dfc 3406{
a544cfd2
KG
3407 tree save_var = NULL_TREE;
3408 HOST_WIDE_INT save_constant = 0;
a751cd5b 3409 int boundary_in_bytes = boundary / BITS_PER_UNIT;
a594a19c
GK
3410 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3411
3412#ifdef SPARC_STACK_BOUNDARY_HACK
2358ff91
EB
3413 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3414 the real alignment of %sp. However, when it does this, the
3415 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
a594a19c
GK
3416 if (SPARC_STACK_BOUNDARY_HACK)
3417 sp_offset = 0;
3418#endif
4fc026cd 3419
9399d5c6 3420 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
4fc026cd
CM
3421 {
3422 save_var = offset_ptr->var;
3423 save_constant = offset_ptr->constant;
3424 }
3425
3426 alignment_pad->var = NULL_TREE;
3427 alignment_pad->constant = 0;
4fc026cd 3428
6f086dfc
RS
3429 if (boundary > BITS_PER_UNIT)
3430 {
3431 if (offset_ptr->var)
3432 {
a594a19c
GK
3433 tree sp_offset_tree = ssize_int (sp_offset);
3434 tree offset = size_binop (PLUS_EXPR,
3435 ARGS_SIZE_TREE (*offset_ptr),
3436 sp_offset_tree);
6f086dfc 3437#ifdef ARGS_GROW_DOWNWARD
a594a19c 3438 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
6f086dfc 3439#else
a594a19c 3440 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
6f086dfc 3441#endif
a594a19c
GK
3442
3443 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
e7949876
AM
3444 /* ARGS_SIZE_TREE includes constant term. */
3445 offset_ptr->constant = 0;
dd3f0101
KH
3446 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3447 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
fed3cef0 3448 save_var);
6f086dfc
RS
3449 }
3450 else
718fe406 3451 {
a594a19c 3452 offset_ptr->constant = -sp_offset +
6f086dfc 3453#ifdef ARGS_GROW_DOWNWARD
a594a19c 3454 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
6f086dfc 3455#else
a594a19c 3456 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
6f086dfc 3457#endif
718fe406
KH
3458 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3459 alignment_pad->constant = offset_ptr->constant - save_constant;
3460 }
6f086dfc
RS
3461 }
3462}
3463
3464static void
fa8db1f7 3465pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
6f086dfc
RS
3466{
3467 if (passed_mode != BLKmode)
3468 {
3469 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3470 offset_ptr->constant
3471 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3472 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3473 - GET_MODE_SIZE (passed_mode));
3474 }
3475 else
3476 {
3477 if (TREE_CODE (sizetree) != INTEGER_CST
3478 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3479 {
3480 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3481 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3482 /* Add it in. */
3483 ADD_PARM_SIZE (*offset_ptr, s2);
3484 SUB_PARM_SIZE (*offset_ptr, sizetree);
3485 }
3486 }
3487}
6f086dfc 3488\f
6f086dfc 3489
6fb5fa3c
DB
3490/* True if register REGNO was alive at a place where `setjmp' was
3491 called and was set more than once or is an argument. Such regs may
3492 be clobbered by `longjmp'. */
3493
3494static bool
3495regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3496{
3497 /* There appear to be cases where some local vars never reach the
3498 backend but have bogus regnos. */
3499 if (regno >= max_reg_num ())
3500 return false;
3501
3502 return ((REG_N_SETS (regno) > 1
3503 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3504 && REGNO_REG_SET_P (setjmp_crosses, regno));
3505}
3506
3507/* Walk the tree of blocks describing the binding levels within a
3508 function and warn about variables the might be killed by setjmp or
3509 vfork. This is done after calling flow_analysis before register
3510 allocation since that will clobber the pseudo-regs to hard
3511 regs. */
3512
3513static void
3514setjmp_vars_warning (bitmap setjmp_crosses, tree block)
6f086dfc 3515{
b3694847 3516 tree decl, sub;
6de9cd9a 3517
6f086dfc
RS
3518 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3519 {
6de9cd9a 3520 if (TREE_CODE (decl) == VAR_DECL
bc41842b 3521 && DECL_RTL_SET_P (decl)
f8cfc6aa 3522 && REG_P (DECL_RTL (decl))
6fb5fa3c 3523 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
2b001724
MLI
3524 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3525 " %<longjmp%> or %<vfork%>", decl);
6f086dfc 3526 }
6de9cd9a 3527
6f086dfc 3528 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
6fb5fa3c 3529 setjmp_vars_warning (setjmp_crosses, sub);
6f086dfc
RS
3530}
3531
6de9cd9a 3532/* Do the appropriate part of setjmp_vars_warning
6f086dfc
RS
3533 but for arguments instead of local variables. */
3534
6fb5fa3c
DB
3535static void
3536setjmp_args_warning (bitmap setjmp_crosses)
6f086dfc 3537{
b3694847 3538 tree decl;
6f086dfc
RS
3539 for (decl = DECL_ARGUMENTS (current_function_decl);
3540 decl; decl = TREE_CHAIN (decl))
3541 if (DECL_RTL (decl) != 0
f8cfc6aa 3542 && REG_P (DECL_RTL (decl))
6fb5fa3c 3543 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
2b001724
MLI
3544 warning (OPT_Wclobbered,
3545 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
dee15844 3546 decl);
6f086dfc
RS
3547}
3548
6fb5fa3c
DB
3549/* Generate warning messages for variables live across setjmp. */
3550
3551void
3552generate_setjmp_warnings (void)
3553{
3554 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
3555
3556 if (n_basic_blocks == NUM_FIXED_BLOCKS
3557 || bitmap_empty_p (setjmp_crosses))
3558 return;
3559
3560 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
3561 setjmp_args_warning (setjmp_crosses);
3562}
3563
6f086dfc 3564\f
a20612aa
RH
3565/* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3566 and create duplicate blocks. */
3567/* ??? Need an option to either create block fragments or to create
3568 abstract origin duplicates of a source block. It really depends
3569 on what optimization has been performed. */
467456d0 3570
116eebd6 3571void
fa8db1f7 3572reorder_blocks (void)
467456d0 3573{
116eebd6 3574 tree block = DECL_INITIAL (current_function_decl);
2c217442 3575 VEC(tree,heap) *block_stack;
467456d0 3576
1a4450c7 3577 if (block == NULL_TREE)
116eebd6 3578 return;
fc289cd1 3579
2c217442 3580 block_stack = VEC_alloc (tree, heap, 10);
18c038b9 3581
a20612aa 3582 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
6de9cd9a 3583 clear_block_marks (block);
a20612aa 3584
116eebd6
MM
3585 /* Prune the old trees away, so that they don't get in the way. */
3586 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3587 BLOCK_CHAIN (block) = NULL_TREE;
fc289cd1 3588
a20612aa 3589 /* Recreate the block tree from the note nesting. */
116eebd6 3590 reorder_blocks_1 (get_insns (), block, &block_stack);
718fe406 3591 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
18c038b9 3592
2c217442 3593 VEC_free (tree, heap, block_stack);
467456d0
RS
3594}
3595
a20612aa 3596/* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
0a1c58a2 3597
6de9cd9a
DN
3598void
3599clear_block_marks (tree block)
cc1fe44f 3600{
a20612aa 3601 while (block)
cc1fe44f 3602 {
a20612aa 3603 TREE_ASM_WRITTEN (block) = 0;
6de9cd9a 3604 clear_block_marks (BLOCK_SUBBLOCKS (block));
a20612aa 3605 block = BLOCK_CHAIN (block);
cc1fe44f
DD
3606 }
3607}
3608
0a1c58a2 3609static void
2c217442 3610reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
0a1c58a2
JL
3611{
3612 rtx insn;
3613
3614 for (insn = insns; insn; insn = NEXT_INSN (insn))
3615 {
4b4bf941 3616 if (NOTE_P (insn))
0a1c58a2 3617 {
a38e7aa5 3618 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
0a1c58a2
JL
3619 {
3620 tree block = NOTE_BLOCK (insn);
51b7d006
DJ
3621 tree origin;
3622
3623 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3624 ? BLOCK_FRAGMENT_ORIGIN (block)
3625 : block);
a20612aa
RH
3626
3627 /* If we have seen this block before, that means it now
3628 spans multiple address regions. Create a new fragment. */
0a1c58a2
JL
3629 if (TREE_ASM_WRITTEN (block))
3630 {
a20612aa 3631 tree new_block = copy_node (block);
a20612aa 3632
a20612aa
RH
3633 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3634 BLOCK_FRAGMENT_CHAIN (new_block)
3635 = BLOCK_FRAGMENT_CHAIN (origin);
3636 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3637
3638 NOTE_BLOCK (insn) = new_block;
3639 block = new_block;
0a1c58a2 3640 }
a20612aa 3641
0a1c58a2
JL
3642 BLOCK_SUBBLOCKS (block) = 0;
3643 TREE_ASM_WRITTEN (block) = 1;
339a28b9
ZW
3644 /* When there's only one block for the entire function,
3645 current_block == block and we mustn't do this, it
3646 will cause infinite recursion. */
3647 if (block != current_block)
3648 {
51b7d006
DJ
3649 if (block != origin)
3650 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
3651
339a28b9
ZW
3652 BLOCK_SUPERCONTEXT (block) = current_block;
3653 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3654 BLOCK_SUBBLOCKS (current_block) = block;
51b7d006 3655 current_block = origin;
339a28b9 3656 }
2c217442 3657 VEC_safe_push (tree, heap, *p_block_stack, block);
0a1c58a2 3658 }
a38e7aa5 3659 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
0a1c58a2 3660 {
2c217442 3661 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
0a1c58a2
JL
3662 BLOCK_SUBBLOCKS (current_block)
3663 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3664 current_block = BLOCK_SUPERCONTEXT (current_block);
3665 }
3666 }
0a1c58a2
JL
3667 }
3668}
3669
467456d0
RS
3670/* Reverse the order of elements in the chain T of blocks,
3671 and return the new head of the chain (old last element). */
3672
6de9cd9a 3673tree
fa8db1f7 3674blocks_nreverse (tree t)
467456d0 3675{
b3694847 3676 tree prev = 0, decl, next;
467456d0
RS
3677 for (decl = t; decl; decl = next)
3678 {
3679 next = BLOCK_CHAIN (decl);
3680 BLOCK_CHAIN (decl) = prev;
3681 prev = decl;
3682 }
3683 return prev;
3684}
3685
18c038b9
MM
3686/* Count the subblocks of the list starting with BLOCK. If VECTOR is
3687 non-NULL, list them all into VECTOR, in a depth-first preorder
3688 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
b2a59b15 3689 blocks. */
467456d0
RS
3690
3691static int
fa8db1f7 3692all_blocks (tree block, tree *vector)
467456d0 3693{
b2a59b15
MS
3694 int n_blocks = 0;
3695
a84efb51
JO
3696 while (block)
3697 {
3698 TREE_ASM_WRITTEN (block) = 0;
b2a59b15 3699
a84efb51
JO
3700 /* Record this block. */
3701 if (vector)
3702 vector[n_blocks] = block;
b2a59b15 3703
a84efb51 3704 ++n_blocks;
718fe406 3705
a84efb51
JO
3706 /* Record the subblocks, and their subblocks... */
3707 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3708 vector ? vector + n_blocks : 0);
3709 block = BLOCK_CHAIN (block);
3710 }
467456d0
RS
3711
3712 return n_blocks;
3713}
18c038b9
MM
3714
3715/* Return a vector containing all the blocks rooted at BLOCK. The
3716 number of elements in the vector is stored in N_BLOCKS_P. The
3717 vector is dynamically allocated; it is the caller's responsibility
3718 to call `free' on the pointer returned. */
718fe406 3719
18c038b9 3720static tree *
fa8db1f7 3721get_block_vector (tree block, int *n_blocks_p)
18c038b9
MM
3722{
3723 tree *block_vector;
3724
3725 *n_blocks_p = all_blocks (block, NULL);
5ed6ace5 3726 block_vector = XNEWVEC (tree, *n_blocks_p);
18c038b9
MM
3727 all_blocks (block, block_vector);
3728
3729 return block_vector;
3730}
3731
f83b236e 3732static GTY(()) int next_block_index = 2;
18c038b9
MM
3733
3734/* Set BLOCK_NUMBER for all the blocks in FN. */
3735
3736void
fa8db1f7 3737number_blocks (tree fn)
18c038b9
MM
3738{
3739 int i;
3740 int n_blocks;
3741 tree *block_vector;
3742
3743 /* For SDB and XCOFF debugging output, we start numbering the blocks
3744 from 1 within each function, rather than keeping a running
3745 count. */
3746#if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
b0e3a658
RK
3747 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3748 next_block_index = 1;
18c038b9
MM
3749#endif
3750
3751 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3752
3753 /* The top-level BLOCK isn't numbered at all. */
3754 for (i = 1; i < n_blocks; ++i)
3755 /* We number the blocks from two. */
3756 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3757
3758 free (block_vector);
3759
3760 return;
3761}
df8992f8
RH
3762
3763/* If VAR is present in a subblock of BLOCK, return the subblock. */
3764
3765tree
fa8db1f7 3766debug_find_var_in_block_tree (tree var, tree block)
df8992f8
RH
3767{
3768 tree t;
3769
3770 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3771 if (t == var)
3772 return block;
3773
3774 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3775 {
3776 tree ret = debug_find_var_in_block_tree (var, t);
3777 if (ret)
3778 return ret;
3779 }
3780
3781 return NULL_TREE;
3782}
467456d0 3783\f
3e87758a
RL
3784
3785/* Return value of funcdef and increase it. */
3786int
62e5bf5d 3787get_next_funcdef_no (void)
3e87758a
RL
3788{
3789 return funcdef_no++;
3790}
3791
3a70d621
RH
3792/* Allocate a function structure for FNDECL and set its contents
3793 to the defaults. */
7a80cf9a 3794
3a70d621
RH
3795void
3796allocate_struct_function (tree fndecl)
6f086dfc 3797{
3a70d621 3798 tree result;
6de9cd9a 3799 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
6f086dfc 3800
3a70d621 3801 cfun = ggc_alloc_cleared (sizeof (struct function));
b384405b 3802
3a70d621
RH
3803 cfun->stack_alignment_needed = STACK_BOUNDARY;
3804 cfun->preferred_stack_boundary = STACK_BOUNDARY;
6f086dfc 3805
3e87758a 3806 current_function_funcdef_no = get_next_funcdef_no ();
6f086dfc 3807
3a70d621 3808 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
6f086dfc 3809
3a70d621 3810 init_eh_for_function ();
6f086dfc 3811
ae2bcd98 3812 lang_hooks.function.init (cfun);
3a70d621
RH
3813 if (init_machine_status)
3814 cfun->machine = (*init_machine_status) ();
e2ecd91c 3815
3a70d621
RH
3816 if (fndecl == NULL)
3817 return;
a0871656 3818
1da326c3 3819 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3a70d621 3820 cfun->decl = fndecl;
6f086dfc 3821
3a70d621 3822 result = DECL_RESULT (fndecl);
61f71b34 3823 if (aggregate_value_p (result, fndecl))
3a70d621
RH
3824 {
3825#ifdef PCC_STATIC_STRUCT_RETURN
3826 current_function_returns_pcc_struct = 1;
3827#endif
3828 current_function_returns_struct = 1;
3829 }
6f086dfc 3830
3a70d621 3831 current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
6f086dfc 3832
6de9cd9a
DN
3833 current_function_stdarg
3834 = (fntype
3835 && TYPE_ARG_TYPES (fntype) != 0
3836 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3837 != void_type_node));
9d30f3c1
JJ
3838
3839 /* Assume all registers in stdarg functions need to be saved. */
3840 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3841 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3a70d621 3842}
6f086dfc 3843
3a70d621 3844/* Reset cfun, and other non-struct-function variables to defaults as
2067c116 3845 appropriate for emitting rtl at the start of a function. */
6f086dfc 3846
3a70d621
RH
3847static void
3848prepare_function_start (tree fndecl)
3849{
1da326c3
SB
3850 if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
3851 cfun = DECL_STRUCT_FUNCTION (fndecl);
3a70d621
RH
3852 else
3853 allocate_struct_function (fndecl);
0de456a5
JH
3854 init_emit ();
3855 init_varasm_status (cfun);
3856 init_expr ();
6f086dfc 3857
3a70d621 3858 cse_not_expected = ! optimize;
6f086dfc 3859
3a70d621
RH
3860 /* Caller save not needed yet. */
3861 caller_save_needed = 0;
6f086dfc 3862
3a70d621
RH
3863 /* We haven't done register allocation yet. */
3864 reg_renumber = 0;
6f086dfc 3865
b384405b
BS
3866 /* Indicate that we have not instantiated virtual registers yet. */
3867 virtuals_instantiated = 0;
3868
1b3d8f8a
GK
3869 /* Indicate that we want CONCATs now. */
3870 generating_concat_p = 1;
3871
b384405b
BS
3872 /* Indicate we have no need of a frame pointer yet. */
3873 frame_pointer_needed = 0;
b384405b
BS
3874}
3875
3876/* Initialize the rtl expansion mechanism so that we can do simple things
3877 like generate sequences. This is used to provide a context during global
3878 initialization of some passes. */
3879void
fa8db1f7 3880init_dummy_function_start (void)
b384405b 3881{
3a70d621 3882 prepare_function_start (NULL);
b384405b
BS
3883}
3884
3885/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3886 and initialize static variables for generating RTL for the statements
3887 of the function. */
3888
3889void
fa8db1f7 3890init_function_start (tree subr)
b384405b 3891{
3a70d621 3892 prepare_function_start (subr);
b384405b 3893
6f086dfc
RS
3894 /* Warn if this value is an aggregate type,
3895 regardless of which calling convention we are using for it. */
ccf08a6e
DD
3896 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3897 warning (OPT_Waggregate_return, "function returns an aggregate");
49ad7cfa 3898}
5c7675e9 3899
49ad7cfa
BS
3900/* Make sure all values used by the optimization passes have sane
3901 defaults. */
c2924966 3902unsigned int
fa8db1f7 3903init_function_for_compilation (void)
49ad7cfa
BS
3904{
3905 reg_renumber = 0;
0a1c58a2 3906
f995dcfe
KH
3907 /* No prologue/epilogue insns yet. Make sure that these vectors are
3908 empty. */
3909 gcc_assert (VEC_length (int, prologue) == 0);
3910 gcc_assert (VEC_length (int, epilogue) == 0);
3911 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
c2924966 3912 return 0;
6f086dfc
RS
3913}
3914
ef330312
PB
3915struct tree_opt_pass pass_init_function =
3916{
3917 NULL, /* name */
3918 NULL, /* gate */
3919 init_function_for_compilation, /* execute */
3920 NULL, /* sub */
3921 NULL, /* next */
3922 0, /* static_pass_number */
3923 0, /* tv_id */
3924 0, /* properties_required */
3925 0, /* properties_provided */
3926 0, /* properties_destroyed */
3927 0, /* todo_flags_start */
3928 0, /* todo_flags_finish */
3929 0 /* letter */
3930};
3931
3932
6f086dfc 3933void
fa8db1f7 3934expand_main_function (void)
6f086dfc 3935{
3a57c6cb
MM
3936#if (defined(INVOKE__main) \
3937 || (!defined(HAS_INIT_SECTION) \
3938 && !defined(INIT_SECTION_ASM_OP) \
3939 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
68d28100 3940 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
1d482056 3941#endif
6f086dfc
RS
3942}
3943\f
7d69de61
RH
3944/* Expand code to initialize the stack_protect_guard. This is invoked at
3945 the beginning of a function to be protected. */
3946
3947#ifndef HAVE_stack_protect_set
3948# define HAVE_stack_protect_set 0
3949# define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
3950#endif
3951
3952void
3953stack_protect_prologue (void)
3954{
3955 tree guard_decl = targetm.stack_protect_guard ();
3956 rtx x, y;
3957
3958 /* Avoid expand_expr here, because we don't want guard_decl pulled
3959 into registers unless absolutely necessary. And we know that
3960 cfun->stack_protect_guard is a local stack slot, so this skips
3961 all the fluff. */
3962 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
3963 y = validize_mem (DECL_RTL (guard_decl));
3964
3965 /* Allow the target to copy from Y to X without leaking Y into a
3966 register. */
3967 if (HAVE_stack_protect_set)
3968 {
3969 rtx insn = gen_stack_protect_set (x, y);
3970 if (insn)
3971 {
3972 emit_insn (insn);
3973 return;
3974 }
3975 }
3976
3977 /* Otherwise do a straight move. */
3978 emit_move_insn (x, y);
3979}
3980
3981/* Expand code to verify the stack_protect_guard. This is invoked at
3982 the end of a function to be protected. */
3983
3984#ifndef HAVE_stack_protect_test
b76be05e
JJ
3985# define HAVE_stack_protect_test 0
3986# define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
7d69de61
RH
3987#endif
3988
b755446c 3989void
7d69de61
RH
3990stack_protect_epilogue (void)
3991{
3992 tree guard_decl = targetm.stack_protect_guard ();
3993 rtx label = gen_label_rtx ();
3994 rtx x, y, tmp;
3995
3996 /* Avoid expand_expr here, because we don't want guard_decl pulled
3997 into registers unless absolutely necessary. And we know that
3998 cfun->stack_protect_guard is a local stack slot, so this skips
3999 all the fluff. */
4000 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4001 y = validize_mem (DECL_RTL (guard_decl));
4002
4003 /* Allow the target to compare Y with X without leaking either into
4004 a register. */
4005 switch (HAVE_stack_protect_test != 0)
4006 {
4007 case 1:
3aebbe5f 4008 tmp = gen_stack_protect_test (x, y, label);
7d69de61
RH
4009 if (tmp)
4010 {
4011 emit_insn (tmp);
7d69de61
RH
4012 break;
4013 }
4014 /* FALLTHRU */
4015
4016 default:
4017 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4018 break;
4019 }
4020
4021 /* The noreturn predictor has been moved to the tree level. The rtl-level
4022 predictors estimate this branch about 20%, which isn't enough to get
4023 things moved out of line. Since this is the only extant case of adding
4024 a noreturn function at the rtl level, it doesn't seem worth doing ought
4025 except adding the prediction by hand. */
4026 tmp = get_last_insn ();
4027 if (JUMP_P (tmp))
4028 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4029
4030 expand_expr_stmt (targetm.stack_protect_fail ());
4031 emit_label (label);
4032}
4033\f
6f086dfc
RS
4034/* Start the RTL for a new function, and set variables used for
4035 emitting RTL.
4036 SUBR is the FUNCTION_DECL node.
4037 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4038 the function's parameters, which must be run at any return statement. */
4039
4040void
b79c5284 4041expand_function_start (tree subr)
6f086dfc 4042{
6f086dfc
RS
4043 /* Make sure volatile mem refs aren't considered
4044 valid operands of arithmetic insns. */
4045 init_recog_no_volatile ();
4046
70f4f91c
WC
4047 current_function_profile
4048 = (profile_flag
4049 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4050
a157febd
GK
4051 current_function_limit_stack
4052 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4053
52a11cbf
RH
4054 /* Make the label for return statements to jump to. Do not special
4055 case machines with special return instructions -- they will be
4056 handled later during jump, ifcvt, or epilogue creation. */
6f086dfc 4057 return_label = gen_label_rtx ();
6f086dfc
RS
4058
4059 /* Initialize rtx used to return the value. */
4060 /* Do this before assign_parms so that we copy the struct value address
4061 before any library calls that assign parms might generate. */
4062
4063 /* Decide whether to return the value in memory or in a register. */
61f71b34 4064 if (aggregate_value_p (DECL_RESULT (subr), subr))
6f086dfc
RS
4065 {
4066 /* Returning something that won't go in a register. */
b3694847 4067 rtx value_address = 0;
6f086dfc
RS
4068
4069#ifdef PCC_STATIC_STRUCT_RETURN
4070 if (current_function_returns_pcc_struct)
4071 {
4072 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4073 value_address = assemble_static_space (size);
4074 }
4075 else
4076#endif
4077 {
2225b57c 4078 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
6f086dfc
RS
4079 /* Expect to be passed the address of a place to store the value.
4080 If it is passed as an argument, assign_parms will take care of
4081 it. */
61f71b34 4082 if (sv)
6f086dfc
RS
4083 {
4084 value_address = gen_reg_rtx (Pmode);
61f71b34 4085 emit_move_insn (value_address, sv);
6f086dfc
RS
4086 }
4087 }
4088 if (value_address)
ccdecf58 4089 {
01c98570
JM
4090 rtx x = value_address;
4091 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4092 {
4093 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4094 set_mem_attributes (x, DECL_RESULT (subr), 1);
4095 }
abde42f7 4096 SET_DECL_RTL (DECL_RESULT (subr), x);
ccdecf58 4097 }
6f086dfc
RS
4098 }
4099 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4100 /* If return mode is void, this decl rtl should not be used. */
19e7881c 4101 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
d5bf1143 4102 else
a53e14c0 4103 {
d5bf1143
RH
4104 /* Compute the return values into a pseudo reg, which we will copy
4105 into the true return register after the cleanups are done. */
bef5d8b6
RS
4106 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4107 if (TYPE_MODE (return_type) != BLKmode
4108 && targetm.calls.return_in_msb (return_type))
4109 /* expand_function_end will insert the appropriate padding in
4110 this case. Use the return value's natural (unpadded) mode
4111 within the function proper. */
4112 SET_DECL_RTL (DECL_RESULT (subr),
4113 gen_reg_rtx (TYPE_MODE (return_type)));
80a480ca 4114 else
0bccc606 4115 {
bef5d8b6
RS
4116 /* In order to figure out what mode to use for the pseudo, we
4117 figure out what the mode of the eventual return register will
4118 actually be, and use that. */
1d636cc6 4119 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
bef5d8b6
RS
4120
4121 /* Structures that are returned in registers are not
4122 aggregate_value_p, so we may see a PARALLEL or a REG. */
4123 if (REG_P (hard_reg))
4124 SET_DECL_RTL (DECL_RESULT (subr),
4125 gen_reg_rtx (GET_MODE (hard_reg)));
4126 else
4127 {
4128 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4129 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4130 }
0bccc606 4131 }
a53e14c0 4132
084a1106
JDA
4133 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4134 result to the real return register(s). */
4135 DECL_REGISTER (DECL_RESULT (subr)) = 1;
a53e14c0 4136 }
6f086dfc
RS
4137
4138 /* Initialize rtx for parameters and local variables.
4139 In some cases this requires emitting insns. */
0d1416c6 4140 assign_parms (subr);
6f086dfc 4141
6de9cd9a
DN
4142 /* If function gets a static chain arg, store it. */
4143 if (cfun->static_chain_decl)
4144 {
7e140280
RH
4145 tree parm = cfun->static_chain_decl;
4146 rtx local = gen_reg_rtx (Pmode);
4147
4148 set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
4149 SET_DECL_RTL (parm, local);
7e140280 4150 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
6de9cd9a 4151
7e140280 4152 emit_move_insn (local, static_chain_incoming_rtx);
6de9cd9a
DN
4153 }
4154
4155 /* If the function receives a non-local goto, then store the
4156 bits we need to restore the frame pointer. */
4157 if (cfun->nonlocal_goto_save_area)
4158 {
4159 tree t_save;
4160 rtx r_save;
4161
4162 /* ??? We need to do this save early. Unfortunately here is
4163 before the frame variable gets declared. Help out... */
4164 expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4165
3244e67d
RS
4166 t_save = build4 (ARRAY_REF, ptr_type_node,
4167 cfun->nonlocal_goto_save_area,
4168 integer_zero_node, NULL_TREE, NULL_TREE);
6de9cd9a 4169 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5e89a381 4170 r_save = convert_memory_address (Pmode, r_save);
f0c51a1e 4171
6de9cd9a
DN
4172 emit_move_insn (r_save, virtual_stack_vars_rtx);
4173 update_nonlocal_goto_save_area ();
4174 }
f0c51a1e 4175
6f086dfc
RS
4176 /* The following was moved from init_function_start.
4177 The move is supposed to make sdb output more accurate. */
4178 /* Indicate the beginning of the function body,
4179 as opposed to parm setup. */
2e040219 4180 emit_note (NOTE_INSN_FUNCTION_BEG);
6f086dfc 4181
ede497cf
SB
4182 gcc_assert (NOTE_P (get_last_insn ()));
4183
6f086dfc
RS
4184 parm_birth_insn = get_last_insn ();
4185
70f4f91c 4186 if (current_function_profile)
f6f315fe 4187 {
f6f315fe 4188#ifdef PROFILE_HOOK
df696a75 4189 PROFILE_HOOK (current_function_funcdef_no);
411707f4 4190#endif
f6f315fe 4191 }
411707f4 4192
ede497cf
SB
4193 /* After the display initializations is where the stack checking
4194 probe should go. */
4195 if(flag_stack_check)
4196 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
6f086dfc 4197
6f086dfc
RS
4198 /* Make sure there is a line number after the function entry setup code. */
4199 force_next_line_note ();
4200}
4201\f
49ad7cfa
BS
4202/* Undo the effects of init_dummy_function_start. */
4203void
fa8db1f7 4204expand_dummy_function_end (void)
49ad7cfa
BS
4205{
4206 /* End any sequences that failed to be closed due to syntax errors. */
4207 while (in_sequence_p ())
4208 end_sequence ();
4209
4210 /* Outside function body, can't compute type's actual size
4211 until next function's body starts. */
fa51b01b 4212
01d939e8
BS
4213 free_after_parsing (cfun);
4214 free_after_compilation (cfun);
01d939e8 4215 cfun = 0;
49ad7cfa
BS
4216}
4217
c13fde05
RH
4218/* Call DOIT for each hard register used as a return value from
4219 the current function. */
bd695e1e
RH
4220
4221void
fa8db1f7 4222diddle_return_value (void (*doit) (rtx, void *), void *arg)
bd695e1e 4223{
c13fde05
RH
4224 rtx outgoing = current_function_return_rtx;
4225
4226 if (! outgoing)
4227 return;
bd695e1e 4228
f8cfc6aa 4229 if (REG_P (outgoing))
c13fde05
RH
4230 (*doit) (outgoing, arg);
4231 else if (GET_CODE (outgoing) == PARALLEL)
4232 {
4233 int i;
bd695e1e 4234
c13fde05
RH
4235 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4236 {
4237 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4238
f8cfc6aa 4239 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
c13fde05 4240 (*doit) (x, arg);
bd695e1e
RH
4241 }
4242 }
4243}
4244
c13fde05 4245static void
fa8db1f7 4246do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
c13fde05
RH
4247{
4248 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4249}
4250
4251void
fa8db1f7 4252clobber_return_register (void)
c13fde05
RH
4253{
4254 diddle_return_value (do_clobber_return_reg, NULL);
9c65bbf4
JH
4255
4256 /* In case we do use pseudo to return value, clobber it too. */
4257 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4258 {
4259 tree decl_result = DECL_RESULT (current_function_decl);
4260 rtx decl_rtl = DECL_RTL (decl_result);
4261 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4262 {
4263 do_clobber_return_reg (decl_rtl, NULL);
4264 }
4265 }
c13fde05
RH
4266}
4267
4268static void
fa8db1f7 4269do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
c13fde05
RH
4270{
4271 emit_insn (gen_rtx_USE (VOIDmode, reg));
4272}
4273
0bf8477d 4274static void
fa8db1f7 4275use_return_register (void)
c13fde05
RH
4276{
4277 diddle_return_value (do_use_return_reg, NULL);
4278}
4279
902edd36
JH
4280/* Possibly warn about unused parameters. */
4281void
4282do_warn_unused_parameter (tree fn)
4283{
4284 tree decl;
4285
4286 for (decl = DECL_ARGUMENTS (fn);
4287 decl; decl = TREE_CHAIN (decl))
4288 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
534fd534
DF
4289 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4290 && !TREE_NO_WARNING (decl))
b9b8dde3 4291 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
902edd36
JH
4292}
4293
e2500fed
GK
4294static GTY(()) rtx initial_trampoline;
4295
71c0e7fc 4296/* Generate RTL for the end of the current function. */
6f086dfc
RS
4297
4298void
fa8db1f7 4299expand_function_end (void)
6f086dfc 4300{
932f0847 4301 rtx clobber_after;
6f086dfc 4302
964be02f
RH
4303 /* If arg_pointer_save_area was referenced only from a nested
4304 function, we will not have initialized it yet. Do that now. */
4305 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4306 get_arg_pointer_save_area (cfun);
4307
11044f66
RK
4308 /* If we are doing stack checking and this function makes calls,
4309 do a stack probe at the start of the function to ensure we have enough
4310 space for another stack frame. */
4311 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4312 {
4313 rtx insn, seq;
4314
4315 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4b4bf941 4316 if (CALL_P (insn))
11044f66
RK
4317 {
4318 start_sequence ();
4319 probe_stack_range (STACK_CHECK_PROTECT,
4320 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4321 seq = get_insns ();
4322 end_sequence ();
ede497cf 4323 emit_insn_before (seq, stack_check_probe_note);
11044f66
RK
4324 break;
4325 }
4326 }
4327
902edd36
JH
4328 /* Possibly warn about unused parameters.
4329 When frontend does unit-at-a-time, the warning is already
4330 issued at finalization time. */
4331 if (warn_unused_parameter
4332 && !lang_hooks.callgraph.expand_function)
4333 do_warn_unused_parameter (current_function_decl);
6f086dfc 4334
6f086dfc
RS
4335 /* End any sequences that failed to be closed due to syntax errors. */
4336 while (in_sequence_p ())
5f4f0e22 4337 end_sequence ();
6f086dfc 4338
6f086dfc
RS
4339 clear_pending_stack_adjust ();
4340 do_pending_stack_adjust ();
4341
6f086dfc
RS
4342 /* Output a linenumber for the end of the function.
4343 SDB depends on this. */
0cea056b 4344 force_next_line_note ();
55e092c4 4345 set_curr_insn_source_location (input_location);
6f086dfc 4346
fbffc70a 4347 /* Before the return label (if any), clobber the return
a1f300c0 4348 registers so that they are not propagated live to the rest of
fbffc70a
GK
4349 the function. This can only happen with functions that drop
4350 through; if there had been a return statement, there would
932f0847
JH
4351 have either been a return rtx, or a jump to the return label.
4352
4353 We delay actual code generation after the current_function_value_rtx
4354 is computed. */
4355 clobber_after = get_last_insn ();
fbffc70a 4356
526c334b
KH
4357 /* Output the label for the actual return from the function. */
4358 emit_label (return_label);
6f086dfc 4359
815eb8f0
AM
4360 if (USING_SJLJ_EXCEPTIONS)
4361 {
4362 /* Let except.c know where it should emit the call to unregister
4363 the function context for sjlj exceptions. */
4364 if (flag_exceptions)
4365 sjlj_emit_function_exit_after (get_last_insn ());
4366 }
6fb5fa3c
DB
4367 else
4368 {
4369 /* We want to ensure that instructions that may trap are not
4370 moved into the epilogue by scheduling, because we don't
4371 always emit unwind information for the epilogue. */
4372 if (flag_non_call_exceptions)
4373 emit_insn (gen_blockage ());
4374 }
0b59e81e 4375
652b0932
RH
4376 /* If this is an implementation of throw, do what's necessary to
4377 communicate between __builtin_eh_return and the epilogue. */
4378 expand_eh_return ();
4379
3e4eac3f
RH
4380 /* If scalar return value was computed in a pseudo-reg, or was a named
4381 return value that got dumped to the stack, copy that to the hard
4382 return register. */
19e7881c 4383 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6f086dfc 4384 {
3e4eac3f
RH
4385 tree decl_result = DECL_RESULT (current_function_decl);
4386 rtx decl_rtl = DECL_RTL (decl_result);
4387
4388 if (REG_P (decl_rtl)
4389 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4390 : DECL_REGISTER (decl_result))
4391 {
ce5e43d0 4392 rtx real_decl_rtl = current_function_return_rtx;
6f086dfc 4393
ce5e43d0 4394 /* This should be set in assign_parms. */
0bccc606 4395 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
3e4eac3f
RH
4396
4397 /* If this is a BLKmode structure being returned in registers,
4398 then use the mode computed in expand_return. Note that if
797a6ac1 4399 decl_rtl is memory, then its mode may have been changed,
3e4eac3f
RH
4400 but that current_function_return_rtx has not. */
4401 if (GET_MODE (real_decl_rtl) == BLKmode)
ce5e43d0 4402 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
3e4eac3f 4403
bef5d8b6
RS
4404 /* If a non-BLKmode return value should be padded at the least
4405 significant end of the register, shift it left by the appropriate
4406 amount. BLKmode results are handled using the group load/store
4407 machinery. */
4408 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4409 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4410 {
4411 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4412 REGNO (real_decl_rtl)),
4413 decl_rtl);
4414 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4415 }
3e4eac3f 4416 /* If a named return value dumped decl_return to memory, then
797a6ac1 4417 we may need to re-do the PROMOTE_MODE signed/unsigned
3e4eac3f 4418 extension. */
bef5d8b6 4419 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
3e4eac3f 4420 {
8df83eae 4421 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
3e4eac3f 4422
61f71b34
DD
4423 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4424 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4425 &unsignedp, 1);
3e4eac3f
RH
4426
4427 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4428 }
aa570f54 4429 else if (GET_CODE (real_decl_rtl) == PARALLEL)
084a1106
JDA
4430 {
4431 /* If expand_function_start has created a PARALLEL for decl_rtl,
4432 move the result to the real return registers. Otherwise, do
4433 a group load from decl_rtl for a named return. */
4434 if (GET_CODE (decl_rtl) == PARALLEL)
4435 emit_group_move (real_decl_rtl, decl_rtl);
4436 else
4437 emit_group_load (real_decl_rtl, decl_rtl,
6e985040 4438 TREE_TYPE (decl_result),
084a1106
JDA
4439 int_size_in_bytes (TREE_TYPE (decl_result)));
4440 }
652b0932
RH
4441 /* In the case of complex integer modes smaller than a word, we'll
4442 need to generate some non-trivial bitfield insertions. Do that
4443 on a pseudo and not the hard register. */
4444 else if (GET_CODE (decl_rtl) == CONCAT
4445 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4446 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4447 {
4448 int old_generating_concat_p;
4449 rtx tmp;
4450
4451 old_generating_concat_p = generating_concat_p;
4452 generating_concat_p = 0;
4453 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4454 generating_concat_p = old_generating_concat_p;
4455
4456 emit_move_insn (tmp, decl_rtl);
4457 emit_move_insn (real_decl_rtl, tmp);
4458 }
3e4eac3f
RH
4459 else
4460 emit_move_insn (real_decl_rtl, decl_rtl);
3e4eac3f 4461 }
6f086dfc
RS
4462 }
4463
4464 /* If returning a structure, arrange to return the address of the value
4465 in a place where debuggers expect to find it.
4466
4467 If returning a structure PCC style,
4468 the caller also depends on this value.
4469 And current_function_returns_pcc_struct is not necessarily set. */
4470 if (current_function_returns_struct
4471 || current_function_returns_pcc_struct)
4472 {
cc77ae10 4473 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
6f086dfc 4474 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
cc77ae10
JM
4475 rtx outgoing;
4476
4477 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4478 type = TREE_TYPE (type);
4479 else
4480 value_address = XEXP (value_address, 0);
4481
1d636cc6
RG
4482 outgoing = targetm.calls.function_value (build_pointer_type (type),
4483 current_function_decl, true);
6f086dfc
RS
4484
4485 /* Mark this as a function return value so integrate will delete the
4486 assignment and USE below when inlining this function. */
4487 REG_FUNCTION_VALUE_P (outgoing) = 1;
4488
d1608933 4489 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5ae6cd0d
MM
4490 value_address = convert_memory_address (GET_MODE (outgoing),
4491 value_address);
d1608933 4492
6f086dfc 4493 emit_move_insn (outgoing, value_address);
d1608933
RK
4494
4495 /* Show return register used to hold result (in this case the address
4496 of the result. */
4497 current_function_return_rtx = outgoing;
6f086dfc
RS
4498 }
4499
932f0847
JH
4500 /* Emit the actual code to clobber return register. */
4501 {
609c3937 4502 rtx seq;
797a6ac1 4503
932f0847
JH
4504 start_sequence ();
4505 clobber_return_register ();
609c3937 4506 expand_naked_return ();
2f937369 4507 seq = get_insns ();
932f0847
JH
4508 end_sequence ();
4509
609c3937 4510 emit_insn_after (seq, clobber_after);
932f0847
JH
4511 }
4512
609c3937
RH
4513 /* Output the label for the naked return from the function. */
4514 emit_label (naked_return_label);
6e3077c6 4515
25108646
AH
4516 /* @@@ This is a kludge. We want to ensure that instructions that
4517 may trap are not moved into the epilogue by scheduling, because
56d17681 4518 we don't always emit unwind information for the epilogue. */
25108646 4519 if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions)
56d17681 4520 emit_insn (gen_blockage ());
25108646 4521
7d69de61
RH
4522 /* If stack protection is enabled for this function, check the guard. */
4523 if (cfun->stack_protect_guard)
4524 stack_protect_epilogue ();
4525
40184445
BS
4526 /* If we had calls to alloca, and this machine needs
4527 an accurate stack pointer to exit the function,
4528 insert some code to save and restore the stack pointer. */
4529 if (! EXIT_IGNORE_STACK
4530 && current_function_calls_alloca)
4531 {
4532 rtx tem = 0;
4533
4534 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4535 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4536 }
4537
c13fde05
RH
4538 /* ??? This should no longer be necessary since stupid is no longer with
4539 us, but there are some parts of the compiler (eg reload_combine, and
4540 sh mach_dep_reorg) that still try and compute their own lifetime info
4541 instead of using the general framework. */
4542 use_return_register ();
6f086dfc 4543}
278ed218
RH
4544
4545rtx
fa8db1f7 4546get_arg_pointer_save_area (struct function *f)
278ed218
RH
4547{
4548 rtx ret = f->x_arg_pointer_save_area;
4549
4550 if (! ret)
4551 {
278ed218
RH
4552 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
4553 f->x_arg_pointer_save_area = ret;
964be02f
RH
4554 }
4555
4556 if (f == cfun && ! f->arg_pointer_save_area_init)
4557 {
4558 rtx seq;
278ed218 4559
797a6ac1 4560 /* Save the arg pointer at the beginning of the function. The
964be02f 4561 generated stack slot may not be a valid memory address, so we
278ed218
RH
4562 have to check it and fix it if necessary. */
4563 start_sequence ();
4564 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
2f937369 4565 seq = get_insns ();
278ed218
RH
4566 end_sequence ();
4567
964be02f 4568 push_topmost_sequence ();
1cb2fc7b 4569 emit_insn_after (seq, entry_of_function ());
964be02f 4570 pop_topmost_sequence ();
278ed218
RH
4571 }
4572
4573 return ret;
4574}
bdac5f58 4575\f
2f937369
DM
4576/* Extend a vector that records the INSN_UIDs of INSNS
4577 (a list of one or more insns). */
bdac5f58 4578
0a1c58a2 4579static void
f995dcfe 4580record_insns (rtx insns, VEC(int,heap) **vecp)
bdac5f58 4581{
2f937369 4582 rtx tmp;
0a1c58a2 4583
f995dcfe
KH
4584 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4585 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
bdac5f58
TW
4586}
4587
589fe865 4588/* Set the locator of the insn chain starting at INSN to LOC. */
0435312e 4589static void
fa8db1f7 4590set_insn_locators (rtx insn, int loc)
0435312e
JH
4591{
4592 while (insn != NULL_RTX)
4593 {
4594 if (INSN_P (insn))
4595 INSN_LOCATOR (insn) = loc;
4596 insn = NEXT_INSN (insn);
4597 }
4598}
4599
2f937369
DM
4600/* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4601 be running after reorg, SEQUENCE rtl is possible. */
bdac5f58 4602
10914065 4603static int
4f588890 4604contains (const_rtx insn, VEC(int,heap) **vec)
bdac5f58 4605{
b3694847 4606 int i, j;
bdac5f58 4607
4b4bf941 4608 if (NONJUMP_INSN_P (insn)
bdac5f58
TW
4609 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4610 {
10914065 4611 int count = 0;
bdac5f58 4612 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
f995dcfe
KH
4613 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4614 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4615 == VEC_index (int, *vec, j))
10914065
TW
4616 count++;
4617 return count;
bdac5f58
TW
4618 }
4619 else
4620 {
f995dcfe
KH
4621 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4622 if (INSN_UID (insn) == VEC_index (int, *vec, j))
10914065 4623 return 1;
bdac5f58
TW
4624 }
4625 return 0;
4626}
5c7675e9
RH
4627
4628int
4f588890 4629prologue_epilogue_contains (const_rtx insn)
5c7675e9 4630{
f995dcfe 4631 if (contains (insn, &prologue))
5c7675e9 4632 return 1;
f995dcfe 4633 if (contains (insn, &epilogue))
5c7675e9
RH
4634 return 1;
4635 return 0;
4636}
bdac5f58 4637
0a1c58a2 4638int
4f588890 4639sibcall_epilogue_contains (const_rtx insn)
0a1c58a2
JL
4640{
4641 if (sibcall_epilogue)
f995dcfe 4642 return contains (insn, &sibcall_epilogue);
0a1c58a2
JL
4643 return 0;
4644}
4645
73ef99fb 4646#ifdef HAVE_return
69732dcb
RH
4647/* Insert gen_return at the end of block BB. This also means updating
4648 block_for_insn appropriately. */
4649
4650static void
6039a0c7 4651emit_return_into_block (basic_block bb)
69732dcb 4652{
a813c111 4653 emit_jump_insn_after (gen_return (), BB_END (bb));
69732dcb 4654}
73ef99fb 4655#endif /* HAVE_return */
69732dcb 4656
3258e996
RK
4657#if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4658
535a42b1
NS
4659/* These functions convert the epilogue into a variant that does not
4660 modify the stack pointer. This is used in cases where a function
4661 returns an object whose size is not known until it is computed.
4662 The called function leaves the object on the stack, leaves the
4663 stack depressed, and returns a pointer to the object.
4664
4665 What we need to do is track all modifications and references to the
4666 stack pointer, deleting the modifications and changing the
4667 references to point to the location the stack pointer would have
4668 pointed to had the modifications taken place.
4669
4670 These functions need to be portable so we need to make as few
4671 assumptions about the epilogue as we can. However, the epilogue
4672 basically contains three things: instructions to reset the stack
4673 pointer, instructions to reload registers, possibly including the
4674 frame pointer, and an instruction to return to the caller.
4675
4676 We must be sure of what a relevant epilogue insn is doing. We also
4677 make no attempt to validate the insns we make since if they are
4678 invalid, we probably can't do anything valid. The intent is that
4679 these routines get "smarter" as more and more machines start to use
4680 them and they try operating on different epilogues.
4681
4682 We use the following structure to track what the part of the
4683 epilogue that we've already processed has done. We keep two copies
4684 of the SP equivalence, one for use during the insn we are
4685 processing and one for use in the next insn. The difference is
4686 because one part of a PARALLEL may adjust SP and the other may use
4687 it. */
3258e996
RK
4688
4689struct epi_info
4690{
4691 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
4692 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
3ef42a0c 4693 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
3258e996
RK
4694 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
4695 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
4696 should be set to once we no longer need
4697 its value. */
f285d67b
RK
4698 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4699 for registers. */
3258e996
RK
4700};
4701
fa8db1f7 4702static void handle_epilogue_set (rtx, struct epi_info *);
7bc980e1 4703static void update_epilogue_consts (rtx, const_rtx, void *);
fa8db1f7 4704static void emit_equiv_load (struct epi_info *);
7393c642 4705
2f937369
DM
4706/* Modify INSN, a list of one or more insns that is part of the epilogue, to
4707 no modifications to the stack pointer. Return the new list of insns. */
7393c642 4708
3258e996 4709static rtx
fa8db1f7 4710keep_stack_depressed (rtx insns)
7393c642 4711{
2f937369 4712 int j;
3258e996 4713 struct epi_info info;
2f937369 4714 rtx insn, next;
7393c642 4715
f285d67b 4716 /* If the epilogue is just a single instruction, it must be OK as is. */
2f937369
DM
4717 if (NEXT_INSN (insns) == NULL_RTX)
4718 return insns;
7393c642 4719
3258e996
RK
4720 /* Otherwise, start a sequence, initialize the information we have, and
4721 process all the insns we were given. */
4722 start_sequence ();
4723
4724 info.sp_equiv_reg = stack_pointer_rtx;
4725 info.sp_offset = 0;
4726 info.equiv_reg_src = 0;
7393c642 4727
f285d67b
RK
4728 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4729 info.const_equiv[j] = 0;
4730
2f937369
DM
4731 insn = insns;
4732 next = NULL_RTX;
4733 while (insn != NULL_RTX)
7393c642 4734 {
2f937369 4735 next = NEXT_INSN (insn);
7393c642 4736
3258e996
RK
4737 if (!INSN_P (insn))
4738 {
4739 add_insn (insn);
2f937369 4740 insn = next;
3258e996
RK
4741 continue;
4742 }
7393c642 4743
3258e996
RK
4744 /* If this insn references the register that SP is equivalent to and
4745 we have a pending load to that register, we must force out the load
4746 first and then indicate we no longer know what SP's equivalent is. */
4747 if (info.equiv_reg_src != 0
4748 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
7393c642 4749 {
3258e996
RK
4750 emit_equiv_load (&info);
4751 info.sp_equiv_reg = 0;
4752 }
7393c642 4753
3258e996
RK
4754 info.new_sp_equiv_reg = info.sp_equiv_reg;
4755 info.new_sp_offset = info.sp_offset;
7393c642 4756
3258e996
RK
4757 /* If this is a (RETURN) and the return address is on the stack,
4758 update the address and change to an indirect jump. */
4759 if (GET_CODE (PATTERN (insn)) == RETURN
4760 || (GET_CODE (PATTERN (insn)) == PARALLEL
4761 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4762 {
4763 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4764 rtx base = 0;
4765 HOST_WIDE_INT offset = 0;
4766 rtx jump_insn, jump_set;
4767
4768 /* If the return address is in a register, we can emit the insn
4769 unchanged. Otherwise, it must be a MEM and we see what the
4770 base register and offset are. In any case, we have to emit any
4771 pending load to the equivalent reg of SP, if any. */
f8cfc6aa 4772 if (REG_P (retaddr))
3258e996
RK
4773 {
4774 emit_equiv_load (&info);
4775 add_insn (insn);
2f937369 4776 insn = next;
3258e996
RK
4777 continue;
4778 }
0bccc606 4779 else
3258e996 4780 {
0bccc606
NS
4781 rtx ret_ptr;
4782 gcc_assert (MEM_P (retaddr));
4783
4784 ret_ptr = XEXP (retaddr, 0);
4785
4786 if (REG_P (ret_ptr))
4787 {
4788 base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
4789 offset = 0;
4790 }
4791 else
4792 {
4793 gcc_assert (GET_CODE (ret_ptr) == PLUS
4794 && REG_P (XEXP (ret_ptr, 0))
4795 && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
4796 base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
4797 offset = INTVAL (XEXP (ret_ptr, 1));
4798 }
3258e996 4799 }
3258e996
RK
4800
4801 /* If the base of the location containing the return pointer
4802 is SP, we must update it with the replacement address. Otherwise,
4803 just build the necessary MEM. */
4804 retaddr = plus_constant (base, offset);
4805 if (base == stack_pointer_rtx)
4806 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4807 plus_constant (info.sp_equiv_reg,
4808 info.sp_offset));
4809
4810 retaddr = gen_rtx_MEM (Pmode, retaddr);
be0c514c 4811 MEM_NOTRAP_P (retaddr) = 1;
3258e996
RK
4812
4813 /* If there is a pending load to the equivalent register for SP
4814 and we reference that register, we must load our address into
4815 a scratch register and then do that load. */
4816 if (info.equiv_reg_src
4817 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4818 {
4819 unsigned int regno;
4820 rtx reg;
4821
4822 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4823 if (HARD_REGNO_MODE_OK (regno, Pmode)
53b6fb26
RK
4824 && !fixed_regs[regno]
4825 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
5e2d947c 4826 && !REGNO_REG_SET_P
6fb5fa3c 4827 (DF_LR_IN (EXIT_BLOCK_PTR), regno)
b5ed05aa 4828 && !refers_to_regno_p (regno,
09e18274 4829 end_hard_regno (Pmode, regno),
f285d67b
RK
4830 info.equiv_reg_src, NULL)
4831 && info.const_equiv[regno] == 0)
3258e996
RK
4832 break;
4833
0bccc606 4834 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7393c642 4835
3258e996
RK
4836 reg = gen_rtx_REG (Pmode, regno);
4837 emit_move_insn (reg, retaddr);
4838 retaddr = reg;
4839 }
4840
4841 emit_equiv_load (&info);
4842 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4843
4844 /* Show the SET in the above insn is a RETURN. */
4845 jump_set = single_set (jump_insn);
0bccc606
NS
4846 gcc_assert (jump_set);
4847 SET_IS_RETURN_P (jump_set) = 1;
7393c642 4848 }
3258e996
RK
4849
4850 /* If SP is not mentioned in the pattern and its equivalent register, if
4851 any, is not modified, just emit it. Otherwise, if neither is set,
4852 replace the reference to SP and emit the insn. If none of those are
4853 true, handle each SET individually. */
4854 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4855 && (info.sp_equiv_reg == stack_pointer_rtx
4856 || !reg_set_p (info.sp_equiv_reg, insn)))
4857 add_insn (insn);
4858 else if (! reg_set_p (stack_pointer_rtx, insn)
4859 && (info.sp_equiv_reg == stack_pointer_rtx
4860 || !reg_set_p (info.sp_equiv_reg, insn)))
7393c642 4861 {
0bccc606
NS
4862 int changed;
4863
4864 changed = validate_replace_rtx (stack_pointer_rtx,
4865 plus_constant (info.sp_equiv_reg,
4866 info.sp_offset),
4867 insn);
4868 gcc_assert (changed);
7393c642 4869
3258e996
RK
4870 add_insn (insn);
4871 }
4872 else if (GET_CODE (PATTERN (insn)) == SET)
4873 handle_epilogue_set (PATTERN (insn), &info);
4874 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4875 {
4876 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4877 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4878 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4879 }
4880 else
4881 add_insn (insn);
4882
4883 info.sp_equiv_reg = info.new_sp_equiv_reg;
4884 info.sp_offset = info.new_sp_offset;
2f937369 4885
f285d67b
RK
4886 /* Now update any constants this insn sets. */
4887 note_stores (PATTERN (insn), update_epilogue_consts, &info);
2f937369 4888 insn = next;
3258e996
RK
4889 }
4890
2f937369 4891 insns = get_insns ();
3258e996 4892 end_sequence ();
2f937369 4893 return insns;
3258e996
RK
4894}
4895
d6a7951f 4896/* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
3258e996 4897 structure that contains information about what we've seen so far. We
797a6ac1 4898 process this SET by either updating that data or by emitting one or
3258e996
RK
4899 more insns. */
4900
4901static void
fa8db1f7 4902handle_epilogue_set (rtx set, struct epi_info *p)
3258e996
RK
4903{
4904 /* First handle the case where we are setting SP. Record what it is being
535a42b1 4905 set from, which we must be able to determine */
3258e996
RK
4906 if (reg_set_p (stack_pointer_rtx, set))
4907 {
0bccc606 4908 gcc_assert (SET_DEST (set) == stack_pointer_rtx);
3258e996 4909
f285d67b 4910 if (GET_CODE (SET_SRC (set)) == PLUS)
3258e996
RK
4911 {
4912 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
f285d67b
RK
4913 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4914 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
f285d67b 4915 else
0bccc606
NS
4916 {
4917 gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
4918 && (REGNO (XEXP (SET_SRC (set), 1))
4919 < FIRST_PSEUDO_REGISTER)
4920 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4921 p->new_sp_offset
4922 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4923 }
7393c642 4924 }
3258e996
RK
4925 else
4926 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4927
4928 /* If we are adjusting SP, we adjust from the old data. */
4929 if (p->new_sp_equiv_reg == stack_pointer_rtx)
4930 {
4931 p->new_sp_equiv_reg = p->sp_equiv_reg;
4932 p->new_sp_offset += p->sp_offset;
4933 }
4934
0bccc606 4935 gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
3258e996
RK
4936
4937 return;
4938 }
4939
535a42b1
NS
4940 /* Next handle the case where we are setting SP's equivalent
4941 register. We must not already have a value to set it to. We
4942 could update, but there seems little point in handling that case.
4943 Note that we have to allow for the case where we are setting the
4944 register set in the previous part of a PARALLEL inside a single
4945 insn. But use the old offset for any updates within this insn.
4946 We must allow for the case where the register is being set in a
4947 different (usually wider) mode than Pmode). */
f189c7ca 4948 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
3258e996 4949 {
0bccc606
NS
4950 gcc_assert (!p->equiv_reg_src
4951 && REG_P (p->new_sp_equiv_reg)
4952 && REG_P (SET_DEST (set))
4953 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
4954 <= BITS_PER_WORD)
4955 && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
4956 p->equiv_reg_src
4957 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4958 plus_constant (p->sp_equiv_reg,
4959 p->sp_offset));
3258e996
RK
4960 }
4961
4962 /* Otherwise, replace any references to SP in the insn to its new value
4963 and emit the insn. */
4964 else
4965 {
4966 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4967 plus_constant (p->sp_equiv_reg,
4968 p->sp_offset));
4969 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
4970 plus_constant (p->sp_equiv_reg,
4971 p->sp_offset));
4972 emit_insn (set);
7393c642
RK
4973 }
4974}
3258e996 4975
f285d67b
RK
4976/* Update the tracking information for registers set to constants. */
4977
4978static void
7bc980e1 4979update_epilogue_consts (rtx dest, const_rtx x, void *data)
f285d67b
RK
4980{
4981 struct epi_info *p = (struct epi_info *) data;
8fbc67c0 4982 rtx new;
f285d67b 4983
f8cfc6aa 4984 if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
f285d67b 4985 return;
8fbc67c0
RK
4986
4987 /* If we are either clobbering a register or doing a partial set,
4988 show we don't know the value. */
4989 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
f285d67b 4990 p->const_equiv[REGNO (dest)] = 0;
8fbc67c0
RK
4991
4992 /* If we are setting it to a constant, record that constant. */
4993 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
f285d67b 4994 p->const_equiv[REGNO (dest)] = SET_SRC (x);
8fbc67c0
RK
4995
4996 /* If this is a binary operation between a register we have been tracking
4997 and a constant, see if we can compute a new constant value. */
ec8e098d 4998 else if (ARITHMETIC_P (SET_SRC (x))
f8cfc6aa 4999 && REG_P (XEXP (SET_SRC (x), 0))
8fbc67c0
RK
5000 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
5001 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
5002 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5003 && 0 != (new = simplify_binary_operation
5004 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
5005 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
5006 XEXP (SET_SRC (x), 1)))
5007 && GET_CODE (new) == CONST_INT)
5008 p->const_equiv[REGNO (dest)] = new;
5009
5010 /* Otherwise, we can't do anything with this value. */
5011 else
5012 p->const_equiv[REGNO (dest)] = 0;
f285d67b
RK
5013}
5014
3258e996
RK
5015/* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
5016
5017static void
fa8db1f7 5018emit_equiv_load (struct epi_info *p)
3258e996
RK
5019{
5020 if (p->equiv_reg_src != 0)
f285d67b
RK
5021 {
5022 rtx dest = p->sp_equiv_reg;
5023
5024 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
5025 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
5026 REGNO (p->sp_equiv_reg));
3258e996 5027
f285d67b
RK
5028 emit_move_insn (dest, p->equiv_reg_src);
5029 p->equiv_reg_src = 0;
5030 }
3258e996 5031}
7393c642
RK
5032#endif
5033
9faa82d8 5034/* Generate the prologue and epilogue RTL if the machine supports it. Thread
bdac5f58
TW
5035 this into place with notes indicating where the prologue ends and where
5036 the epilogue begins. Update the basic block information when possible. */
5037
6fb5fa3c
DB
5038static void
5039thread_prologue_and_epilogue_insns (void)
bdac5f58 5040{
ca1117cc 5041 int inserted = 0;
19d3c25c 5042 edge e;
91ea4f8d 5043#if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
19d3c25c 5044 rtx seq;
91ea4f8d 5045#endif
86c82654
RH
5046#if defined (HAVE_epilogue) || defined(HAVE_return)
5047 rtx epilogue_end = NULL_RTX;
5048#endif
628f6a4e 5049 edge_iterator ei;
e881bb1b 5050
bdac5f58
TW
5051#ifdef HAVE_prologue
5052 if (HAVE_prologue)
5053 {
e881bb1b 5054 start_sequence ();
718fe406 5055 seq = gen_prologue ();
e881bb1b 5056 emit_insn (seq);
bdac5f58 5057
6fb5fa3c
DB
5058 /* Insert an explicit USE for the frame pointer
5059 if the profiling is on and the frame pointer is required. */
5060 if (current_function_profile && frame_pointer_needed)
5061 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
5062
bdac5f58 5063 /* Retain a map of the prologue insns. */
0a1c58a2 5064 record_insns (seq, &prologue);
56d17681
UB
5065 emit_note (NOTE_INSN_PROLOGUE_END);
5066
5067#ifndef PROFILE_BEFORE_PROLOGUE
5068 /* Ensure that instructions are not moved into the prologue when
5069 profiling is on. The call to the profiling routine can be
5070 emitted within the live range of a call-clobbered register. */
5071 if (current_function_profile)
5072 emit_insn (gen_blockage ());
5073#endif
9185a8d5 5074
2f937369 5075 seq = get_insns ();
e881bb1b 5076 end_sequence ();
0435312e 5077 set_insn_locators (seq, prologue_locator);
e881bb1b 5078
d6a7951f 5079 /* Can't deal with multiple successors of the entry block
75540af0
JH
5080 at the moment. Function should always have at least one
5081 entry point. */
c5cbcccf 5082 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
e881bb1b 5083
c5cbcccf 5084 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
75540af0 5085 inserted = 1;
bdac5f58 5086 }
bdac5f58 5087#endif
bdac5f58 5088
19d3c25c
RH
5089 /* If the exit block has no non-fake predecessors, we don't need
5090 an epilogue. */
628f6a4e 5091 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
19d3c25c
RH
5092 if ((e->flags & EDGE_FAKE) == 0)
5093 break;
5094 if (e == NULL)
5095 goto epilogue_done;
5096
69732dcb
RH
5097#ifdef HAVE_return
5098 if (optimize && HAVE_return)
5099 {
5100 /* If we're allowed to generate a simple return instruction,
5101 then by definition we don't need a full epilogue. Examine
718fe406
KH
5102 the block that falls through to EXIT. If it does not
5103 contain any code, examine its predecessors and try to
69732dcb
RH
5104 emit (conditional) return instructions. */
5105
5106 basic_block last;
69732dcb
RH
5107 rtx label;
5108
628f6a4e 5109 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
69732dcb
RH
5110 if (e->flags & EDGE_FALLTHRU)
5111 break;
5112 if (e == NULL)
5113 goto epilogue_done;
5114 last = e->src;
5115
5116 /* Verify that there are no active instructions in the last block. */
a813c111 5117 label = BB_END (last);
4b4bf941 5118 while (label && !LABEL_P (label))
69732dcb
RH
5119 {
5120 if (active_insn_p (label))
5121 break;
5122 label = PREV_INSN (label);
5123 }
5124
4b4bf941 5125 if (BB_HEAD (last) == label && LABEL_P (label))
69732dcb 5126 {
628f6a4e 5127 edge_iterator ei2;
86c82654 5128
628f6a4e 5129 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
69732dcb
RH
5130 {
5131 basic_block bb = e->src;
5132 rtx jump;
5133
69732dcb 5134 if (bb == ENTRY_BLOCK_PTR)
628f6a4e
BE
5135 {
5136 ei_next (&ei2);
5137 continue;
5138 }
69732dcb 5139
a813c111 5140 jump = BB_END (bb);
4b4bf941 5141 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
628f6a4e
BE
5142 {
5143 ei_next (&ei2);
5144 continue;
5145 }
69732dcb
RH
5146
5147 /* If we have an unconditional jump, we can replace that
5148 with a simple return instruction. */
5149 if (simplejump_p (jump))
5150 {
6039a0c7 5151 emit_return_into_block (bb);
53c17031 5152 delete_insn (jump);
69732dcb
RH
5153 }
5154
5155 /* If we have a conditional jump, we can try to replace
5156 that with a conditional return instruction. */
5157 else if (condjump_p (jump))
5158 {
47009d11 5159 if (! redirect_jump (jump, 0, 0))
628f6a4e
BE
5160 {
5161 ei_next (&ei2);
5162 continue;
5163 }
718fe406 5164
3a75e42e
CP
5165 /* If this block has only one successor, it both jumps
5166 and falls through to the fallthru block, so we can't
5167 delete the edge. */
c5cbcccf 5168 if (single_succ_p (bb))
628f6a4e
BE
5169 {
5170 ei_next (&ei2);
5171 continue;
5172 }
69732dcb
RH
5173 }
5174 else
628f6a4e
BE
5175 {
5176 ei_next (&ei2);
5177 continue;
5178 }
69732dcb
RH
5179
5180 /* Fix up the CFG for the successful change we just made. */
86c82654 5181 redirect_edge_succ (e, EXIT_BLOCK_PTR);
69732dcb 5182 }
69732dcb 5183
2dd8bc01
GK
5184 /* Emit a return insn for the exit fallthru block. Whether
5185 this is still reachable will be determined later. */
69732dcb 5186
a813c111 5187 emit_barrier_after (BB_END (last));
6039a0c7 5188 emit_return_into_block (last);
a813c111 5189 epilogue_end = BB_END (last);
c5cbcccf 5190 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
718fe406 5191 goto epilogue_done;
2dd8bc01 5192 }
69732dcb
RH
5193 }
5194#endif
623a66fa
R
5195 /* Find the edge that falls through to EXIT. Other edges may exist
5196 due to RETURN instructions, but those don't need epilogues.
5197 There really shouldn't be a mixture -- either all should have
5198 been converted or none, however... */
5199
628f6a4e 5200 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
623a66fa
R
5201 if (e->flags & EDGE_FALLTHRU)
5202 break;
5203 if (e == NULL)
5204 goto epilogue_done;
5205
bdac5f58
TW
5206#ifdef HAVE_epilogue
5207 if (HAVE_epilogue)
5208 {
19d3c25c 5209 start_sequence ();
2e040219 5210 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
a78bdb38 5211
19d3c25c 5212 seq = gen_epilogue ();
7393c642 5213
3258e996
RK
5214#ifdef INCOMING_RETURN_ADDR_RTX
5215 /* If this function returns with the stack depressed and we can support
5216 it, massage the epilogue to actually do that. */
43db0363
RK
5217 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5218 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
3258e996
RK
5219 seq = keep_stack_depressed (seq);
5220#endif
7393c642 5221
19d3c25c 5222 emit_jump_insn (seq);
bdac5f58 5223
19d3c25c 5224 /* Retain a map of the epilogue insns. */
0a1c58a2 5225 record_insns (seq, &epilogue);
0435312e 5226 set_insn_locators (seq, epilogue_locator);
bdac5f58 5227
2f937369 5228 seq = get_insns ();
718fe406 5229 end_sequence ();
e881bb1b 5230
19d3c25c 5231 insert_insn_on_edge (seq, e);
ca1117cc 5232 inserted = 1;
bdac5f58 5233 }
623a66fa 5234 else
bdac5f58 5235#endif
623a66fa
R
5236 {
5237 basic_block cur_bb;
5238
5239 if (! next_active_insn (BB_END (e->src)))
5240 goto epilogue_done;
5241 /* We have a fall-through edge to the exit block, the source is not
5242 at the end of the function, and there will be an assembler epilogue
5243 at the end of the function.
5244 We can't use force_nonfallthru here, because that would try to
5245 use return. Inserting a jump 'by hand' is extremely messy, so
5246 we take advantage of cfg_layout_finalize using
5247 fixup_fallthru_exit_predecessor. */
35b6b437 5248 cfg_layout_initialize (0);
623a66fa 5249 FOR_EACH_BB (cur_bb)
24bd1a0b
DB
5250 if (cur_bb->index >= NUM_FIXED_BLOCKS
5251 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
370369e1 5252 cur_bb->aux = cur_bb->next_bb;
623a66fa
R
5253 cfg_layout_finalize ();
5254 }
19d3c25c 5255epilogue_done:
e881bb1b 5256
ca1117cc 5257 if (inserted)
30a873c3
ZD
5258 {
5259 commit_edge_insertions ();
5260
5261 /* The epilogue insns we inserted may cause the exit edge to no longer
5262 be fallthru. */
5263 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5264 {
5265 if (((e->flags & EDGE_FALLTHRU) != 0)
5266 && returnjump_p (BB_END (e->src)))
5267 e->flags &= ~EDGE_FALLTHRU;
5268 }
5269 }
0a1c58a2
JL
5270
5271#ifdef HAVE_sibcall_epilogue
5272 /* Emit sibling epilogues before any sibling call sites. */
628f6a4e 5273 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
0a1c58a2
JL
5274 {
5275 basic_block bb = e->src;
a813c111 5276 rtx insn = BB_END (bb);
0a1c58a2 5277
4b4bf941 5278 if (!CALL_P (insn)
0a1c58a2 5279 || ! SIBLING_CALL_P (insn))
628f6a4e
BE
5280 {
5281 ei_next (&ei);
5282 continue;
5283 }
0a1c58a2
JL
5284
5285 start_sequence ();
0af5c896
RE
5286 emit_insn (gen_sibcall_epilogue ());
5287 seq = get_insns ();
0a1c58a2
JL
5288 end_sequence ();
5289
2f937369
DM
5290 /* Retain a map of the epilogue insns. Used in life analysis to
5291 avoid getting rid of sibcall epilogue insns. Do this before we
5292 actually emit the sequence. */
5293 record_insns (seq, &sibcall_epilogue);
0435312e 5294 set_insn_locators (seq, epilogue_locator);
2f937369 5295
5e35992a 5296 emit_insn_before (seq, insn);
628f6a4e 5297 ei_next (&ei);
0a1c58a2
JL
5298 }
5299#endif
ca1117cc 5300
86c82654
RH
5301#ifdef HAVE_epilogue
5302 if (epilogue_end)
5303 {
5304 rtx insn, next;
5305
5306 /* Similarly, move any line notes that appear after the epilogue.
ff7cc307 5307 There is no need, however, to be quite so anal about the existence
071a42f9 5308 of such a note. Also possibly move
84c1fa24
UW
5309 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5310 info generation. */
718fe406 5311 for (insn = epilogue_end; insn; insn = next)
86c82654
RH
5312 {
5313 next = NEXT_INSN (insn);
4b4bf941 5314 if (NOTE_P (insn)
a38e7aa5 5315 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
86c82654
RH
5316 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5317 }
5318 }
5319#endif
6fb5fa3c
DB
5320
5321 /* Threading the prologue and epilogue changes the artificial refs
5322 in the entry and exit blocks. */
5323 epilogue_completed = 1;
5324 df_update_entry_exit_and_calls ();
bdac5f58
TW
5325}
5326
5327/* Reposition the prologue-end and epilogue-begin notes after instruction
5328 scheduling and delayed branch scheduling. */
5329
5330void
6fb5fa3c 5331reposition_prologue_and_epilogue_notes (void)
bdac5f58
TW
5332{
5333#if defined (HAVE_prologue) || defined (HAVE_epilogue)
9f53e965 5334 rtx insn, last, note;
0a1c58a2
JL
5335 int len;
5336
f995dcfe 5337 if ((len = VEC_length (int, prologue)) > 0)
bdac5f58 5338 {
9f53e965 5339 last = 0, note = 0;
bdac5f58 5340
0a1c58a2
JL
5341 /* Scan from the beginning until we reach the last prologue insn.
5342 We apparently can't depend on basic_block_{head,end} after
5343 reorg has run. */
6fb5fa3c 5344 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
bdac5f58 5345 {
4b4bf941 5346 if (NOTE_P (insn))
9392c110 5347 {
a38e7aa5 5348 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
0a1c58a2
JL
5349 note = insn;
5350 }
f995dcfe 5351 else if (contains (insn, &prologue))
0a1c58a2 5352 {
9f53e965
RH
5353 last = insn;
5354 if (--len == 0)
5355 break;
5356 }
5357 }
797a6ac1 5358
9f53e965
RH
5359 if (last)
5360 {
9f53e965
RH
5361 /* Find the prologue-end note if we haven't already, and
5362 move it to just after the last prologue insn. */
5363 if (note == 0)
5364 {
5365 for (note = last; (note = NEXT_INSN (note));)
4b4bf941 5366 if (NOTE_P (note)
a38e7aa5 5367 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
9f53e965
RH
5368 break;
5369 }
c93b03c2 5370
9f53e965 5371 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
4b4bf941 5372 if (LABEL_P (last))
9f53e965
RH
5373 last = NEXT_INSN (last);
5374 reorder_insns (note, note, last);
bdac5f58 5375 }
0a1c58a2
JL
5376 }
5377
f995dcfe 5378 if ((len = VEC_length (int, epilogue)) > 0)
0a1c58a2 5379 {
9f53e965 5380 last = 0, note = 0;
bdac5f58 5381
0a1c58a2
JL
5382 /* Scan from the end until we reach the first epilogue insn.
5383 We apparently can't depend on basic_block_{head,end} after
5384 reorg has run. */
9f53e965 5385 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
bdac5f58 5386 {
4b4bf941 5387 if (NOTE_P (insn))
9392c110 5388 {
a38e7aa5 5389 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
0a1c58a2
JL
5390 note = insn;
5391 }
f995dcfe 5392 else if (contains (insn, &epilogue))
0a1c58a2 5393 {
9f53e965
RH
5394 last = insn;
5395 if (--len == 0)
5396 break;
5397 }
5398 }
c93b03c2 5399
9f53e965
RH
5400 if (last)
5401 {
5402 /* Find the epilogue-begin note if we haven't already, and
5403 move it to just before the first epilogue insn. */
5404 if (note == 0)
5405 {
5406 for (note = insn; (note = PREV_INSN (note));)
4b4bf941 5407 if (NOTE_P (note)
a38e7aa5 5408 && NOTE_KIND (note) == NOTE_INSN_EPILOGUE_BEG)
9f53e965 5409 break;
9392c110 5410 }
9f53e965
RH
5411
5412 if (PREV_INSN (last) != note)
5413 reorder_insns (note, note, PREV_INSN (last));
bdac5f58
TW
5414 }
5415 }
5416#endif /* HAVE_prologue or HAVE_epilogue */
5417}
87ff9c8e 5418
faed5cc3
SB
5419/* Returns the name of the current function. */
5420const char *
5421current_function_name (void)
5422{
ae2bcd98 5423 return lang_hooks.decl_printable_name (cfun->decl, 2);
faed5cc3 5424}
6fb5fa3c
DB
5425
5426/* Returns the raw (mangled) name of the current function. */
5427const char *
5428current_function_assembler_name (void)
5429{
5430 return IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (cfun->decl));
5431}
ef330312
PB
5432\f
5433
c2924966 5434static unsigned int
ef330312
PB
5435rest_of_handle_check_leaf_regs (void)
5436{
5437#ifdef LEAF_REGISTERS
5438 current_function_uses_only_leaf_regs
5439 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5440#endif
c2924966 5441 return 0;
ef330312
PB
5442}
5443
8d8d1a28
AH
5444/* Insert a TYPE into the used types hash table of CFUN. */
5445static void
5446used_types_insert_helper (tree type, struct function *func)
33c9159e 5447{
8d8d1a28 5448 if (type != NULL && func != NULL)
33c9159e
AH
5449 {
5450 void **slot;
5451
5452 if (func->used_types_hash == NULL)
5453 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
8d8d1a28
AH
5454 htab_eq_pointer, NULL);
5455 slot = htab_find_slot (func->used_types_hash, type, INSERT);
33c9159e 5456 if (*slot == NULL)
8d8d1a28 5457 *slot = type;
33c9159e
AH
5458 }
5459}
5460
8d8d1a28
AH
5461/* Given a type, insert it into the used hash table in cfun. */
5462void
5463used_types_insert (tree t)
5464{
5465 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5466 t = TREE_TYPE (t);
5467 t = TYPE_MAIN_VARIANT (t);
5468 if (debug_info_level > DINFO_LEVEL_NONE)
5469 used_types_insert_helper (t, cfun);
5470}
5471
ef330312
PB
5472struct tree_opt_pass pass_leaf_regs =
5473{
5474 NULL, /* name */
5475 NULL, /* gate */
5476 rest_of_handle_check_leaf_regs, /* execute */
5477 NULL, /* sub */
5478 NULL, /* next */
5479 0, /* static_pass_number */
5480 0, /* tv_id */
5481 0, /* properties_required */
5482 0, /* properties_provided */
5483 0, /* properties_destroyed */
5484 0, /* todo_flags_start */
5485 0, /* todo_flags_finish */
5486 0 /* letter */
5487};
5488
6fb5fa3c
DB
5489static unsigned int
5490rest_of_handle_thread_prologue_and_epilogue (void)
5491{
5492 if (optimize)
5493 cleanup_cfg (CLEANUP_EXPENSIVE);
5494 /* On some machines, the prologue and epilogue code, or parts thereof,
5495 can be represented as RTL. Doing so lets us schedule insns between
5496 it and the rest of the code and also allows delayed branch
5497 scheduling to operate in the epilogue. */
5498
5499 thread_prologue_and_epilogue_insns ();
5500 return 0;
5501}
5502
5503struct tree_opt_pass pass_thread_prologue_and_epilogue =
5504{
5505 "pro_and_epilogue", /* name */
5506 NULL, /* gate */
5507 rest_of_handle_thread_prologue_and_epilogue, /* execute */
5508 NULL, /* sub */
5509 NULL, /* next */
5510 0, /* static_pass_number */
5511 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
5512 0, /* properties_required */
5513 0, /* properties_provided */
5514 0, /* properties_destroyed */
5515 TODO_verify_flow, /* todo_flags_start */
5516 TODO_dump_func |
0d475361 5517 TODO_df_verify |
6fb5fa3c
DB
5518 TODO_df_finish |
5519 TODO_ggc_collect, /* todo_flags_finish */
5520 'w' /* letter */
5521};
d8d72314
PB
5522\f
5523
5524/* This mini-pass fixes fall-out from SSA in asm statements that have
5525 in-out constraints. Say you start with
5526
5527 orig = inout;
5528 asm ("": "+mr" (inout));
5529 use (orig);
5530
5531 which is transformed very early to use explicit output and match operands:
5532
5533 orig = inout;
5534 asm ("": "=mr" (inout) : "0" (inout));
5535 use (orig);
5536
5537 Or, after SSA and copyprop,
5538
5539 asm ("": "=mr" (inout_2) : "0" (inout_1));
5540 use (inout_1);
5541
5542 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
5543 they represent two separate values, so they will get different pseudo
5544 registers during expansion. Then, since the two operands need to match
5545 per the constraints, but use different pseudo registers, reload can
5546 only register a reload for these operands. But reloads can only be
5547 satisfied by hardregs, not by memory, so we need a register for this
5548 reload, just because we are presented with non-matching operands.
5549 So, even though we allow memory for this operand, no memory can be
5550 used for it, just because the two operands don't match. This can
5551 cause reload failures on register-starved targets.
5552
5553 So it's a symptom of reload not being able to use memory for reloads
5554 or, alternatively it's also a symptom of both operands not coming into
5555 reload as matching (in which case the pseudo could go to memory just
5556 fine, as the alternative allows it, and no reload would be necessary).
5557 We fix the latter problem here, by transforming
5558
5559 asm ("": "=mr" (inout_2) : "0" (inout_1));
5560
5561 back to
5562
5563 inout_2 = inout_1;
5564 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
5565
5566static void
5567match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
5568{
5569 int i;
5570 bool changed = false;
5571 rtx op = SET_SRC (p_sets[0]);
5572 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
5573 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
5574
5575 for (i = 0; i < ninputs; i++)
5576 {
5577 rtx input, output, insns;
5578 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
5579 char *end;
5580 int match;
5581
5582 match = strtoul (constraint, &end, 10);
5583 if (end == constraint)
5584 continue;
5585
5586 gcc_assert (match < noutputs);
5587 output = SET_DEST (p_sets[match]);
5588 input = RTVEC_ELT (inputs, i);
5589 if (rtx_equal_p (output, input)
5590 || (GET_MODE (input) != VOIDmode
5591 && GET_MODE (input) != GET_MODE (output)))
5592 continue;
5593
5594 start_sequence ();
5595 emit_move_insn (copy_rtx (output), input);
5596 RTVEC_ELT (inputs, i) = copy_rtx (output);
5597 insns = get_insns ();
5598 end_sequence ();
5599
5600 emit_insn_before (insns, insn);
5601 changed = true;
5602 }
5603
5604 if (changed)
5605 df_insn_rescan (insn);
5606}
5607
5608static unsigned
5609rest_of_match_asm_constraints (void)
5610{
5611 basic_block bb;
5612 rtx insn, pat, *p_sets;
5613 int noutputs;
5614
5615 if (!cfun->has_asm_statement)
5616 return 0;
5617
5618 df_set_flags (DF_DEFER_INSN_RESCAN);
5619 FOR_EACH_BB (bb)
5620 {
5621 FOR_BB_INSNS (bb, insn)
5622 {
5623 if (!INSN_P (insn))
5624 continue;
5625
5626 pat = PATTERN (insn);
5627 if (GET_CODE (pat) == PARALLEL)
5628 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
5629 else if (GET_CODE (pat) == SET)
5630 p_sets = &PATTERN (insn), noutputs = 1;
5631 else
5632 continue;
5633
5634 if (GET_CODE (*p_sets) == SET
5635 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
5636 match_asm_constraints_1 (insn, p_sets, noutputs);
5637 }
5638 }
5639
5640 return TODO_df_finish;
5641}
5642
5643struct tree_opt_pass pass_match_asm_constraints =
5644{
5645 "asmcons", /* name */
5646 NULL, /* gate */
5647 rest_of_match_asm_constraints, /* execute */
5648 NULL, /* sub */
5649 NULL, /* next */
5650 0, /* static_pass_number */
5651 0, /* tv_id */
5652 0, /* properties_required */
5653 0, /* properties_provided */
5654 0, /* properties_destroyed */
5655 0, /* todo_flags_start */
5656 TODO_dump_func, /* todo_flags_finish */
5657 0 /* letter */
5658};
6fb5fa3c 5659
faed5cc3 5660
e2500fed 5661#include "gt-function.h"