]>
Commit | Line | Data |
---|---|---|
6f086dfc | 1 | /* Expands front end tree to back end RTL for GNU C-Compiler |
af841dbd | 2 | Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997, |
d1608933 | 3 | 1998, 1999, 2000, 2001 Free Software Foundation, Inc. |
6f086dfc | 4 | |
1322177d | 5 | This file is part of GCC. |
6f086dfc | 6 | |
1322177d LB |
7 | GCC is free software; you can redistribute it and/or modify it under |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 2, or (at your option) any later | |
10 | version. | |
6f086dfc | 11 | |
1322177d LB |
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
6f086dfc RS |
16 | |
17 | You should have received a copy of the GNU General Public License | |
1322177d LB |
18 | along with GCC; see the file COPYING. If not, write to the Free |
19 | Software Foundation, 59 Temple Place - Suite 330, Boston, MA | |
20 | 02111-1307, USA. */ | |
6f086dfc | 21 | |
6f086dfc RS |
22 | /* This file handles the generation of rtl code from tree structure |
23 | at the level of the function as a whole. | |
24 | It creates the rtl expressions for parameters and auto variables | |
25 | and has full responsibility for allocating stack slots. | |
26 | ||
27 | `expand_function_start' is called at the beginning of a function, | |
28 | before the function body is parsed, and `expand_function_end' is | |
29 | called after parsing the body. | |
30 | ||
31 | Call `assign_stack_local' to allocate a stack slot for a local variable. | |
32 | This is usually done during the RTL generation for the function body, | |
33 | but it can also be done in the reload pass when a pseudo-register does | |
34 | not get a hard register. | |
35 | ||
36 | Call `put_var_into_stack' when you learn, belatedly, that a variable | |
37 | previously given a pseudo-register must in fact go in the stack. | |
38 | This function changes the DECL_RTL to be a stack slot instead of a reg | |
39 | then scans all the RTL instructions so far generated to correct them. */ | |
40 | ||
41 | #include "config.h" | |
670ee920 | 42 | #include "system.h" |
6f086dfc RS |
43 | #include "rtl.h" |
44 | #include "tree.h" | |
45 | #include "flags.h" | |
1ef08c63 | 46 | #include "except.h" |
6f086dfc | 47 | #include "function.h" |
6f086dfc | 48 | #include "expr.h" |
e78d8e51 | 49 | #include "libfuncs.h" |
6f086dfc RS |
50 | #include "regs.h" |
51 | #include "hard-reg-set.h" | |
52 | #include "insn-config.h" | |
53 | #include "recog.h" | |
54 | #include "output.h" | |
bdac5f58 | 55 | #include "basic-block.h" |
c20bf1f3 | 56 | #include "obstack.h" |
10f0ad3d | 57 | #include "toplev.h" |
fe9b4957 | 58 | #include "hash.h" |
87ff9c8e | 59 | #include "ggc.h" |
b1474bb7 | 60 | #include "tm_p.h" |
c0e7830f | 61 | #include "integrate.h" |
6f086dfc | 62 | |
189cc377 RK |
63 | #ifndef TRAMPOLINE_ALIGNMENT |
64 | #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY | |
65 | #endif | |
66 | ||
d16790f2 JW |
67 | #ifndef LOCAL_ALIGNMENT |
68 | #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT | |
69 | #endif | |
70 | ||
293e3de4 RS |
71 | /* Some systems use __main in a way incompatible with its use in gcc, in these |
72 | cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to | |
73 | give the same symbol without quotes for an alternative entry point. You | |
0f41302f | 74 | must define both, or neither. */ |
293e3de4 RS |
75 | #ifndef NAME__MAIN |
76 | #define NAME__MAIN "__main" | |
77 | #define SYMBOL__MAIN __main | |
78 | #endif | |
79 | ||
6f086dfc RS |
80 | /* Round a value to the lowest integer less than it that is a multiple of |
81 | the required alignment. Avoid using division in case the value is | |
82 | negative. Assume the alignment is a power of two. */ | |
83 | #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1)) | |
84 | ||
85 | /* Similar, but round to the next highest integer that meets the | |
86 | alignment. */ | |
87 | #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1)) | |
88 | ||
89 | /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp | |
90 | during rtl generation. If they are different register numbers, this is | |
91 | always true. It may also be true if | |
92 | FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl | |
93 | generation. See fix_lexical_addr for details. */ | |
94 | ||
95 | #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM | |
96 | #define NEED_SEPARATE_AP | |
97 | #endif | |
98 | ||
54ff41b7 JW |
99 | /* Nonzero if function being compiled doesn't contain any calls |
100 | (ignoring the prologue and epilogue). This is set prior to | |
101 | local register allocation and is valid for the remaining | |
718fe406 | 102 | compiler passes. */ |
54ff41b7 JW |
103 | int current_function_is_leaf; |
104 | ||
fb13d4d0 JM |
105 | /* Nonzero if function being compiled doesn't contain any instructions |
106 | that can throw an exception. This is set prior to final. */ | |
107 | ||
108 | int current_function_nothrow; | |
109 | ||
fdb8a883 JW |
110 | /* Nonzero if function being compiled doesn't modify the stack pointer |
111 | (ignoring the prologue and epilogue). This is only valid after | |
718fe406 | 112 | life_analysis has run. */ |
fdb8a883 JW |
113 | int current_function_sp_is_unchanging; |
114 | ||
54ff41b7 JW |
115 | /* Nonzero if the function being compiled is a leaf function which only |
116 | uses leaf registers. This is valid after reload (specifically after | |
117 | sched2) and is useful only if the port defines LEAF_REGISTERS. */ | |
54ff41b7 JW |
118 | int current_function_uses_only_leaf_regs; |
119 | ||
6f086dfc | 120 | /* Nonzero once virtual register instantiation has been done. |
c39ada04 DD |
121 | assign_stack_local uses frame_pointer_rtx when this is nonzero. |
122 | calls.c:emit_library_call_value_1 uses it to set up | |
123 | post-instantiation libcalls. */ | |
124 | int virtuals_instantiated; | |
6f086dfc | 125 | |
414c4dc4 NC |
126 | /* These variables hold pointers to functions to create and destroy |
127 | target specific, per-function data structures. */ | |
711d877c | 128 | void (*init_machine_status) PARAMS ((struct function *)); |
711d877c | 129 | void (*free_machine_status) PARAMS ((struct function *)); |
414c4dc4 NC |
130 | /* This variable holds a pointer to a function to register any |
131 | data items in the target specific, per-function data structure | |
132 | that will need garbage collection. */ | |
133 | void (*mark_machine_status) PARAMS ((struct function *)); | |
46766466 | 134 | |
8c5666b4 | 135 | /* Likewise, but for language-specific data. */ |
711d877c KG |
136 | void (*init_lang_status) PARAMS ((struct function *)); |
137 | void (*save_lang_status) PARAMS ((struct function *)); | |
138 | void (*restore_lang_status) PARAMS ((struct function *)); | |
139 | void (*mark_lang_status) PARAMS ((struct function *)); | |
140 | void (*free_lang_status) PARAMS ((struct function *)); | |
8c5666b4 | 141 | |
49ad7cfa BS |
142 | /* The FUNCTION_DECL for an inline function currently being expanded. */ |
143 | tree inline_function_decl; | |
b384405b BS |
144 | |
145 | /* The currently compiled function. */ | |
01d939e8 | 146 | struct function *cfun = 0; |
b384405b | 147 | |
5c7675e9 | 148 | /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */ |
0a1c58a2 JL |
149 | static varray_type prologue; |
150 | static varray_type epilogue; | |
151 | ||
152 | /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue | |
153 | in this function. */ | |
154 | static varray_type sibcall_epilogue; | |
6f086dfc RS |
155 | \f |
156 | /* In order to evaluate some expressions, such as function calls returning | |
157 | structures in memory, we need to temporarily allocate stack locations. | |
158 | We record each allocated temporary in the following structure. | |
159 | ||
160 | Associated with each temporary slot is a nesting level. When we pop up | |
161 | one level, all temporaries associated with the previous level are freed. | |
162 | Normally, all temporaries are freed after the execution of the statement | |
163 | in which they were created. However, if we are inside a ({...}) grouping, | |
164 | the result may be in a temporary and hence must be preserved. If the | |
165 | result could be in a temporary, we preserve it if we can determine which | |
166 | one it is in. If we cannot determine which temporary may contain the | |
167 | result, all temporaries are preserved. A temporary is preserved by | |
168 | pretending it was allocated at the previous nesting level. | |
169 | ||
170 | Automatic variables are also assigned temporary slots, at the nesting | |
171 | level where they are defined. They are marked a "kept" so that | |
172 | free_temp_slots will not free them. */ | |
173 | ||
174 | struct temp_slot | |
175 | { | |
176 | /* Points to next temporary slot. */ | |
177 | struct temp_slot *next; | |
0f41302f | 178 | /* The rtx to used to reference the slot. */ |
6f086dfc | 179 | rtx slot; |
e5e76139 RK |
180 | /* The rtx used to represent the address if not the address of the |
181 | slot above. May be an EXPR_LIST if multiple addresses exist. */ | |
182 | rtx address; | |
718fe406 | 183 | /* The alignment (in bits) of the slot. */ |
d16790f2 | 184 | int align; |
6f086dfc | 185 | /* The size, in units, of the slot. */ |
e5e809f4 | 186 | HOST_WIDE_INT size; |
1da68f56 RK |
187 | /* The type of the object in the slot, or zero if it doesn't correspond |
188 | to a type. We use this to determine whether a slot can be reused. | |
189 | It can be reused if objects of the type of the new slot will always | |
190 | conflict with objects of the type of the old slot. */ | |
191 | tree type; | |
e7a84011 RK |
192 | /* The value of `sequence_rtl_expr' when this temporary is allocated. */ |
193 | tree rtl_expr; | |
6f086dfc RS |
194 | /* Non-zero if this temporary is currently in use. */ |
195 | char in_use; | |
a25d4ba2 RK |
196 | /* Non-zero if this temporary has its address taken. */ |
197 | char addr_taken; | |
6f086dfc RS |
198 | /* Nesting level at which this slot is being used. */ |
199 | int level; | |
200 | /* Non-zero if this should survive a call to free_temp_slots. */ | |
201 | int keep; | |
fc91b0d0 RK |
202 | /* The offset of the slot from the frame_pointer, including extra space |
203 | for alignment. This info is for combine_temp_slots. */ | |
e5e809f4 | 204 | HOST_WIDE_INT base_offset; |
fc91b0d0 RK |
205 | /* The size of the slot, including extra space for alignment. This |
206 | info is for combine_temp_slots. */ | |
e5e809f4 | 207 | HOST_WIDE_INT full_size; |
6f086dfc | 208 | }; |
6f086dfc | 209 | \f |
e15679f8 RK |
210 | /* This structure is used to record MEMs or pseudos used to replace VAR, any |
211 | SUBREGs of VAR, and any MEMs containing VAR as an address. We need to | |
212 | maintain this list in case two operands of an insn were required to match; | |
213 | in that case we must ensure we use the same replacement. */ | |
214 | ||
215 | struct fixup_replacement | |
216 | { | |
217 | rtx old; | |
218 | rtx new; | |
219 | struct fixup_replacement *next; | |
220 | }; | |
718fe406 | 221 | |
7a80cf9a RK |
222 | struct insns_for_mem_entry |
223 | { | |
fe9b4957 MM |
224 | /* The KEY in HE will be a MEM. */ |
225 | struct hash_entry he; | |
226 | /* These are the INSNS which reference the MEM. */ | |
227 | rtx insns; | |
228 | }; | |
229 | ||
e15679f8 RK |
230 | /* Forward declarations. */ |
231 | ||
711d877c KG |
232 | static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT, |
233 | int, struct function *)); | |
234 | static rtx assign_stack_temp_for_type PARAMS ((enum machine_mode, | |
235 | HOST_WIDE_INT, int, tree)); | |
236 | static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx)); | |
237 | static void put_reg_into_stack PARAMS ((struct function *, rtx, tree, | |
238 | enum machine_mode, enum machine_mode, | |
770ae6cc RK |
239 | int, unsigned int, int, |
240 | struct hash_table *)); | |
018577e4 R |
241 | static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree, |
242 | enum machine_mode, | |
243 | struct hash_table *)); | |
718fe406 | 244 | static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int, |
711d877c | 245 | struct hash_table *)); |
e15679f8 | 246 | static struct fixup_replacement |
711d877c | 247 | *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx)); |
a42a5f59 ZW |
248 | static void fixup_var_refs_insns PARAMS ((rtx, rtx, enum machine_mode, |
249 | int, int)); | |
250 | static void fixup_var_refs_insns_with_hash | |
251 | PARAMS ((struct hash_table *, rtx, | |
252 | enum machine_mode, int)); | |
253 | static void fixup_var_refs_insn PARAMS ((rtx, rtx, enum machine_mode, | |
254 | int, int)); | |
711d877c KG |
255 | static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx, |
256 | struct fixup_replacement **)); | |
257 | static rtx fixup_memory_subreg PARAMS ((rtx, rtx, int)); | |
258 | static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, int)); | |
259 | static rtx fixup_stack_1 PARAMS ((rtx, rtx)); | |
260 | static void optimize_bit_field PARAMS ((rtx, rtx, rtx *)); | |
261 | static void instantiate_decls PARAMS ((tree, int)); | |
262 | static void instantiate_decls_1 PARAMS ((tree, int)); | |
770ae6cc | 263 | static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int)); |
d1405722 | 264 | static rtx instantiate_new_reg PARAMS ((rtx, HOST_WIDE_INT *)); |
711d877c KG |
265 | static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int)); |
266 | static void delete_handlers PARAMS ((void)); | |
267 | static void pad_to_arg_alignment PARAMS ((struct args_size *, int, | |
268 | struct args_size *)); | |
51723711 | 269 | #ifndef ARGS_GROW_DOWNWARD |
711d877c KG |
270 | static void pad_below PARAMS ((struct args_size *, enum machine_mode, |
271 | tree)); | |
51723711 | 272 | #endif |
711d877c | 273 | static rtx round_trampoline_addr PARAMS ((rtx)); |
b33493e3 | 274 | static rtx adjust_trampoline_addr PARAMS ((rtx)); |
0a1c58a2 | 275 | static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *)); |
a20612aa | 276 | static void reorder_blocks_0 PARAMS ((tree)); |
0a1c58a2 | 277 | static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *)); |
a20612aa | 278 | static void reorder_fix_fragments PARAMS ((tree)); |
711d877c KG |
279 | static tree blocks_nreverse PARAMS ((tree)); |
280 | static int all_blocks PARAMS ((tree, tree *)); | |
18c038b9 | 281 | static tree *get_block_vector PARAMS ((tree, int *)); |
ec97b83a KG |
282 | /* We always define `record_insns' even if its not used so that we |
283 | can always export `prologue_epilogue_contains'. */ | |
0a1c58a2 JL |
284 | static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED; |
285 | static int contains PARAMS ((rtx, varray_type)); | |
73ef99fb | 286 | #ifdef HAVE_return |
86c82654 | 287 | static void emit_return_into_block PARAMS ((basic_block, rtx)); |
73ef99fb | 288 | #endif |
711d877c | 289 | static void put_addressof_into_stack PARAMS ((rtx, struct hash_table *)); |
d6edb99e | 290 | static bool purge_addressof_1 PARAMS ((rtx *, rtx, int, int, |
711d877c | 291 | struct hash_table *)); |
659e47fb | 292 | static void purge_single_hard_subreg_set PARAMS ((rtx)); |
7393c642 RK |
293 | #ifdef HAVE_epilogue |
294 | static void keep_stack_depressed PARAMS ((rtx)); | |
295 | #endif | |
711d877c KG |
296 | static int is_addressof PARAMS ((rtx *, void *)); |
297 | static struct hash_entry *insns_for_mem_newfunc PARAMS ((struct hash_entry *, | |
298 | struct hash_table *, | |
299 | hash_table_key)); | |
300 | static unsigned long insns_for_mem_hash PARAMS ((hash_table_key)); | |
d6edb99e | 301 | static bool insns_for_mem_comp PARAMS ((hash_table_key, hash_table_key)); |
711d877c KG |
302 | static int insns_for_mem_walk PARAMS ((rtx *, void *)); |
303 | static void compute_insns_for_mem PARAMS ((rtx, rtx, struct hash_table *)); | |
711d877c | 304 | static void mark_function_status PARAMS ((struct function *)); |
eb3ae3e1 | 305 | static void maybe_mark_struct_function PARAMS ((void *)); |
711d877c | 306 | static void prepare_function_start PARAMS ((void)); |
c13fde05 RH |
307 | static void do_clobber_return_reg PARAMS ((rtx, void *)); |
308 | static void do_use_return_reg PARAMS ((rtx, void *)); | |
c20bf1f3 | 309 | \f |
6f086dfc | 310 | /* Pointer to chain of `struct function' for containing functions. */ |
eb3ae3e1 | 311 | static struct function *outer_function_chain; |
6f086dfc RS |
312 | |
313 | /* Given a function decl for a containing function, | |
314 | return the `struct function' for it. */ | |
315 | ||
316 | struct function * | |
317 | find_function_data (decl) | |
318 | tree decl; | |
319 | { | |
320 | struct function *p; | |
e5e809f4 | 321 | |
eb3ae3e1 | 322 | for (p = outer_function_chain; p; p = p->outer) |
6f086dfc RS |
323 | if (p->decl == decl) |
324 | return p; | |
e5e809f4 | 325 | |
6f086dfc RS |
326 | abort (); |
327 | } | |
328 | ||
329 | /* Save the current context for compilation of a nested function. | |
8c5666b4 BS |
330 | This is called from language-specific code. The caller should use |
331 | the save_lang_status callback to save any language-specific state, | |
332 | since this function knows only about language-independent | |
333 | variables. */ | |
6f086dfc RS |
334 | |
335 | void | |
a0dabda5 JM |
336 | push_function_context_to (context) |
337 | tree context; | |
6f086dfc | 338 | { |
eb3ae3e1 | 339 | struct function *p; |
36edd3cc BS |
340 | |
341 | if (context) | |
342 | { | |
eb3ae3e1 ZW |
343 | if (context == current_function_decl) |
344 | cfun->contains_functions = 1; | |
345 | else | |
346 | { | |
347 | struct function *containing = find_function_data (context); | |
348 | containing->contains_functions = 1; | |
349 | } | |
36edd3cc | 350 | } |
b384405b | 351 | |
01d939e8 | 352 | if (cfun == 0) |
b384405b | 353 | init_dummy_function_start (); |
01d939e8 | 354 | p = cfun; |
6f086dfc | 355 | |
eb3ae3e1 | 356 | p->outer = outer_function_chain; |
6f086dfc | 357 | outer_function_chain = p; |
6f086dfc RS |
358 | p->fixup_var_refs_queue = 0; |
359 | ||
8c5666b4 BS |
360 | if (save_lang_status) |
361 | (*save_lang_status) (p); | |
b384405b | 362 | |
01d939e8 | 363 | cfun = 0; |
6f086dfc RS |
364 | } |
365 | ||
e4a4639e JM |
366 | void |
367 | push_function_context () | |
368 | { | |
a0dabda5 | 369 | push_function_context_to (current_function_decl); |
e4a4639e JM |
370 | } |
371 | ||
6f086dfc RS |
372 | /* Restore the last saved context, at the end of a nested function. |
373 | This function is called from language-specific code. */ | |
374 | ||
375 | void | |
a0dabda5 | 376 | pop_function_context_from (context) |
ca3075bd | 377 | tree context ATTRIBUTE_UNUSED; |
6f086dfc RS |
378 | { |
379 | struct function *p = outer_function_chain; | |
e5e809f4 | 380 | struct var_refs_queue *queue; |
6f086dfc | 381 | |
01d939e8 | 382 | cfun = p; |
eb3ae3e1 | 383 | outer_function_chain = p->outer; |
6f086dfc | 384 | |
6f086dfc | 385 | current_function_decl = p->decl; |
7cbc7b0c | 386 | reg_renumber = 0; |
6f086dfc | 387 | |
6f086dfc | 388 | restore_emit_status (p); |
6ba3b214 | 389 | restore_varasm_status (p); |
6f086dfc | 390 | |
8c5666b4 BS |
391 | if (restore_lang_status) |
392 | (*restore_lang_status) (p); | |
46766466 | 393 | |
6f086dfc RS |
394 | /* Finish doing put_var_into_stack for any of our variables |
395 | which became addressable during the nested function. */ | |
7a80cf9a RK |
396 | for (queue = p->fixup_var_refs_queue; queue; queue = queue->next) |
397 | fixup_var_refs (queue->modified, queue->promoted_mode, | |
398 | queue->unsignedp, 0); | |
399 | ||
a3770a81 | 400 | p->fixup_var_refs_queue = 0; |
6f086dfc | 401 | |
6f086dfc RS |
402 | /* Reset variables that have known state during rtx generation. */ |
403 | rtx_equal_function_value_matters = 1; | |
404 | virtuals_instantiated = 0; | |
1b3d8f8a | 405 | generating_concat_p = 1; |
6f086dfc | 406 | } |
e4a4639e | 407 | |
36edd3cc BS |
408 | void |
409 | pop_function_context () | |
e4a4639e | 410 | { |
a0dabda5 | 411 | pop_function_context_from (current_function_decl); |
e4a4639e | 412 | } |
e2ecd91c | 413 | |
fa51b01b RH |
414 | /* Clear out all parts of the state in F that can safely be discarded |
415 | after the function has been parsed, but not compiled, to let | |
416 | garbage collection reclaim the memory. */ | |
417 | ||
418 | void | |
419 | free_after_parsing (f) | |
420 | struct function *f; | |
421 | { | |
422 | /* f->expr->forced_labels is used by code generation. */ | |
423 | /* f->emit->regno_reg_rtx is used by code generation. */ | |
424 | /* f->varasm is used by code generation. */ | |
425 | /* f->eh->eh_return_stub_label is used by code generation. */ | |
426 | ||
427 | if (free_lang_status) | |
428 | (*free_lang_status) (f); | |
429 | free_stmt_status (f); | |
430 | } | |
431 | ||
e2ecd91c BS |
432 | /* Clear out all parts of the state in F that can safely be discarded |
433 | after the function has been compiled, to let garbage collection | |
0a8a198c | 434 | reclaim the memory. */ |
21cd906e | 435 | |
e2ecd91c | 436 | void |
0a8a198c | 437 | free_after_compilation (f) |
e2ecd91c BS |
438 | struct function *f; |
439 | { | |
fa51b01b RH |
440 | free_eh_status (f); |
441 | free_expr_status (f); | |
0a8a198c MM |
442 | free_emit_status (f); |
443 | free_varasm_status (f); | |
e2ecd91c | 444 | |
fa51b01b RH |
445 | if (free_machine_status) |
446 | (*free_machine_status) (f); | |
447 | ||
5faf03ae MM |
448 | if (f->x_parm_reg_stack_loc) |
449 | free (f->x_parm_reg_stack_loc); | |
fa51b01b | 450 | |
bedda2da | 451 | f->x_temp_slots = NULL; |
fa51b01b RH |
452 | f->arg_offset_rtx = NULL; |
453 | f->return_rtx = NULL; | |
454 | f->internal_arg_pointer = NULL; | |
455 | f->x_nonlocal_labels = NULL; | |
456 | f->x_nonlocal_goto_handler_slots = NULL; | |
457 | f->x_nonlocal_goto_handler_labels = NULL; | |
458 | f->x_nonlocal_goto_stack_level = NULL; | |
459 | f->x_cleanup_label = NULL; | |
460 | f->x_return_label = NULL; | |
461 | f->x_save_expr_regs = NULL; | |
462 | f->x_stack_slot_list = NULL; | |
463 | f->x_rtl_expr_chain = NULL; | |
464 | f->x_tail_recursion_label = NULL; | |
465 | f->x_tail_recursion_reentry = NULL; | |
466 | f->x_arg_pointer_save_area = NULL; | |
cf77f15f | 467 | f->x_clobber_return_insn = NULL; |
fa51b01b RH |
468 | f->x_context_display = NULL; |
469 | f->x_trampoline_list = NULL; | |
470 | f->x_parm_birth_insn = NULL; | |
471 | f->x_last_parm_insn = NULL; | |
472 | f->x_parm_reg_stack_loc = NULL; | |
fa51b01b RH |
473 | f->fixup_var_refs_queue = NULL; |
474 | f->original_arg_vector = NULL; | |
475 | f->original_decl_initial = NULL; | |
476 | f->inl_last_parm_insn = NULL; | |
477 | f->epilogue_delay_list = NULL; | |
e2ecd91c | 478 | } |
6f086dfc RS |
479 | \f |
480 | /* Allocate fixed slots in the stack frame of the current function. */ | |
481 | ||
49ad7cfa BS |
482 | /* Return size needed for stack frame based on slots so far allocated in |
483 | function F. | |
c795bca9 | 484 | This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY; |
6f086dfc RS |
485 | the caller may have to do that. */ |
486 | ||
8af5168b | 487 | HOST_WIDE_INT |
49ad7cfa BS |
488 | get_func_frame_size (f) |
489 | struct function *f; | |
6f086dfc RS |
490 | { |
491 | #ifdef FRAME_GROWS_DOWNWARD | |
49ad7cfa | 492 | return -f->x_frame_offset; |
6f086dfc | 493 | #else |
49ad7cfa | 494 | return f->x_frame_offset; |
6f086dfc RS |
495 | #endif |
496 | } | |
497 | ||
49ad7cfa BS |
498 | /* Return size needed for stack frame based on slots so far allocated. |
499 | This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY; | |
500 | the caller may have to do that. */ | |
501 | HOST_WIDE_INT | |
502 | get_frame_size () | |
503 | { | |
01d939e8 | 504 | return get_func_frame_size (cfun); |
49ad7cfa BS |
505 | } |
506 | ||
6f086dfc RS |
507 | /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it |
508 | with machine mode MODE. | |
718fe406 | 509 | |
6f086dfc RS |
510 | ALIGN controls the amount of alignment for the address of the slot: |
511 | 0 means according to MODE, | |
512 | -1 means use BIGGEST_ALIGNMENT and round size to multiple of that, | |
513 | positive specifies alignment boundary in bits. | |
514 | ||
e2ecd91c | 515 | We do not round to stack_boundary here. |
6f086dfc | 516 | |
e2ecd91c BS |
517 | FUNCTION specifies the function to allocate in. */ |
518 | ||
519 | static rtx | |
520 | assign_stack_local_1 (mode, size, align, function) | |
6f086dfc | 521 | enum machine_mode mode; |
e5e809f4 | 522 | HOST_WIDE_INT size; |
6f086dfc | 523 | int align; |
e2ecd91c | 524 | struct function *function; |
6f086dfc | 525 | { |
b3694847 | 526 | rtx x, addr; |
6f086dfc RS |
527 | int bigend_correction = 0; |
528 | int alignment; | |
529 | ||
530 | if (align == 0) | |
531 | { | |
d16790f2 JW |
532 | tree type; |
533 | ||
6f086dfc | 534 | if (mode == BLKmode) |
d16790f2 | 535 | alignment = BIGGEST_ALIGNMENT; |
dbab7b72 | 536 | else |
718fe406 | 537 | alignment = GET_MODE_ALIGNMENT (mode); |
d16790f2 JW |
538 | |
539 | /* Allow the target to (possibly) increase the alignment of this | |
540 | stack slot. */ | |
541 | type = type_for_mode (mode, 0); | |
542 | if (type) | |
543 | alignment = LOCAL_ALIGNMENT (type, alignment); | |
544 | ||
545 | alignment /= BITS_PER_UNIT; | |
6f086dfc RS |
546 | } |
547 | else if (align == -1) | |
548 | { | |
549 | alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; | |
550 | size = CEIL_ROUND (size, alignment); | |
551 | } | |
552 | else | |
553 | alignment = align / BITS_PER_UNIT; | |
554 | ||
1474e303 | 555 | #ifdef FRAME_GROWS_DOWNWARD |
e2ecd91c | 556 | function->x_frame_offset -= size; |
1474e303 JL |
557 | #endif |
558 | ||
a0871656 JH |
559 | /* Ignore alignment we can't do with expected alignment of the boundary. */ |
560 | if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY) | |
561 | alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; | |
562 | ||
563 | if (function->stack_alignment_needed < alignment * BITS_PER_UNIT) | |
564 | function->stack_alignment_needed = alignment * BITS_PER_UNIT; | |
565 | ||
6f086dfc RS |
566 | /* Round frame offset to that alignment. |
567 | We must be careful here, since FRAME_OFFSET might be negative and | |
568 | division with a negative dividend isn't as well defined as we might | |
569 | like. So we instead assume that ALIGNMENT is a power of two and | |
570 | use logical operations which are unambiguous. */ | |
571 | #ifdef FRAME_GROWS_DOWNWARD | |
e2ecd91c | 572 | function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment); |
6f086dfc | 573 | #else |
e2ecd91c | 574 | function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment); |
6f086dfc RS |
575 | #endif |
576 | ||
577 | /* On a big-endian machine, if we are allocating more space than we will use, | |
578 | use the least significant bytes of those that are allocated. */ | |
f76b9db2 | 579 | if (BYTES_BIG_ENDIAN && mode != BLKmode) |
6f086dfc | 580 | bigend_correction = size - GET_MODE_SIZE (mode); |
6f086dfc | 581 | |
6f086dfc RS |
582 | /* If we have already instantiated virtual registers, return the actual |
583 | address relative to the frame pointer. */ | |
01d939e8 | 584 | if (function == cfun && virtuals_instantiated) |
6f086dfc RS |
585 | addr = plus_constant (frame_pointer_rtx, |
586 | (frame_offset + bigend_correction | |
587 | + STARTING_FRAME_OFFSET)); | |
588 | else | |
589 | addr = plus_constant (virtual_stack_vars_rtx, | |
3b71623b | 590 | function->x_frame_offset + bigend_correction); |
6f086dfc RS |
591 | |
592 | #ifndef FRAME_GROWS_DOWNWARD | |
e2ecd91c | 593 | function->x_frame_offset += size; |
6f086dfc RS |
594 | #endif |
595 | ||
38a448ca | 596 | x = gen_rtx_MEM (mode, addr); |
6f086dfc | 597 | |
e2ecd91c BS |
598 | function->x_stack_slot_list |
599 | = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list); | |
600 | ||
6f086dfc RS |
601 | return x; |
602 | } | |
603 | ||
e2ecd91c BS |
604 | /* Wrapper around assign_stack_local_1; assign a local stack slot for the |
605 | current function. */ | |
3bdf5ad1 | 606 | |
e2ecd91c BS |
607 | rtx |
608 | assign_stack_local (mode, size, align) | |
6f086dfc | 609 | enum machine_mode mode; |
e5e809f4 | 610 | HOST_WIDE_INT size; |
6f086dfc | 611 | int align; |
6f086dfc | 612 | { |
01d939e8 | 613 | return assign_stack_local_1 (mode, size, align, cfun); |
6f086dfc RS |
614 | } |
615 | \f | |
616 | /* Allocate a temporary stack slot and record it for possible later | |
617 | reuse. | |
618 | ||
619 | MODE is the machine mode to be given to the returned rtx. | |
620 | ||
621 | SIZE is the size in units of the space required. We do no rounding here | |
622 | since assign_stack_local will do any required rounding. | |
623 | ||
d93d4205 MS |
624 | KEEP is 1 if this slot is to be retained after a call to |
625 | free_temp_slots. Automatic variables for a block are allocated | |
e5e809f4 JL |
626 | with this flag. KEEP is 2 if we allocate a longer term temporary, |
627 | whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3 | |
628 | if we are to allocate something at an inner level to be treated as | |
718fe406 | 629 | a variable in the block (e.g., a SAVE_EXPR). |
a4c6502a MM |
630 | |
631 | TYPE is the type that will be used for the stack slot. */ | |
6f086dfc | 632 | |
d16790f2 JW |
633 | static rtx |
634 | assign_stack_temp_for_type (mode, size, keep, type) | |
6f086dfc | 635 | enum machine_mode mode; |
e5e809f4 | 636 | HOST_WIDE_INT size; |
6f086dfc | 637 | int keep; |
d16790f2 | 638 | tree type; |
6f086dfc | 639 | { |
d16790f2 | 640 | int align; |
6f086dfc RS |
641 | struct temp_slot *p, *best_p = 0; |
642 | ||
303ec2aa RK |
643 | /* If SIZE is -1 it means that somebody tried to allocate a temporary |
644 | of a variable size. */ | |
645 | if (size == -1) | |
646 | abort (); | |
647 | ||
d16790f2 JW |
648 | if (mode == BLKmode) |
649 | align = BIGGEST_ALIGNMENT; | |
dbab7b72 JH |
650 | else |
651 | align = GET_MODE_ALIGNMENT (mode); | |
6f086dfc | 652 | |
d16790f2 JW |
653 | if (! type) |
654 | type = type_for_mode (mode, 0); | |
3bdf5ad1 | 655 | |
d16790f2 JW |
656 | if (type) |
657 | align = LOCAL_ALIGNMENT (type, align); | |
658 | ||
659 | /* Try to find an available, already-allocated temporary of the proper | |
660 | mode which meets the size and alignment requirements. Choose the | |
661 | smallest one with the closest alignment. */ | |
662 | for (p = temp_slots; p; p = p->next) | |
663 | if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode | |
664 | && ! p->in_use | |
1da68f56 | 665 | && objects_must_conflict_p (p->type, type) |
d16790f2 JW |
666 | && (best_p == 0 || best_p->size > p->size |
667 | || (best_p->size == p->size && best_p->align > p->align))) | |
668 | { | |
669 | if (p->align == align && p->size == size) | |
670 | { | |
671 | best_p = 0; | |
672 | break; | |
673 | } | |
6f086dfc | 674 | best_p = p; |
d16790f2 | 675 | } |
6f086dfc RS |
676 | |
677 | /* Make our best, if any, the one to use. */ | |
678 | if (best_p) | |
a45035b6 JW |
679 | { |
680 | /* If there are enough aligned bytes left over, make them into a new | |
681 | temp_slot so that the extra bytes don't get wasted. Do this only | |
682 | for BLKmode slots, so that we can be sure of the alignment. */ | |
3bdf5ad1 | 683 | if (GET_MODE (best_p->slot) == BLKmode) |
a45035b6 | 684 | { |
d16790f2 | 685 | int alignment = best_p->align / BITS_PER_UNIT; |
e5e809f4 | 686 | HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment); |
a45035b6 JW |
687 | |
688 | if (best_p->size - rounded_size >= alignment) | |
689 | { | |
7a80cf9a | 690 | p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot)); |
a25d4ba2 | 691 | p->in_use = p->addr_taken = 0; |
a45035b6 | 692 | p->size = best_p->size - rounded_size; |
307d8cd6 RK |
693 | p->base_offset = best_p->base_offset + rounded_size; |
694 | p->full_size = best_p->full_size - rounded_size; | |
38a448ca RH |
695 | p->slot = gen_rtx_MEM (BLKmode, |
696 | plus_constant (XEXP (best_p->slot, 0), | |
697 | rounded_size)); | |
d16790f2 | 698 | p->align = best_p->align; |
e5e76139 | 699 | p->address = 0; |
591ccf92 | 700 | p->rtl_expr = 0; |
1da68f56 | 701 | p->type = best_p->type; |
a45035b6 JW |
702 | p->next = temp_slots; |
703 | temp_slots = p; | |
704 | ||
38a448ca RH |
705 | stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot, |
706 | stack_slot_list); | |
a45035b6 JW |
707 | |
708 | best_p->size = rounded_size; | |
291dde90 | 709 | best_p->full_size = rounded_size; |
a45035b6 JW |
710 | } |
711 | } | |
712 | ||
713 | p = best_p; | |
714 | } | |
718fe406 | 715 | |
6f086dfc RS |
716 | /* If we still didn't find one, make a new temporary. */ |
717 | if (p == 0) | |
718 | { | |
e5e809f4 JL |
719 | HOST_WIDE_INT frame_offset_old = frame_offset; |
720 | ||
7a80cf9a | 721 | p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot)); |
e5e809f4 | 722 | |
c87a0a39 JL |
723 | /* We are passing an explicit alignment request to assign_stack_local. |
724 | One side effect of that is assign_stack_local will not round SIZE | |
725 | to ensure the frame offset remains suitably aligned. | |
726 | ||
727 | So for requests which depended on the rounding of SIZE, we go ahead | |
728 | and round it now. We also make sure ALIGNMENT is at least | |
729 | BIGGEST_ALIGNMENT. */ | |
010529e5 | 730 | if (mode == BLKmode && align < BIGGEST_ALIGNMENT) |
6f67a30d JW |
731 | abort(); |
732 | p->slot = assign_stack_local (mode, | |
010529e5 AS |
733 | (mode == BLKmode |
734 | ? CEIL_ROUND (size, align / BITS_PER_UNIT) | |
735 | : size), | |
6f67a30d | 736 | align); |
d16790f2 JW |
737 | |
738 | p->align = align; | |
e5e809f4 | 739 | |
b2a80c0d DE |
740 | /* The following slot size computation is necessary because we don't |
741 | know the actual size of the temporary slot until assign_stack_local | |
742 | has performed all the frame alignment and size rounding for the | |
fc91b0d0 RK |
743 | requested temporary. Note that extra space added for alignment |
744 | can be either above or below this stack slot depending on which | |
745 | way the frame grows. We include the extra space if and only if it | |
746 | is above this slot. */ | |
b2a80c0d DE |
747 | #ifdef FRAME_GROWS_DOWNWARD |
748 | p->size = frame_offset_old - frame_offset; | |
749 | #else | |
fc91b0d0 RK |
750 | p->size = size; |
751 | #endif | |
e5e809f4 | 752 | |
fc91b0d0 RK |
753 | /* Now define the fields used by combine_temp_slots. */ |
754 | #ifdef FRAME_GROWS_DOWNWARD | |
755 | p->base_offset = frame_offset; | |
756 | p->full_size = frame_offset_old - frame_offset; | |
757 | #else | |
758 | p->base_offset = frame_offset_old; | |
759 | p->full_size = frame_offset - frame_offset_old; | |
b2a80c0d | 760 | #endif |
e5e76139 | 761 | p->address = 0; |
6f086dfc RS |
762 | p->next = temp_slots; |
763 | temp_slots = p; | |
764 | } | |
765 | ||
766 | p->in_use = 1; | |
a25d4ba2 | 767 | p->addr_taken = 0; |
591ccf92 | 768 | p->rtl_expr = seq_rtl_expr; |
1da68f56 | 769 | p->type = type; |
a25d4ba2 | 770 | |
d93d4205 MS |
771 | if (keep == 2) |
772 | { | |
773 | p->level = target_temp_slot_level; | |
774 | p->keep = 0; | |
775 | } | |
e5e809f4 JL |
776 | else if (keep == 3) |
777 | { | |
778 | p->level = var_temp_slot_level; | |
779 | p->keep = 0; | |
780 | } | |
d93d4205 MS |
781 | else |
782 | { | |
783 | p->level = temp_slot_level; | |
784 | p->keep = keep; | |
785 | } | |
1995f267 RK |
786 | |
787 | /* We may be reusing an old slot, so clear any MEM flags that may have been | |
788 | set from before. */ | |
789 | RTX_UNCHANGING_P (p->slot) = 0; | |
790 | MEM_IN_STRUCT_P (p->slot) = 0; | |
c6df88cb | 791 | MEM_SCALAR_P (p->slot) = 0; |
1da68f56 | 792 | MEM_VOLATILE_P (p->slot) = 0; |
3bdf5ad1 | 793 | |
1da68f56 RK |
794 | /* If we know the alias set for the memory that will be used, use |
795 | it. If there's no TYPE, then we don't know anything about the | |
796 | alias set for the memory. */ | |
ba4828e0 | 797 | set_mem_alias_set (p->slot, type ? get_alias_set (type) : 0); |
1da68f56 | 798 | |
30f7a378 | 799 | /* If a type is specified, set the relevant flags. */ |
3bdf5ad1 | 800 | if (type != 0) |
1da68f56 RK |
801 | { |
802 | RTX_UNCHANGING_P (p->slot) = TYPE_READONLY (type); | |
803 | MEM_VOLATILE_P (p->slot) = TYPE_VOLATILE (type); | |
804 | MEM_SET_IN_STRUCT_P (p->slot, AGGREGATE_TYPE_P (type)); | |
805 | } | |
3bdf5ad1 | 806 | |
6f086dfc RS |
807 | return p->slot; |
808 | } | |
d16790f2 JW |
809 | |
810 | /* Allocate a temporary stack slot and record it for possible later | |
811 | reuse. First three arguments are same as in preceding function. */ | |
812 | ||
813 | rtx | |
814 | assign_stack_temp (mode, size, keep) | |
815 | enum machine_mode mode; | |
816 | HOST_WIDE_INT size; | |
817 | int keep; | |
818 | { | |
819 | return assign_stack_temp_for_type (mode, size, keep, NULL_TREE); | |
820 | } | |
638141a6 | 821 | \f |
230f21b4 PB |
822 | /* Assign a temporary of given TYPE. |
823 | KEEP is as for assign_stack_temp. | |
824 | MEMORY_REQUIRED is 1 if the result must be addressable stack memory; | |
b55d9ff8 RK |
825 | it is 0 if a register is OK. |
826 | DONT_PROMOTE is 1 if we should not promote values in register | |
827 | to wider modes. */ | |
230f21b4 PB |
828 | |
829 | rtx | |
b55d9ff8 | 830 | assign_temp (type, keep, memory_required, dont_promote) |
230f21b4 PB |
831 | tree type; |
832 | int keep; | |
833 | int memory_required; | |
0ce8a59c | 834 | int dont_promote ATTRIBUTE_UNUSED; |
230f21b4 PB |
835 | { |
836 | enum machine_mode mode = TYPE_MODE (type); | |
0ce8a59c | 837 | #ifndef PROMOTE_FOR_CALL_ONLY |
638141a6 | 838 | int unsignedp = TREE_UNSIGNED (type); |
0ce8a59c | 839 | #endif |
638141a6 | 840 | |
230f21b4 PB |
841 | if (mode == BLKmode || memory_required) |
842 | { | |
e5e809f4 | 843 | HOST_WIDE_INT size = int_size_in_bytes (type); |
230f21b4 PB |
844 | rtx tmp; |
845 | ||
44affdae JH |
846 | /* Zero sized arrays are GNU C extension. Set size to 1 to avoid |
847 | problems with allocating the stack space. */ | |
848 | if (size == 0) | |
849 | size = 1; | |
850 | ||
230f21b4 PB |
851 | /* Unfortunately, we don't yet know how to allocate variable-sized |
852 | temporaries. However, sometimes we have a fixed upper limit on | |
853 | the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that | |
0f41302f | 854 | instead. This is the case for Chill variable-sized strings. */ |
230f21b4 PB |
855 | if (size == -1 && TREE_CODE (type) == ARRAY_TYPE |
856 | && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE | |
3bdf5ad1 RK |
857 | && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1)) |
858 | size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1); | |
230f21b4 | 859 | |
d16790f2 | 860 | tmp = assign_stack_temp_for_type (mode, size, keep, type); |
230f21b4 PB |
861 | return tmp; |
862 | } | |
638141a6 | 863 | |
230f21b4 | 864 | #ifndef PROMOTE_FOR_CALL_ONLY |
b55d9ff8 RK |
865 | if (! dont_promote) |
866 | mode = promote_mode (type, mode, &unsignedp, 0); | |
230f21b4 | 867 | #endif |
638141a6 | 868 | |
230f21b4 PB |
869 | return gen_reg_rtx (mode); |
870 | } | |
638141a6 | 871 | \f |
a45035b6 JW |
872 | /* Combine temporary stack slots which are adjacent on the stack. |
873 | ||
874 | This allows for better use of already allocated stack space. This is only | |
875 | done for BLKmode slots because we can be sure that we won't have alignment | |
876 | problems in this case. */ | |
877 | ||
878 | void | |
879 | combine_temp_slots () | |
880 | { | |
881 | struct temp_slot *p, *q; | |
882 | struct temp_slot *prev_p, *prev_q; | |
e5e809f4 JL |
883 | int num_slots; |
884 | ||
a4c6502a MM |
885 | /* We can't combine slots, because the information about which slot |
886 | is in which alias set will be lost. */ | |
887 | if (flag_strict_aliasing) | |
888 | return; | |
889 | ||
718fe406 | 890 | /* If there are a lot of temp slots, don't do anything unless |
e5e809f4 JL |
891 | high levels of optimizaton. */ |
892 | if (! flag_expensive_optimizations) | |
893 | for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++) | |
894 | if (num_slots > 100 || (num_slots > 10 && optimize == 0)) | |
895 | return; | |
a45035b6 | 896 | |
e9b7093a RS |
897 | for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots) |
898 | { | |
899 | int delete_p = 0; | |
e5e809f4 | 900 | |
e9b7093a RS |
901 | if (! p->in_use && GET_MODE (p->slot) == BLKmode) |
902 | for (q = p->next, prev_q = p; q; q = prev_q->next) | |
a45035b6 | 903 | { |
e9b7093a RS |
904 | int delete_q = 0; |
905 | if (! q->in_use && GET_MODE (q->slot) == BLKmode) | |
a45035b6 | 906 | { |
fc91b0d0 | 907 | if (p->base_offset + p->full_size == q->base_offset) |
e9b7093a RS |
908 | { |
909 | /* Q comes after P; combine Q into P. */ | |
910 | p->size += q->size; | |
307d8cd6 | 911 | p->full_size += q->full_size; |
e9b7093a RS |
912 | delete_q = 1; |
913 | } | |
fc91b0d0 | 914 | else if (q->base_offset + q->full_size == p->base_offset) |
e9b7093a RS |
915 | { |
916 | /* P comes after Q; combine P into Q. */ | |
917 | q->size += p->size; | |
307d8cd6 | 918 | q->full_size += p->full_size; |
e9b7093a RS |
919 | delete_p = 1; |
920 | break; | |
921 | } | |
a45035b6 | 922 | } |
e9b7093a RS |
923 | /* Either delete Q or advance past it. */ |
924 | if (delete_q) | |
7a80cf9a | 925 | prev_q->next = q->next; |
e9b7093a RS |
926 | else |
927 | prev_q = q; | |
a45035b6 | 928 | } |
e9b7093a RS |
929 | /* Either delete P or advance past it. */ |
930 | if (delete_p) | |
931 | { | |
932 | if (prev_p) | |
933 | prev_p->next = p->next; | |
934 | else | |
935 | temp_slots = p->next; | |
936 | } | |
937 | else | |
938 | prev_p = p; | |
939 | } | |
a45035b6 | 940 | } |
6f086dfc | 941 | \f |
e5e76139 RK |
942 | /* Find the temp slot corresponding to the object at address X. */ |
943 | ||
944 | static struct temp_slot * | |
945 | find_temp_slot_from_address (x) | |
946 | rtx x; | |
947 | { | |
948 | struct temp_slot *p; | |
949 | rtx next; | |
950 | ||
951 | for (p = temp_slots; p; p = p->next) | |
952 | { | |
953 | if (! p->in_use) | |
954 | continue; | |
e5e809f4 | 955 | |
e5e76139 | 956 | else if (XEXP (p->slot, 0) == x |
abb52246 RK |
957 | || p->address == x |
958 | || (GET_CODE (x) == PLUS | |
959 | && XEXP (x, 0) == virtual_stack_vars_rtx | |
960 | && GET_CODE (XEXP (x, 1)) == CONST_INT | |
961 | && INTVAL (XEXP (x, 1)) >= p->base_offset | |
962 | && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)) | |
e5e76139 RK |
963 | return p; |
964 | ||
965 | else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST) | |
966 | for (next = p->address; next; next = XEXP (next, 1)) | |
967 | if (XEXP (next, 0) == x) | |
968 | return p; | |
969 | } | |
970 | ||
14a774a9 RK |
971 | /* If we have a sum involving a register, see if it points to a temp |
972 | slot. */ | |
973 | if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG | |
974 | && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0) | |
975 | return p; | |
976 | else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG | |
977 | && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0) | |
978 | return p; | |
979 | ||
e5e76139 RK |
980 | return 0; |
981 | } | |
718fe406 | 982 | |
9faa82d8 | 983 | /* Indicate that NEW is an alternate way of referring to the temp slot |
e5e809f4 | 984 | that previously was known by OLD. */ |
e5e76139 RK |
985 | |
986 | void | |
987 | update_temp_slot_address (old, new) | |
988 | rtx old, new; | |
989 | { | |
14a774a9 | 990 | struct temp_slot *p; |
e5e76139 | 991 | |
14a774a9 | 992 | if (rtx_equal_p (old, new)) |
e5e76139 | 993 | return; |
14a774a9 RK |
994 | |
995 | p = find_temp_slot_from_address (old); | |
996 | ||
700f19f0 RK |
997 | /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW |
998 | is a register, see if one operand of the PLUS is a temporary | |
999 | location. If so, NEW points into it. Otherwise, if both OLD and | |
1000 | NEW are a PLUS and if there is a register in common between them. | |
1001 | If so, try a recursive call on those values. */ | |
14a774a9 RK |
1002 | if (p == 0) |
1003 | { | |
700f19f0 RK |
1004 | if (GET_CODE (old) != PLUS) |
1005 | return; | |
1006 | ||
1007 | if (GET_CODE (new) == REG) | |
1008 | { | |
1009 | update_temp_slot_address (XEXP (old, 0), new); | |
1010 | update_temp_slot_address (XEXP (old, 1), new); | |
1011 | return; | |
1012 | } | |
1013 | else if (GET_CODE (new) != PLUS) | |
14a774a9 RK |
1014 | return; |
1015 | ||
1016 | if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0))) | |
1017 | update_temp_slot_address (XEXP (old, 1), XEXP (new, 1)); | |
1018 | else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0))) | |
1019 | update_temp_slot_address (XEXP (old, 0), XEXP (new, 1)); | |
1020 | else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1))) | |
1021 | update_temp_slot_address (XEXP (old, 1), XEXP (new, 0)); | |
1022 | else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1))) | |
1023 | update_temp_slot_address (XEXP (old, 0), XEXP (new, 0)); | |
1024 | ||
1025 | return; | |
1026 | } | |
1027 | ||
718fe406 | 1028 | /* Otherwise add an alias for the temp's address. */ |
e5e76139 RK |
1029 | else if (p->address == 0) |
1030 | p->address = new; | |
1031 | else | |
1032 | { | |
1033 | if (GET_CODE (p->address) != EXPR_LIST) | |
38a448ca | 1034 | p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX); |
e5e76139 | 1035 | |
38a448ca | 1036 | p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address); |
e5e76139 RK |
1037 | } |
1038 | } | |
1039 | ||
a25d4ba2 | 1040 | /* If X could be a reference to a temporary slot, mark the fact that its |
9faa82d8 | 1041 | address was taken. */ |
a25d4ba2 RK |
1042 | |
1043 | void | |
1044 | mark_temp_addr_taken (x) | |
1045 | rtx x; | |
1046 | { | |
1047 | struct temp_slot *p; | |
1048 | ||
1049 | if (x == 0) | |
1050 | return; | |
1051 | ||
1052 | /* If X is not in memory or is at a constant address, it cannot be in | |
1053 | a temporary slot. */ | |
1054 | if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))) | |
1055 | return; | |
1056 | ||
1057 | p = find_temp_slot_from_address (XEXP (x, 0)); | |
1058 | if (p != 0) | |
1059 | p->addr_taken = 1; | |
1060 | } | |
1061 | ||
9cca6a99 MS |
1062 | /* If X could be a reference to a temporary slot, mark that slot as |
1063 | belonging to the to one level higher than the current level. If X | |
1064 | matched one of our slots, just mark that one. Otherwise, we can't | |
1065 | easily predict which it is, so upgrade all of them. Kept slots | |
1066 | need not be touched. | |
6f086dfc RS |
1067 | |
1068 | This is called when an ({...}) construct occurs and a statement | |
1069 | returns a value in memory. */ | |
1070 | ||
1071 | void | |
1072 | preserve_temp_slots (x) | |
1073 | rtx x; | |
1074 | { | |
a25d4ba2 | 1075 | struct temp_slot *p = 0; |
6f086dfc | 1076 | |
73620b82 RK |
1077 | /* If there is no result, we still might have some objects whose address |
1078 | were taken, so we need to make sure they stay around. */ | |
e3a77161 | 1079 | if (x == 0) |
73620b82 RK |
1080 | { |
1081 | for (p = temp_slots; p; p = p->next) | |
1082 | if (p->in_use && p->level == temp_slot_level && p->addr_taken) | |
1083 | p->level--; | |
1084 | ||
1085 | return; | |
1086 | } | |
e3a77161 RK |
1087 | |
1088 | /* If X is a register that is being used as a pointer, see if we have | |
1089 | a temporary slot we know it points to. To be consistent with | |
1090 | the code below, we really should preserve all non-kept slots | |
1091 | if we can't find a match, but that seems to be much too costly. */ | |
3502dc9c | 1092 | if (GET_CODE (x) == REG && REG_POINTER (x)) |
a25d4ba2 RK |
1093 | p = find_temp_slot_from_address (x); |
1094 | ||
6f086dfc | 1095 | /* If X is not in memory or is at a constant address, it cannot be in |
e19571db RK |
1096 | a temporary slot, but it can contain something whose address was |
1097 | taken. */ | |
a25d4ba2 | 1098 | if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))) |
e19571db RK |
1099 | { |
1100 | for (p = temp_slots; p; p = p->next) | |
1101 | if (p->in_use && p->level == temp_slot_level && p->addr_taken) | |
1102 | p->level--; | |
1103 | ||
1104 | return; | |
1105 | } | |
6f086dfc RS |
1106 | |
1107 | /* First see if we can find a match. */ | |
73620b82 | 1108 | if (p == 0) |
a25d4ba2 RK |
1109 | p = find_temp_slot_from_address (XEXP (x, 0)); |
1110 | ||
e5e76139 RK |
1111 | if (p != 0) |
1112 | { | |
a25d4ba2 RK |
1113 | /* Move everything at our level whose address was taken to our new |
1114 | level in case we used its address. */ | |
1115 | struct temp_slot *q; | |
1116 | ||
9cca6a99 MS |
1117 | if (p->level == temp_slot_level) |
1118 | { | |
1119 | for (q = temp_slots; q; q = q->next) | |
1120 | if (q != p && q->addr_taken && q->level == p->level) | |
1121 | q->level--; | |
a25d4ba2 | 1122 | |
9cca6a99 MS |
1123 | p->level--; |
1124 | p->addr_taken = 0; | |
1125 | } | |
e5e76139 RK |
1126 | return; |
1127 | } | |
6f086dfc RS |
1128 | |
1129 | /* Otherwise, preserve all non-kept slots at this level. */ | |
1130 | for (p = temp_slots; p; p = p->next) | |
1131 | if (p->in_use && p->level == temp_slot_level && ! p->keep) | |
1132 | p->level--; | |
1133 | } | |
1134 | ||
591ccf92 MM |
1135 | /* X is the result of an RTL_EXPR. If it is a temporary slot associated |
1136 | with that RTL_EXPR, promote it into a temporary slot at the present | |
1137 | level so it will not be freed when we free slots made in the | |
1138 | RTL_EXPR. */ | |
1139 | ||
1140 | void | |
1141 | preserve_rtl_expr_result (x) | |
1142 | rtx x; | |
1143 | { | |
1144 | struct temp_slot *p; | |
1145 | ||
1146 | /* If X is not in memory or is at a constant address, it cannot be in | |
1147 | a temporary slot. */ | |
1148 | if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))) | |
1149 | return; | |
1150 | ||
1151 | /* If we can find a match, move it to our level unless it is already at | |
1152 | an upper level. */ | |
1153 | p = find_temp_slot_from_address (XEXP (x, 0)); | |
64dc53f3 MM |
1154 | if (p != 0) |
1155 | { | |
1156 | p->level = MIN (p->level, temp_slot_level); | |
1157 | p->rtl_expr = 0; | |
1158 | } | |
591ccf92 MM |
1159 | |
1160 | return; | |
1161 | } | |
1162 | ||
6f086dfc | 1163 | /* Free all temporaries used so far. This is normally called at the end |
e7a84011 RK |
1164 | of generating code for a statement. Don't free any temporaries |
1165 | currently in use for an RTL_EXPR that hasn't yet been emitted. | |
1166 | We could eventually do better than this since it can be reused while | |
1167 | generating the same RTL_EXPR, but this is complex and probably not | |
1168 | worthwhile. */ | |
6f086dfc RS |
1169 | |
1170 | void | |
1171 | free_temp_slots () | |
1172 | { | |
1173 | struct temp_slot *p; | |
1174 | ||
1175 | for (p = temp_slots; p; p = p->next) | |
591ccf92 MM |
1176 | if (p->in_use && p->level == temp_slot_level && ! p->keep |
1177 | && p->rtl_expr == 0) | |
1178 | p->in_use = 0; | |
1179 | ||
1180 | combine_temp_slots (); | |
1181 | } | |
1182 | ||
1183 | /* Free all temporary slots used in T, an RTL_EXPR node. */ | |
1184 | ||
1185 | void | |
1186 | free_temps_for_rtl_expr (t) | |
1187 | tree t; | |
1188 | { | |
1189 | struct temp_slot *p; | |
1190 | ||
1191 | for (p = temp_slots; p; p = p->next) | |
1192 | if (p->rtl_expr == t) | |
64dc53f3 MM |
1193 | { |
1194 | /* If this slot is below the current TEMP_SLOT_LEVEL, then it | |
1195 | needs to be preserved. This can happen if a temporary in | |
1196 | the RTL_EXPR was addressed; preserve_temp_slots will move | |
6d2f8887 | 1197 | the temporary into a higher level. */ |
64dc53f3 MM |
1198 | if (temp_slot_level <= p->level) |
1199 | p->in_use = 0; | |
1200 | else | |
1201 | p->rtl_expr = NULL_TREE; | |
1202 | } | |
a45035b6 JW |
1203 | |
1204 | combine_temp_slots (); | |
6f086dfc RS |
1205 | } |
1206 | ||
956d6950 | 1207 | /* Mark all temporaries ever allocated in this function as not suitable |
a94e4054 RK |
1208 | for reuse until the current level is exited. */ |
1209 | ||
1210 | void | |
1211 | mark_all_temps_used () | |
1212 | { | |
1213 | struct temp_slot *p; | |
1214 | ||
1215 | for (p = temp_slots; p; p = p->next) | |
1216 | { | |
85b119d1 | 1217 | p->in_use = p->keep = 1; |
27ce006b | 1218 | p->level = MIN (p->level, temp_slot_level); |
a94e4054 RK |
1219 | } |
1220 | } | |
1221 | ||
6f086dfc RS |
1222 | /* Push deeper into the nesting level for stack temporaries. */ |
1223 | ||
1224 | void | |
1225 | push_temp_slots () | |
1226 | { | |
6f086dfc RS |
1227 | temp_slot_level++; |
1228 | } | |
1229 | ||
e5e809f4 JL |
1230 | /* Likewise, but save the new level as the place to allocate variables |
1231 | for blocks. */ | |
1232 | ||
ca3075bd | 1233 | #if 0 |
e5e809f4 JL |
1234 | void |
1235 | push_temp_slots_for_block () | |
1236 | { | |
1237 | push_temp_slots (); | |
1238 | ||
1239 | var_temp_slot_level = temp_slot_level; | |
1240 | } | |
1241 | ||
f5963e61 JL |
1242 | /* Likewise, but save the new level as the place to allocate temporaries |
1243 | for TARGET_EXPRs. */ | |
1244 | ||
1245 | void | |
1246 | push_temp_slots_for_target () | |
1247 | { | |
1248 | push_temp_slots (); | |
1249 | ||
1250 | target_temp_slot_level = temp_slot_level; | |
1251 | } | |
1252 | ||
1253 | /* Set and get the value of target_temp_slot_level. The only | |
1254 | permitted use of these functions is to save and restore this value. */ | |
1255 | ||
1256 | int | |
1257 | get_target_temp_slot_level () | |
1258 | { | |
1259 | return target_temp_slot_level; | |
1260 | } | |
1261 | ||
1262 | void | |
1263 | set_target_temp_slot_level (level) | |
1264 | int level; | |
1265 | { | |
1266 | target_temp_slot_level = level; | |
1267 | } | |
ca3075bd | 1268 | #endif |
f5963e61 | 1269 | |
6f086dfc RS |
1270 | /* Pop a temporary nesting level. All slots in use in the current level |
1271 | are freed. */ | |
1272 | ||
1273 | void | |
1274 | pop_temp_slots () | |
1275 | { | |
1276 | struct temp_slot *p; | |
1277 | ||
6f086dfc | 1278 | for (p = temp_slots; p; p = p->next) |
591ccf92 | 1279 | if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0) |
6f086dfc RS |
1280 | p->in_use = 0; |
1281 | ||
a45035b6 JW |
1282 | combine_temp_slots (); |
1283 | ||
6f086dfc RS |
1284 | temp_slot_level--; |
1285 | } | |
bc0ebdf9 RK |
1286 | |
1287 | /* Initialize temporary slots. */ | |
1288 | ||
1289 | void | |
1290 | init_temp_slots () | |
1291 | { | |
1292 | /* We have not allocated any temporaries yet. */ | |
1293 | temp_slots = 0; | |
1294 | temp_slot_level = 0; | |
e5e809f4 | 1295 | var_temp_slot_level = 0; |
bc0ebdf9 RK |
1296 | target_temp_slot_level = 0; |
1297 | } | |
6f086dfc RS |
1298 | \f |
1299 | /* Retroactively move an auto variable from a register to a stack slot. | |
1300 | This is done when an address-reference to the variable is seen. */ | |
1301 | ||
1302 | void | |
1303 | put_var_into_stack (decl) | |
1304 | tree decl; | |
1305 | { | |
b3694847 | 1306 | rtx reg; |
00d8a4c1 | 1307 | enum machine_mode promoted_mode, decl_mode; |
6f086dfc | 1308 | struct function *function = 0; |
c20bf1f3 | 1309 | tree context; |
e9a25f70 | 1310 | int can_use_addressof; |
c357082f RK |
1311 | int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl); |
1312 | int usedp = (TREE_USED (decl) | |
1313 | || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0)); | |
c20bf1f3 | 1314 | |
c20bf1f3 | 1315 | context = decl_function_context (decl); |
6f086dfc | 1316 | |
9ec36da5 | 1317 | /* Get the current rtl used for this object and its original mode. */ |
19e7881c MM |
1318 | reg = (TREE_CODE (decl) == SAVE_EXPR |
1319 | ? SAVE_EXPR_RTL (decl) | |
1320 | : DECL_RTL_IF_SET (decl)); | |
2baccce2 RS |
1321 | |
1322 | /* No need to do anything if decl has no rtx yet | |
1323 | since in that case caller is setting TREE_ADDRESSABLE | |
1324 | and a stack slot will be assigned when the rtl is made. */ | |
1325 | if (reg == 0) | |
1326 | return; | |
00d8a4c1 RK |
1327 | |
1328 | /* Get the declared mode for this object. */ | |
1329 | decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl)) | |
1330 | : DECL_MODE (decl)); | |
2baccce2 RS |
1331 | /* Get the mode it's actually stored in. */ |
1332 | promoted_mode = GET_MODE (reg); | |
6f086dfc | 1333 | |
eb3ae3e1 ZW |
1334 | /* If this variable comes from an outer function, find that |
1335 | function's saved context. Don't use find_function_data here, | |
1336 | because it might not be in any active function. | |
1337 | FIXME: Is that really supposed to happen? | |
1338 | It does in ObjC at least. */ | |
4ac74fb8 | 1339 | if (context != current_function_decl && context != inline_function_decl) |
eb3ae3e1 | 1340 | for (function = outer_function_chain; function; function = function->outer) |
6f086dfc RS |
1341 | if (function->decl == context) |
1342 | break; | |
1343 | ||
6f086dfc RS |
1344 | /* If this is a variable-size object with a pseudo to address it, |
1345 | put that pseudo into the stack, if the var is nonlocal. */ | |
c357082f | 1346 | if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl) |
6f086dfc RS |
1347 | && GET_CODE (reg) == MEM |
1348 | && GET_CODE (XEXP (reg, 0)) == REG | |
1349 | && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER) | |
4cdb3e78 RS |
1350 | { |
1351 | reg = XEXP (reg, 0); | |
1352 | decl_mode = promoted_mode = GET_MODE (reg); | |
1353 | } | |
e15762df | 1354 | |
e9a25f70 JL |
1355 | can_use_addressof |
1356 | = (function == 0 | |
e5e809f4 | 1357 | && optimize > 0 |
e9a25f70 JL |
1358 | /* FIXME make it work for promoted modes too */ |
1359 | && decl_mode == promoted_mode | |
1360 | #ifdef NON_SAVING_SETJMP | |
1361 | && ! (NON_SAVING_SETJMP && current_function_calls_setjmp) | |
1362 | #endif | |
1363 | ); | |
1364 | ||
1365 | /* If we can't use ADDRESSOF, make sure we see through one we already | |
1366 | generated. */ | |
1367 | if (! can_use_addressof && GET_CODE (reg) == MEM | |
1368 | && GET_CODE (XEXP (reg, 0)) == ADDRESSOF) | |
1369 | reg = XEXP (XEXP (reg, 0), 0); | |
1370 | ||
293e3de4 RS |
1371 | /* Now we should have a value that resides in one or more pseudo regs. */ |
1372 | ||
1373 | if (GET_CODE (reg) == REG) | |
e9a25f70 JL |
1374 | { |
1375 | /* If this variable lives in the current function and we don't need | |
1376 | to put things in the stack for the sake of setjmp, try to keep it | |
1377 | in a register until we know we actually need the address. */ | |
1378 | if (can_use_addressof) | |
1379 | gen_mem_addressof (reg, decl); | |
1380 | else | |
c357082f RK |
1381 | put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode, |
1382 | decl_mode, volatilep, 0, usedp, 0); | |
e9a25f70 | 1383 | } |
293e3de4 RS |
1384 | else if (GET_CODE (reg) == CONCAT) |
1385 | { | |
1386 | /* A CONCAT contains two pseudos; put them both in the stack. | |
018577e4 R |
1387 | We do it so they end up consecutive. |
1388 | We fixup references to the parts only after we fixup references | |
1389 | to the whole CONCAT, lest we do double fixups for the latter | |
1390 | references. */ | |
293e3de4 | 1391 | enum machine_mode part_mode = GET_MODE (XEXP (reg, 0)); |
c3b247b4 | 1392 | tree part_type = type_for_mode (part_mode, 0); |
018577e4 R |
1393 | rtx lopart = XEXP (reg, 0); |
1394 | rtx hipart = XEXP (reg, 1); | |
4738c10d | 1395 | #ifdef FRAME_GROWS_DOWNWARD |
293e3de4 | 1396 | /* Since part 0 should have a lower address, do it second. */ |
018577e4 R |
1397 | put_reg_into_stack (function, hipart, part_type, part_mode, |
1398 | part_mode, volatilep, 0, 0, 0); | |
1399 | put_reg_into_stack (function, lopart, part_type, part_mode, | |
1400 | part_mode, volatilep, 0, 0, 0); | |
293e3de4 | 1401 | #else |
018577e4 R |
1402 | put_reg_into_stack (function, lopart, part_type, part_mode, |
1403 | part_mode, volatilep, 0, 0, 0); | |
1404 | put_reg_into_stack (function, hipart, part_type, part_mode, | |
1405 | part_mode, volatilep, 0, 0, 0); | |
293e3de4 RS |
1406 | #endif |
1407 | ||
1408 | /* Change the CONCAT into a combined MEM for both parts. */ | |
1409 | PUT_CODE (reg, MEM); | |
173b24b9 | 1410 | MEM_ATTRS (reg) = 0; |
abde42f7 JH |
1411 | |
1412 | /* set_mem_attributes uses DECL_RTL to avoid re-generating of | |
1413 | already computed alias sets. Here we want to re-generate. */ | |
1414 | if (DECL_P (decl)) | |
1415 | SET_DECL_RTL (decl, NULL); | |
c357082f | 1416 | set_mem_attributes (reg, decl, 1); |
abde42f7 JH |
1417 | if (DECL_P (decl)) |
1418 | SET_DECL_RTL (decl, reg); | |
0006e95b | 1419 | |
293e3de4 RS |
1420 | /* The two parts are in memory order already. |
1421 | Use the lower parts address as ours. */ | |
1422 | XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0); | |
1423 | /* Prevent sharing of rtl that might lose. */ | |
1424 | if (GET_CODE (XEXP (reg, 0)) == PLUS) | |
1425 | XEXP (reg, 0) = copy_rtx (XEXP (reg, 0)); | |
018577e4 R |
1426 | if (usedp) |
1427 | { | |
1428 | schedule_fixup_var_refs (function, reg, TREE_TYPE (decl), | |
1429 | promoted_mode, 0); | |
1430 | schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0); | |
1431 | schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0); | |
1432 | } | |
293e3de4 | 1433 | } |
86fa911a RK |
1434 | else |
1435 | return; | |
718fe406 | 1436 | |
7d384cc0 | 1437 | if (current_function_check_memory_usage) |
ebb1b59a BS |
1438 | emit_library_call (chkr_set_right_libfunc, LCT_CONST_MAKE_BLOCK, VOIDmode, |
1439 | 3, XEXP (reg, 0), Pmode, | |
86fa911a RK |
1440 | GEN_INT (GET_MODE_SIZE (GET_MODE (reg))), |
1441 | TYPE_MODE (sizetype), | |
956d6950 JL |
1442 | GEN_INT (MEMORY_USE_RW), |
1443 | TYPE_MODE (integer_type_node)); | |
293e3de4 RS |
1444 | } |
1445 | ||
1446 | /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG | |
1447 | into the stack frame of FUNCTION (0 means the current function). | |
1448 | DECL_MODE is the machine mode of the user-level data type. | |
0006e95b | 1449 | PROMOTED_MODE is the machine mode of the register. |
e5e809f4 JL |
1450 | VOLATILE_P is nonzero if this is for a "volatile" decl. |
1451 | USED_P is nonzero if this reg might have already been used in an insn. */ | |
293e3de4 RS |
1452 | |
1453 | static void | |
e9a25f70 | 1454 | put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p, |
fe9b4957 | 1455 | original_regno, used_p, ht) |
293e3de4 RS |
1456 | struct function *function; |
1457 | rtx reg; | |
1458 | tree type; | |
1459 | enum machine_mode promoted_mode, decl_mode; | |
0006e95b | 1460 | int volatile_p; |
770ae6cc | 1461 | unsigned int original_regno; |
e5e809f4 | 1462 | int used_p; |
fe9b4957 | 1463 | struct hash_table *ht; |
293e3de4 | 1464 | { |
01d939e8 | 1465 | struct function *func = function ? function : cfun; |
293e3de4 | 1466 | rtx new = 0; |
770ae6cc | 1467 | unsigned int regno = original_regno; |
e9a25f70 JL |
1468 | |
1469 | if (regno == 0) | |
1470 | regno = REGNO (reg); | |
6f086dfc | 1471 | |
e2ecd91c BS |
1472 | if (regno < func->x_max_parm_reg) |
1473 | new = func->x_parm_reg_stack_loc[regno]; | |
770ae6cc | 1474 | |
e2ecd91c BS |
1475 | if (new == 0) |
1476 | new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func); | |
6f086dfc | 1477 | |
ef178af3 | 1478 | PUT_CODE (reg, MEM); |
0006e95b | 1479 | PUT_MODE (reg, decl_mode); |
6f086dfc | 1480 | XEXP (reg, 0) = XEXP (new, 0); |
173b24b9 | 1481 | MEM_ATTRS (reg) = 0; |
6f086dfc | 1482 | /* `volatil' bit means one thing for MEMs, another entirely for REGs. */ |
0006e95b | 1483 | MEM_VOLATILE_P (reg) = volatile_p; |
6f086dfc RS |
1484 | |
1485 | /* If this is a memory ref that contains aggregate components, | |
bdd3e6ab JW |
1486 | mark it as such for cse and loop optimize. If we are reusing a |
1487 | previously generated stack slot, then we need to copy the bit in | |
1488 | case it was set for other reasons. For instance, it is set for | |
1489 | __builtin_va_alist. */ | |
8b4944fb RH |
1490 | if (type) |
1491 | { | |
1492 | MEM_SET_IN_STRUCT_P (reg, | |
1493 | AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new)); | |
ba4828e0 | 1494 | set_mem_alias_set (reg, get_alias_set (type)); |
8b4944fb | 1495 | } |
173b24b9 | 1496 | |
018577e4 R |
1497 | if (used_p) |
1498 | schedule_fixup_var_refs (function, reg, type, promoted_mode, ht); | |
1499 | } | |
6f086dfc | 1500 | |
018577e4 R |
1501 | /* Make sure that all refs to the variable, previously made |
1502 | when it was a register, are fixed up to be valid again. | |
1503 | See function above for meaning of arguments. */ | |
1da68f56 | 1504 | |
018577e4 R |
1505 | static void |
1506 | schedule_fixup_var_refs (function, reg, type, promoted_mode, ht) | |
1507 | struct function *function; | |
1508 | rtx reg; | |
1509 | tree type; | |
1510 | enum machine_mode promoted_mode; | |
1511 | struct hash_table *ht; | |
1512 | { | |
8b4944fb RH |
1513 | int unsigned_p = type ? TREE_UNSIGNED (type) : 0; |
1514 | ||
018577e4 | 1515 | if (function != 0) |
6f086dfc RS |
1516 | { |
1517 | struct var_refs_queue *temp; | |
1518 | ||
6f086dfc | 1519 | temp |
7a80cf9a | 1520 | = (struct var_refs_queue *) ggc_alloc (sizeof (struct var_refs_queue)); |
6f086dfc | 1521 | temp->modified = reg; |
00d8a4c1 | 1522 | temp->promoted_mode = promoted_mode; |
8b4944fb | 1523 | temp->unsignedp = unsigned_p; |
6f086dfc RS |
1524 | temp->next = function->fixup_var_refs_queue; |
1525 | function->fixup_var_refs_queue = temp; | |
6f086dfc | 1526 | } |
018577e4 | 1527 | else |
6f086dfc | 1528 | /* Variable is local; fix it up now. */ |
8b4944fb | 1529 | fixup_var_refs (reg, promoted_mode, unsigned_p, ht); |
6f086dfc RS |
1530 | } |
1531 | \f | |
1532 | static void | |
fe9b4957 | 1533 | fixup_var_refs (var, promoted_mode, unsignedp, ht) |
6f086dfc | 1534 | rtx var; |
00d8a4c1 RK |
1535 | enum machine_mode promoted_mode; |
1536 | int unsignedp; | |
fe9b4957 | 1537 | struct hash_table *ht; |
6f086dfc RS |
1538 | { |
1539 | tree pending; | |
1540 | rtx first_insn = get_insns (); | |
49ad7cfa | 1541 | struct sequence_stack *stack = seq_stack; |
6f086dfc RS |
1542 | tree rtl_exps = rtl_expr_chain; |
1543 | ||
fe9b4957 MM |
1544 | /* If there's a hash table, it must record all uses of VAR. */ |
1545 | if (ht) | |
a42a5f59 ZW |
1546 | { |
1547 | if (stack != 0) | |
1548 | abort (); | |
1549 | fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp); | |
1550 | return; | |
1551 | } | |
1552 | ||
1553 | fixup_var_refs_insns (first_insn, var, promoted_mode, unsignedp, | |
1554 | stack == 0); | |
6f086dfc RS |
1555 | |
1556 | /* Scan all pending sequences too. */ | |
1557 | for (; stack; stack = stack->next) | |
1558 | { | |
b05467dc | 1559 | push_to_full_sequence (stack->first, stack->last); |
a42a5f59 ZW |
1560 | fixup_var_refs_insns (stack->first, var, promoted_mode, unsignedp, |
1561 | stack->next != 0); | |
6f086dfc RS |
1562 | /* Update remembered end of sequence |
1563 | in case we added an insn at the end. */ | |
1564 | stack->last = get_last_insn (); | |
1565 | end_sequence (); | |
1566 | } | |
1567 | ||
1568 | /* Scan all waiting RTL_EXPRs too. */ | |
1569 | for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending)) | |
1570 | { | |
1571 | rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending)); | |
1572 | if (seq != const0_rtx && seq != 0) | |
1573 | { | |
1574 | push_to_sequence (seq); | |
a42a5f59 | 1575 | fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0); |
6f086dfc RS |
1576 | end_sequence (); |
1577 | } | |
1578 | } | |
1579 | } | |
1580 | \f | |
e15679f8 | 1581 | /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is |
6f086dfc | 1582 | some part of an insn. Return a struct fixup_replacement whose OLD |
0f41302f | 1583 | value is equal to X. Allocate a new structure if no such entry exists. */ |
6f086dfc RS |
1584 | |
1585 | static struct fixup_replacement * | |
2740a678 | 1586 | find_fixup_replacement (replacements, x) |
6f086dfc RS |
1587 | struct fixup_replacement **replacements; |
1588 | rtx x; | |
1589 | { | |
1590 | struct fixup_replacement *p; | |
1591 | ||
1592 | /* See if we have already replaced this. */ | |
c5c76735 | 1593 | for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next) |
6f086dfc RS |
1594 | ; |
1595 | ||
1596 | if (p == 0) | |
1597 | { | |
1f8f4a0b | 1598 | p = (struct fixup_replacement *) xmalloc (sizeof (struct fixup_replacement)); |
6f086dfc RS |
1599 | p->old = x; |
1600 | p->new = 0; | |
1601 | p->next = *replacements; | |
1602 | *replacements = p; | |
1603 | } | |
1604 | ||
1605 | return p; | |
1606 | } | |
1607 | ||
1608 | /* Scan the insn-chain starting with INSN for refs to VAR | |
1609 | and fix them up. TOPLEVEL is nonzero if this chain is the | |
1610 | main chain of insns for the current function. */ | |
1611 | ||
1612 | static void | |
a42a5f59 ZW |
1613 | fixup_var_refs_insns (insn, var, promoted_mode, unsignedp, toplevel) |
1614 | rtx insn; | |
6f086dfc | 1615 | rtx var; |
00d8a4c1 RK |
1616 | enum machine_mode promoted_mode; |
1617 | int unsignedp; | |
6f086dfc | 1618 | int toplevel; |
a42a5f59 ZW |
1619 | { |
1620 | while (insn) | |
1621 | { | |
1622 | /* fixup_var_refs_insn might modify insn, so save its next | |
1623 | pointer now. */ | |
1624 | rtx next = NEXT_INSN (insn); | |
1625 | ||
1626 | /* CALL_PLACEHOLDERs are special; we have to switch into each of | |
1627 | the three sequences they (potentially) contain, and process | |
1628 | them recursively. The CALL_INSN itself is not interesting. */ | |
1629 | ||
1630 | if (GET_CODE (insn) == CALL_INSN | |
1631 | && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER) | |
1632 | { | |
1633 | int i; | |
1634 | ||
1635 | /* Look at the Normal call, sibling call and tail recursion | |
1636 | sequences attached to the CALL_PLACEHOLDER. */ | |
1637 | for (i = 0; i < 3; i++) | |
1638 | { | |
1639 | rtx seq = XEXP (PATTERN (insn), i); | |
1640 | if (seq) | |
1641 | { | |
1642 | push_to_sequence (seq); | |
1643 | fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0); | |
1644 | XEXP (PATTERN (insn), i) = get_insns (); | |
1645 | end_sequence (); | |
1646 | } | |
1647 | } | |
1648 | } | |
1649 | ||
1650 | else if (INSN_P (insn)) | |
1651 | fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel); | |
1652 | ||
1653 | insn = next; | |
1654 | } | |
1655 | } | |
1656 | ||
1657 | /* Look up the insns which reference VAR in HT and fix them up. Other | |
1658 | arguments are the same as fixup_var_refs_insns. | |
1659 | ||
1660 | N.B. No need for special processing of CALL_PLACEHOLDERs here, | |
1661 | because the hash table will point straight to the interesting insn | |
1662 | (inside the CALL_PLACEHOLDER). */ | |
7a80cf9a | 1663 | |
a42a5f59 ZW |
1664 | static void |
1665 | fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp) | |
fe9b4957 | 1666 | struct hash_table *ht; |
a42a5f59 ZW |
1667 | rtx var; |
1668 | enum machine_mode promoted_mode; | |
1669 | int unsignedp; | |
6f086dfc | 1670 | { |
a42a5f59 ZW |
1671 | struct insns_for_mem_entry *ime = (struct insns_for_mem_entry *) |
1672 | hash_lookup (ht, var, /*create=*/0, /*copy=*/0); | |
1673 | rtx insn_list = ime->insns; | |
fe9b4957 | 1674 | |
a42a5f59 | 1675 | while (insn_list) |
fe9b4957 | 1676 | { |
a42a5f59 ZW |
1677 | rtx insn = XEXP (insn_list, 0); |
1678 | ||
1679 | if (INSN_P (insn)) | |
43e72072 | 1680 | fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, 1); |
a42a5f59 | 1681 | |
fe9b4957 MM |
1682 | insn_list = XEXP (insn_list, 1); |
1683 | } | |
a42a5f59 | 1684 | } |
02a10449 | 1685 | |
a42a5f59 ZW |
1686 | |
1687 | /* Per-insn processing by fixup_var_refs_insns(_with_hash). INSN is | |
1688 | the insn under examination, VAR is the variable to fix up | |
1689 | references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and | |
1690 | TOPLEVEL is nonzero if this is the main insn chain for this | |
1691 | function. */ | |
7a80cf9a | 1692 | |
a42a5f59 ZW |
1693 | static void |
1694 | fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel) | |
1695 | rtx insn; | |
1696 | rtx var; | |
1697 | enum machine_mode promoted_mode; | |
1698 | int unsignedp; | |
1699 | int toplevel; | |
1700 | { | |
1701 | rtx call_dest = 0; | |
1702 | rtx set, prev, prev_set; | |
1703 | rtx note; | |
1704 | ||
1705 | /* Remember the notes in case we delete the insn. */ | |
1706 | note = REG_NOTES (insn); | |
1707 | ||
1708 | /* If this is a CLOBBER of VAR, delete it. | |
1709 | ||
1710 | If it has a REG_LIBCALL note, delete the REG_LIBCALL | |
1711 | and REG_RETVAL notes too. */ | |
1712 | if (GET_CODE (PATTERN (insn)) == CLOBBER | |
1713 | && (XEXP (PATTERN (insn), 0) == var | |
1714 | || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT | |
1715 | && (XEXP (XEXP (PATTERN (insn), 0), 0) == var | |
1716 | || XEXP (XEXP (PATTERN (insn), 0), 1) == var)))) | |
6f086dfc | 1717 | { |
a42a5f59 ZW |
1718 | if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0) |
1719 | /* The REG_LIBCALL note will go away since we are going to | |
1720 | turn INSN into a NOTE, so just delete the | |
1721 | corresponding REG_RETVAL note. */ | |
1722 | remove_note (XEXP (note, 0), | |
1723 | find_reg_note (XEXP (note, 0), REG_RETVAL, | |
1724 | NULL_RTX)); | |
1725 | ||
ca6c03ca | 1726 | delete_insn (insn); |
a42a5f59 | 1727 | } |
e5e809f4 | 1728 | |
a42a5f59 ZW |
1729 | /* The insn to load VAR from a home in the arglist |
1730 | is now a no-op. When we see it, just delete it. | |
1731 | Similarly if this is storing VAR from a register from which | |
1732 | it was loaded in the previous insn. This will occur | |
1733 | when an ADDRESSOF was made for an arglist slot. */ | |
1734 | else if (toplevel | |
1735 | && (set = single_set (insn)) != 0 | |
1736 | && SET_DEST (set) == var | |
1737 | /* If this represents the result of an insn group, | |
1738 | don't delete the insn. */ | |
1739 | && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0 | |
1740 | && (rtx_equal_p (SET_SRC (set), var) | |
1741 | || (GET_CODE (SET_SRC (set)) == REG | |
1742 | && (prev = prev_nonnote_insn (insn)) != 0 | |
1743 | && (prev_set = single_set (prev)) != 0 | |
1744 | && SET_DEST (prev_set) == SET_SRC (set) | |
1745 | && rtx_equal_p (SET_SRC (prev_set), var)))) | |
1746 | { | |
ca6c03ca | 1747 | delete_insn (insn); |
a42a5f59 ZW |
1748 | } |
1749 | else | |
1750 | { | |
1751 | struct fixup_replacement *replacements = 0; | |
1752 | rtx next_insn = NEXT_INSN (insn); | |
1753 | ||
1754 | if (SMALL_REGISTER_CLASSES) | |
6f086dfc | 1755 | { |
a42a5f59 ZW |
1756 | /* If the insn that copies the results of a CALL_INSN |
1757 | into a pseudo now references VAR, we have to use an | |
1758 | intermediate pseudo since we want the life of the | |
1759 | return value register to be only a single insn. | |
1760 | ||
1761 | If we don't use an intermediate pseudo, such things as | |
1762 | address computations to make the address of VAR valid | |
1763 | if it is not can be placed between the CALL_INSN and INSN. | |
1764 | ||
1765 | To make sure this doesn't happen, we record the destination | |
1766 | of the CALL_INSN and see if the next insn uses both that | |
1767 | and VAR. */ | |
1768 | ||
1769 | if (call_dest != 0 && GET_CODE (insn) == INSN | |
1770 | && reg_mentioned_p (var, PATTERN (insn)) | |
1771 | && reg_mentioned_p (call_dest, PATTERN (insn))) | |
63770d6a | 1772 | { |
a42a5f59 | 1773 | rtx temp = gen_reg_rtx (GET_MODE (call_dest)); |
63770d6a | 1774 | |
a42a5f59 ZW |
1775 | emit_insn_before (gen_move_insn (temp, call_dest), insn); |
1776 | ||
1777 | PATTERN (insn) = replace_rtx (PATTERN (insn), | |
1778 | call_dest, temp); | |
6f086dfc | 1779 | } |
02a10449 | 1780 | |
a42a5f59 ZW |
1781 | if (GET_CODE (insn) == CALL_INSN |
1782 | && GET_CODE (PATTERN (insn)) == SET) | |
1783 | call_dest = SET_DEST (PATTERN (insn)); | |
1784 | else if (GET_CODE (insn) == CALL_INSN | |
1785 | && GET_CODE (PATTERN (insn)) == PARALLEL | |
1786 | && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET) | |
1787 | call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0)); | |
1788 | else | |
1789 | call_dest = 0; | |
1790 | } | |
02a10449 | 1791 | |
a42a5f59 ZW |
1792 | /* See if we have to do anything to INSN now that VAR is in |
1793 | memory. If it needs to be loaded into a pseudo, use a single | |
1794 | pseudo for the entire insn in case there is a MATCH_DUP | |
1795 | between two operands. We pass a pointer to the head of | |
1796 | a list of struct fixup_replacements. If fixup_var_refs_1 | |
1797 | needs to allocate pseudos or replacement MEMs (for SUBREGs), | |
1798 | it will record them in this list. | |
02a10449 | 1799 | |
a42a5f59 ZW |
1800 | If it allocated a pseudo for any replacement, we copy into |
1801 | it here. */ | |
718fe406 | 1802 | |
a42a5f59 ZW |
1803 | fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn, |
1804 | &replacements); | |
02a10449 | 1805 | |
a42a5f59 ZW |
1806 | /* If this is last_parm_insn, and any instructions were output |
1807 | after it to fix it up, then we must set last_parm_insn to | |
1808 | the last such instruction emitted. */ | |
1809 | if (insn == last_parm_insn) | |
1810 | last_parm_insn = PREV_INSN (next_insn); | |
718fe406 | 1811 | |
a42a5f59 ZW |
1812 | while (replacements) |
1813 | { | |
1814 | struct fixup_replacement *next; | |
6f086dfc | 1815 | |
a42a5f59 ZW |
1816 | if (GET_CODE (replacements->new) == REG) |
1817 | { | |
1818 | rtx insert_before; | |
1819 | rtx seq; | |
6f086dfc | 1820 | |
a42a5f59 ZW |
1821 | /* OLD might be a (subreg (mem)). */ |
1822 | if (GET_CODE (replacements->old) == SUBREG) | |
1823 | replacements->old | |
1824 | = fixup_memory_subreg (replacements->old, insn, 0); | |
1825 | else | |
1826 | replacements->old | |
1827 | = fixup_stack_1 (replacements->old, insn); | |
77121fee | 1828 | |
a42a5f59 | 1829 | insert_before = insn; |
1f8f4a0b | 1830 | |
a42a5f59 ZW |
1831 | /* If we are changing the mode, do a conversion. |
1832 | This might be wasteful, but combine.c will | |
1833 | eliminate much of the waste. */ | |
6f086dfc | 1834 | |
a42a5f59 ZW |
1835 | if (GET_MODE (replacements->new) |
1836 | != GET_MODE (replacements->old)) | |
1837 | { | |
1838 | start_sequence (); | |
1839 | convert_move (replacements->new, | |
1840 | replacements->old, unsignedp); | |
1841 | seq = gen_sequence (); | |
1842 | end_sequence (); | |
6f086dfc | 1843 | } |
a42a5f59 ZW |
1844 | else |
1845 | seq = gen_move_insn (replacements->new, | |
1846 | replacements->old); | |
6f086dfc | 1847 | |
a42a5f59 | 1848 | emit_insn_before (seq, insert_before); |
ef178af3 | 1849 | } |
fe9b4957 | 1850 | |
a42a5f59 ZW |
1851 | next = replacements->next; |
1852 | free (replacements); | |
1853 | replacements = next; | |
fe9b4957 | 1854 | } |
a42a5f59 ZW |
1855 | } |
1856 | ||
1857 | /* Also fix up any invalid exprs in the REG_NOTES of this insn. | |
1858 | But don't touch other insns referred to by reg-notes; | |
1859 | we will get them elsewhere. */ | |
1860 | while (note) | |
1861 | { | |
1862 | if (GET_CODE (note) != INSN_LIST) | |
1863 | XEXP (note, 0) | |
1864 | = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1); | |
1865 | note = XEXP (note, 1); | |
6f086dfc RS |
1866 | } |
1867 | } | |
1868 | \f | |
00d8a4c1 | 1869 | /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE. |
718fe406 | 1870 | See if the rtx expression at *LOC in INSN needs to be changed. |
6f086dfc RS |
1871 | |
1872 | REPLACEMENTS is a pointer to a list head that starts out zero, but may | |
1873 | contain a list of original rtx's and replacements. If we find that we need | |
1874 | to modify this insn by replacing a memory reference with a pseudo or by | |
1875 | making a new MEM to implement a SUBREG, we consult that list to see if | |
1876 | we have already chosen a replacement. If none has already been allocated, | |
a42a5f59 | 1877 | we allocate it and update the list. fixup_var_refs_insn will copy VAR |
6f086dfc RS |
1878 | or the SUBREG, as appropriate, to the pseudo. */ |
1879 | ||
1880 | static void | |
00d8a4c1 | 1881 | fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements) |
b3694847 | 1882 | rtx var; |
00d8a4c1 | 1883 | enum machine_mode promoted_mode; |
b3694847 | 1884 | rtx *loc; |
6f086dfc RS |
1885 | rtx insn; |
1886 | struct fixup_replacement **replacements; | |
1887 | { | |
b3694847 SS |
1888 | int i; |
1889 | rtx x = *loc; | |
6f086dfc | 1890 | RTX_CODE code = GET_CODE (x); |
b3694847 SS |
1891 | const char *fmt; |
1892 | rtx tem, tem1; | |
6f086dfc RS |
1893 | struct fixup_replacement *replacement; |
1894 | ||
1895 | switch (code) | |
1896 | { | |
e9a25f70 JL |
1897 | case ADDRESSOF: |
1898 | if (XEXP (x, 0) == var) | |
1899 | { | |
956d6950 JL |
1900 | /* Prevent sharing of rtl that might lose. */ |
1901 | rtx sub = copy_rtx (XEXP (var, 0)); | |
1902 | ||
956d6950 JL |
1903 | if (! validate_change (insn, loc, sub, 0)) |
1904 | { | |
5f98f7c4 RH |
1905 | rtx y = gen_reg_rtx (GET_MODE (sub)); |
1906 | rtx seq, new_insn; | |
1907 | ||
1908 | /* We should be able to replace with a register or all is lost. | |
1909 | Note that we can't use validate_change to verify this, since | |
1910 | we're not caring for replacing all dups simultaneously. */ | |
1911 | if (! validate_replace_rtx (*loc, y, insn)) | |
1912 | abort (); | |
1913 | ||
1914 | /* Careful! First try to recognize a direct move of the | |
1915 | value, mimicking how things are done in gen_reload wrt | |
1916 | PLUS. Consider what happens when insn is a conditional | |
1917 | move instruction and addsi3 clobbers flags. */ | |
1918 | ||
1919 | start_sequence (); | |
1920 | new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub)); | |
1921 | seq = gen_sequence (); | |
1922 | end_sequence (); | |
1923 | ||
1924 | if (recog_memoized (new_insn) < 0) | |
1925 | { | |
1926 | /* That failed. Fall back on force_operand and hope. */ | |
956d6950 | 1927 | |
5f98f7c4 | 1928 | start_sequence (); |
e2a5f96b R |
1929 | sub = force_operand (sub, y); |
1930 | if (sub != y) | |
1931 | emit_insn (gen_move_insn (y, sub)); | |
5f98f7c4 RH |
1932 | seq = gen_sequence (); |
1933 | end_sequence (); | |
1934 | } | |
956d6950 | 1935 | |
5f98f7c4 RH |
1936 | #ifdef HAVE_cc0 |
1937 | /* Don't separate setter from user. */ | |
1938 | if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn))) | |
1939 | insn = PREV_INSN (insn); | |
1940 | #endif | |
1941 | ||
1942 | emit_insn_before (seq, insn); | |
1943 | } | |
e9a25f70 JL |
1944 | } |
1945 | return; | |
1946 | ||
6f086dfc RS |
1947 | case MEM: |
1948 | if (var == x) | |
1949 | { | |
718fe406 | 1950 | /* If we already have a replacement, use it. Otherwise, |
6f086dfc RS |
1951 | try to fix up this address in case it is invalid. */ |
1952 | ||
2740a678 | 1953 | replacement = find_fixup_replacement (replacements, var); |
6f086dfc RS |
1954 | if (replacement->new) |
1955 | { | |
1956 | *loc = replacement->new; | |
1957 | return; | |
1958 | } | |
1959 | ||
1960 | *loc = replacement->new = x = fixup_stack_1 (x, insn); | |
1961 | ||
00d8a4c1 RK |
1962 | /* Unless we are forcing memory to register or we changed the mode, |
1963 | we can leave things the way they are if the insn is valid. */ | |
718fe406 | 1964 | |
6f086dfc | 1965 | INSN_CODE (insn) = -1; |
00d8a4c1 RK |
1966 | if (! flag_force_mem && GET_MODE (x) == promoted_mode |
1967 | && recog_memoized (insn) >= 0) | |
6f086dfc RS |
1968 | return; |
1969 | ||
00d8a4c1 | 1970 | *loc = replacement->new = gen_reg_rtx (promoted_mode); |
6f086dfc RS |
1971 | return; |
1972 | } | |
1973 | ||
1974 | /* If X contains VAR, we need to unshare it here so that we update | |
1975 | each occurrence separately. But all identical MEMs in one insn | |
1976 | must be replaced with the same rtx because of the possibility of | |
1977 | MATCH_DUPs. */ | |
1978 | ||
1979 | if (reg_mentioned_p (var, x)) | |
1980 | { | |
2740a678 | 1981 | replacement = find_fixup_replacement (replacements, x); |
6f086dfc RS |
1982 | if (replacement->new == 0) |
1983 | replacement->new = copy_most_rtx (x, var); | |
1984 | ||
1985 | *loc = x = replacement->new; | |
3f546a53 | 1986 | code = GET_CODE (x); |
6f086dfc RS |
1987 | } |
1988 | break; | |
1989 | ||
1990 | case REG: | |
1991 | case CC0: | |
1992 | case PC: | |
1993 | case CONST_INT: | |
1994 | case CONST: | |
1995 | case SYMBOL_REF: | |
1996 | case LABEL_REF: | |
1997 | case CONST_DOUBLE: | |
1998 | return; | |
1999 | ||
2000 | case SIGN_EXTRACT: | |
2001 | case ZERO_EXTRACT: | |
2002 | /* Note that in some cases those types of expressions are altered | |
2003 | by optimize_bit_field, and do not survive to get here. */ | |
2004 | if (XEXP (x, 0) == var | |
2005 | || (GET_CODE (XEXP (x, 0)) == SUBREG | |
2006 | && SUBREG_REG (XEXP (x, 0)) == var)) | |
2007 | { | |
2008 | /* Get TEM as a valid MEM in the mode presently in the insn. | |
2009 | ||
2010 | We don't worry about the possibility of MATCH_DUP here; it | |
2011 | is highly unlikely and would be tricky to handle. */ | |
2012 | ||
2013 | tem = XEXP (x, 0); | |
2014 | if (GET_CODE (tem) == SUBREG) | |
0e09cc26 RK |
2015 | { |
2016 | if (GET_MODE_BITSIZE (GET_MODE (tem)) | |
2017 | > GET_MODE_BITSIZE (GET_MODE (var))) | |
2018 | { | |
2019 | replacement = find_fixup_replacement (replacements, var); | |
2020 | if (replacement->new == 0) | |
2021 | replacement->new = gen_reg_rtx (GET_MODE (var)); | |
2022 | SUBREG_REG (tem) = replacement->new; | |
226ed43f JW |
2023 | |
2024 | /* The following code works only if we have a MEM, so we | |
2025 | need to handle the subreg here. We directly substitute | |
2026 | it assuming that a subreg must be OK here. We already | |
2027 | scheduled a replacement to copy the mem into the | |
2028 | subreg. */ | |
2029 | XEXP (x, 0) = tem; | |
2030 | return; | |
0e09cc26 | 2031 | } |
ef933d26 RK |
2032 | else |
2033 | tem = fixup_memory_subreg (tem, insn, 0); | |
0e09cc26 RK |
2034 | } |
2035 | else | |
2036 | tem = fixup_stack_1 (tem, insn); | |
6f086dfc RS |
2037 | |
2038 | /* Unless we want to load from memory, get TEM into the proper mode | |
2039 | for an extract from memory. This can only be done if the | |
2040 | extract is at a constant position and length. */ | |
2041 | ||
2042 | if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT | |
2043 | && GET_CODE (XEXP (x, 2)) == CONST_INT | |
2044 | && ! mode_dependent_address_p (XEXP (tem, 0)) | |
2045 | && ! MEM_VOLATILE_P (tem)) | |
2046 | { | |
2047 | enum machine_mode wanted_mode = VOIDmode; | |
2048 | enum machine_mode is_mode = GET_MODE (tem); | |
e5e809f4 | 2049 | HOST_WIDE_INT pos = INTVAL (XEXP (x, 2)); |
6f086dfc | 2050 | |
6f086dfc | 2051 | if (GET_CODE (x) == ZERO_EXTRACT) |
0d8e55d8 | 2052 | { |
da920570 ZW |
2053 | enum machine_mode new_mode |
2054 | = mode_for_extraction (EP_extzv, 1); | |
2055 | if (new_mode != MAX_MACHINE_MODE) | |
2056 | wanted_mode = new_mode; | |
0d8e55d8 | 2057 | } |
da920570 | 2058 | else if (GET_CODE (x) == SIGN_EXTRACT) |
0d8e55d8 | 2059 | { |
da920570 ZW |
2060 | enum machine_mode new_mode |
2061 | = mode_for_extraction (EP_extv, 1); | |
2062 | if (new_mode != MAX_MACHINE_MODE) | |
2063 | wanted_mode = new_mode; | |
0d8e55d8 | 2064 | } |
da920570 | 2065 | |
6dc42e49 | 2066 | /* If we have a narrower mode, we can do something. */ |
6f086dfc RS |
2067 | if (wanted_mode != VOIDmode |
2068 | && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode)) | |
2069 | { | |
e5e809f4 | 2070 | HOST_WIDE_INT offset = pos / BITS_PER_UNIT; |
6f086dfc RS |
2071 | rtx old_pos = XEXP (x, 2); |
2072 | rtx newmem; | |
2073 | ||
2074 | /* If the bytes and bits are counted differently, we | |
2075 | must adjust the offset. */ | |
f76b9db2 ILT |
2076 | if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN) |
2077 | offset = (GET_MODE_SIZE (is_mode) | |
2078 | - GET_MODE_SIZE (wanted_mode) - offset); | |
6f086dfc RS |
2079 | |
2080 | pos %= GET_MODE_BITSIZE (wanted_mode); | |
2081 | ||
f1ec5147 | 2082 | newmem = adjust_address_nv (tem, wanted_mode, offset); |
6f086dfc RS |
2083 | |
2084 | /* Make the change and see if the insn remains valid. */ | |
2085 | INSN_CODE (insn) = -1; | |
2086 | XEXP (x, 0) = newmem; | |
5f4f0e22 | 2087 | XEXP (x, 2) = GEN_INT (pos); |
6f086dfc RS |
2088 | |
2089 | if (recog_memoized (insn) >= 0) | |
2090 | return; | |
2091 | ||
2092 | /* Otherwise, restore old position. XEXP (x, 0) will be | |
2093 | restored later. */ | |
2094 | XEXP (x, 2) = old_pos; | |
2095 | } | |
2096 | } | |
2097 | ||
2098 | /* If we get here, the bitfield extract insn can't accept a memory | |
2099 | reference. Copy the input into a register. */ | |
2100 | ||
2101 | tem1 = gen_reg_rtx (GET_MODE (tem)); | |
2102 | emit_insn_before (gen_move_insn (tem1, tem), insn); | |
2103 | XEXP (x, 0) = tem1; | |
2104 | return; | |
2105 | } | |
2106 | break; | |
718fe406 | 2107 | |
6f086dfc RS |
2108 | case SUBREG: |
2109 | if (SUBREG_REG (x) == var) | |
2110 | { | |
00d8a4c1 RK |
2111 | /* If this is a special SUBREG made because VAR was promoted |
2112 | from a wider mode, replace it with VAR and call ourself | |
2113 | recursively, this time saying that the object previously | |
2114 | had its current mode (by virtue of the SUBREG). */ | |
2115 | ||
2116 | if (SUBREG_PROMOTED_VAR_P (x)) | |
2117 | { | |
2118 | *loc = var; | |
2119 | fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements); | |
2120 | return; | |
2121 | } | |
2122 | ||
6f086dfc | 2123 | /* If this SUBREG makes VAR wider, it has become a paradoxical |
718fe406 | 2124 | SUBREG with VAR in memory, but these aren't allowed at this |
6f086dfc RS |
2125 | stage of the compilation. So load VAR into a pseudo and take |
2126 | a SUBREG of that pseudo. */ | |
2127 | if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var))) | |
2128 | { | |
2740a678 | 2129 | replacement = find_fixup_replacement (replacements, var); |
6f086dfc | 2130 | if (replacement->new == 0) |
6349ad24 | 2131 | replacement->new = gen_reg_rtx (promoted_mode); |
6f086dfc RS |
2132 | SUBREG_REG (x) = replacement->new; |
2133 | return; | |
2134 | } | |
2135 | ||
2136 | /* See if we have already found a replacement for this SUBREG. | |
2137 | If so, use it. Otherwise, make a MEM and see if the insn | |
2138 | is recognized. If not, or if we should force MEM into a register, | |
2139 | make a pseudo for this SUBREG. */ | |
2740a678 | 2140 | replacement = find_fixup_replacement (replacements, x); |
6f086dfc RS |
2141 | if (replacement->new) |
2142 | { | |
2143 | *loc = replacement->new; | |
2144 | return; | |
2145 | } | |
718fe406 | 2146 | |
6f086dfc RS |
2147 | replacement->new = *loc = fixup_memory_subreg (x, insn, 0); |
2148 | ||
f898f031 | 2149 | INSN_CODE (insn) = -1; |
6f086dfc RS |
2150 | if (! flag_force_mem && recog_memoized (insn) >= 0) |
2151 | return; | |
2152 | ||
2153 | *loc = replacement->new = gen_reg_rtx (GET_MODE (x)); | |
2154 | return; | |
2155 | } | |
2156 | break; | |
2157 | ||
2158 | case SET: | |
2159 | /* First do special simplification of bit-field references. */ | |
2160 | if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT | |
2161 | || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT) | |
2162 | optimize_bit_field (x, insn, 0); | |
2163 | if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT | |
2164 | || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT) | |
6496a589 | 2165 | optimize_bit_field (x, insn, 0); |
6f086dfc | 2166 | |
0e09cc26 RK |
2167 | /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object |
2168 | into a register and then store it back out. */ | |
2169 | if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT | |
2170 | && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG | |
2171 | && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var | |
2172 | && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0))) | |
2173 | > GET_MODE_SIZE (GET_MODE (var)))) | |
2174 | { | |
2175 | replacement = find_fixup_replacement (replacements, var); | |
2176 | if (replacement->new == 0) | |
2177 | replacement->new = gen_reg_rtx (GET_MODE (var)); | |
2178 | ||
2179 | SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new; | |
2180 | emit_insn_after (gen_move_insn (var, replacement->new), insn); | |
2181 | } | |
2182 | ||
6f086dfc | 2183 | /* If SET_DEST is now a paradoxical SUBREG, put the result of this |
0f41302f | 2184 | insn into a pseudo and store the low part of the pseudo into VAR. */ |
6f086dfc RS |
2185 | if (GET_CODE (SET_DEST (x)) == SUBREG |
2186 | && SUBREG_REG (SET_DEST (x)) == var | |
2187 | && (GET_MODE_SIZE (GET_MODE (SET_DEST (x))) | |
2188 | > GET_MODE_SIZE (GET_MODE (var)))) | |
2189 | { | |
2190 | SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x))); | |
2191 | emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var), | |
2192 | tem)), | |
2193 | insn); | |
2194 | break; | |
2195 | } | |
718fe406 | 2196 | |
6f086dfc RS |
2197 | { |
2198 | rtx dest = SET_DEST (x); | |
2199 | rtx src = SET_SRC (x); | |
2200 | rtx outerdest = dest; | |
2201 | ||
2202 | while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART | |
2203 | || GET_CODE (dest) == SIGN_EXTRACT | |
2204 | || GET_CODE (dest) == ZERO_EXTRACT) | |
2205 | dest = XEXP (dest, 0); | |
2206 | ||
2207 | if (GET_CODE (src) == SUBREG) | |
ddef6bc7 | 2208 | src = SUBREG_REG (src); |
6f086dfc RS |
2209 | |
2210 | /* If VAR does not appear at the top level of the SET | |
2211 | just scan the lower levels of the tree. */ | |
2212 | ||
718fe406 | 2213 | if (src != var && dest != var) |
6f086dfc RS |
2214 | break; |
2215 | ||
2216 | /* We will need to rerecognize this insn. */ | |
2217 | INSN_CODE (insn) = -1; | |
2218 | ||
da920570 ZW |
2219 | if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var |
2220 | && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE) | |
6f086dfc RS |
2221 | { |
2222 | /* Since this case will return, ensure we fixup all the | |
2223 | operands here. */ | |
00d8a4c1 RK |
2224 | fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1), |
2225 | insn, replacements); | |
2226 | fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2), | |
2227 | insn, replacements); | |
2228 | fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x), | |
2229 | insn, replacements); | |
6f086dfc RS |
2230 | |
2231 | tem = XEXP (outerdest, 0); | |
2232 | ||
2233 | /* Clean up (SUBREG:SI (MEM:mode ...) 0) | |
2234 | that may appear inside a ZERO_EXTRACT. | |
2235 | This was legitimate when the MEM was a REG. */ | |
2236 | if (GET_CODE (tem) == SUBREG | |
2237 | && SUBREG_REG (tem) == var) | |
0e09cc26 | 2238 | tem = fixup_memory_subreg (tem, insn, 0); |
6f086dfc RS |
2239 | else |
2240 | tem = fixup_stack_1 (tem, insn); | |
2241 | ||
2242 | if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT | |
2243 | && GET_CODE (XEXP (outerdest, 2)) == CONST_INT | |
2244 | && ! mode_dependent_address_p (XEXP (tem, 0)) | |
2245 | && ! MEM_VOLATILE_P (tem)) | |
2246 | { | |
0d8e55d8 | 2247 | enum machine_mode wanted_mode; |
6f086dfc | 2248 | enum machine_mode is_mode = GET_MODE (tem); |
e5e809f4 | 2249 | HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2)); |
6f086dfc | 2250 | |
da920570 | 2251 | wanted_mode = mode_for_extraction (EP_insv, 0); |
0d8e55d8 | 2252 | |
6dc42e49 | 2253 | /* If we have a narrower mode, we can do something. */ |
6f086dfc RS |
2254 | if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode)) |
2255 | { | |
e5e809f4 | 2256 | HOST_WIDE_INT offset = pos / BITS_PER_UNIT; |
6f086dfc RS |
2257 | rtx old_pos = XEXP (outerdest, 2); |
2258 | rtx newmem; | |
2259 | ||
f76b9db2 ILT |
2260 | if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN) |
2261 | offset = (GET_MODE_SIZE (is_mode) | |
2262 | - GET_MODE_SIZE (wanted_mode) - offset); | |
6f086dfc RS |
2263 | |
2264 | pos %= GET_MODE_BITSIZE (wanted_mode); | |
2265 | ||
f1ec5147 | 2266 | newmem = adjust_address_nv (tem, wanted_mode, offset); |
6f086dfc RS |
2267 | |
2268 | /* Make the change and see if the insn remains valid. */ | |
2269 | INSN_CODE (insn) = -1; | |
2270 | XEXP (outerdest, 0) = newmem; | |
5f4f0e22 | 2271 | XEXP (outerdest, 2) = GEN_INT (pos); |
718fe406 | 2272 | |
6f086dfc RS |
2273 | if (recog_memoized (insn) >= 0) |
2274 | return; | |
718fe406 | 2275 | |
6f086dfc RS |
2276 | /* Otherwise, restore old position. XEXP (x, 0) will be |
2277 | restored later. */ | |
2278 | XEXP (outerdest, 2) = old_pos; | |
2279 | } | |
2280 | } | |
2281 | ||
2282 | /* If we get here, the bit-field store doesn't allow memory | |
2283 | or isn't located at a constant position. Load the value into | |
2284 | a register, do the store, and put it back into memory. */ | |
2285 | ||
2286 | tem1 = gen_reg_rtx (GET_MODE (tem)); | |
2287 | emit_insn_before (gen_move_insn (tem1, tem), insn); | |
2288 | emit_insn_after (gen_move_insn (tem, tem1), insn); | |
2289 | XEXP (outerdest, 0) = tem1; | |
2290 | return; | |
2291 | } | |
6f086dfc RS |
2292 | |
2293 | /* STRICT_LOW_PART is a no-op on memory references | |
2294 | and it can cause combinations to be unrecognizable, | |
2295 | so eliminate it. */ | |
2296 | ||
2297 | if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART) | |
2298 | SET_DEST (x) = XEXP (SET_DEST (x), 0); | |
2299 | ||
2300 | /* A valid insn to copy VAR into or out of a register | |
2301 | must be left alone, to avoid an infinite loop here. | |
2302 | If the reference to VAR is by a subreg, fix that up, | |
2303 | since SUBREG is not valid for a memref. | |
e15762df RK |
2304 | Also fix up the address of the stack slot. |
2305 | ||
2306 | Note that we must not try to recognize the insn until | |
2307 | after we know that we have valid addresses and no | |
2308 | (subreg (mem ...) ...) constructs, since these interfere | |
2309 | with determining the validity of the insn. */ | |
6f086dfc RS |
2310 | |
2311 | if ((SET_SRC (x) == var | |
2312 | || (GET_CODE (SET_SRC (x)) == SUBREG | |
2313 | && SUBREG_REG (SET_SRC (x)) == var)) | |
2314 | && (GET_CODE (SET_DEST (x)) == REG | |
2315 | || (GET_CODE (SET_DEST (x)) == SUBREG | |
2316 | && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG)) | |
1d273bf5 | 2317 | && GET_MODE (var) == promoted_mode |
c46722a7 | 2318 | && x == single_set (insn)) |
6f086dfc | 2319 | { |
cc0cbae1 | 2320 | rtx pat, last; |
e15762df | 2321 | |
2740a678 | 2322 | replacement = find_fixup_replacement (replacements, SET_SRC (x)); |
6f086dfc | 2323 | if (replacement->new) |
6f086dfc | 2324 | SET_SRC (x) = replacement->new; |
6f086dfc RS |
2325 | else if (GET_CODE (SET_SRC (x)) == SUBREG) |
2326 | SET_SRC (x) = replacement->new | |
2327 | = fixup_memory_subreg (SET_SRC (x), insn, 0); | |
2328 | else | |
2329 | SET_SRC (x) = replacement->new | |
2330 | = fixup_stack_1 (SET_SRC (x), insn); | |
e15762df RK |
2331 | |
2332 | if (recog_memoized (insn) >= 0) | |
2333 | return; | |
2334 | ||
2335 | /* INSN is not valid, but we know that we want to | |
2336 | copy SET_SRC (x) to SET_DEST (x) in some way. So | |
2337 | we generate the move and see whether it requires more | |
2338 | than one insn. If it does, we emit those insns and | |
718fe406 | 2339 | delete INSN. Otherwise, we an just replace the pattern |
e15762df RK |
2340 | of INSN; we have already verified above that INSN has |
2341 | no other function that to do X. */ | |
2342 | ||
2343 | pat = gen_move_insn (SET_DEST (x), SET_SRC (x)); | |
2344 | if (GET_CODE (pat) == SEQUENCE) | |
2345 | { | |
cc0cbae1 JW |
2346 | last = emit_insn_before (pat, insn); |
2347 | ||
2348 | /* INSN might have REG_RETVAL or other important notes, so | |
2349 | we need to store the pattern of the last insn in the | |
2350 | sequence into INSN similarly to the normal case. LAST | |
2351 | should not have REG_NOTES, but we allow them if INSN has | |
2352 | no REG_NOTES. */ | |
2353 | if (REG_NOTES (last) && REG_NOTES (insn)) | |
2354 | abort (); | |
2355 | if (REG_NOTES (last)) | |
2356 | REG_NOTES (insn) = REG_NOTES (last); | |
2357 | PATTERN (insn) = PATTERN (last); | |
2358 | ||
ca6c03ca | 2359 | delete_insn (last); |
e15762df RK |
2360 | } |
2361 | else | |
2362 | PATTERN (insn) = pat; | |
2363 | ||
6f086dfc RS |
2364 | return; |
2365 | } | |
2366 | ||
2367 | if ((SET_DEST (x) == var | |
2368 | || (GET_CODE (SET_DEST (x)) == SUBREG | |
2369 | && SUBREG_REG (SET_DEST (x)) == var)) | |
2370 | && (GET_CODE (SET_SRC (x)) == REG | |
2371 | || (GET_CODE (SET_SRC (x)) == SUBREG | |
2372 | && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG)) | |
1d273bf5 | 2373 | && GET_MODE (var) == promoted_mode |
c46722a7 | 2374 | && x == single_set (insn)) |
6f086dfc | 2375 | { |
cc0cbae1 | 2376 | rtx pat, last; |
e15762df | 2377 | |
6f086dfc RS |
2378 | if (GET_CODE (SET_DEST (x)) == SUBREG) |
2379 | SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0); | |
2380 | else | |
2381 | SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn); | |
e15762df RK |
2382 | |
2383 | if (recog_memoized (insn) >= 0) | |
2384 | return; | |
2385 | ||
2386 | pat = gen_move_insn (SET_DEST (x), SET_SRC (x)); | |
2387 | if (GET_CODE (pat) == SEQUENCE) | |
2388 | { | |
cc0cbae1 JW |
2389 | last = emit_insn_before (pat, insn); |
2390 | ||
2391 | /* INSN might have REG_RETVAL or other important notes, so | |
2392 | we need to store the pattern of the last insn in the | |
2393 | sequence into INSN similarly to the normal case. LAST | |
2394 | should not have REG_NOTES, but we allow them if INSN has | |
2395 | no REG_NOTES. */ | |
2396 | if (REG_NOTES (last) && REG_NOTES (insn)) | |
2397 | abort (); | |
2398 | if (REG_NOTES (last)) | |
2399 | REG_NOTES (insn) = REG_NOTES (last); | |
2400 | PATTERN (insn) = PATTERN (last); | |
2401 | ||
ca6c03ca | 2402 | delete_insn (last); |
e15762df RK |
2403 | } |
2404 | else | |
2405 | PATTERN (insn) = pat; | |
2406 | ||
6f086dfc RS |
2407 | return; |
2408 | } | |
2409 | ||
2410 | /* Otherwise, storing into VAR must be handled specially | |
2411 | by storing into a temporary and copying that into VAR | |
00d8a4c1 RK |
2412 | with a new insn after this one. Note that this case |
2413 | will be used when storing into a promoted scalar since | |
2414 | the insn will now have different modes on the input | |
2415 | and output and hence will be invalid (except for the case | |
2416 | of setting it to a constant, which does not need any | |
2417 | change if it is valid). We generate extra code in that case, | |
2418 | but combine.c will eliminate it. */ | |
6f086dfc RS |
2419 | |
2420 | if (dest == var) | |
2421 | { | |
2422 | rtx temp; | |
00d8a4c1 RK |
2423 | rtx fixeddest = SET_DEST (x); |
2424 | ||
6f086dfc | 2425 | /* STRICT_LOW_PART can be discarded, around a MEM. */ |
00d8a4c1 RK |
2426 | if (GET_CODE (fixeddest) == STRICT_LOW_PART) |
2427 | fixeddest = XEXP (fixeddest, 0); | |
6f086dfc | 2428 | /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */ |
00d8a4c1 | 2429 | if (GET_CODE (fixeddest) == SUBREG) |
926d1ca5 RK |
2430 | { |
2431 | fixeddest = fixup_memory_subreg (fixeddest, insn, 0); | |
2432 | promoted_mode = GET_MODE (fixeddest); | |
2433 | } | |
6f086dfc | 2434 | else |
00d8a4c1 RK |
2435 | fixeddest = fixup_stack_1 (fixeddest, insn); |
2436 | ||
926d1ca5 | 2437 | temp = gen_reg_rtx (promoted_mode); |
00d8a4c1 RK |
2438 | |
2439 | emit_insn_after (gen_move_insn (fixeddest, | |
2440 | gen_lowpart (GET_MODE (fixeddest), | |
2441 | temp)), | |
2442 | insn); | |
6f086dfc | 2443 | |
6f086dfc RS |
2444 | SET_DEST (x) = temp; |
2445 | } | |
2446 | } | |
e9a25f70 JL |
2447 | |
2448 | default: | |
2449 | break; | |
6f086dfc RS |
2450 | } |
2451 | ||
2452 | /* Nothing special about this RTX; fix its operands. */ | |
2453 | ||
2454 | fmt = GET_RTX_FORMAT (code); | |
2455 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
2456 | { | |
2457 | if (fmt[i] == 'e') | |
00d8a4c1 | 2458 | fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements); |
d4757e6a | 2459 | else if (fmt[i] == 'E') |
6f086dfc | 2460 | { |
b3694847 | 2461 | int j; |
6f086dfc | 2462 | for (j = 0; j < XVECLEN (x, i); j++) |
00d8a4c1 RK |
2463 | fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j), |
2464 | insn, replacements); | |
6f086dfc RS |
2465 | } |
2466 | } | |
2467 | } | |
2468 | \f | |
2469 | /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)), | |
2470 | return an rtx (MEM:m1 newaddr) which is equivalent. | |
2471 | If any insns must be emitted to compute NEWADDR, put them before INSN. | |
2472 | ||
2473 | UNCRITICAL nonzero means accept paradoxical subregs. | |
0f41302f | 2474 | This is used for subregs found inside REG_NOTES. */ |
6f086dfc RS |
2475 | |
2476 | static rtx | |
2477 | fixup_memory_subreg (x, insn, uncritical) | |
2478 | rtx x; | |
2479 | rtx insn; | |
2480 | int uncritical; | |
2481 | { | |
ddef6bc7 | 2482 | int offset = SUBREG_BYTE (x); |
6f086dfc RS |
2483 | rtx addr = XEXP (SUBREG_REG (x), 0); |
2484 | enum machine_mode mode = GET_MODE (x); | |
29a82058 | 2485 | rtx result; |
6f086dfc RS |
2486 | |
2487 | /* Paradoxical SUBREGs are usually invalid during RTL generation. */ | |
2488 | if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) | |
2489 | && ! uncritical) | |
2490 | abort (); | |
2491 | ||
f4ef873c RK |
2492 | if (!flag_force_addr |
2493 | && memory_address_p (mode, plus_constant (addr, offset))) | |
6f086dfc | 2494 | /* Shortcut if no insns need be emitted. */ |
f4ef873c RK |
2495 | return adjust_address (SUBREG_REG (x), mode, offset); |
2496 | ||
6f086dfc | 2497 | start_sequence (); |
f4ef873c | 2498 | result = adjust_address (SUBREG_REG (x), mode, offset); |
6f086dfc RS |
2499 | emit_insn_before (gen_sequence (), insn); |
2500 | end_sequence (); | |
2501 | return result; | |
2502 | } | |
2503 | ||
2504 | /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X. | |
2505 | Replace subexpressions of X in place. | |
2506 | If X itself is a (SUBREG (MEM ...) ...), return the replacement expression. | |
2507 | Otherwise return X, with its contents possibly altered. | |
2508 | ||
718fe406 | 2509 | If any insns must be emitted to compute NEWADDR, put them before INSN. |
ab6155b7 RK |
2510 | |
2511 | UNCRITICAL is as in fixup_memory_subreg. */ | |
6f086dfc RS |
2512 | |
2513 | static rtx | |
ab6155b7 | 2514 | walk_fixup_memory_subreg (x, insn, uncritical) |
b3694847 | 2515 | rtx x; |
6f086dfc | 2516 | rtx insn; |
ab6155b7 | 2517 | int uncritical; |
6f086dfc | 2518 | { |
b3694847 SS |
2519 | enum rtx_code code; |
2520 | const char *fmt; | |
2521 | int i; | |
6f086dfc RS |
2522 | |
2523 | if (x == 0) | |
2524 | return 0; | |
2525 | ||
2526 | code = GET_CODE (x); | |
2527 | ||
2528 | if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM) | |
ab6155b7 | 2529 | return fixup_memory_subreg (x, insn, uncritical); |
6f086dfc RS |
2530 | |
2531 | /* Nothing special about this RTX; fix its operands. */ | |
2532 | ||
2533 | fmt = GET_RTX_FORMAT (code); | |
2534 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
2535 | { | |
2536 | if (fmt[i] == 'e') | |
ab6155b7 | 2537 | XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical); |
d4757e6a | 2538 | else if (fmt[i] == 'E') |
6f086dfc | 2539 | { |
b3694847 | 2540 | int j; |
6f086dfc RS |
2541 | for (j = 0; j < XVECLEN (x, i); j++) |
2542 | XVECEXP (x, i, j) | |
ab6155b7 | 2543 | = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical); |
6f086dfc RS |
2544 | } |
2545 | } | |
2546 | return x; | |
2547 | } | |
2548 | \f | |
6f086dfc RS |
2549 | /* For each memory ref within X, if it refers to a stack slot |
2550 | with an out of range displacement, put the address in a temp register | |
2551 | (emitting new insns before INSN to load these registers) | |
2552 | and alter the memory ref to use that register. | |
2553 | Replace each such MEM rtx with a copy, to avoid clobberage. */ | |
2554 | ||
2555 | static rtx | |
2556 | fixup_stack_1 (x, insn) | |
2557 | rtx x; | |
2558 | rtx insn; | |
2559 | { | |
b3694847 SS |
2560 | int i; |
2561 | RTX_CODE code = GET_CODE (x); | |
2562 | const char *fmt; | |
6f086dfc RS |
2563 | |
2564 | if (code == MEM) | |
2565 | { | |
b3694847 | 2566 | rtx ad = XEXP (x, 0); |
6f086dfc RS |
2567 | /* If we have address of a stack slot but it's not valid |
2568 | (displacement is too large), compute the sum in a register. */ | |
2569 | if (GET_CODE (ad) == PLUS | |
2570 | && GET_CODE (XEXP (ad, 0)) == REG | |
40d05551 RK |
2571 | && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER |
2572 | && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER) | |
e9a25f70 JL |
2573 | || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM |
2574 | #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM | |
2575 | || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM | |
2576 | #endif | |
2577 | || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM | |
956d6950 | 2578 | || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM |
40d05551 | 2579 | || XEXP (ad, 0) == current_function_internal_arg_pointer) |
6f086dfc RS |
2580 | && GET_CODE (XEXP (ad, 1)) == CONST_INT) |
2581 | { | |
2582 | rtx temp, seq; | |
2583 | if (memory_address_p (GET_MODE (x), ad)) | |
2584 | return x; | |
2585 | ||
2586 | start_sequence (); | |
2587 | temp = copy_to_reg (ad); | |
2588 | seq = gen_sequence (); | |
2589 | end_sequence (); | |
2590 | emit_insn_before (seq, insn); | |
792760b9 | 2591 | return replace_equiv_address (x, temp); |
6f086dfc RS |
2592 | } |
2593 | return x; | |
2594 | } | |
2595 | ||
2596 | fmt = GET_RTX_FORMAT (code); | |
2597 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
2598 | { | |
2599 | if (fmt[i] == 'e') | |
2600 | XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn); | |
d4757e6a | 2601 | else if (fmt[i] == 'E') |
6f086dfc | 2602 | { |
b3694847 | 2603 | int j; |
6f086dfc RS |
2604 | for (j = 0; j < XVECLEN (x, i); j++) |
2605 | XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn); | |
2606 | } | |
2607 | } | |
2608 | return x; | |
2609 | } | |
2610 | \f | |
2611 | /* Optimization: a bit-field instruction whose field | |
2612 | happens to be a byte or halfword in memory | |
2613 | can be changed to a move instruction. | |
2614 | ||
2615 | We call here when INSN is an insn to examine or store into a bit-field. | |
2616 | BODY is the SET-rtx to be altered. | |
2617 | ||
2618 | EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0. | |
2619 | (Currently this is called only from function.c, and EQUIV_MEM | |
2620 | is always 0.) */ | |
2621 | ||
2622 | static void | |
2623 | optimize_bit_field (body, insn, equiv_mem) | |
2624 | rtx body; | |
2625 | rtx insn; | |
2626 | rtx *equiv_mem; | |
2627 | { | |
b3694847 | 2628 | rtx bitfield; |
6f086dfc RS |
2629 | int destflag; |
2630 | rtx seq = 0; | |
2631 | enum machine_mode mode; | |
2632 | ||
2633 | if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT | |
2634 | || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT) | |
2635 | bitfield = SET_DEST (body), destflag = 1; | |
2636 | else | |
2637 | bitfield = SET_SRC (body), destflag = 0; | |
2638 | ||
2639 | /* First check that the field being stored has constant size and position | |
2640 | and is in fact a byte or halfword suitably aligned. */ | |
2641 | ||
2642 | if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT | |
2643 | && GET_CODE (XEXP (bitfield, 2)) == CONST_INT | |
2644 | && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1)) | |
2645 | != BLKmode) | |
2646 | && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0) | |
2647 | { | |
b3694847 | 2648 | rtx memref = 0; |
6f086dfc RS |
2649 | |
2650 | /* Now check that the containing word is memory, not a register, | |
2651 | and that it is safe to change the machine mode. */ | |
2652 | ||
2653 | if (GET_CODE (XEXP (bitfield, 0)) == MEM) | |
2654 | memref = XEXP (bitfield, 0); | |
2655 | else if (GET_CODE (XEXP (bitfield, 0)) == REG | |
2656 | && equiv_mem != 0) | |
2657 | memref = equiv_mem[REGNO (XEXP (bitfield, 0))]; | |
2658 | else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG | |
2659 | && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM) | |
2660 | memref = SUBREG_REG (XEXP (bitfield, 0)); | |
2661 | else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG | |
2662 | && equiv_mem != 0 | |
2663 | && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG) | |
2664 | memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))]; | |
2665 | ||
2666 | if (memref | |
2667 | && ! mode_dependent_address_p (XEXP (memref, 0)) | |
2668 | && ! MEM_VOLATILE_P (memref)) | |
2669 | { | |
2670 | /* Now adjust the address, first for any subreg'ing | |
2671 | that we are now getting rid of, | |
2672 | and then for which byte of the word is wanted. */ | |
2673 | ||
e5e809f4 | 2674 | HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2)); |
b88a3142 RK |
2675 | rtx insns; |
2676 | ||
6f086dfc | 2677 | /* Adjust OFFSET to count bits from low-address byte. */ |
f76b9db2 ILT |
2678 | if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN) |
2679 | offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0))) | |
2680 | - offset - INTVAL (XEXP (bitfield, 1))); | |
2681 | ||
6f086dfc RS |
2682 | /* Adjust OFFSET to count bytes from low-address byte. */ |
2683 | offset /= BITS_PER_UNIT; | |
2684 | if (GET_CODE (XEXP (bitfield, 0)) == SUBREG) | |
2685 | { | |
ddef6bc7 JJ |
2686 | offset += (SUBREG_BYTE (XEXP (bitfield, 0)) |
2687 | / UNITS_PER_WORD) * UNITS_PER_WORD; | |
f76b9db2 ILT |
2688 | if (BYTES_BIG_ENDIAN) |
2689 | offset -= (MIN (UNITS_PER_WORD, | |
2690 | GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0)))) | |
2691 | - MIN (UNITS_PER_WORD, | |
2692 | GET_MODE_SIZE (GET_MODE (memref)))); | |
6f086dfc RS |
2693 | } |
2694 | ||
b88a3142 | 2695 | start_sequence (); |
f4ef873c | 2696 | memref = adjust_address (memref, mode, offset); |
b88a3142 RK |
2697 | insns = get_insns (); |
2698 | end_sequence (); | |
2699 | emit_insns_before (insns, insn); | |
6f086dfc RS |
2700 | |
2701 | /* Store this memory reference where | |
2702 | we found the bit field reference. */ | |
2703 | ||
2704 | if (destflag) | |
2705 | { | |
2706 | validate_change (insn, &SET_DEST (body), memref, 1); | |
2707 | if (! CONSTANT_ADDRESS_P (SET_SRC (body))) | |
2708 | { | |
2709 | rtx src = SET_SRC (body); | |
2710 | while (GET_CODE (src) == SUBREG | |
ddef6bc7 | 2711 | && SUBREG_BYTE (src) == 0) |
6f086dfc RS |
2712 | src = SUBREG_REG (src); |
2713 | if (GET_MODE (src) != GET_MODE (memref)) | |
2714 | src = gen_lowpart (GET_MODE (memref), SET_SRC (body)); | |
2715 | validate_change (insn, &SET_SRC (body), src, 1); | |
2716 | } | |
2717 | else if (GET_MODE (SET_SRC (body)) != VOIDmode | |
2718 | && GET_MODE (SET_SRC (body)) != GET_MODE (memref)) | |
2719 | /* This shouldn't happen because anything that didn't have | |
2720 | one of these modes should have got converted explicitly | |
2721 | and then referenced through a subreg. | |
2722 | This is so because the original bit-field was | |
2723 | handled by agg_mode and so its tree structure had | |
2724 | the same mode that memref now has. */ | |
2725 | abort (); | |
2726 | } | |
2727 | else | |
2728 | { | |
2729 | rtx dest = SET_DEST (body); | |
2730 | ||
2731 | while (GET_CODE (dest) == SUBREG | |
ddef6bc7 | 2732 | && SUBREG_BYTE (dest) == 0 |
4013a709 | 2733 | && (GET_MODE_CLASS (GET_MODE (dest)) |
ab87f8c8 JL |
2734 | == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))) |
2735 | && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) | |
2736 | <= UNITS_PER_WORD)) | |
6f086dfc RS |
2737 | dest = SUBREG_REG (dest); |
2738 | ||
2739 | validate_change (insn, &SET_DEST (body), dest, 1); | |
2740 | ||
2741 | if (GET_MODE (dest) == GET_MODE (memref)) | |
2742 | validate_change (insn, &SET_SRC (body), memref, 1); | |
2743 | else | |
2744 | { | |
2745 | /* Convert the mem ref to the destination mode. */ | |
2746 | rtx newreg = gen_reg_rtx (GET_MODE (dest)); | |
2747 | ||
2748 | start_sequence (); | |
2749 | convert_move (newreg, memref, | |
2750 | GET_CODE (SET_SRC (body)) == ZERO_EXTRACT); | |
2751 | seq = get_insns (); | |
2752 | end_sequence (); | |
2753 | ||
2754 | validate_change (insn, &SET_SRC (body), newreg, 1); | |
2755 | } | |
2756 | } | |
2757 | ||
2758 | /* See if we can convert this extraction or insertion into | |
2759 | a simple move insn. We might not be able to do so if this | |
2760 | was, for example, part of a PARALLEL. | |
2761 | ||
2762 | If we succeed, write out any needed conversions. If we fail, | |
2763 | it is hard to guess why we failed, so don't do anything | |
2764 | special; just let the optimization be suppressed. */ | |
2765 | ||
2766 | if (apply_change_group () && seq) | |
2767 | emit_insns_before (seq, insn); | |
2768 | } | |
2769 | } | |
2770 | } | |
2771 | \f | |
2772 | /* These routines are responsible for converting virtual register references | |
2773 | to the actual hard register references once RTL generation is complete. | |
2774 | ||
2775 | The following four variables are used for communication between the | |
2776 | routines. They contain the offsets of the virtual registers from their | |
2777 | respective hard registers. */ | |
2778 | ||
2779 | static int in_arg_offset; | |
2780 | static int var_offset; | |
2781 | static int dynamic_offset; | |
2782 | static int out_arg_offset; | |
71038426 | 2783 | static int cfa_offset; |
6f086dfc RS |
2784 | |
2785 | /* In most machines, the stack pointer register is equivalent to the bottom | |
2786 | of the stack. */ | |
2787 | ||
2788 | #ifndef STACK_POINTER_OFFSET | |
2789 | #define STACK_POINTER_OFFSET 0 | |
2790 | #endif | |
2791 | ||
2792 | /* If not defined, pick an appropriate default for the offset of dynamically | |
2793 | allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS, | |
2794 | REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */ | |
2795 | ||
2796 | #ifndef STACK_DYNAMIC_OFFSET | |
2797 | ||
6f086dfc RS |
2798 | /* The bottom of the stack points to the actual arguments. If |
2799 | REG_PARM_STACK_SPACE is defined, this includes the space for the register | |
2800 | parameters. However, if OUTGOING_REG_PARM_STACK space is not defined, | |
718fe406 | 2801 | stack space for register parameters is not pushed by the caller, but |
6f086dfc RS |
2802 | rather part of the fixed stack areas and hence not included in |
2803 | `current_function_outgoing_args_size'. Nevertheless, we must allow | |
2804 | for it when allocating stack dynamic objects. */ | |
2805 | ||
2806 | #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE) | |
2807 | #define STACK_DYNAMIC_OFFSET(FNDECL) \ | |
f73ad30e JH |
2808 | ((ACCUMULATE_OUTGOING_ARGS \ |
2809 | ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\ | |
2810 | + (STACK_POINTER_OFFSET)) \ | |
6f086dfc RS |
2811 | |
2812 | #else | |
2813 | #define STACK_DYNAMIC_OFFSET(FNDECL) \ | |
f73ad30e JH |
2814 | ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \ |
2815 | + (STACK_POINTER_OFFSET)) | |
6f086dfc RS |
2816 | #endif |
2817 | #endif | |
2818 | ||
2c849145 | 2819 | /* On most machines, the CFA coincides with the first incoming parm. */ |
71038426 RH |
2820 | |
2821 | #ifndef ARG_POINTER_CFA_OFFSET | |
2c849145 | 2822 | #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL) |
71038426 RH |
2823 | #endif |
2824 | ||
e9a25f70 JL |
2825 | /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had |
2826 | its address taken. DECL is the decl for the object stored in the | |
2827 | register, for later use if we do need to force REG into the stack. | |
2828 | REG is overwritten by the MEM like in put_reg_into_stack. */ | |
2829 | ||
2830 | rtx | |
2831 | gen_mem_addressof (reg, decl) | |
2832 | rtx reg; | |
2833 | tree decl; | |
2834 | { | |
8f985ec4 ZW |
2835 | rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), |
2836 | REGNO (reg), decl); | |
14a774a9 | 2837 | |
5755cd38 | 2838 | /* Calculate this before we start messing with decl's RTL. */ |
c49f511c | 2839 | HOST_WIDE_INT set = decl ? get_alias_set (decl) : 0; |
5755cd38 | 2840 | |
95ca22f4 | 2841 | /* If the original REG was a user-variable, then so is the REG whose |
14a774a9 | 2842 | address is being taken. Likewise for unchanging. */ |
95ca22f4 | 2843 | REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg); |
14a774a9 | 2844 | RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg); |
e9a25f70 | 2845 | |
e9a25f70 | 2846 | PUT_CODE (reg, MEM); |
173b24b9 | 2847 | MEM_ATTRS (reg) = 0; |
ef178af3 | 2848 | XEXP (reg, 0) = r; |
173b24b9 | 2849 | |
8b4944fb RH |
2850 | if (decl) |
2851 | { | |
2852 | tree type = TREE_TYPE (decl); | |
b927dc22 RK |
2853 | enum machine_mode decl_mode |
2854 | = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl)) | |
2855 | : DECL_MODE (decl)); | |
173b24b9 | 2856 | rtx decl_rtl = decl ? DECL_RTL_IF_SET (decl) : 0; |
8b4944fb | 2857 | |
b927dc22 | 2858 | PUT_MODE (reg, decl_mode); |
173b24b9 RK |
2859 | |
2860 | /* Clear DECL_RTL momentarily so functions below will work | |
2861 | properly, then set it again. */ | |
2862 | if (decl_rtl == reg) | |
2863 | SET_DECL_RTL (decl, 0); | |
2864 | ||
2865 | set_mem_attributes (reg, decl, 1); | |
5755cd38 | 2866 | set_mem_alias_set (reg, set); |
e9a25f70 | 2867 | |
173b24b9 RK |
2868 | if (decl_rtl == reg) |
2869 | SET_DECL_RTL (decl, reg); | |
2870 | ||
8b4944fb RH |
2871 | if (TREE_USED (decl) || DECL_INITIAL (decl) != 0) |
2872 | fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0); | |
2873 | } | |
2874 | else | |
173b24b9 | 2875 | fixup_var_refs (reg, GET_MODE (reg), 0, 0); |
e5e809f4 | 2876 | |
e9a25f70 JL |
2877 | return reg; |
2878 | } | |
2879 | ||
2880 | /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */ | |
2881 | ||
2882 | void | |
2883 | flush_addressof (decl) | |
2884 | tree decl; | |
2885 | { | |
2886 | if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL) | |
2887 | && DECL_RTL (decl) != 0 | |
2888 | && GET_CODE (DECL_RTL (decl)) == MEM | |
2889 | && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF | |
2890 | && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG) | |
fe9b4957 | 2891 | put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0); |
e9a25f70 JL |
2892 | } |
2893 | ||
2894 | /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */ | |
2895 | ||
2896 | static void | |
fe9b4957 | 2897 | put_addressof_into_stack (r, ht) |
e9a25f70 | 2898 | rtx r; |
fe9b4957 | 2899 | struct hash_table *ht; |
e9a25f70 | 2900 | { |
8b4944fb RH |
2901 | tree decl, type; |
2902 | int volatile_p, used_p; | |
2903 | ||
e9a25f70 JL |
2904 | rtx reg = XEXP (r, 0); |
2905 | ||
2906 | if (GET_CODE (reg) != REG) | |
2907 | abort (); | |
2908 | ||
8b4944fb RH |
2909 | decl = ADDRESSOF_DECL (r); |
2910 | if (decl) | |
2911 | { | |
2912 | type = TREE_TYPE (decl); | |
2913 | volatile_p = (TREE_CODE (decl) != SAVE_EXPR | |
2914 | && TREE_THIS_VOLATILE (decl)); | |
2915 | used_p = (TREE_USED (decl) | |
2916 | || (TREE_CODE (decl) != SAVE_EXPR | |
2917 | && DECL_INITIAL (decl) != 0)); | |
2918 | } | |
2919 | else | |
2920 | { | |
2921 | type = NULL_TREE; | |
2922 | volatile_p = 0; | |
2923 | used_p = 1; | |
2924 | } | |
2925 | ||
2926 | put_reg_into_stack (0, reg, type, GET_MODE (reg), GET_MODE (reg), | |
2927 | volatile_p, ADDRESSOF_REGNO (r), used_p, ht); | |
e9a25f70 JL |
2928 | } |
2929 | ||
b5bd3b3c AS |
2930 | /* List of replacements made below in purge_addressof_1 when creating |
2931 | bitfield insertions. */ | |
8b04083b VM |
2932 | static rtx purge_bitfield_addressof_replacements; |
2933 | ||
2934 | /* List of replacements made below in purge_addressof_1 for patterns | |
2935 | (MEM (ADDRESSOF (REG ...))). The key of the list entry is the | |
2936 | corresponding (ADDRESSOF (REG ...)) and value is a substitution for | |
2937 | the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not | |
2938 | enough in complex cases, e.g. when some field values can be | |
718fe406 | 2939 | extracted by usage MEM with narrower mode. */ |
b5bd3b3c AS |
2940 | static rtx purge_addressof_replacements; |
2941 | ||
e9a25f70 JL |
2942 | /* Helper function for purge_addressof. See if the rtx expression at *LOC |
2943 | in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into | |
8c36698e NC |
2944 | the stack. If the function returns FALSE then the replacement could not |
2945 | be made. */ | |
e9a25f70 | 2946 | |
d6edb99e | 2947 | static bool |
fe9b4957 | 2948 | purge_addressof_1 (loc, insn, force, store, ht) |
e9a25f70 JL |
2949 | rtx *loc; |
2950 | rtx insn; | |
f7b6d104 | 2951 | int force, store; |
fe9b4957 | 2952 | struct hash_table *ht; |
e9a25f70 JL |
2953 | { |
2954 | rtx x; | |
2955 | RTX_CODE code; | |
2956 | int i, j; | |
6f7d635c | 2957 | const char *fmt; |
d6edb99e | 2958 | bool result = true; |
e9a25f70 JL |
2959 | |
2960 | /* Re-start here to avoid recursion in common cases. */ | |
2961 | restart: | |
2962 | ||
2963 | x = *loc; | |
2964 | if (x == 0) | |
8c36698e | 2965 | return true; |
e9a25f70 JL |
2966 | |
2967 | code = GET_CODE (x); | |
2968 | ||
c5c76735 JL |
2969 | /* If we don't return in any of the cases below, we will recurse inside |
2970 | the RTX, which will normally result in any ADDRESSOF being forced into | |
2971 | memory. */ | |
2972 | if (code == SET) | |
2973 | { | |
8c36698e NC |
2974 | result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht); |
2975 | result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht); | |
2976 | return result; | |
c5c76735 | 2977 | } |
cc2e8b2d | 2978 | else if (code == ADDRESSOF) |
e9a25f70 | 2979 | { |
cc2e8b2d ZW |
2980 | rtx sub, insns; |
2981 | ||
2982 | if (GET_CODE (XEXP (x, 0)) != MEM) | |
2983 | { | |
2984 | put_addressof_into_stack (x, ht); | |
2985 | return true; | |
2986 | } | |
2987 | ||
956d6950 JL |
2988 | /* We must create a copy of the rtx because it was created by |
2989 | overwriting a REG rtx which is always shared. */ | |
cc2e8b2d | 2990 | sub = copy_rtx (XEXP (XEXP (x, 0), 0)); |
ab87f8c8 JL |
2991 | if (validate_change (insn, loc, sub, 0) |
2992 | || validate_replace_rtx (x, sub, insn)) | |
8c36698e | 2993 | return true; |
718fe406 | 2994 | |
e9a25f70 | 2995 | start_sequence (); |
ab87f8c8 JL |
2996 | sub = force_operand (sub, NULL_RTX); |
2997 | if (! validate_change (insn, loc, sub, 0) | |
2998 | && ! validate_replace_rtx (x, sub, insn)) | |
e9a25f70 JL |
2999 | abort (); |
3000 | ||
f7b6d104 | 3001 | insns = gen_sequence (); |
e9a25f70 | 3002 | end_sequence (); |
18e765cb | 3003 | emit_insn_before (insns, insn); |
8c36698e | 3004 | return true; |
e9a25f70 | 3005 | } |
c5c76735 | 3006 | |
e9a25f70 JL |
3007 | else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force) |
3008 | { | |
3009 | rtx sub = XEXP (XEXP (x, 0), 0); | |
e5e809f4 | 3010 | |
6d8ccdbb | 3011 | if (GET_CODE (sub) == MEM) |
f1ec5147 | 3012 | sub = adjust_address_nv (sub, GET_MODE (x), 0); |
c5c76735 JL |
3013 | else if (GET_CODE (sub) == REG |
3014 | && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)) | |
3015 | ; | |
e5e809f4 | 3016 | else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub)) |
e9a25f70 | 3017 | { |
f7b6d104 RH |
3018 | int size_x, size_sub; |
3019 | ||
b5bd3b3c AS |
3020 | if (!insn) |
3021 | { | |
3022 | /* When processing REG_NOTES look at the list of | |
3023 | replacements done on the insn to find the register that X | |
3024 | was replaced by. */ | |
3025 | rtx tem; | |
3026 | ||
8b04083b VM |
3027 | for (tem = purge_bitfield_addressof_replacements; |
3028 | tem != NULL_RTX; | |
b5bd3b3c | 3029 | tem = XEXP (XEXP (tem, 1), 1)) |
8b04083b VM |
3030 | if (rtx_equal_p (x, XEXP (tem, 0))) |
3031 | { | |
3032 | *loc = XEXP (XEXP (tem, 1), 0); | |
8c36698e | 3033 | return true; |
8b04083b | 3034 | } |
fbdfe39c | 3035 | |
718fe406 | 3036 | /* See comment for purge_addressof_replacements. */ |
8b04083b VM |
3037 | for (tem = purge_addressof_replacements; |
3038 | tem != NULL_RTX; | |
3039 | tem = XEXP (XEXP (tem, 1), 1)) | |
3040 | if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0))) | |
3041 | { | |
3042 | rtx z = XEXP (XEXP (tem, 1), 0); | |
fbdfe39c | 3043 | |
8b04083b VM |
3044 | if (GET_MODE (x) == GET_MODE (z) |
3045 | || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG | |
3046 | && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG)) | |
3047 | abort (); | |
3048 | ||
3049 | /* It can happen that the note may speak of things | |
3050 | in a wider (or just different) mode than the | |
3051 | code did. This is especially true of | |
718fe406 | 3052 | REG_RETVAL. */ |
8b04083b | 3053 | |
ddef6bc7 | 3054 | if (GET_CODE (z) == SUBREG && SUBREG_BYTE (z) == 0) |
8b04083b | 3055 | z = SUBREG_REG (z); |
718fe406 | 3056 | |
8b04083b VM |
3057 | if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD |
3058 | && (GET_MODE_SIZE (GET_MODE (x)) | |
3059 | > GET_MODE_SIZE (GET_MODE (z)))) | |
3060 | { | |
3061 | /* This can occur as a result in invalid | |
718fe406 | 3062 | pointer casts, e.g. float f; ... |
8b04083b VM |
3063 | *(long long int *)&f. |
3064 | ??? We could emit a warning here, but | |
3065 | without a line number that wouldn't be | |
3066 | very helpful. */ | |
3067 | z = gen_rtx_SUBREG (GET_MODE (x), z, 0); | |
3068 | } | |
3069 | else | |
3070 | z = gen_lowpart (GET_MODE (x), z); | |
3071 | ||
3072 | *loc = z; | |
aa608fe6 | 3073 | return true; |
8b04083b | 3074 | } |
b5bd3b3c | 3075 | |
8c36698e NC |
3076 | /* Sometimes we may not be able to find the replacement. For |
3077 | example when the original insn was a MEM in a wider mode, | |
3078 | and the note is part of a sign extension of a narrowed | |
3079 | version of that MEM. Gcc testcase compile/990829-1.c can | |
3080 | generate an example of this siutation. Rather than complain | |
3081 | we return false, which will prompt our caller to remove the | |
3082 | offending note. */ | |
3083 | return false; | |
b5bd3b3c AS |
3084 | } |
3085 | ||
f7b6d104 RH |
3086 | size_x = GET_MODE_BITSIZE (GET_MODE (x)); |
3087 | size_sub = GET_MODE_BITSIZE (GET_MODE (sub)); | |
3088 | ||
3089 | /* Don't even consider working with paradoxical subregs, | |
3090 | or the moral equivalent seen here. */ | |
470032d7 | 3091 | if (size_x <= size_sub |
d006aa54 | 3092 | && int_mode_for_mode (GET_MODE (sub)) != BLKmode) |
e9a25f70 | 3093 | { |
f7b6d104 RH |
3094 | /* Do a bitfield insertion to mirror what would happen |
3095 | in memory. */ | |
3096 | ||
f7b6d104 RH |
3097 | rtx val, seq; |
3098 | ||
f7b6d104 RH |
3099 | if (store) |
3100 | { | |
fe9b4957 | 3101 | rtx p = PREV_INSN (insn); |
de0dd934 | 3102 | |
f7b6d104 RH |
3103 | start_sequence (); |
3104 | val = gen_reg_rtx (GET_MODE (x)); | |
3105 | if (! validate_change (insn, loc, val, 0)) | |
b5bd3b3c AS |
3106 | { |
3107 | /* Discard the current sequence and put the | |
3108 | ADDRESSOF on stack. */ | |
3109 | end_sequence (); | |
3110 | goto give_up; | |
3111 | } | |
f7b6d104 RH |
3112 | seq = gen_sequence (); |
3113 | end_sequence (); | |
3114 | emit_insn_before (seq, insn); | |
718fe406 | 3115 | compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (), |
fe9b4957 | 3116 | insn, ht); |
718fe406 | 3117 | |
f7b6d104 | 3118 | start_sequence (); |
47401c4d | 3119 | store_bit_field (sub, size_x, 0, GET_MODE (x), |
f7b6d104 | 3120 | val, GET_MODE_SIZE (GET_MODE (sub)), |
19caa751 | 3121 | GET_MODE_ALIGNMENT (GET_MODE (sub))); |
f7b6d104 | 3122 | |
de0dd934 R |
3123 | /* Make sure to unshare any shared rtl that store_bit_field |
3124 | might have created. */ | |
d1b81779 | 3125 | unshare_all_rtl_again (get_insns ()); |
de0dd934 | 3126 | |
f7b6d104 RH |
3127 | seq = gen_sequence (); |
3128 | end_sequence (); | |
fe9b4957 MM |
3129 | p = emit_insn_after (seq, insn); |
3130 | if (NEXT_INSN (insn)) | |
718fe406 | 3131 | compute_insns_for_mem (NEXT_INSN (insn), |
fe9b4957 MM |
3132 | p ? NEXT_INSN (p) : NULL_RTX, |
3133 | ht); | |
f7b6d104 RH |
3134 | } |
3135 | else | |
3136 | { | |
fe9b4957 MM |
3137 | rtx p = PREV_INSN (insn); |
3138 | ||
f7b6d104 | 3139 | start_sequence (); |
47401c4d | 3140 | val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX, |
f7b6d104 RH |
3141 | GET_MODE (x), GET_MODE (x), |
3142 | GET_MODE_SIZE (GET_MODE (sub)), | |
3143 | GET_MODE_SIZE (GET_MODE (sub))); | |
3144 | ||
f7b6d104 | 3145 | if (! validate_change (insn, loc, val, 0)) |
b5bd3b3c AS |
3146 | { |
3147 | /* Discard the current sequence and put the | |
3148 | ADDRESSOF on stack. */ | |
3149 | end_sequence (); | |
3150 | goto give_up; | |
3151 | } | |
f7b6d104 RH |
3152 | |
3153 | seq = gen_sequence (); | |
3154 | end_sequence (); | |
3155 | emit_insn_before (seq, insn); | |
fe9b4957 MM |
3156 | compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (), |
3157 | insn, ht); | |
f7b6d104 RH |
3158 | } |
3159 | ||
b5bd3b3c AS |
3160 | /* Remember the replacement so that the same one can be done |
3161 | on the REG_NOTES. */ | |
8b04083b | 3162 | purge_bitfield_addressof_replacements |
b5bd3b3c | 3163 | = gen_rtx_EXPR_LIST (VOIDmode, x, |
8b04083b VM |
3164 | gen_rtx_EXPR_LIST |
3165 | (VOIDmode, val, | |
3166 | purge_bitfield_addressof_replacements)); | |
b5bd3b3c | 3167 | |
f7b6d104 | 3168 | /* We replaced with a reg -- all done. */ |
8c36698e | 3169 | return true; |
e9a25f70 JL |
3170 | } |
3171 | } | |
c5c76735 | 3172 | |
e9a25f70 | 3173 | else if (validate_change (insn, loc, sub, 0)) |
fbdfe39c RH |
3174 | { |
3175 | /* Remember the replacement so that the same one can be done | |
3176 | on the REG_NOTES. */ | |
8b04083b VM |
3177 | if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG) |
3178 | { | |
3179 | rtx tem; | |
3180 | ||
3181 | for (tem = purge_addressof_replacements; | |
3182 | tem != NULL_RTX; | |
3183 | tem = XEXP (XEXP (tem, 1), 1)) | |
3184 | if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0))) | |
3185 | { | |
3186 | XEXP (XEXP (tem, 1), 0) = sub; | |
8c36698e | 3187 | return true; |
8b04083b VM |
3188 | } |
3189 | purge_addressof_replacements | |
3190 | = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0), | |
3191 | gen_rtx_EXPR_LIST (VOIDmode, sub, | |
3192 | purge_addressof_replacements)); | |
8c36698e | 3193 | return true; |
8b04083b | 3194 | } |
fbdfe39c RH |
3195 | goto restart; |
3196 | } | |
f7b6d104 | 3197 | } |
e9a25f70 | 3198 | |
cc2e8b2d | 3199 | give_up: |
718fe406 | 3200 | /* Scan all subexpressions. */ |
e9a25f70 JL |
3201 | fmt = GET_RTX_FORMAT (code); |
3202 | for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) | |
3203 | { | |
3204 | if (*fmt == 'e') | |
8c36698e | 3205 | result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht); |
e9a25f70 JL |
3206 | else if (*fmt == 'E') |
3207 | for (j = 0; j < XVECLEN (x, i); j++) | |
8c36698e | 3208 | result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht); |
fe9b4957 | 3209 | } |
8c36698e NC |
3210 | |
3211 | return result; | |
fe9b4957 MM |
3212 | } |
3213 | ||
3214 | /* Return a new hash table entry in HT. */ | |
3215 | ||
3216 | static struct hash_entry * | |
3217 | insns_for_mem_newfunc (he, ht, k) | |
3218 | struct hash_entry *he; | |
3219 | struct hash_table *ht; | |
3220 | hash_table_key k ATTRIBUTE_UNUSED; | |
3221 | { | |
3222 | struct insns_for_mem_entry *ifmhe; | |
3223 | if (he) | |
3224 | return he; | |
3225 | ||
3226 | ifmhe = ((struct insns_for_mem_entry *) | |
3227 | hash_allocate (ht, sizeof (struct insns_for_mem_entry))); | |
3228 | ifmhe->insns = NULL_RTX; | |
3229 | ||
3230 | return &ifmhe->he; | |
3231 | } | |
3232 | ||
3233 | /* Return a hash value for K, a REG. */ | |
3234 | ||
3235 | static unsigned long | |
3236 | insns_for_mem_hash (k) | |
3237 | hash_table_key k; | |
3238 | { | |
3239 | /* K is really a RTX. Just use the address as the hash value. */ | |
3240 | return (unsigned long) k; | |
3241 | } | |
3242 | ||
3243 | /* Return non-zero if K1 and K2 (two REGs) are the same. */ | |
3244 | ||
d6edb99e | 3245 | static bool |
fe9b4957 MM |
3246 | insns_for_mem_comp (k1, k2) |
3247 | hash_table_key k1; | |
3248 | hash_table_key k2; | |
3249 | { | |
3250 | return k1 == k2; | |
3251 | } | |
3252 | ||
7a80cf9a RK |
3253 | struct insns_for_mem_walk_info |
3254 | { | |
fe9b4957 MM |
3255 | /* The hash table that we are using to record which INSNs use which |
3256 | MEMs. */ | |
3257 | struct hash_table *ht; | |
3258 | ||
3259 | /* The INSN we are currently proessing. */ | |
3260 | rtx insn; | |
3261 | ||
3262 | /* Zero if we are walking to find ADDRESSOFs, one if we are walking | |
3263 | to find the insns that use the REGs in the ADDRESSOFs. */ | |
3264 | int pass; | |
3265 | }; | |
3266 | ||
3267 | /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG | |
3268 | that might be used in an ADDRESSOF expression, record this INSN in | |
3269 | the hash table given by DATA (which is really a pointer to an | |
3270 | insns_for_mem_walk_info structure). */ | |
3271 | ||
3272 | static int | |
3273 | insns_for_mem_walk (r, data) | |
3274 | rtx *r; | |
3275 | void *data; | |
3276 | { | |
718fe406 | 3277 | struct insns_for_mem_walk_info *ifmwi |
fe9b4957 MM |
3278 | = (struct insns_for_mem_walk_info *) data; |
3279 | ||
3280 | if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF | |
3281 | && GET_CODE (XEXP (*r, 0)) == REG) | |
3282 | hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0); | |
3283 | else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG) | |
3284 | { | |
3285 | /* Lookup this MEM in the hashtable, creating it if necessary. */ | |
718fe406 | 3286 | struct insns_for_mem_entry *ifme |
fe9b4957 MM |
3287 | = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht, |
3288 | *r, | |
3289 | /*create=*/0, | |
3290 | /*copy=*/0); | |
3291 | ||
3292 | /* If we have not already recorded this INSN, do so now. Since | |
3293 | we process the INSNs in order, we know that if we have | |
3294 | recorded it it must be at the front of the list. */ | |
3295 | if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn)) | |
1f8f4a0b MM |
3296 | ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn, |
3297 | ifme->insns); | |
e9a25f70 | 3298 | } |
fe9b4957 MM |
3299 | |
3300 | return 0; | |
3301 | } | |
3302 | ||
3303 | /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use | |
3304 | which REGs in HT. */ | |
3305 | ||
3306 | static void | |
3307 | compute_insns_for_mem (insns, last_insn, ht) | |
3308 | rtx insns; | |
3309 | rtx last_insn; | |
3310 | struct hash_table *ht; | |
3311 | { | |
3312 | rtx insn; | |
3313 | struct insns_for_mem_walk_info ifmwi; | |
3314 | ifmwi.ht = ht; | |
3315 | ||
3316 | for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass) | |
3317 | for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn)) | |
2c3c49de | 3318 | if (INSN_P (insn)) |
fe9b4957 MM |
3319 | { |
3320 | ifmwi.insn = insn; | |
3321 | for_each_rtx (&insn, insns_for_mem_walk, &ifmwi); | |
3322 | } | |
e9a25f70 JL |
3323 | } |
3324 | ||
8c36698e NC |
3325 | /* Helper function for purge_addressof called through for_each_rtx. |
3326 | Returns true iff the rtl is an ADDRESSOF. */ | |
b987f237 | 3327 | |
8c36698e NC |
3328 | static int |
3329 | is_addressof (rtl, data) | |
718fe406 KH |
3330 | rtx *rtl; |
3331 | void *data ATTRIBUTE_UNUSED; | |
8c36698e | 3332 | { |
718fe406 | 3333 | return GET_CODE (*rtl) == ADDRESSOF; |
8c36698e NC |
3334 | } |
3335 | ||
e9a25f70 JL |
3336 | /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining |
3337 | (MEM (ADDRESSOF)) patterns, and force any needed registers into the | |
3338 | stack. */ | |
3339 | ||
3340 | void | |
3341 | purge_addressof (insns) | |
3342 | rtx insns; | |
3343 | { | |
3344 | rtx insn; | |
fe9b4957 | 3345 | struct hash_table ht; |
718fe406 | 3346 | |
fe9b4957 MM |
3347 | /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That |
3348 | requires a fixup pass over the instruction stream to correct | |
3349 | INSNs that depended on the REG being a REG, and not a MEM. But, | |
bedda2da | 3350 | these fixup passes are slow. Furthermore, most MEMs are not |
fe9b4957 MM |
3351 | mentioned in very many instructions. So, we speed up the process |
3352 | by pre-calculating which REGs occur in which INSNs; that allows | |
3353 | us to perform the fixup passes much more quickly. */ | |
718fe406 | 3354 | hash_table_init (&ht, |
fe9b4957 MM |
3355 | insns_for_mem_newfunc, |
3356 | insns_for_mem_hash, | |
3357 | insns_for_mem_comp); | |
3358 | compute_insns_for_mem (insns, NULL_RTX, &ht); | |
3359 | ||
e9a25f70 JL |
3360 | for (insn = insns; insn; insn = NEXT_INSN (insn)) |
3361 | if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN | |
3362 | || GET_CODE (insn) == CALL_INSN) | |
3363 | { | |
8c36698e NC |
3364 | if (! purge_addressof_1 (&PATTERN (insn), insn, |
3365 | asm_noperands (PATTERN (insn)) > 0, 0, &ht)) | |
3366 | /* If we could not replace the ADDRESSOFs in the insn, | |
3367 | something is wrong. */ | |
3368 | abort (); | |
718fe406 | 3369 | |
8c36698e NC |
3370 | if (! purge_addressof_1 (®_NOTES (insn), NULL_RTX, 0, 0, &ht)) |
3371 | { | |
3372 | /* If we could not replace the ADDRESSOFs in the insn's notes, | |
3373 | we can just remove the offending notes instead. */ | |
3374 | rtx note; | |
3375 | ||
3376 | for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) | |
3377 | { | |
3378 | /* If we find a REG_RETVAL note then the insn is a libcall. | |
3379 | Such insns must have REG_EQUAL notes as well, in order | |
3380 | for later passes of the compiler to work. So it is not | |
3381 | safe to delete the notes here, and instead we abort. */ | |
3382 | if (REG_NOTE_KIND (note) == REG_RETVAL) | |
3383 | abort (); | |
718fe406 | 3384 | if (for_each_rtx (¬e, is_addressof, NULL)) |
8c36698e NC |
3385 | remove_note (insn, note); |
3386 | } | |
3387 | } | |
e9a25f70 | 3388 | } |
fe9b4957 MM |
3389 | |
3390 | /* Clean up. */ | |
3391 | hash_table_free (&ht); | |
8b04083b | 3392 | purge_bitfield_addressof_replacements = 0; |
da9b1f9c | 3393 | purge_addressof_replacements = 0; |
4fa48eae JL |
3394 | |
3395 | /* REGs are shared. purge_addressof will destructively replace a REG | |
3396 | with a MEM, which creates shared MEMs. | |
3397 | ||
3398 | Unfortunately, the children of put_reg_into_stack assume that MEMs | |
3399 | referring to the same stack slot are shared (fixup_var_refs and | |
3400 | the associated hash table code). | |
3401 | ||
3402 | So, we have to do another unsharing pass after we have flushed any | |
3403 | REGs that had their address taken into the stack. | |
3404 | ||
3405 | It may be worth tracking whether or not we converted any REGs into | |
3406 | MEMs to avoid this overhead when it is not needed. */ | |
3407 | unshare_all_rtl_again (get_insns ()); | |
e9a25f70 JL |
3408 | } |
3409 | \f | |
659e47fb AH |
3410 | /* Convert a SET of a hard subreg to a set of the appropriet hard |
3411 | register. A subroutine of purge_hard_subreg_sets. */ | |
3412 | ||
3413 | static void | |
3414 | purge_single_hard_subreg_set (pattern) | |
3415 | rtx pattern; | |
3416 | { | |
3417 | rtx reg = SET_DEST (pattern); | |
3418 | enum machine_mode mode = GET_MODE (SET_DEST (pattern)); | |
ddef6bc7 JJ |
3419 | int offset = 0; |
3420 | ||
3421 | if (GET_CODE (reg) == SUBREG && GET_CODE (SUBREG_REG (reg)) == REG | |
3422 | && REGNO (SUBREG_REG (reg)) < FIRST_PSEUDO_REGISTER) | |
659e47fb | 3423 | { |
ddef6bc7 JJ |
3424 | offset = subreg_regno_offset (REGNO (SUBREG_REG (reg)), |
3425 | GET_MODE (SUBREG_REG (reg)), | |
3426 | SUBREG_BYTE (reg), | |
3427 | GET_MODE (reg)); | |
659e47fb AH |
3428 | reg = SUBREG_REG (reg); |
3429 | } | |
ddef6bc7 JJ |
3430 | |
3431 | ||
55107ee3 | 3432 | if (GET_CODE (reg) == REG && REGNO (reg) < FIRST_PSEUDO_REGISTER) |
659e47fb | 3433 | { |
ddef6bc7 | 3434 | reg = gen_rtx_REG (mode, REGNO (reg) + offset); |
659e47fb AH |
3435 | SET_DEST (pattern) = reg; |
3436 | } | |
3437 | } | |
3438 | ||
3439 | /* Eliminate all occurrences of SETs of hard subregs from INSNS. The | |
3440 | only such SETs that we expect to see are those left in because | |
3441 | integrate can't handle sets of parts of a return value register. | |
3442 | ||
3443 | We don't use alter_subreg because we only want to eliminate subregs | |
3444 | of hard registers. */ | |
3445 | ||
3446 | void | |
3447 | purge_hard_subreg_sets (insn) | |
3448 | rtx insn; | |
3449 | { | |
3450 | for (; insn; insn = NEXT_INSN (insn)) | |
3451 | { | |
3452 | if (INSN_P (insn)) | |
3453 | { | |
3454 | rtx pattern = PATTERN (insn); | |
3455 | switch (GET_CODE (pattern)) | |
3456 | { | |
3457 | case SET: | |
3458 | if (GET_CODE (SET_DEST (pattern)) == SUBREG) | |
3459 | purge_single_hard_subreg_set (pattern); | |
3460 | break; | |
3461 | case PARALLEL: | |
3462 | { | |
3463 | int j; | |
3464 | for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--) | |
3465 | { | |
3466 | rtx inner_pattern = XVECEXP (pattern, 0, j); | |
3467 | if (GET_CODE (inner_pattern) == SET | |
3468 | && GET_CODE (SET_DEST (inner_pattern)) == SUBREG) | |
3469 | purge_single_hard_subreg_set (inner_pattern); | |
3470 | } | |
3471 | } | |
3472 | break; | |
3473 | default: | |
3474 | break; | |
3475 | } | |
3476 | } | |
3477 | } | |
3478 | } | |
3479 | \f | |
6f086dfc RS |
3480 | /* Pass through the INSNS of function FNDECL and convert virtual register |
3481 | references to hard register references. */ | |
3482 | ||
3483 | void | |
3484 | instantiate_virtual_regs (fndecl, insns) | |
3485 | tree fndecl; | |
3486 | rtx insns; | |
3487 | { | |
3488 | rtx insn; | |
770ae6cc | 3489 | unsigned int i; |
6f086dfc RS |
3490 | |
3491 | /* Compute the offsets to use for this function. */ | |
3492 | in_arg_offset = FIRST_PARM_OFFSET (fndecl); | |
3493 | var_offset = STARTING_FRAME_OFFSET; | |
3494 | dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl); | |
3495 | out_arg_offset = STACK_POINTER_OFFSET; | |
2c849145 | 3496 | cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl); |
6f086dfc RS |
3497 | |
3498 | /* Scan all variables and parameters of this function. For each that is | |
3499 | in memory, instantiate all virtual registers if the result is a valid | |
3500 | address. If not, we do it later. That will handle most uses of virtual | |
3501 | regs on many machines. */ | |
3502 | instantiate_decls (fndecl, 1); | |
3503 | ||
3504 | /* Initialize recognition, indicating that volatile is OK. */ | |
3505 | init_recog (); | |
3506 | ||
3507 | /* Scan through all the insns, instantiating every virtual register still | |
3508 | present. */ | |
3509 | for (insn = insns; insn; insn = NEXT_INSN (insn)) | |
3510 | if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN | |
3511 | || GET_CODE (insn) == CALL_INSN) | |
3512 | { | |
3513 | instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1); | |
5f4f0e22 | 3514 | instantiate_virtual_regs_1 (®_NOTES (insn), NULL_RTX, 0); |
87c61e2d JL |
3515 | /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */ |
3516 | if (GET_CODE (insn) == CALL_INSN) | |
3517 | instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn), | |
3518 | NULL_RTX, 0); | |
6f086dfc RS |
3519 | } |
3520 | ||
e9a25f70 JL |
3521 | /* Instantiate the stack slots for the parm registers, for later use in |
3522 | addressof elimination. */ | |
3523 | for (i = 0; i < max_parm_reg; ++i) | |
3524 | if (parm_reg_stack_loc[i]) | |
3525 | instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0); | |
3526 | ||
6f086dfc RS |
3527 | /* Now instantiate the remaining register equivalences for debugging info. |
3528 | These will not be valid addresses. */ | |
3529 | instantiate_decls (fndecl, 0); | |
3530 | ||
3531 | /* Indicate that, from now on, assign_stack_local should use | |
3532 | frame_pointer_rtx. */ | |
3533 | virtuals_instantiated = 1; | |
3534 | } | |
3535 | ||
3536 | /* Scan all decls in FNDECL (both variables and parameters) and instantiate | |
3537 | all virtual registers in their DECL_RTL's. | |
3538 | ||
3539 | If VALID_ONLY, do this only if the resulting address is still valid. | |
3540 | Otherwise, always do it. */ | |
3541 | ||
3542 | static void | |
3543 | instantiate_decls (fndecl, valid_only) | |
3544 | tree fndecl; | |
3545 | int valid_only; | |
3546 | { | |
3547 | tree decl; | |
3548 | ||
6f086dfc RS |
3549 | /* Process all parameters of the function. */ |
3550 | for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl)) | |
3551 | { | |
e5e809f4 | 3552 | HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl)); |
ae0ed63a | 3553 | HOST_WIDE_INT size_rtl; |
e5e809f4 | 3554 | |
718fe406 | 3555 | instantiate_decl (DECL_RTL (decl), size, valid_only); |
ce717ce4 JW |
3556 | |
3557 | /* If the parameter was promoted, then the incoming RTL mode may be | |
3558 | larger than the declared type size. We must use the larger of | |
3559 | the two sizes. */ | |
ae0ed63a JM |
3560 | size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))); |
3561 | size = MAX (size_rtl, size); | |
ce717ce4 | 3562 | instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only); |
6f086dfc RS |
3563 | } |
3564 | ||
0f41302f | 3565 | /* Now process all variables defined in the function or its subblocks. */ |
6f086dfc | 3566 | instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only); |
6f086dfc RS |
3567 | } |
3568 | ||
3569 | /* Subroutine of instantiate_decls: Process all decls in the given | |
3570 | BLOCK node and all its subblocks. */ | |
3571 | ||
3572 | static void | |
3573 | instantiate_decls_1 (let, valid_only) | |
3574 | tree let; | |
3575 | int valid_only; | |
3576 | { | |
3577 | tree t; | |
3578 | ||
3579 | for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t)) | |
19e7881c MM |
3580 | if (DECL_RTL_SET_P (t)) |
3581 | instantiate_decl (DECL_RTL (t), | |
3582 | int_size_in_bytes (TREE_TYPE (t)), | |
3583 | valid_only); | |
6f086dfc RS |
3584 | |
3585 | /* Process all subblocks. */ | |
3586 | for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t)) | |
3587 | instantiate_decls_1 (t, valid_only); | |
3588 | } | |
5a73491b | 3589 | |
8008b228 | 3590 | /* Subroutine of the preceding procedures: Given RTL representing a |
5a73491b RK |
3591 | decl and the size of the object, do any instantiation required. |
3592 | ||
3593 | If VALID_ONLY is non-zero, it means that the RTL should only be | |
3594 | changed if the new address is valid. */ | |
3595 | ||
3596 | static void | |
3597 | instantiate_decl (x, size, valid_only) | |
3598 | rtx x; | |
770ae6cc | 3599 | HOST_WIDE_INT size; |
5a73491b RK |
3600 | int valid_only; |
3601 | { | |
3602 | enum machine_mode mode; | |
3603 | rtx addr; | |
3604 | ||
3605 | /* If this is not a MEM, no need to do anything. Similarly if the | |
3606 | address is a constant or a register that is not a virtual register. */ | |
3607 | ||
3608 | if (x == 0 || GET_CODE (x) != MEM) | |
3609 | return; | |
3610 | ||
3611 | addr = XEXP (x, 0); | |
3612 | if (CONSTANT_P (addr) | |
956d6950 | 3613 | || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG) |
5a73491b RK |
3614 | || (GET_CODE (addr) == REG |
3615 | && (REGNO (addr) < FIRST_VIRTUAL_REGISTER | |
3616 | || REGNO (addr) > LAST_VIRTUAL_REGISTER))) | |
3617 | return; | |
3618 | ||
3619 | /* If we should only do this if the address is valid, copy the address. | |
3620 | We need to do this so we can undo any changes that might make the | |
3621 | address invalid. This copy is unfortunate, but probably can't be | |
3622 | avoided. */ | |
3623 | ||
3624 | if (valid_only) | |
3625 | addr = copy_rtx (addr); | |
3626 | ||
3627 | instantiate_virtual_regs_1 (&addr, NULL_RTX, 0); | |
3628 | ||
770ae6cc | 3629 | if (valid_only && size >= 0) |
87ce34d6 | 3630 | { |
770ae6cc RK |
3631 | unsigned HOST_WIDE_INT decl_size = size; |
3632 | ||
87ce34d6 JW |
3633 | /* Now verify that the resulting address is valid for every integer or |
3634 | floating-point mode up to and including SIZE bytes long. We do this | |
3635 | since the object might be accessed in any mode and frame addresses | |
3636 | are shared. */ | |
3637 | ||
3638 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); | |
770ae6cc | 3639 | mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size; |
87ce34d6 JW |
3640 | mode = GET_MODE_WIDER_MODE (mode)) |
3641 | if (! memory_address_p (mode, addr)) | |
3642 | return; | |
5a73491b | 3643 | |
87ce34d6 | 3644 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); |
770ae6cc | 3645 | mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size; |
87ce34d6 JW |
3646 | mode = GET_MODE_WIDER_MODE (mode)) |
3647 | if (! memory_address_p (mode, addr)) | |
3648 | return; | |
3649 | } | |
5a73491b | 3650 | |
87ce34d6 JW |
3651 | /* Put back the address now that we have updated it and we either know |
3652 | it is valid or we don't care whether it is valid. */ | |
5a73491b RK |
3653 | |
3654 | XEXP (x, 0) = addr; | |
3655 | } | |
6f086dfc | 3656 | \f |
d1405722 RK |
3657 | /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX |
3658 | is a virtual register, return the requivalent hard register and set the | |
3659 | offset indirectly through the pointer. Otherwise, return 0. */ | |
3660 | ||
3661 | static rtx | |
3662 | instantiate_new_reg (x, poffset) | |
3663 | rtx x; | |
3664 | HOST_WIDE_INT *poffset; | |
3665 | { | |
3666 | rtx new; | |
3667 | HOST_WIDE_INT offset; | |
3668 | ||
3669 | if (x == virtual_incoming_args_rtx) | |
3670 | new = arg_pointer_rtx, offset = in_arg_offset; | |
3671 | else if (x == virtual_stack_vars_rtx) | |
3672 | new = frame_pointer_rtx, offset = var_offset; | |
3673 | else if (x == virtual_stack_dynamic_rtx) | |
3674 | new = stack_pointer_rtx, offset = dynamic_offset; | |
3675 | else if (x == virtual_outgoing_args_rtx) | |
3676 | new = stack_pointer_rtx, offset = out_arg_offset; | |
3677 | else if (x == virtual_cfa_rtx) | |
3678 | new = arg_pointer_rtx, offset = cfa_offset; | |
3679 | else | |
3680 | return 0; | |
3681 | ||
3682 | *poffset = offset; | |
3683 | return new; | |
3684 | } | |
3685 | \f | |
6f086dfc RS |
3686 | /* Given a pointer to a piece of rtx and an optional pointer to the |
3687 | containing object, instantiate any virtual registers present in it. | |
3688 | ||
3689 | If EXTRA_INSNS, we always do the replacement and generate | |
3690 | any extra insns before OBJECT. If it zero, we do nothing if replacement | |
3691 | is not valid. | |
3692 | ||
3693 | Return 1 if we either had nothing to do or if we were able to do the | |
718fe406 | 3694 | needed replacement. Return 0 otherwise; we only return zero if |
6f086dfc RS |
3695 | EXTRA_INSNS is zero. |
3696 | ||
3697 | We first try some simple transformations to avoid the creation of extra | |
3698 | pseudos. */ | |
3699 | ||
3700 | static int | |
3701 | instantiate_virtual_regs_1 (loc, object, extra_insns) | |
3702 | rtx *loc; | |
3703 | rtx object; | |
3704 | int extra_insns; | |
3705 | { | |
3706 | rtx x; | |
3707 | RTX_CODE code; | |
3708 | rtx new = 0; | |
07444f1d | 3709 | HOST_WIDE_INT offset = 0; |
6f086dfc RS |
3710 | rtx temp; |
3711 | rtx seq; | |
3712 | int i, j; | |
6f7d635c | 3713 | const char *fmt; |
6f086dfc RS |
3714 | |
3715 | /* Re-start here to avoid recursion in common cases. */ | |
3716 | restart: | |
3717 | ||
3718 | x = *loc; | |
3719 | if (x == 0) | |
3720 | return 1; | |
3721 | ||
3722 | code = GET_CODE (x); | |
3723 | ||
3724 | /* Check for some special cases. */ | |
3725 | switch (code) | |
3726 | { | |
3727 | case CONST_INT: | |
3728 | case CONST_DOUBLE: | |
3729 | case CONST: | |
3730 | case SYMBOL_REF: | |
3731 | case CODE_LABEL: | |
3732 | case PC: | |
3733 | case CC0: | |
3734 | case ASM_INPUT: | |
3735 | case ADDR_VEC: | |
3736 | case ADDR_DIFF_VEC: | |
3737 | case RETURN: | |
3738 | return 1; | |
3739 | ||
3740 | case SET: | |
3741 | /* We are allowed to set the virtual registers. This means that | |
38e01259 | 3742 | the actual register should receive the source minus the |
6f086dfc RS |
3743 | appropriate offset. This is used, for example, in the handling |
3744 | of non-local gotos. */ | |
d1405722 | 3745 | if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0) |
6f086dfc | 3746 | { |
14a774a9 RK |
3747 | rtx src = SET_SRC (x); |
3748 | ||
d1405722 RK |
3749 | /* We are setting the register, not using it, so the relevant |
3750 | offset is the negative of the offset to use were we using | |
3751 | the register. */ | |
3752 | offset = - offset; | |
14a774a9 RK |
3753 | instantiate_virtual_regs_1 (&src, NULL_RTX, 0); |
3754 | ||
6f086dfc RS |
3755 | /* The only valid sources here are PLUS or REG. Just do |
3756 | the simplest possible thing to handle them. */ | |
14a774a9 | 3757 | if (GET_CODE (src) != REG && GET_CODE (src) != PLUS) |
6f086dfc RS |
3758 | abort (); |
3759 | ||
3760 | start_sequence (); | |
14a774a9 RK |
3761 | if (GET_CODE (src) != REG) |
3762 | temp = force_operand (src, NULL_RTX); | |
6f086dfc | 3763 | else |
14a774a9 | 3764 | temp = src; |
5f4f0e22 | 3765 | temp = force_operand (plus_constant (temp, offset), NULL_RTX); |
6f086dfc RS |
3766 | seq = get_insns (); |
3767 | end_sequence (); | |
3768 | ||
3769 | emit_insns_before (seq, object); | |
3770 | SET_DEST (x) = new; | |
3771 | ||
e9a25f70 | 3772 | if (! validate_change (object, &SET_SRC (x), temp, 0) |
6f086dfc RS |
3773 | || ! extra_insns) |
3774 | abort (); | |
3775 | ||
3776 | return 1; | |
3777 | } | |
3778 | ||
3779 | instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns); | |
3780 | loc = &SET_SRC (x); | |
3781 | goto restart; | |
3782 | ||
3783 | case PLUS: | |
3784 | /* Handle special case of virtual register plus constant. */ | |
3785 | if (CONSTANT_P (XEXP (x, 1))) | |
3786 | { | |
b1f82ccf | 3787 | rtx old, new_offset; |
6f086dfc RS |
3788 | |
3789 | /* Check for (plus (plus VIRT foo) (const_int)) first. */ | |
3790 | if (GET_CODE (XEXP (x, 0)) == PLUS) | |
3791 | { | |
d1405722 RK |
3792 | if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset))) |
3793 | { | |
3794 | instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object, | |
3795 | extra_insns); | |
3796 | new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1)); | |
3797 | } | |
6f086dfc RS |
3798 | else |
3799 | { | |
3800 | loc = &XEXP (x, 0); | |
3801 | goto restart; | |
3802 | } | |
6f086dfc RS |
3803 | } |
3804 | ||
d1405722 RK |
3805 | #ifdef POINTERS_EXTEND_UNSIGNED |
3806 | /* If we have (plus (subreg (virtual-reg)) (const_int)), we know | |
3807 | we can commute the PLUS and SUBREG because pointers into the | |
3808 | frame are well-behaved. */ | |
3809 | else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode | |
3810 | && GET_CODE (XEXP (x, 1)) == CONST_INT | |
3811 | && 0 != (new | |
3812 | = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)), | |
3813 | &offset)) | |
3814 | && validate_change (object, loc, | |
3815 | plus_constant (gen_lowpart (ptr_mode, | |
3816 | new), | |
3817 | offset | |
3818 | + INTVAL (XEXP (x, 1))), | |
3819 | 0)) | |
3820 | return 1; | |
3821 | #endif | |
3822 | else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0) | |
6f086dfc RS |
3823 | { |
3824 | /* We know the second operand is a constant. Unless the | |
3825 | first operand is a REG (which has been already checked), | |
3826 | it needs to be checked. */ | |
3827 | if (GET_CODE (XEXP (x, 0)) != REG) | |
3828 | { | |
3829 | loc = &XEXP (x, 0); | |
3830 | goto restart; | |
3831 | } | |
3832 | return 1; | |
3833 | } | |
3834 | ||
b1f82ccf | 3835 | new_offset = plus_constant (XEXP (x, 1), offset); |
6f086dfc | 3836 | |
b1f82ccf DE |
3837 | /* If the new constant is zero, try to replace the sum with just |
3838 | the register. */ | |
3839 | if (new_offset == const0_rtx | |
3840 | && validate_change (object, loc, new, 0)) | |
6f086dfc RS |
3841 | return 1; |
3842 | ||
b1f82ccf DE |
3843 | /* Next try to replace the register and new offset. |
3844 | There are two changes to validate here and we can't assume that | |
3845 | in the case of old offset equals new just changing the register | |
3846 | will yield a valid insn. In the interests of a little efficiency, | |
3847 | however, we only call validate change once (we don't queue up the | |
0f41302f | 3848 | changes and then call apply_change_group). */ |
b1f82ccf DE |
3849 | |
3850 | old = XEXP (x, 0); | |
3851 | if (offset == 0 | |
3852 | ? ! validate_change (object, &XEXP (x, 0), new, 0) | |
3853 | : (XEXP (x, 0) = new, | |
3854 | ! validate_change (object, &XEXP (x, 1), new_offset, 0))) | |
6f086dfc RS |
3855 | { |
3856 | if (! extra_insns) | |
3857 | { | |
3858 | XEXP (x, 0) = old; | |
3859 | return 0; | |
3860 | } | |
3861 | ||
3862 | /* Otherwise copy the new constant into a register and replace | |
3863 | constant with that register. */ | |
3864 | temp = gen_reg_rtx (Pmode); | |
b1f82ccf | 3865 | XEXP (x, 0) = new; |
6f086dfc | 3866 | if (validate_change (object, &XEXP (x, 1), temp, 0)) |
b1f82ccf | 3867 | emit_insn_before (gen_move_insn (temp, new_offset), object); |
6f086dfc RS |
3868 | else |
3869 | { | |
3870 | /* If that didn't work, replace this expression with a | |
3871 | register containing the sum. */ | |
3872 | ||
6f086dfc | 3873 | XEXP (x, 0) = old; |
38a448ca | 3874 | new = gen_rtx_PLUS (Pmode, new, new_offset); |
6f086dfc RS |
3875 | |
3876 | start_sequence (); | |
5f4f0e22 | 3877 | temp = force_operand (new, NULL_RTX); |
6f086dfc RS |
3878 | seq = get_insns (); |
3879 | end_sequence (); | |
3880 | ||
3881 | emit_insns_before (seq, object); | |
3882 | if (! validate_change (object, loc, temp, 0) | |
3883 | && ! validate_replace_rtx (x, temp, object)) | |
3884 | abort (); | |
3885 | } | |
3886 | } | |
3887 | ||
3888 | return 1; | |
3889 | } | |
3890 | ||
3891 | /* Fall through to generic two-operand expression case. */ | |
3892 | case EXPR_LIST: | |
3893 | case CALL: | |
3894 | case COMPARE: | |
3895 | case MINUS: | |
3896 | case MULT: | |
3897 | case DIV: case UDIV: | |
3898 | case MOD: case UMOD: | |
3899 | case AND: case IOR: case XOR: | |
45620ed4 RK |
3900 | case ROTATERT: case ROTATE: |
3901 | case ASHIFTRT: case LSHIFTRT: case ASHIFT: | |
6f086dfc RS |
3902 | case NE: case EQ: |
3903 | case GE: case GT: case GEU: case GTU: | |
3904 | case LE: case LT: case LEU: case LTU: | |
3905 | if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1))) | |
3906 | instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns); | |
3907 | loc = &XEXP (x, 0); | |
3908 | goto restart; | |
3909 | ||
3910 | case MEM: | |
3911 | /* Most cases of MEM that convert to valid addresses have already been | |
4fd796bb | 3912 | handled by our scan of decls. The only special handling we |
6f086dfc | 3913 | need here is to make a copy of the rtx to ensure it isn't being |
718fe406 | 3914 | shared if we have to change it to a pseudo. |
6f086dfc RS |
3915 | |
3916 | If the rtx is a simple reference to an address via a virtual register, | |
3917 | it can potentially be shared. In such cases, first try to make it | |
3918 | a valid address, which can also be shared. Otherwise, copy it and | |
718fe406 | 3919 | proceed normally. |
6f086dfc RS |
3920 | |
3921 | First check for common cases that need no processing. These are | |
3922 | usually due to instantiation already being done on a previous instance | |
3923 | of a shared rtx. */ | |
3924 | ||
3925 | temp = XEXP (x, 0); | |
3926 | if (CONSTANT_ADDRESS_P (temp) | |
3927 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM | |
3928 | || temp == arg_pointer_rtx | |
b37f453b DE |
3929 | #endif |
3930 | #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM | |
3931 | || temp == hard_frame_pointer_rtx | |
6f086dfc RS |
3932 | #endif |
3933 | || temp == frame_pointer_rtx) | |
3934 | return 1; | |
3935 | ||
3936 | if (GET_CODE (temp) == PLUS | |
3937 | && CONSTANT_ADDRESS_P (XEXP (temp, 1)) | |
3938 | && (XEXP (temp, 0) == frame_pointer_rtx | |
b37f453b DE |
3939 | #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM |
3940 | || XEXP (temp, 0) == hard_frame_pointer_rtx | |
3941 | #endif | |
6f086dfc RS |
3942 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM |
3943 | || XEXP (temp, 0) == arg_pointer_rtx | |
3944 | #endif | |
3945 | )) | |
3946 | return 1; | |
3947 | ||
3948 | if (temp == virtual_stack_vars_rtx | |
3949 | || temp == virtual_incoming_args_rtx | |
3950 | || (GET_CODE (temp) == PLUS | |
3951 | && CONSTANT_ADDRESS_P (XEXP (temp, 1)) | |
3952 | && (XEXP (temp, 0) == virtual_stack_vars_rtx | |
3953 | || XEXP (temp, 0) == virtual_incoming_args_rtx))) | |
3954 | { | |
3955 | /* This MEM may be shared. If the substitution can be done without | |
3956 | the need to generate new pseudos, we want to do it in place | |
3957 | so all copies of the shared rtx benefit. The call below will | |
3958 | only make substitutions if the resulting address is still | |
3959 | valid. | |
3960 | ||
3961 | Note that we cannot pass X as the object in the recursive call | |
3962 | since the insn being processed may not allow all valid | |
6461be14 RS |
3963 | addresses. However, if we were not passed on object, we can |
3964 | only modify X without copying it if X will have a valid | |
3965 | address. | |
6f086dfc | 3966 | |
6461be14 RS |
3967 | ??? Also note that this can still lose if OBJECT is an insn that |
3968 | has less restrictions on an address that some other insn. | |
3969 | In that case, we will modify the shared address. This case | |
4fd796bb RK |
3970 | doesn't seem very likely, though. One case where this could |
3971 | happen is in the case of a USE or CLOBBER reference, but we | |
3972 | take care of that below. */ | |
6461be14 RS |
3973 | |
3974 | if (instantiate_virtual_regs_1 (&XEXP (x, 0), | |
3975 | object ? object : x, 0)) | |
6f086dfc RS |
3976 | return 1; |
3977 | ||
3978 | /* Otherwise make a copy and process that copy. We copy the entire | |
3979 | RTL expression since it might be a PLUS which could also be | |
3980 | shared. */ | |
3981 | *loc = x = copy_rtx (x); | |
3982 | } | |
3983 | ||
3984 | /* Fall through to generic unary operation case. */ | |
6f086dfc RS |
3985 | case SUBREG: |
3986 | case STRICT_LOW_PART: | |
3987 | case NEG: case NOT: | |
3988 | case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC: | |
3989 | case SIGN_EXTEND: case ZERO_EXTEND: | |
3990 | case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE: | |
3991 | case FLOAT: case FIX: | |
3992 | case UNSIGNED_FIX: case UNSIGNED_FLOAT: | |
3993 | case ABS: | |
3994 | case SQRT: | |
3995 | case FFS: | |
3996 | /* These case either have just one operand or we know that we need not | |
3997 | check the rest of the operands. */ | |
3998 | loc = &XEXP (x, 0); | |
3999 | goto restart; | |
4000 | ||
4fd796bb RK |
4001 | case USE: |
4002 | case CLOBBER: | |
4003 | /* If the operand is a MEM, see if the change is a valid MEM. If not, | |
4004 | go ahead and make the invalid one, but do it to a copy. For a REG, | |
718fe406 | 4005 | just make the recursive call, since there's no chance of a problem. */ |
4fd796bb RK |
4006 | |
4007 | if ((GET_CODE (XEXP (x, 0)) == MEM | |
4008 | && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0), | |
4009 | 0)) | |
4010 | || (GET_CODE (XEXP (x, 0)) == REG | |
7694ce35 | 4011 | && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0))) |
4fd796bb RK |
4012 | return 1; |
4013 | ||
4014 | XEXP (x, 0) = copy_rtx (XEXP (x, 0)); | |
4015 | loc = &XEXP (x, 0); | |
4016 | goto restart; | |
4017 | ||
6f086dfc RS |
4018 | case REG: |
4019 | /* Try to replace with a PLUS. If that doesn't work, compute the sum | |
4020 | in front of this insn and substitute the temporary. */ | |
d1405722 | 4021 | if ((new = instantiate_new_reg (x, &offset)) != 0) |
6f086dfc RS |
4022 | { |
4023 | temp = plus_constant (new, offset); | |
4024 | if (!validate_change (object, loc, temp, 0)) | |
4025 | { | |
4026 | if (! extra_insns) | |
4027 | return 0; | |
4028 | ||
4029 | start_sequence (); | |
5f4f0e22 | 4030 | temp = force_operand (temp, NULL_RTX); |
6f086dfc RS |
4031 | seq = get_insns (); |
4032 | end_sequence (); | |
4033 | ||
4034 | emit_insns_before (seq, object); | |
4035 | if (! validate_change (object, loc, temp, 0) | |
4036 | && ! validate_replace_rtx (x, temp, object)) | |
4037 | abort (); | |
4038 | } | |
4039 | } | |
4040 | ||
4041 | return 1; | |
e9a25f70 JL |
4042 | |
4043 | case ADDRESSOF: | |
4044 | if (GET_CODE (XEXP (x, 0)) == REG) | |
4045 | return 1; | |
4046 | ||
4047 | else if (GET_CODE (XEXP (x, 0)) == MEM) | |
4048 | { | |
4049 | /* If we have a (addressof (mem ..)), do any instantiation inside | |
4050 | since we know we'll be making the inside valid when we finally | |
4051 | remove the ADDRESSOF. */ | |
4052 | instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0); | |
4053 | return 1; | |
4054 | } | |
4055 | break; | |
718fe406 | 4056 | |
e9a25f70 JL |
4057 | default: |
4058 | break; | |
6f086dfc RS |
4059 | } |
4060 | ||
4061 | /* Scan all subexpressions. */ | |
4062 | fmt = GET_RTX_FORMAT (code); | |
4063 | for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) | |
4064 | if (*fmt == 'e') | |
4065 | { | |
4066 | if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns)) | |
4067 | return 0; | |
4068 | } | |
4069 | else if (*fmt == 'E') | |
4070 | for (j = 0; j < XVECLEN (x, i); j++) | |
4071 | if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object, | |
4072 | extra_insns)) | |
4073 | return 0; | |
4074 | ||
4075 | return 1; | |
4076 | } | |
4077 | \f | |
4078 | /* Optimization: assuming this function does not receive nonlocal gotos, | |
4079 | delete the handlers for such, as well as the insns to establish | |
4080 | and disestablish them. */ | |
4081 | ||
4082 | static void | |
4083 | delete_handlers () | |
4084 | { | |
4085 | rtx insn; | |
4086 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
4087 | { | |
4088 | /* Delete the handler by turning off the flag that would | |
4089 | prevent jump_optimize from deleting it. | |
4090 | Also permit deletion of the nonlocal labels themselves | |
4091 | if nothing local refers to them. */ | |
4092 | if (GET_CODE (insn) == CODE_LABEL) | |
71cd4a8d JW |
4093 | { |
4094 | tree t, last_t; | |
4095 | ||
4096 | LABEL_PRESERVE_P (insn) = 0; | |
4097 | ||
4098 | /* Remove it from the nonlocal_label list, to avoid confusing | |
4099 | flow. */ | |
4100 | for (t = nonlocal_labels, last_t = 0; t; | |
4101 | last_t = t, t = TREE_CHAIN (t)) | |
4102 | if (DECL_RTL (TREE_VALUE (t)) == insn) | |
4103 | break; | |
4104 | if (t) | |
4105 | { | |
4106 | if (! last_t) | |
4107 | nonlocal_labels = TREE_CHAIN (nonlocal_labels); | |
4108 | else | |
4109 | TREE_CHAIN (last_t) = TREE_CHAIN (t); | |
4110 | } | |
4111 | } | |
ba716ac9 BS |
4112 | if (GET_CODE (insn) == INSN) |
4113 | { | |
4114 | int can_delete = 0; | |
4115 | rtx t; | |
4116 | for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1)) | |
4117 | if (reg_mentioned_p (t, PATTERN (insn))) | |
4118 | { | |
4119 | can_delete = 1; | |
4120 | break; | |
4121 | } | |
4122 | if (can_delete | |
59257ff7 RK |
4123 | || (nonlocal_goto_stack_level != 0 |
4124 | && reg_mentioned_p (nonlocal_goto_stack_level, | |
ba716ac9 | 4125 | PATTERN (insn)))) |
53c17031 | 4126 | delete_related_insns (insn); |
ba716ac9 | 4127 | } |
6f086dfc RS |
4128 | } |
4129 | } | |
6f086dfc | 4130 | \f |
6f086dfc RS |
4131 | int |
4132 | max_parm_reg_num () | |
4133 | { | |
4134 | return max_parm_reg; | |
4135 | } | |
4136 | ||
4137 | /* Return the first insn following those generated by `assign_parms'. */ | |
4138 | ||
4139 | rtx | |
4140 | get_first_nonparm_insn () | |
4141 | { | |
4142 | if (last_parm_insn) | |
4143 | return NEXT_INSN (last_parm_insn); | |
4144 | return get_insns (); | |
4145 | } | |
4146 | ||
5378192b RS |
4147 | /* Return the first NOTE_INSN_BLOCK_BEG note in the function. |
4148 | Crash if there is none. */ | |
4149 | ||
4150 | rtx | |
4151 | get_first_block_beg () | |
4152 | { | |
b3694847 SS |
4153 | rtx searcher; |
4154 | rtx insn = get_first_nonparm_insn (); | |
5378192b RS |
4155 | |
4156 | for (searcher = insn; searcher; searcher = NEXT_INSN (searcher)) | |
4157 | if (GET_CODE (searcher) == NOTE | |
4158 | && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG) | |
4159 | return searcher; | |
4160 | ||
4161 | abort (); /* Invalid call to this function. (See comments above.) */ | |
4162 | return NULL_RTX; | |
4163 | } | |
4164 | ||
d181c154 RS |
4165 | /* Return 1 if EXP is an aggregate type (or a value with aggregate type). |
4166 | This means a type for which function calls must pass an address to the | |
4167 | function or get an address back from the function. | |
4168 | EXP may be a type node or an expression (whose type is tested). */ | |
6f086dfc RS |
4169 | |
4170 | int | |
4171 | aggregate_value_p (exp) | |
4172 | tree exp; | |
4173 | { | |
9d790a4f RS |
4174 | int i, regno, nregs; |
4175 | rtx reg; | |
2f939d94 TP |
4176 | |
4177 | tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp); | |
d181c154 | 4178 | |
d7bf8ada MM |
4179 | if (TREE_CODE (type) == VOID_TYPE) |
4180 | return 0; | |
d181c154 | 4181 | if (RETURN_IN_MEMORY (type)) |
6f086dfc | 4182 | return 1; |
956d6950 | 4183 | /* Types that are TREE_ADDRESSABLE must be constructed in memory, |
49a2e5b2 DE |
4184 | and thus can't be returned in registers. */ |
4185 | if (TREE_ADDRESSABLE (type)) | |
4186 | return 1; | |
05e3bdb9 | 4187 | if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type)) |
6f086dfc | 4188 | return 1; |
9d790a4f RS |
4189 | /* Make sure we have suitable call-clobbered regs to return |
4190 | the value in; if not, we must return it in memory. */ | |
4dc07bd7 | 4191 | reg = hard_function_value (type, 0, 0); |
e71f7aa5 JW |
4192 | |
4193 | /* If we have something other than a REG (e.g. a PARALLEL), then assume | |
4194 | it is OK. */ | |
4195 | if (GET_CODE (reg) != REG) | |
4196 | return 0; | |
4197 | ||
9d790a4f | 4198 | regno = REGNO (reg); |
d181c154 | 4199 | nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type)); |
9d790a4f RS |
4200 | for (i = 0; i < nregs; i++) |
4201 | if (! call_used_regs[regno + i]) | |
4202 | return 1; | |
6f086dfc RS |
4203 | return 0; |
4204 | } | |
4205 | \f | |
4206 | /* Assign RTL expressions to the function's parameters. | |
4207 | This may involve copying them into registers and using | |
0d1416c6 | 4208 | those registers as the RTL for them. */ |
6f086dfc RS |
4209 | |
4210 | void | |
0d1416c6 | 4211 | assign_parms (fndecl) |
6f086dfc | 4212 | tree fndecl; |
6f086dfc | 4213 | { |
b3694847 SS |
4214 | tree parm; |
4215 | rtx entry_parm = 0; | |
4216 | rtx stack_parm = 0; | |
6f086dfc | 4217 | CUMULATIVE_ARGS args_so_far; |
621061f4 RK |
4218 | enum machine_mode promoted_mode, passed_mode; |
4219 | enum machine_mode nominal_mode, promoted_nominal_mode; | |
00d8a4c1 | 4220 | int unsignedp; |
6f086dfc RS |
4221 | /* Total space needed so far for args on the stack, |
4222 | given as a constant and a tree-expression. */ | |
4223 | struct args_size stack_args_size; | |
4224 | tree fntype = TREE_TYPE (fndecl); | |
4225 | tree fnargs = DECL_ARGUMENTS (fndecl); | |
4226 | /* This is used for the arg pointer when referring to stack args. */ | |
4227 | rtx internal_arg_pointer; | |
718fe406 | 4228 | /* This is a dummy PARM_DECL that we used for the function result if |
6f086dfc RS |
4229 | the function returns a structure. */ |
4230 | tree function_result_decl = 0; | |
54ea1de9 | 4231 | #ifdef SETUP_INCOMING_VARARGS |
6f086dfc | 4232 | int varargs_setup = 0; |
54ea1de9 | 4233 | #endif |
3412b298 | 4234 | rtx conversion_insns = 0; |
4fc026cd | 4235 | struct args_size alignment_pad; |
6f086dfc RS |
4236 | |
4237 | /* Nonzero if the last arg is named `__builtin_va_alist', | |
4238 | which is used on some machines for old-fashioned non-ANSI varargs.h; | |
4239 | this should be stuck onto the stack as if it had arrived there. */ | |
3b69d50e RK |
4240 | int hide_last_arg |
4241 | = (current_function_varargs | |
4242 | && fnargs | |
6f086dfc RS |
4243 | && (parm = tree_last (fnargs)) != 0 |
4244 | && DECL_NAME (parm) | |
4245 | && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)), | |
4246 | "__builtin_va_alist"))); | |
4247 | ||
4248 | /* Nonzero if function takes extra anonymous args. | |
4249 | This means the last named arg must be on the stack | |
0f41302f | 4250 | right before the anonymous ones. */ |
6f086dfc RS |
4251 | int stdarg |
4252 | = (TYPE_ARG_TYPES (fntype) != 0 | |
4253 | && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype))) | |
4254 | != void_type_node)); | |
4255 | ||
ebb904cb RK |
4256 | current_function_stdarg = stdarg; |
4257 | ||
6f086dfc RS |
4258 | /* If the reg that the virtual arg pointer will be translated into is |
4259 | not a fixed reg or is the stack pointer, make a copy of the virtual | |
4260 | arg pointer, and address parms via the copy. The frame pointer is | |
4261 | considered fixed even though it is not marked as such. | |
4262 | ||
4263 | The second time through, simply use ap to avoid generating rtx. */ | |
4264 | ||
4265 | if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM | |
4266 | || ! (fixed_regs[ARG_POINTER_REGNUM] | |
0d1416c6 | 4267 | || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))) |
6f086dfc RS |
4268 | internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx); |
4269 | else | |
4270 | internal_arg_pointer = virtual_incoming_args_rtx; | |
4271 | current_function_internal_arg_pointer = internal_arg_pointer; | |
4272 | ||
4273 | stack_args_size.constant = 0; | |
4274 | stack_args_size.var = 0; | |
4275 | ||
4276 | /* If struct value address is treated as the first argument, make it so. */ | |
4277 | if (aggregate_value_p (DECL_RESULT (fndecl)) | |
4278 | && ! current_function_returns_pcc_struct | |
4279 | && struct_value_incoming_rtx == 0) | |
4280 | { | |
f9f29478 | 4281 | tree type = build_pointer_type (TREE_TYPE (fntype)); |
6f086dfc | 4282 | |
5f4f0e22 | 4283 | function_result_decl = build_decl (PARM_DECL, NULL_TREE, type); |
6f086dfc RS |
4284 | |
4285 | DECL_ARG_TYPE (function_result_decl) = type; | |
4286 | TREE_CHAIN (function_result_decl) = fnargs; | |
4287 | fnargs = function_result_decl; | |
4288 | } | |
718fe406 | 4289 | |
e9a25f70 | 4290 | max_parm_reg = LAST_VIRTUAL_REGISTER + 1; |
e2ecd91c | 4291 | parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx)); |
6f086dfc RS |
4292 | |
4293 | #ifdef INIT_CUMULATIVE_INCOMING_ARGS | |
ea0d4c4b | 4294 | INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX); |
6f086dfc | 4295 | #else |
2c7ee1a6 | 4296 | INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0); |
6f086dfc RS |
4297 | #endif |
4298 | ||
4299 | /* We haven't yet found an argument that we must push and pretend the | |
4300 | caller did. */ | |
4301 | current_function_pretend_args_size = 0; | |
4302 | ||
4303 | for (parm = fnargs; parm; parm = TREE_CHAIN (parm)) | |
4304 | { | |
6f086dfc RS |
4305 | struct args_size stack_offset; |
4306 | struct args_size arg_size; | |
4307 | int passed_pointer = 0; | |
621061f4 | 4308 | int did_conversion = 0; |
6f086dfc | 4309 | tree passed_type = DECL_ARG_TYPE (parm); |
621061f4 | 4310 | tree nominal_type = TREE_TYPE (parm); |
9ab70a9b | 4311 | int pretend_named; |
6f086dfc RS |
4312 | |
4313 | /* Set LAST_NAMED if this is last named arg before some | |
bf9c83fe | 4314 | anonymous args. */ |
6f086dfc RS |
4315 | int last_named = ((TREE_CHAIN (parm) == 0 |
4316 | || DECL_NAME (TREE_CHAIN (parm)) == 0) | |
3b69d50e | 4317 | && (stdarg || current_function_varargs)); |
bf9c83fe JW |
4318 | /* Set NAMED_ARG if this arg should be treated as a named arg. For |
4319 | most machines, if this is a varargs/stdarg function, then we treat | |
4320 | the last named arg as if it were anonymous too. */ | |
e5e809f4 | 4321 | int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named; |
6f086dfc RS |
4322 | |
4323 | if (TREE_TYPE (parm) == error_mark_node | |
4324 | /* This can happen after weird syntax errors | |
4325 | or if an enum type is defined among the parms. */ | |
4326 | || TREE_CODE (parm) != PARM_DECL | |
4327 | || passed_type == NULL) | |
4328 | { | |
19e7881c MM |
4329 | SET_DECL_RTL (parm, gen_rtx_MEM (BLKmode, const0_rtx)); |
4330 | DECL_INCOMING_RTL (parm) = DECL_RTL (parm); | |
6f086dfc RS |
4331 | TREE_USED (parm) = 1; |
4332 | continue; | |
4333 | } | |
4334 | ||
4335 | /* For varargs.h function, save info about regs and stack space | |
4336 | used by the individual args, not including the va_alist arg. */ | |
3b69d50e | 4337 | if (hide_last_arg && last_named) |
6f086dfc RS |
4338 | current_function_args_info = args_so_far; |
4339 | ||
4340 | /* Find mode of arg as it is passed, and mode of arg | |
4341 | as it should be during execution of this function. */ | |
4342 | passed_mode = TYPE_MODE (passed_type); | |
621061f4 | 4343 | nominal_mode = TYPE_MODE (nominal_type); |
6f086dfc | 4344 | |
16bae307 RS |
4345 | /* If the parm's mode is VOID, its value doesn't matter, |
4346 | and avoid the usual things like emit_move_insn that could crash. */ | |
4347 | if (nominal_mode == VOIDmode) | |
4348 | { | |
19e7881c MM |
4349 | SET_DECL_RTL (parm, const0_rtx); |
4350 | DECL_INCOMING_RTL (parm) = DECL_RTL (parm); | |
16bae307 RS |
4351 | continue; |
4352 | } | |
4353 | ||
3f46679a RK |
4354 | /* If the parm is to be passed as a transparent union, use the |
4355 | type of the first field for the tests below. We have already | |
4356 | verified that the modes are the same. */ | |
4357 | if (DECL_TRANSPARENT_UNION (parm) | |
2bf105ab RK |
4358 | || (TREE_CODE (passed_type) == UNION_TYPE |
4359 | && TYPE_TRANSPARENT_UNION (passed_type))) | |
3f46679a RK |
4360 | passed_type = TREE_TYPE (TYPE_FIELDS (passed_type)); |
4361 | ||
a14ae508 RK |
4362 | /* See if this arg was passed by invisible reference. It is if |
4363 | it is an object whose size depends on the contents of the | |
4364 | object itself or if the machine requires these objects be passed | |
4365 | that way. */ | |
4366 | ||
4367 | if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST | |
4368 | && contains_placeholder_p (TYPE_SIZE (passed_type))) | |
657bb6dc | 4369 | || TREE_ADDRESSABLE (passed_type) |
6f086dfc | 4370 | #ifdef FUNCTION_ARG_PASS_BY_REFERENCE |
a14ae508 | 4371 | || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode, |
bf9c83fe | 4372 | passed_type, named_arg) |
a14ae508 RK |
4373 | #endif |
4374 | ) | |
6f086dfc | 4375 | { |
621061f4 | 4376 | passed_type = nominal_type = build_pointer_type (passed_type); |
6f086dfc RS |
4377 | passed_pointer = 1; |
4378 | passed_mode = nominal_mode = Pmode; | |
4379 | } | |
6f086dfc | 4380 | |
a53e14c0 RK |
4381 | promoted_mode = passed_mode; |
4382 | ||
4383 | #ifdef PROMOTE_FUNCTION_ARGS | |
4384 | /* Compute the mode in which the arg is actually extended to. */ | |
7940255d | 4385 | unsignedp = TREE_UNSIGNED (passed_type); |
a5a52dbc | 4386 | promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1); |
a53e14c0 RK |
4387 | #endif |
4388 | ||
6f086dfc RS |
4389 | /* Let machine desc say which reg (if any) the parm arrives in. |
4390 | 0 means it arrives on the stack. */ | |
4391 | #ifdef FUNCTION_INCOMING_ARG | |
a53e14c0 | 4392 | entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode, |
bf9c83fe | 4393 | passed_type, named_arg); |
6f086dfc | 4394 | #else |
a53e14c0 | 4395 | entry_parm = FUNCTION_ARG (args_so_far, promoted_mode, |
bf9c83fe | 4396 | passed_type, named_arg); |
6f086dfc RS |
4397 | #endif |
4398 | ||
621061f4 RK |
4399 | if (entry_parm == 0) |
4400 | promoted_mode = passed_mode; | |
a53e14c0 | 4401 | |
6f086dfc RS |
4402 | #ifdef SETUP_INCOMING_VARARGS |
4403 | /* If this is the last named parameter, do any required setup for | |
4404 | varargs or stdargs. We need to know about the case of this being an | |
4405 | addressable type, in which case we skip the registers it | |
4406 | would have arrived in. | |
4407 | ||
4408 | For stdargs, LAST_NAMED will be set for two parameters, the one that | |
4409 | is actually the last named, and the dummy parameter. We only | |
4410 | want to do this action once. | |
4411 | ||
4412 | Also, indicate when RTL generation is to be suppressed. */ | |
4413 | if (last_named && !varargs_setup) | |
4414 | { | |
621061f4 | 4415 | SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type, |
0d1416c6 | 4416 | current_function_pretend_args_size, 0); |
6f086dfc RS |
4417 | varargs_setup = 1; |
4418 | } | |
4419 | #endif | |
4420 | ||
4421 | /* Determine parm's home in the stack, | |
4422 | in case it arrives in the stack or we should pretend it did. | |
4423 | ||
4424 | Compute the stack position and rtx where the argument arrives | |
4425 | and its size. | |
4426 | ||
4427 | There is one complexity here: If this was a parameter that would | |
4428 | have been passed in registers, but wasn't only because it is | |
4429 | __builtin_va_alist, we want locate_and_pad_parm to treat it as if | |
4430 | it came in a register so that REG_PARM_STACK_SPACE isn't skipped. | |
4431 | In this case, we call FUNCTION_ARG with NAMED set to 1 instead of | |
4432 | 0 as it was the previous time. */ | |
4433 | ||
9ab70a9b | 4434 | pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED; |
0f11903b | 4435 | locate_and_pad_parm (promoted_mode, passed_type, |
6f086dfc RS |
4436 | #ifdef STACK_PARMS_IN_REG_PARM_AREA |
4437 | 1, | |
4438 | #else | |
4439 | #ifdef FUNCTION_INCOMING_ARG | |
621061f4 | 4440 | FUNCTION_INCOMING_ARG (args_so_far, promoted_mode, |
6f086dfc | 4441 | passed_type, |
9ab70a9b | 4442 | pretend_named) != 0, |
6f086dfc | 4443 | #else |
621061f4 | 4444 | FUNCTION_ARG (args_so_far, promoted_mode, |
6f086dfc | 4445 | passed_type, |
9ab70a9b | 4446 | pretend_named) != 0, |
6f086dfc RS |
4447 | #endif |
4448 | #endif | |
4fc026cd | 4449 | fndecl, &stack_args_size, &stack_offset, &arg_size, |
718fe406 | 4450 | &alignment_pad); |
6f086dfc | 4451 | |
0d1416c6 BS |
4452 | { |
4453 | rtx offset_rtx = ARGS_SIZE_RTX (stack_offset); | |
4454 | ||
4455 | if (offset_rtx == const0_rtx) | |
4456 | stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer); | |
4457 | else | |
4458 | stack_parm = gen_rtx_MEM (promoted_mode, | |
4459 | gen_rtx_PLUS (Pmode, | |
4460 | internal_arg_pointer, | |
4461 | offset_rtx)); | |
4462 | ||
3bdf5ad1 | 4463 | set_mem_attributes (stack_parm, parm, 1); |
0d1416c6 | 4464 | } |
6f086dfc RS |
4465 | |
4466 | /* If this parameter was passed both in registers and in the stack, | |
4467 | use the copy on the stack. */ | |
621061f4 | 4468 | if (MUST_PASS_IN_STACK (promoted_mode, passed_type)) |
6f086dfc RS |
4469 | entry_parm = 0; |
4470 | ||
461beb10 | 4471 | #ifdef FUNCTION_ARG_PARTIAL_NREGS |
6f086dfc RS |
4472 | /* If this parm was passed part in regs and part in memory, |
4473 | pretend it arrived entirely in memory | |
4474 | by pushing the register-part onto the stack. | |
4475 | ||
4476 | In the special case of a DImode or DFmode that is split, | |
4477 | we could put it together in a pseudoreg directly, | |
4478 | but for now that's not worth bothering with. */ | |
4479 | ||
4480 | if (entry_parm) | |
4481 | { | |
621061f4 | 4482 | int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode, |
bf9c83fe | 4483 | passed_type, named_arg); |
6f086dfc RS |
4484 | |
4485 | if (nregs > 0) | |
4486 | { | |
4487 | current_function_pretend_args_size | |
4488 | = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1) | |
4489 | / (PARM_BOUNDARY / BITS_PER_UNIT) | |
4490 | * (PARM_BOUNDARY / BITS_PER_UNIT)); | |
4491 | ||
0d1416c6 BS |
4492 | /* Handle calls that pass values in multiple non-contiguous |
4493 | locations. The Irix 6 ABI has examples of this. */ | |
4494 | if (GET_CODE (entry_parm) == PARALLEL) | |
4495 | emit_group_store (validize_mem (stack_parm), entry_parm, | |
4496 | int_size_in_bytes (TREE_TYPE (parm)), | |
19caa751 | 4497 | TYPE_ALIGN (TREE_TYPE (parm))); |
718fe406 | 4498 | |
0d1416c6 BS |
4499 | else |
4500 | move_block_from_reg (REGNO (entry_parm), | |
4501 | validize_mem (stack_parm), nregs, | |
4502 | int_size_in_bytes (TREE_TYPE (parm))); | |
4503 | ||
6f086dfc RS |
4504 | entry_parm = stack_parm; |
4505 | } | |
4506 | } | |
461beb10 | 4507 | #endif |
6f086dfc RS |
4508 | |
4509 | /* If we didn't decide this parm came in a register, | |
4510 | by default it came on the stack. */ | |
4511 | if (entry_parm == 0) | |
4512 | entry_parm = stack_parm; | |
4513 | ||
4514 | /* Record permanently how this parm was passed. */ | |
0d1416c6 | 4515 | DECL_INCOMING_RTL (parm) = entry_parm; |
6f086dfc RS |
4516 | |
4517 | /* If there is actually space on the stack for this parm, | |
4518 | count it in stack_args_size; otherwise set stack_parm to 0 | |
4519 | to indicate there is no preallocated stack slot for the parm. */ | |
4520 | ||
4521 | if (entry_parm == stack_parm | |
ab87f8c8 JL |
4522 | || (GET_CODE (entry_parm) == PARALLEL |
4523 | && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX) | |
d9ca49d5 | 4524 | #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE) |
6f086dfc | 4525 | /* On some machines, even if a parm value arrives in a register |
d9ca49d5 JW |
4526 | there is still an (uninitialized) stack slot allocated for it. |
4527 | ||
4528 | ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell | |
4529 | whether this parameter already has a stack slot allocated, | |
4530 | because an arg block exists only if current_function_args_size | |
abc95ed3 | 4531 | is larger than some threshold, and we haven't calculated that |
d9ca49d5 JW |
4532 | yet. So, for now, we just assume that stack slots never exist |
4533 | in this case. */ | |
6f086dfc RS |
4534 | || REG_PARM_STACK_SPACE (fndecl) > 0 |
4535 | #endif | |
4536 | ) | |
4537 | { | |
4538 | stack_args_size.constant += arg_size.constant; | |
4539 | if (arg_size.var) | |
4540 | ADD_PARM_SIZE (stack_args_size, arg_size.var); | |
4541 | } | |
4542 | else | |
4543 | /* No stack slot was pushed for this parm. */ | |
4544 | stack_parm = 0; | |
4545 | ||
4546 | /* Update info on where next arg arrives in registers. */ | |
4547 | ||
621061f4 | 4548 | FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode, |
bf9c83fe | 4549 | passed_type, named_arg); |
6f086dfc | 4550 | |
e16c591a RS |
4551 | /* If we can't trust the parm stack slot to be aligned enough |
4552 | for its ultimate type, don't use that slot after entry. | |
4553 | We'll make another stack slot, if we need one. */ | |
4554 | { | |
c8d8ed65 | 4555 | unsigned int thisparm_boundary |
621061f4 | 4556 | = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type); |
e16c591a RS |
4557 | |
4558 | if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary) | |
4559 | stack_parm = 0; | |
4560 | } | |
4561 | ||
cb61f66f RS |
4562 | /* If parm was passed in memory, and we need to convert it on entry, |
4563 | don't store it back in that same slot. */ | |
4564 | if (entry_parm != 0 | |
4565 | && nominal_mode != BLKmode && nominal_mode != passed_mode) | |
4566 | stack_parm = 0; | |
4567 | ||
e68a6ce1 AO |
4568 | /* When an argument is passed in multiple locations, we can't |
4569 | make use of this information, but we can save some copying if | |
4570 | the whole argument is passed in a single register. */ | |
4571 | if (GET_CODE (entry_parm) == PARALLEL | |
4572 | && nominal_mode != BLKmode && passed_mode != BLKmode) | |
4573 | { | |
4574 | int i, len = XVECLEN (entry_parm, 0); | |
4575 | ||
4576 | for (i = 0; i < len; i++) | |
4577 | if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX | |
4578 | && GET_CODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) == REG | |
4579 | && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) | |
4580 | == passed_mode) | |
b845f897 | 4581 | && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0) |
e68a6ce1 AO |
4582 | { |
4583 | entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0); | |
fd81f5f6 | 4584 | DECL_INCOMING_RTL (parm) = entry_parm; |
e68a6ce1 AO |
4585 | break; |
4586 | } | |
4587 | } | |
4588 | ||
6f086dfc RS |
4589 | /* ENTRY_PARM is an RTX for the parameter as it arrives, |
4590 | in the mode in which it arrives. | |
4591 | STACK_PARM is an RTX for a stack slot where the parameter can live | |
4592 | during the function (in case we want to put it there). | |
4593 | STACK_PARM is 0 if no stack slot was pushed for it. | |
4594 | ||
4595 | Now output code if necessary to convert ENTRY_PARM to | |
4596 | the type in which this function declares it, | |
4597 | and store that result in an appropriate place, | |
4598 | which may be a pseudo reg, may be STACK_PARM, | |
4599 | or may be a local stack slot if STACK_PARM is 0. | |
4600 | ||
4601 | Set DECL_RTL to that place. */ | |
4602 | ||
5c4cdc9f | 4603 | if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL) |
6f086dfc | 4604 | { |
5c4cdc9f JW |
4605 | /* If a BLKmode arrives in registers, copy it to a stack slot. |
4606 | Handle calls that pass values in multiple non-contiguous | |
4607 | locations. The Irix 6 ABI has examples of this. */ | |
4608 | if (GET_CODE (entry_parm) == REG | |
4609 | || GET_CODE (entry_parm) == PARALLEL) | |
6f086dfc | 4610 | { |
621061f4 RK |
4611 | int size_stored |
4612 | = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)), | |
4613 | UNITS_PER_WORD); | |
6f086dfc RS |
4614 | |
4615 | /* Note that we will be storing an integral number of words. | |
4616 | So we have to be careful to ensure that we allocate an | |
4617 | integral number of words. We do this below in the | |
4618 | assign_stack_local if space was not allocated in the argument | |
4619 | list. If it was, this will not work if PARM_BOUNDARY is not | |
4620 | a multiple of BITS_PER_WORD. It isn't clear how to fix this | |
4621 | if it becomes a problem. */ | |
4622 | ||
4623 | if (stack_parm == 0) | |
7e41ffa2 RS |
4624 | { |
4625 | stack_parm | |
621061f4 RK |
4626 | = assign_stack_local (GET_MODE (entry_parm), |
4627 | size_stored, 0); | |
3bdf5ad1 | 4628 | set_mem_attributes (stack_parm, parm, 1); |
7e41ffa2 RS |
4629 | } |
4630 | ||
6f086dfc RS |
4631 | else if (PARM_BOUNDARY % BITS_PER_WORD != 0) |
4632 | abort (); | |
4633 | ||
5c4cdc9f JW |
4634 | /* Handle calls that pass values in multiple non-contiguous |
4635 | locations. The Irix 6 ABI has examples of this. */ | |
4636 | if (GET_CODE (entry_parm) == PARALLEL) | |
aac5cc16 RH |
4637 | emit_group_store (validize_mem (stack_parm), entry_parm, |
4638 | int_size_in_bytes (TREE_TYPE (parm)), | |
19caa751 | 4639 | TYPE_ALIGN (TREE_TYPE (parm))); |
5c4cdc9f JW |
4640 | else |
4641 | move_block_from_reg (REGNO (entry_parm), | |
4642 | validize_mem (stack_parm), | |
4643 | size_stored / UNITS_PER_WORD, | |
4644 | int_size_in_bytes (TREE_TYPE (parm))); | |
6f086dfc | 4645 | } |
19e7881c | 4646 | SET_DECL_RTL (parm, stack_parm); |
6f086dfc | 4647 | } |
d29c259b RH |
4648 | else if (! ((! optimize |
4649 | && ! DECL_REGISTER (parm) | |
a82ad570 | 4650 | && ! DECL_INLINE (fndecl)) |
6f086dfc RS |
4651 | || TREE_SIDE_EFFECTS (parm) |
4652 | /* If -ffloat-store specified, don't put explicit | |
4653 | float variables into registers. */ | |
4654 | || (flag_float_store | |
4655 | && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)) | |
4656 | /* Always assign pseudo to structure return or item passed | |
4657 | by invisible reference. */ | |
4658 | || passed_pointer || parm == function_result_decl) | |
4659 | { | |
00d8a4c1 RK |
4660 | /* Store the parm in a pseudoregister during the function, but we |
4661 | may need to do it in a wider mode. */ | |
4662 | ||
b3694847 | 4663 | rtx parmreg; |
770ae6cc | 4664 | unsigned int regno, regnoi = 0, regnor = 0; |
00d8a4c1 RK |
4665 | |
4666 | unsignedp = TREE_UNSIGNED (TREE_TYPE (parm)); | |
cd5b3469 | 4667 | |
621061f4 RK |
4668 | promoted_nominal_mode |
4669 | = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0); | |
6f086dfc | 4670 | |
621061f4 | 4671 | parmreg = gen_reg_rtx (promoted_nominal_mode); |
ddb7361a | 4672 | mark_user_reg (parmreg); |
6f086dfc RS |
4673 | |
4674 | /* If this was an item that we received a pointer to, set DECL_RTL | |
4675 | appropriately. */ | |
4676 | if (passed_pointer) | |
4677 | { | |
abde42f7 JH |
4678 | rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), |
4679 | parmreg); | |
4680 | set_mem_attributes (x, parm, 1); | |
4681 | SET_DECL_RTL (parm, x); | |
6f086dfc RS |
4682 | } |
4683 | else | |
d64236b4 | 4684 | { |
19e7881c | 4685 | SET_DECL_RTL (parm, parmreg); |
d64236b4 RK |
4686 | maybe_set_unchanging (DECL_RTL (parm), parm); |
4687 | } | |
4688 | ||
6f086dfc | 4689 | /* Copy the value into the register. */ |
621061f4 RK |
4690 | if (nominal_mode != passed_mode |
4691 | || promoted_nominal_mode != promoted_mode) | |
86f8eff3 | 4692 | { |
efd8cba0 | 4693 | int save_tree_used; |
621061f4 | 4694 | /* ENTRY_PARM has been converted to PROMOTED_MODE, its |
718fe406 | 4695 | mode, by the caller. We now have to convert it to |
621061f4 | 4696 | NOMINAL_MODE, if different. However, PARMREG may be in |
956d6950 | 4697 | a different mode than NOMINAL_MODE if it is being stored |
621061f4 RK |
4698 | promoted. |
4699 | ||
4700 | If ENTRY_PARM is a hard register, it might be in a register | |
86f8eff3 RK |
4701 | not valid for operating in its mode (e.g., an odd-numbered |
4702 | register for a DFmode). In that case, moves are the only | |
4703 | thing valid, so we can't do a convert from there. This | |
4704 | occurs when the calling sequence allow such misaligned | |
3412b298 JW |
4705 | usages. |
4706 | ||
4707 | In addition, the conversion may involve a call, which could | |
4708 | clobber parameters which haven't been copied to pseudo | |
4709 | registers yet. Therefore, we must first copy the parm to | |
4710 | a pseudo reg here, and save the conversion until after all | |
4711 | parameters have been moved. */ | |
4712 | ||
4713 | rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm)); | |
4714 | ||
4715 | emit_move_insn (tempreg, validize_mem (entry_parm)); | |
4716 | ||
4717 | push_to_sequence (conversion_insns); | |
ad241351 RK |
4718 | tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp); |
4719 | ||
ddef6bc7 JJ |
4720 | if (GET_CODE (tempreg) == SUBREG |
4721 | && GET_MODE (tempreg) == nominal_mode | |
4722 | && GET_CODE (SUBREG_REG (tempreg)) == REG | |
4723 | && nominal_mode == passed_mode | |
4724 | && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (entry_parm) | |
4725 | && GET_MODE_SIZE (GET_MODE (tempreg)) | |
4726 | < GET_MODE_SIZE (GET_MODE (entry_parm))) | |
4727 | { | |
4728 | /* The argument is already sign/zero extended, so note it | |
4729 | into the subreg. */ | |
4730 | SUBREG_PROMOTED_VAR_P (tempreg) = 1; | |
4731 | SUBREG_PROMOTED_UNSIGNED_P (tempreg) = unsignedp; | |
4732 | } | |
4733 | ||
efd8cba0 DB |
4734 | /* TREE_USED gets set erroneously during expand_assignment. */ |
4735 | save_tree_used = TREE_USED (parm); | |
621061f4 RK |
4736 | expand_assignment (parm, |
4737 | make_tree (nominal_type, tempreg), 0, 0); | |
efd8cba0 | 4738 | TREE_USED (parm) = save_tree_used; |
3412b298 | 4739 | conversion_insns = get_insns (); |
621061f4 | 4740 | did_conversion = 1; |
3412b298 | 4741 | end_sequence (); |
86f8eff3 | 4742 | } |
6f086dfc RS |
4743 | else |
4744 | emit_move_insn (parmreg, validize_mem (entry_parm)); | |
4745 | ||
74bd77a8 RS |
4746 | /* If we were passed a pointer but the actual value |
4747 | can safely live in a register, put it in one. */ | |
16bae307 | 4748 | if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode |
d29c259b RH |
4749 | && ! ((! optimize |
4750 | && ! DECL_REGISTER (parm) | |
74bd77a8 | 4751 | && ! DECL_INLINE (fndecl)) |
74bd77a8 RS |
4752 | || TREE_SIDE_EFFECTS (parm) |
4753 | /* If -ffloat-store specified, don't put explicit | |
4754 | float variables into registers. */ | |
4755 | || (flag_float_store | |
4756 | && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))) | |
4757 | { | |
2654605a JW |
4758 | /* We can't use nominal_mode, because it will have been set to |
4759 | Pmode above. We must use the actual mode of the parm. */ | |
4760 | parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm))); | |
ddb7361a | 4761 | mark_user_reg (parmreg); |
f523247a AO |
4762 | if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm))) |
4763 | { | |
4764 | rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm))); | |
19e7881c | 4765 | int unsigned_p = TREE_UNSIGNED (TREE_TYPE (parm)); |
f523247a AO |
4766 | push_to_sequence (conversion_insns); |
4767 | emit_move_insn (tempreg, DECL_RTL (parm)); | |
19e7881c MM |
4768 | SET_DECL_RTL (parm, |
4769 | convert_to_mode (GET_MODE (parmreg), | |
4770 | tempreg, | |
4771 | unsigned_p)); | |
f523247a AO |
4772 | emit_move_insn (parmreg, DECL_RTL (parm)); |
4773 | conversion_insns = get_insns(); | |
4774 | did_conversion = 1; | |
4775 | end_sequence (); | |
4776 | } | |
4777 | else | |
4778 | emit_move_insn (parmreg, DECL_RTL (parm)); | |
19e7881c | 4779 | SET_DECL_RTL (parm, parmreg); |
c110c53d RS |
4780 | /* STACK_PARM is the pointer, not the parm, and PARMREG is |
4781 | now the parm. */ | |
4782 | stack_parm = 0; | |
74bd77a8 | 4783 | } |
137a2a7b DE |
4784 | #ifdef FUNCTION_ARG_CALLEE_COPIES |
4785 | /* If we are passed an arg by reference and it is our responsibility | |
4786 | to make a copy, do it now. | |
4787 | PASSED_TYPE and PASSED mode now refer to the pointer, not the | |
4788 | original argument, so we must recreate them in the call to | |
4789 | FUNCTION_ARG_CALLEE_COPIES. */ | |
4790 | /* ??? Later add code to handle the case that if the argument isn't | |
4791 | modified, don't do the copy. */ | |
4792 | ||
4793 | else if (passed_pointer | |
4794 | && FUNCTION_ARG_CALLEE_COPIES (args_so_far, | |
4795 | TYPE_MODE (DECL_ARG_TYPE (parm)), | |
4796 | DECL_ARG_TYPE (parm), | |
bf9c83fe | 4797 | named_arg) |
926b1b99 | 4798 | && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm))) |
137a2a7b DE |
4799 | { |
4800 | rtx copy; | |
4801 | tree type = DECL_ARG_TYPE (parm); | |
4802 | ||
4803 | /* This sequence may involve a library call perhaps clobbering | |
4804 | registers that haven't been copied to pseudos yet. */ | |
4805 | ||
4806 | push_to_sequence (conversion_insns); | |
4807 | ||
d0f062fb | 4808 | if (!COMPLETE_TYPE_P (type) |
137a2a7b | 4809 | || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST) |
1fd3ef7f | 4810 | /* This is a variable sized object. */ |
38a448ca RH |
4811 | copy = gen_rtx_MEM (BLKmode, |
4812 | allocate_dynamic_stack_space | |
4813 | (expr_size (parm), NULL_RTX, | |
4814 | TYPE_ALIGN (type))); | |
137a2a7b | 4815 | else |
1fd3ef7f RK |
4816 | copy = assign_stack_temp (TYPE_MODE (type), |
4817 | int_size_in_bytes (type), 1); | |
a696c1d6 | 4818 | set_mem_attributes (copy, parm, 1); |
137a2a7b DE |
4819 | |
4820 | store_expr (parm, copy, 0); | |
4821 | emit_move_insn (parmreg, XEXP (copy, 0)); | |
7d384cc0 | 4822 | if (current_function_check_memory_usage) |
ebb1b59a BS |
4823 | emit_library_call (chkr_set_right_libfunc, |
4824 | LCT_CONST_MAKE_BLOCK, VOIDmode, 3, | |
6a9c4aed | 4825 | XEXP (copy, 0), Pmode, |
86fa911a RK |
4826 | GEN_INT (int_size_in_bytes (type)), |
4827 | TYPE_MODE (sizetype), | |
956d6950 JL |
4828 | GEN_INT (MEMORY_USE_RW), |
4829 | TYPE_MODE (integer_type_node)); | |
137a2a7b | 4830 | conversion_insns = get_insns (); |
621061f4 | 4831 | did_conversion = 1; |
137a2a7b DE |
4832 | end_sequence (); |
4833 | } | |
4834 | #endif /* FUNCTION_ARG_CALLEE_COPIES */ | |
74bd77a8 | 4835 | |
6f086dfc | 4836 | /* In any case, record the parm's desired stack location |
718fe406 | 4837 | in case we later discover it must live in the stack. |
14aceb29 RS |
4838 | |
4839 | If it is a COMPLEX value, store the stack location for both | |
4840 | halves. */ | |
4841 | ||
4842 | if (GET_CODE (parmreg) == CONCAT) | |
4843 | regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1))); | |
4844 | else | |
4845 | regno = REGNO (parmreg); | |
4846 | ||
e9a25f70 | 4847 | if (regno >= max_parm_reg) |
6f086dfc RS |
4848 | { |
4849 | rtx *new; | |
e9a25f70 | 4850 | int old_max_parm_reg = max_parm_reg; |
14aceb29 | 4851 | |
e9a25f70 JL |
4852 | /* It's slow to expand this one register at a time, |
4853 | but it's also rare and we need max_parm_reg to be | |
4854 | precisely correct. */ | |
4855 | max_parm_reg = regno + 1; | |
e2ecd91c BS |
4856 | new = (rtx *) xrealloc (parm_reg_stack_loc, |
4857 | max_parm_reg * sizeof (rtx)); | |
961192e1 | 4858 | memset ((char *) (new + old_max_parm_reg), 0, |
e9a25f70 | 4859 | (max_parm_reg - old_max_parm_reg) * sizeof (rtx)); |
6f086dfc RS |
4860 | parm_reg_stack_loc = new; |
4861 | } | |
14aceb29 RS |
4862 | |
4863 | if (GET_CODE (parmreg) == CONCAT) | |
4864 | { | |
4865 | enum machine_mode submode = GET_MODE (XEXP (parmreg, 0)); | |
4866 | ||
a03caf76 RK |
4867 | regnor = REGNO (gen_realpart (submode, parmreg)); |
4868 | regnoi = REGNO (gen_imagpart (submode, parmreg)); | |
4869 | ||
7b1a0c14 RS |
4870 | if (stack_parm != 0) |
4871 | { | |
a03caf76 | 4872 | parm_reg_stack_loc[regnor] |
3d329b07 | 4873 | = gen_realpart (submode, stack_parm); |
a03caf76 | 4874 | parm_reg_stack_loc[regnoi] |
3d329b07 | 4875 | = gen_imagpart (submode, stack_parm); |
7b1a0c14 RS |
4876 | } |
4877 | else | |
4878 | { | |
a03caf76 RK |
4879 | parm_reg_stack_loc[regnor] = 0; |
4880 | parm_reg_stack_loc[regnoi] = 0; | |
7b1a0c14 | 4881 | } |
14aceb29 RS |
4882 | } |
4883 | else | |
4884 | parm_reg_stack_loc[REGNO (parmreg)] = stack_parm; | |
6f086dfc RS |
4885 | |
4886 | /* Mark the register as eliminable if we did no conversion | |
4887 | and it was copied from memory at a fixed offset, | |
4888 | and the arg pointer was not copied to a pseudo-reg. | |
4889 | If the arg pointer is a pseudo reg or the offset formed | |
4890 | an invalid address, such memory-equivalences | |
4891 | as we make here would screw up life analysis for it. */ | |
4892 | if (nominal_mode == passed_mode | |
621061f4 | 4893 | && ! did_conversion |
38b610ed ILT |
4894 | && stack_parm != 0 |
4895 | && GET_CODE (stack_parm) == MEM | |
6f086dfc RS |
4896 | && stack_offset.var == 0 |
4897 | && reg_mentioned_p (virtual_incoming_args_rtx, | |
38b610ed | 4898 | XEXP (stack_parm, 0))) |
a03caf76 RK |
4899 | { |
4900 | rtx linsn = get_last_insn (); | |
69685820 | 4901 | rtx sinsn, set; |
a03caf76 RK |
4902 | |
4903 | /* Mark complex types separately. */ | |
4904 | if (GET_CODE (parmreg) == CONCAT) | |
69685820 RK |
4905 | /* Scan backwards for the set of the real and |
4906 | imaginary parts. */ | |
4907 | for (sinsn = linsn; sinsn != 0; | |
4908 | sinsn = prev_nonnote_insn (sinsn)) | |
4909 | { | |
4910 | set = single_set (sinsn); | |
4911 | if (set != 0 | |
4912 | && SET_DEST (set) == regno_reg_rtx [regnoi]) | |
4913 | REG_NOTES (sinsn) | |
38a448ca RH |
4914 | = gen_rtx_EXPR_LIST (REG_EQUIV, |
4915 | parm_reg_stack_loc[regnoi], | |
4916 | REG_NOTES (sinsn)); | |
69685820 RK |
4917 | else if (set != 0 |
4918 | && SET_DEST (set) == regno_reg_rtx [regnor]) | |
4919 | REG_NOTES (sinsn) | |
38a448ca RH |
4920 | = gen_rtx_EXPR_LIST (REG_EQUIV, |
4921 | parm_reg_stack_loc[regnor], | |
4922 | REG_NOTES (sinsn)); | |
69685820 RK |
4923 | } |
4924 | else if ((set = single_set (linsn)) != 0 | |
4925 | && SET_DEST (set) == parmreg) | |
718fe406 | 4926 | REG_NOTES (linsn) |
38a448ca RH |
4927 | = gen_rtx_EXPR_LIST (REG_EQUIV, |
4928 | stack_parm, REG_NOTES (linsn)); | |
a03caf76 | 4929 | } |
6f086dfc RS |
4930 | |
4931 | /* For pointer data type, suggest pointer register. */ | |
e5e809f4 | 4932 | if (POINTER_TYPE_P (TREE_TYPE (parm))) |
6c6166bd | 4933 | mark_reg_pointer (parmreg, |
bdb429a5 RK |
4934 | TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))); |
4935 | ||
d96a6d1a JM |
4936 | /* If something wants our address, try to use ADDRESSOF. */ |
4937 | if (TREE_ADDRESSABLE (parm)) | |
db5fbfb4 MM |
4938 | { |
4939 | /* If we end up putting something into the stack, | |
4940 | fixup_var_refs_insns will need to make a pass over | |
4941 | all the instructions. It looks throughs the pending | |
4942 | sequences -- but it can't see the ones in the | |
4943 | CONVERSION_INSNS, if they're not on the sequence | |
4944 | stack. So, we go back to that sequence, just so that | |
4945 | the fixups will happen. */ | |
4946 | push_to_sequence (conversion_insns); | |
4947 | put_var_into_stack (parm); | |
4948 | conversion_insns = get_insns (); | |
4949 | end_sequence (); | |
4950 | } | |
6f086dfc RS |
4951 | } |
4952 | else | |
4953 | { | |
4954 | /* Value must be stored in the stack slot STACK_PARM | |
4955 | during function execution. */ | |
4956 | ||
621061f4 | 4957 | if (promoted_mode != nominal_mode) |
86f8eff3 | 4958 | { |
6d2f8887 | 4959 | /* Conversion is required. */ |
3412b298 JW |
4960 | rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm)); |
4961 | ||
4962 | emit_move_insn (tempreg, validize_mem (entry_parm)); | |
86f8eff3 | 4963 | |
3412b298 JW |
4964 | push_to_sequence (conversion_insns); |
4965 | entry_parm = convert_to_mode (nominal_mode, tempreg, | |
a53e14c0 | 4966 | TREE_UNSIGNED (TREE_TYPE (parm))); |
de957303 | 4967 | if (stack_parm) |
f4ef873c RK |
4968 | /* ??? This may need a big-endian conversion on sparc64. */ |
4969 | stack_parm = adjust_address (stack_parm, nominal_mode, 0); | |
4970 | ||
3412b298 | 4971 | conversion_insns = get_insns (); |
621061f4 | 4972 | did_conversion = 1; |
3412b298 | 4973 | end_sequence (); |
86f8eff3 | 4974 | } |
6f086dfc RS |
4975 | |
4976 | if (entry_parm != stack_parm) | |
4977 | { | |
4978 | if (stack_parm == 0) | |
7e41ffa2 RS |
4979 | { |
4980 | stack_parm | |
4981 | = assign_stack_local (GET_MODE (entry_parm), | |
4982 | GET_MODE_SIZE (GET_MODE (entry_parm)), 0); | |
3bdf5ad1 | 4983 | set_mem_attributes (stack_parm, parm, 1); |
7e41ffa2 RS |
4984 | } |
4985 | ||
621061f4 | 4986 | if (promoted_mode != nominal_mode) |
3412b298 JW |
4987 | { |
4988 | push_to_sequence (conversion_insns); | |
4989 | emit_move_insn (validize_mem (stack_parm), | |
4990 | validize_mem (entry_parm)); | |
4991 | conversion_insns = get_insns (); | |
4992 | end_sequence (); | |
4993 | } | |
4994 | else | |
4995 | emit_move_insn (validize_mem (stack_parm), | |
4996 | validize_mem (entry_parm)); | |
6f086dfc | 4997 | } |
7d384cc0 | 4998 | if (current_function_check_memory_usage) |
86fa911a RK |
4999 | { |
5000 | push_to_sequence (conversion_insns); | |
ebb1b59a BS |
5001 | emit_library_call (chkr_set_right_libfunc, LCT_CONST_MAKE_BLOCK, |
5002 | VOIDmode, 3, XEXP (stack_parm, 0), Pmode, | |
718fe406 | 5003 | GEN_INT (GET_MODE_SIZE (GET_MODE |
86fa911a RK |
5004 | (entry_parm))), |
5005 | TYPE_MODE (sizetype), | |
956d6950 JL |
5006 | GEN_INT (MEMORY_USE_RW), |
5007 | TYPE_MODE (integer_type_node)); | |
6f086dfc | 5008 | |
86fa911a RK |
5009 | conversion_insns = get_insns (); |
5010 | end_sequence (); | |
5011 | } | |
19e7881c | 5012 | SET_DECL_RTL (parm, stack_parm); |
6f086dfc | 5013 | } |
718fe406 | 5014 | |
6f086dfc RS |
5015 | /* If this "parameter" was the place where we are receiving the |
5016 | function's incoming structure pointer, set up the result. */ | |
5017 | if (parm == function_result_decl) | |
ccdecf58 RK |
5018 | { |
5019 | tree result = DECL_RESULT (fndecl); | |
abde42f7 | 5020 | rtx x = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm)); |
ccdecf58 | 5021 | |
abde42f7 JH |
5022 | set_mem_attributes (x, result, 1); |
5023 | SET_DECL_RTL (result, x); | |
ccdecf58 | 5024 | } |
6f086dfc RS |
5025 | } |
5026 | ||
3412b298 JW |
5027 | /* Output all parameter conversion instructions (possibly including calls) |
5028 | now that all parameters have been copied out of hard registers. */ | |
5029 | emit_insns (conversion_insns); | |
5030 | ||
6f086dfc RS |
5031 | last_parm_insn = get_last_insn (); |
5032 | ||
5033 | current_function_args_size = stack_args_size.constant; | |
5034 | ||
5035 | /* Adjust function incoming argument size for alignment and | |
5036 | minimum length. */ | |
5037 | ||
5038 | #ifdef REG_PARM_STACK_SPACE | |
6f90e075 | 5039 | #ifndef MAYBE_REG_PARM_STACK_SPACE |
6f086dfc RS |
5040 | current_function_args_size = MAX (current_function_args_size, |
5041 | REG_PARM_STACK_SPACE (fndecl)); | |
5042 | #endif | |
6f90e075 | 5043 | #endif |
6f086dfc | 5044 | |
4433e339 RH |
5045 | #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT) |
5046 | ||
5047 | current_function_args_size | |
5048 | = ((current_function_args_size + STACK_BYTES - 1) | |
5049 | / STACK_BYTES) * STACK_BYTES; | |
4433e339 | 5050 | |
6f086dfc RS |
5051 | #ifdef ARGS_GROW_DOWNWARD |
5052 | current_function_arg_offset_rtx | |
5f4f0e22 | 5053 | = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant) |
718fe406 KH |
5054 | : expand_expr (size_diffop (stack_args_size.var, |
5055 | size_int (-stack_args_size.constant)), | |
86fa911a | 5056 | NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD)); |
6f086dfc RS |
5057 | #else |
5058 | current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size); | |
5059 | #endif | |
5060 | ||
5061 | /* See how many bytes, if any, of its args a function should try to pop | |
5062 | on return. */ | |
5063 | ||
64e6d9cc | 5064 | current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl), |
6f086dfc RS |
5065 | current_function_args_size); |
5066 | ||
3b69d50e RK |
5067 | /* For stdarg.h function, save info about |
5068 | regs and stack space used by the named args. */ | |
6f086dfc | 5069 | |
3b69d50e | 5070 | if (!hide_last_arg) |
6f086dfc RS |
5071 | current_function_args_info = args_so_far; |
5072 | ||
5073 | /* Set the rtx used for the function return value. Put this in its | |
5074 | own variable so any optimizers that need this information don't have | |
5075 | to include tree.h. Do this here so it gets done when an inlined | |
5076 | function gets output. */ | |
5077 | ||
19e7881c MM |
5078 | current_function_return_rtx |
5079 | = (DECL_RTL_SET_P (DECL_RESULT (fndecl)) | |
5080 | ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX); | |
6f086dfc RS |
5081 | } |
5082 | \f | |
75dc3319 RK |
5083 | /* Indicate whether REGNO is an incoming argument to the current function |
5084 | that was promoted to a wider mode. If so, return the RTX for the | |
5085 | register (to get its mode). PMODE and PUNSIGNEDP are set to the mode | |
5086 | that REGNO is promoted from and whether the promotion was signed or | |
5087 | unsigned. */ | |
5088 | ||
5089 | #ifdef PROMOTE_FUNCTION_ARGS | |
5090 | ||
5091 | rtx | |
5092 | promoted_input_arg (regno, pmode, punsignedp) | |
770ae6cc | 5093 | unsigned int regno; |
75dc3319 RK |
5094 | enum machine_mode *pmode; |
5095 | int *punsignedp; | |
5096 | { | |
5097 | tree arg; | |
5098 | ||
5099 | for (arg = DECL_ARGUMENTS (current_function_decl); arg; | |
5100 | arg = TREE_CHAIN (arg)) | |
5101 | if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG | |
621061f4 RK |
5102 | && REGNO (DECL_INCOMING_RTL (arg)) == regno |
5103 | && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg))) | |
75dc3319 RK |
5104 | { |
5105 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg)); | |
5106 | int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg)); | |
5107 | ||
a5a52dbc | 5108 | mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1); |
75dc3319 RK |
5109 | if (mode == GET_MODE (DECL_INCOMING_RTL (arg)) |
5110 | && mode != DECL_MODE (arg)) | |
5111 | { | |
5112 | *pmode = DECL_MODE (arg); | |
5113 | *punsignedp = unsignedp; | |
5114 | return DECL_INCOMING_RTL (arg); | |
5115 | } | |
5116 | } | |
5117 | ||
5118 | return 0; | |
5119 | } | |
5120 | ||
5121 | #endif | |
5122 | \f | |
6f086dfc RS |
5123 | /* Compute the size and offset from the start of the stacked arguments for a |
5124 | parm passed in mode PASSED_MODE and with type TYPE. | |
5125 | ||
5126 | INITIAL_OFFSET_PTR points to the current offset into the stacked | |
5127 | arguments. | |
5128 | ||
5129 | The starting offset and size for this parm are returned in *OFFSET_PTR | |
5130 | and *ARG_SIZE_PTR, respectively. | |
5131 | ||
5132 | IN_REGS is non-zero if the argument will be passed in registers. It will | |
5133 | never be set if REG_PARM_STACK_SPACE is not defined. | |
5134 | ||
5135 | FNDECL is the function in which the argument was defined. | |
5136 | ||
5137 | There are two types of rounding that are done. The first, controlled by | |
5138 | FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument | |
5139 | list to be aligned to the specific boundary (in bits). This rounding | |
5140 | affects the initial and starting offsets, but not the argument size. | |
5141 | ||
5142 | The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY, | |
5143 | optionally rounds the size of the parm to PARM_BOUNDARY. The | |
5144 | initial offset is not affected by this rounding, while the size always | |
5145 | is and the starting offset may be. */ | |
5146 | ||
718fe406 | 5147 | /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case; |
6f086dfc RS |
5148 | initial_offset_ptr is positive because locate_and_pad_parm's |
5149 | callers pass in the total size of args so far as | |
5150 | initial_offset_ptr. arg_size_ptr is always positive.*/ | |
5151 | ||
6f086dfc RS |
5152 | void |
5153 | locate_and_pad_parm (passed_mode, type, in_regs, fndecl, | |
4fc026cd | 5154 | initial_offset_ptr, offset_ptr, arg_size_ptr, |
718fe406 | 5155 | alignment_pad) |
6f086dfc RS |
5156 | enum machine_mode passed_mode; |
5157 | tree type; | |
57bed152 | 5158 | int in_regs ATTRIBUTE_UNUSED; |
91813b28 | 5159 | tree fndecl ATTRIBUTE_UNUSED; |
6f086dfc RS |
5160 | struct args_size *initial_offset_ptr; |
5161 | struct args_size *offset_ptr; | |
5162 | struct args_size *arg_size_ptr; | |
4fc026cd CM |
5163 | struct args_size *alignment_pad; |
5164 | ||
6f086dfc RS |
5165 | { |
5166 | tree sizetree | |
5167 | = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode)); | |
5168 | enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type); | |
5169 | int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type); | |
6f086dfc RS |
5170 | |
5171 | #ifdef REG_PARM_STACK_SPACE | |
5172 | /* If we have found a stack parm before we reach the end of the | |
5173 | area reserved for registers, skip that area. */ | |
5174 | if (! in_regs) | |
5175 | { | |
29a82058 JL |
5176 | int reg_parm_stack_space = 0; |
5177 | ||
29008b51 JW |
5178 | #ifdef MAYBE_REG_PARM_STACK_SPACE |
5179 | reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE; | |
5180 | #else | |
6f086dfc | 5181 | reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl); |
29008b51 | 5182 | #endif |
6f086dfc RS |
5183 | if (reg_parm_stack_space > 0) |
5184 | { | |
5185 | if (initial_offset_ptr->var) | |
5186 | { | |
5187 | initial_offset_ptr->var | |
5188 | = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr), | |
fed3cef0 | 5189 | ssize_int (reg_parm_stack_space)); |
6f086dfc RS |
5190 | initial_offset_ptr->constant = 0; |
5191 | } | |
5192 | else if (initial_offset_ptr->constant < reg_parm_stack_space) | |
5193 | initial_offset_ptr->constant = reg_parm_stack_space; | |
5194 | } | |
5195 | } | |
5196 | #endif /* REG_PARM_STACK_SPACE */ | |
5197 | ||
5198 | arg_size_ptr->var = 0; | |
5199 | arg_size_ptr->constant = 0; | |
23ccfa6d RH |
5200 | alignment_pad->var = 0; |
5201 | alignment_pad->constant = 0; | |
6f086dfc RS |
5202 | |
5203 | #ifdef ARGS_GROW_DOWNWARD | |
5204 | if (initial_offset_ptr->var) | |
5205 | { | |
5206 | offset_ptr->constant = 0; | |
fed3cef0 | 5207 | offset_ptr->var = size_binop (MINUS_EXPR, ssize_int (0), |
6f086dfc RS |
5208 | initial_offset_ptr->var); |
5209 | } | |
5210 | else | |
5211 | { | |
718fe406 | 5212 | offset_ptr->constant = -initial_offset_ptr->constant; |
6f086dfc RS |
5213 | offset_ptr->var = 0; |
5214 | } | |
0b21dcf5 | 5215 | if (where_pad != none |
1468899d RK |
5216 | && (!host_integerp (sizetree, 1) |
5217 | || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY)) | |
6f086dfc RS |
5218 | sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT); |
5219 | SUB_PARM_SIZE (*offset_ptr, sizetree); | |
66bcbe19 | 5220 | if (where_pad != downward) |
4fc026cd | 5221 | pad_to_arg_alignment (offset_ptr, boundary, alignment_pad); |
6f086dfc | 5222 | if (initial_offset_ptr->var) |
fed3cef0 RK |
5223 | arg_size_ptr->var = size_binop (MINUS_EXPR, |
5224 | size_binop (MINUS_EXPR, | |
5225 | ssize_int (0), | |
5226 | initial_offset_ptr->var), | |
5227 | offset_ptr->var); | |
5228 | ||
6f086dfc | 5229 | else |
718fe406 KH |
5230 | arg_size_ptr->constant = (-initial_offset_ptr->constant |
5231 | - offset_ptr->constant); | |
fed3cef0 | 5232 | |
6f086dfc | 5233 | #else /* !ARGS_GROW_DOWNWARD */ |
832ea3b3 FS |
5234 | if (!in_regs |
5235 | #ifdef REG_PARM_STACK_SPACE | |
5236 | || REG_PARM_STACK_SPACE (fndecl) > 0 | |
5237 | #endif | |
5238 | ) | |
5239 | pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad); | |
6f086dfc | 5240 | *offset_ptr = *initial_offset_ptr; |
6f086dfc RS |
5241 | |
5242 | #ifdef PUSH_ROUNDING | |
5243 | if (passed_mode != BLKmode) | |
5244 | sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree))); | |
5245 | #endif | |
5246 | ||
d4b0a7a0 DE |
5247 | /* Pad_below needs the pre-rounded size to know how much to pad below |
5248 | so this must be done before rounding up. */ | |
ea5917da DE |
5249 | if (where_pad == downward |
5250 | /* However, BLKmode args passed in regs have their padding done elsewhere. | |
5251 | The stack slot must be able to hold the entire register. */ | |
5252 | && !(in_regs && passed_mode == BLKmode)) | |
dc56ceb7 | 5253 | pad_below (offset_ptr, passed_mode, sizetree); |
d4b0a7a0 | 5254 | |
6f086dfc | 5255 | if (where_pad != none |
1468899d RK |
5256 | && (!host_integerp (sizetree, 1) |
5257 | || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY)) | |
6f086dfc RS |
5258 | sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT); |
5259 | ||
5260 | ADD_PARM_SIZE (*arg_size_ptr, sizetree); | |
5261 | #endif /* ARGS_GROW_DOWNWARD */ | |
5262 | } | |
5263 | ||
e16c591a RS |
5264 | /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY. |
5265 | BOUNDARY is measured in bits, but must be a multiple of a storage unit. */ | |
5266 | ||
6f086dfc | 5267 | static void |
4fc026cd | 5268 | pad_to_arg_alignment (offset_ptr, boundary, alignment_pad) |
6f086dfc RS |
5269 | struct args_size *offset_ptr; |
5270 | int boundary; | |
4fc026cd | 5271 | struct args_size *alignment_pad; |
6f086dfc | 5272 | { |
a544cfd2 KG |
5273 | tree save_var = NULL_TREE; |
5274 | HOST_WIDE_INT save_constant = 0; | |
4fc026cd | 5275 | |
6f086dfc | 5276 | int boundary_in_bytes = boundary / BITS_PER_UNIT; |
718fe406 | 5277 | |
9399d5c6 | 5278 | if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY) |
4fc026cd CM |
5279 | { |
5280 | save_var = offset_ptr->var; | |
5281 | save_constant = offset_ptr->constant; | |
5282 | } | |
5283 | ||
5284 | alignment_pad->var = NULL_TREE; | |
5285 | alignment_pad->constant = 0; | |
4fc026cd | 5286 | |
6f086dfc RS |
5287 | if (boundary > BITS_PER_UNIT) |
5288 | { | |
5289 | if (offset_ptr->var) | |
5290 | { | |
718fe406 | 5291 | offset_ptr->var = |
6f086dfc | 5292 | #ifdef ARGS_GROW_DOWNWARD |
718fe406 | 5293 | round_down |
6f086dfc RS |
5294 | #else |
5295 | round_up | |
5296 | #endif | |
5297 | (ARGS_SIZE_TREE (*offset_ptr), | |
5298 | boundary / BITS_PER_UNIT); | |
5299 | offset_ptr->constant = 0; /*?*/ | |
9399d5c6 | 5300 | if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY) |
fed3cef0 RK |
5301 | alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var, |
5302 | save_var); | |
6f086dfc RS |
5303 | } |
5304 | else | |
718fe406 | 5305 | { |
fbb57b2a | 5306 | offset_ptr->constant = |
6f086dfc | 5307 | #ifdef ARGS_GROW_DOWNWARD |
fbb57b2a | 5308 | FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes); |
6f086dfc | 5309 | #else |
fbb57b2a | 5310 | CEIL_ROUND (offset_ptr->constant, boundary_in_bytes); |
6f086dfc | 5311 | #endif |
718fe406 KH |
5312 | if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY) |
5313 | alignment_pad->constant = offset_ptr->constant - save_constant; | |
5314 | } | |
6f086dfc RS |
5315 | } |
5316 | } | |
5317 | ||
51723711 | 5318 | #ifndef ARGS_GROW_DOWNWARD |
6f086dfc RS |
5319 | static void |
5320 | pad_below (offset_ptr, passed_mode, sizetree) | |
5321 | struct args_size *offset_ptr; | |
5322 | enum machine_mode passed_mode; | |
5323 | tree sizetree; | |
5324 | { | |
5325 | if (passed_mode != BLKmode) | |
5326 | { | |
5327 | if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY) | |
5328 | offset_ptr->constant | |
5329 | += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1) | |
5330 | / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT) | |
5331 | - GET_MODE_SIZE (passed_mode)); | |
5332 | } | |
5333 | else | |
5334 | { | |
5335 | if (TREE_CODE (sizetree) != INTEGER_CST | |
5336 | || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY) | |
5337 | { | |
5338 | /* Round the size up to multiple of PARM_BOUNDARY bits. */ | |
5339 | tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT); | |
5340 | /* Add it in. */ | |
5341 | ADD_PARM_SIZE (*offset_ptr, s2); | |
5342 | SUB_PARM_SIZE (*offset_ptr, sizetree); | |
5343 | } | |
5344 | } | |
5345 | } | |
51723711 | 5346 | #endif |
6f086dfc RS |
5347 | \f |
5348 | /* Walk the tree of blocks describing the binding levels within a function | |
5349 | and warn about uninitialized variables. | |
5350 | This is done after calling flow_analysis and before global_alloc | |
5351 | clobbers the pseudo-regs to hard regs. */ | |
5352 | ||
5353 | void | |
5354 | uninitialized_vars_warning (block) | |
5355 | tree block; | |
5356 | { | |
b3694847 | 5357 | tree decl, sub; |
6f086dfc RS |
5358 | for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl)) |
5359 | { | |
8fbe1035 ML |
5360 | if (warn_uninitialized |
5361 | && TREE_CODE (decl) == VAR_DECL | |
6f086dfc RS |
5362 | /* These warnings are unreliable for and aggregates |
5363 | because assigning the fields one by one can fail to convince | |
5364 | flow.c that the entire aggregate was initialized. | |
5365 | Unions are troublesome because members may be shorter. */ | |
05e3bdb9 | 5366 | && ! AGGREGATE_TYPE_P (TREE_TYPE (decl)) |
6f086dfc RS |
5367 | && DECL_RTL (decl) != 0 |
5368 | && GET_CODE (DECL_RTL (decl)) == REG | |
6acdd0fd JL |
5369 | /* Global optimizations can make it difficult to determine if a |
5370 | particular variable has been initialized. However, a VAR_DECL | |
5371 | with a nonzero DECL_INITIAL had an initializer, so do not | |
5372 | claim it is potentially uninitialized. | |
5373 | ||
5374 | We do not care about the actual value in DECL_INITIAL, so we do | |
5375 | not worry that it may be a dangling pointer. */ | |
5376 | && DECL_INITIAL (decl) == NULL_TREE | |
6f086dfc RS |
5377 | && regno_uninitialized (REGNO (DECL_RTL (decl)))) |
5378 | warning_with_decl (decl, | |
3c8cd8bd | 5379 | "`%s' might be used uninitialized in this function"); |
8fbe1035 ML |
5380 | if (extra_warnings |
5381 | && TREE_CODE (decl) == VAR_DECL | |
6f086dfc RS |
5382 | && DECL_RTL (decl) != 0 |
5383 | && GET_CODE (DECL_RTL (decl)) == REG | |
5384 | && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl)))) | |
5385 | warning_with_decl (decl, | |
3c8cd8bd | 5386 | "variable `%s' might be clobbered by `longjmp' or `vfork'"); |
6f086dfc RS |
5387 | } |
5388 | for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub)) | |
5389 | uninitialized_vars_warning (sub); | |
5390 | } | |
5391 | ||
5392 | /* Do the appropriate part of uninitialized_vars_warning | |
5393 | but for arguments instead of local variables. */ | |
5394 | ||
5395 | void | |
0cd6ef35 | 5396 | setjmp_args_warning () |
6f086dfc | 5397 | { |
b3694847 | 5398 | tree decl; |
6f086dfc RS |
5399 | for (decl = DECL_ARGUMENTS (current_function_decl); |
5400 | decl; decl = TREE_CHAIN (decl)) | |
5401 | if (DECL_RTL (decl) != 0 | |
5402 | && GET_CODE (DECL_RTL (decl)) == REG | |
5403 | && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl)))) | |
718fe406 KH |
5404 | warning_with_decl (decl, |
5405 | "argument `%s' might be clobbered by `longjmp' or `vfork'"); | |
6f086dfc RS |
5406 | } |
5407 | ||
5408 | /* If this function call setjmp, put all vars into the stack | |
5409 | unless they were declared `register'. */ | |
5410 | ||
5411 | void | |
5412 | setjmp_protect (block) | |
5413 | tree block; | |
5414 | { | |
b3694847 | 5415 | tree decl, sub; |
6f086dfc RS |
5416 | for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl)) |
5417 | if ((TREE_CODE (decl) == VAR_DECL | |
5418 | || TREE_CODE (decl) == PARM_DECL) | |
5419 | && DECL_RTL (decl) != 0 | |
e9a25f70 JL |
5420 | && (GET_CODE (DECL_RTL (decl)) == REG |
5421 | || (GET_CODE (DECL_RTL (decl)) == MEM | |
5422 | && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF)) | |
b335c2cc | 5423 | /* If this variable came from an inline function, it must be |
9ec36da5 | 5424 | that its life doesn't overlap the setjmp. If there was a |
b335c2cc TW |
5425 | setjmp in the function, it would already be in memory. We |
5426 | must exclude such variable because their DECL_RTL might be | |
5427 | set to strange things such as virtual_stack_vars_rtx. */ | |
5428 | && ! DECL_FROM_INLINE (decl) | |
6f086dfc RS |
5429 | && ( |
5430 | #ifdef NON_SAVING_SETJMP | |
5431 | /* If longjmp doesn't restore the registers, | |
5432 | don't put anything in them. */ | |
5433 | NON_SAVING_SETJMP | |
5434 | || | |
5435 | #endif | |
a82ad570 | 5436 | ! DECL_REGISTER (decl))) |
6f086dfc RS |
5437 | put_var_into_stack (decl); |
5438 | for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub)) | |
5439 | setjmp_protect (sub); | |
5440 | } | |
5441 | \f | |
5442 | /* Like the previous function, but for args instead of local variables. */ | |
5443 | ||
5444 | void | |
5445 | setjmp_protect_args () | |
5446 | { | |
b3694847 | 5447 | tree decl; |
6f086dfc RS |
5448 | for (decl = DECL_ARGUMENTS (current_function_decl); |
5449 | decl; decl = TREE_CHAIN (decl)) | |
5450 | if ((TREE_CODE (decl) == VAR_DECL | |
5451 | || TREE_CODE (decl) == PARM_DECL) | |
5452 | && DECL_RTL (decl) != 0 | |
e9a25f70 JL |
5453 | && (GET_CODE (DECL_RTL (decl)) == REG |
5454 | || (GET_CODE (DECL_RTL (decl)) == MEM | |
5455 | && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF)) | |
6f086dfc RS |
5456 | && ( |
5457 | /* If longjmp doesn't restore the registers, | |
5458 | don't put anything in them. */ | |
5459 | #ifdef NON_SAVING_SETJMP | |
5460 | NON_SAVING_SETJMP | |
5461 | || | |
5462 | #endif | |
a82ad570 | 5463 | ! DECL_REGISTER (decl))) |
6f086dfc RS |
5464 | put_var_into_stack (decl); |
5465 | } | |
5466 | \f | |
5467 | /* Return the context-pointer register corresponding to DECL, | |
5468 | or 0 if it does not need one. */ | |
5469 | ||
5470 | rtx | |
5471 | lookup_static_chain (decl) | |
5472 | tree decl; | |
5473 | { | |
b001a02f PB |
5474 | tree context = decl_function_context (decl); |
5475 | tree link; | |
7ad8c4bf | 5476 | |
38ee6ed9 JM |
5477 | if (context == 0 |
5478 | || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl))) | |
7ad8c4bf | 5479 | return 0; |
38ee6ed9 | 5480 | |
6f086dfc RS |
5481 | /* We treat inline_function_decl as an alias for the current function |
5482 | because that is the inline function whose vars, types, etc. | |
5483 | are being merged into the current function. | |
5484 | See expand_inline_function. */ | |
5485 | if (context == current_function_decl || context == inline_function_decl) | |
5486 | return virtual_stack_vars_rtx; | |
5487 | ||
5488 | for (link = context_display; link; link = TREE_CHAIN (link)) | |
5489 | if (TREE_PURPOSE (link) == context) | |
5490 | return RTL_EXPR_RTL (TREE_VALUE (link)); | |
5491 | ||
5492 | abort (); | |
5493 | } | |
5494 | \f | |
5495 | /* Convert a stack slot address ADDR for variable VAR | |
5496 | (from a containing function) | |
5497 | into an address valid in this function (using a static chain). */ | |
5498 | ||
5499 | rtx | |
5500 | fix_lexical_addr (addr, var) | |
5501 | rtx addr; | |
5502 | tree var; | |
5503 | { | |
5504 | rtx basereg; | |
e5e809f4 | 5505 | HOST_WIDE_INT displacement; |
6f086dfc RS |
5506 | tree context = decl_function_context (var); |
5507 | struct function *fp; | |
5508 | rtx base = 0; | |
5509 | ||
5510 | /* If this is the present function, we need not do anything. */ | |
5511 | if (context == current_function_decl || context == inline_function_decl) | |
5512 | return addr; | |
5513 | ||
eb3ae3e1 | 5514 | fp = find_function_data (context); |
6f086dfc | 5515 | |
e9a25f70 JL |
5516 | if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM) |
5517 | addr = XEXP (XEXP (addr, 0), 0); | |
5518 | ||
6f086dfc RS |
5519 | /* Decode given address as base reg plus displacement. */ |
5520 | if (GET_CODE (addr) == REG) | |
5521 | basereg = addr, displacement = 0; | |
5522 | else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT) | |
5523 | basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1)); | |
5524 | else | |
5525 | abort (); | |
5526 | ||
5527 | /* We accept vars reached via the containing function's | |
5528 | incoming arg pointer and via its stack variables pointer. */ | |
5529 | if (basereg == fp->internal_arg_pointer) | |
5530 | { | |
5531 | /* If reached via arg pointer, get the arg pointer value | |
5532 | out of that function's stack frame. | |
5533 | ||
5534 | There are two cases: If a separate ap is needed, allocate a | |
5535 | slot in the outer function for it and dereference it that way. | |
5536 | This is correct even if the real ap is actually a pseudo. | |
5537 | Otherwise, just adjust the offset from the frame pointer to | |
5538 | compensate. */ | |
5539 | ||
5540 | #ifdef NEED_SEPARATE_AP | |
5541 | rtx addr; | |
5542 | ||
278ed218 RH |
5543 | addr = get_arg_pointer_save_area (fp); |
5544 | addr = fix_lexical_addr (XEXP (addr, 0), var); | |
6f086dfc RS |
5545 | addr = memory_address (Pmode, addr); |
5546 | ||
3bdf5ad1 | 5547 | base = gen_rtx_MEM (Pmode, addr); |
6a1d250e | 5548 | set_mem_alias_set (base, get_frame_alias_set ()); |
3bdf5ad1 | 5549 | base = copy_to_reg (base); |
6f086dfc RS |
5550 | #else |
5551 | displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET); | |
86f8eff3 | 5552 | base = lookup_static_chain (var); |
6f086dfc RS |
5553 | #endif |
5554 | } | |
5555 | ||
5556 | else if (basereg == virtual_stack_vars_rtx) | |
5557 | { | |
5558 | /* This is the same code as lookup_static_chain, duplicated here to | |
5559 | avoid an extra call to decl_function_context. */ | |
5560 | tree link; | |
5561 | ||
5562 | for (link = context_display; link; link = TREE_CHAIN (link)) | |
5563 | if (TREE_PURPOSE (link) == context) | |
5564 | { | |
5565 | base = RTL_EXPR_RTL (TREE_VALUE (link)); | |
5566 | break; | |
5567 | } | |
5568 | } | |
5569 | ||
5570 | if (base == 0) | |
5571 | abort (); | |
5572 | ||
5573 | /* Use same offset, relative to appropriate static chain or argument | |
5574 | pointer. */ | |
5575 | return plus_constant (base, displacement); | |
5576 | } | |
5577 | \f | |
5578 | /* Return the address of the trampoline for entering nested fn FUNCTION. | |
5579 | If necessary, allocate a trampoline (in the stack frame) | |
5580 | and emit rtl to initialize its contents (at entry to this function). */ | |
5581 | ||
5582 | rtx | |
5583 | trampoline_address (function) | |
5584 | tree function; | |
5585 | { | |
5586 | tree link; | |
5587 | tree rtlexp; | |
5588 | rtx tramp; | |
5589 | struct function *fp; | |
5590 | tree fn_context; | |
5591 | ||
5592 | /* Find an existing trampoline and return it. */ | |
5593 | for (link = trampoline_list; link; link = TREE_CHAIN (link)) | |
5594 | if (TREE_PURPOSE (link) == function) | |
e87ee2a9 | 5595 | return |
b33493e3 | 5596 | adjust_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0)); |
e87ee2a9 | 5597 | |
eb3ae3e1 | 5598 | for (fp = outer_function_chain; fp; fp = fp->outer) |
49ad7cfa | 5599 | for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link)) |
6f086dfc RS |
5600 | if (TREE_PURPOSE (link) == function) |
5601 | { | |
5602 | tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0), | |
5603 | function); | |
b33493e3 | 5604 | return adjust_trampoline_addr (tramp); |
6f086dfc RS |
5605 | } |
5606 | ||
5607 | /* None exists; we must make one. */ | |
5608 | ||
5609 | /* Find the `struct function' for the function containing FUNCTION. */ | |
5610 | fp = 0; | |
5611 | fn_context = decl_function_context (function); | |
4ac74fb8 RK |
5612 | if (fn_context != current_function_decl |
5613 | && fn_context != inline_function_decl) | |
eb3ae3e1 | 5614 | fp = find_function_data (fn_context); |
6f086dfc RS |
5615 | |
5616 | /* Allocate run-time space for this trampoline | |
5617 | (usually in the defining function's stack frame). */ | |
5618 | #ifdef ALLOCATE_TRAMPOLINE | |
5619 | tramp = ALLOCATE_TRAMPOLINE (fp); | |
5620 | #else | |
5621 | /* If rounding needed, allocate extra space | |
5622 | to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */ | |
5623 | #ifdef TRAMPOLINE_ALIGNMENT | |
b02ab63a RK |
5624 | #define TRAMPOLINE_REAL_SIZE \ |
5625 | (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1) | |
6f086dfc RS |
5626 | #else |
5627 | #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE) | |
5628 | #endif | |
e2ecd91c | 5629 | tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0, |
01d939e8 | 5630 | fp ? fp : cfun); |
6f086dfc RS |
5631 | #endif |
5632 | ||
5633 | /* Record the trampoline for reuse and note it for later initialization | |
5634 | by expand_function_end. */ | |
5635 | if (fp != 0) | |
5636 | { | |
6f086dfc RS |
5637 | rtlexp = make_node (RTL_EXPR); |
5638 | RTL_EXPR_RTL (rtlexp) = tramp; | |
49ad7cfa BS |
5639 | fp->x_trampoline_list = tree_cons (function, rtlexp, |
5640 | fp->x_trampoline_list); | |
6f086dfc RS |
5641 | } |
5642 | else | |
5643 | { | |
5644 | /* Make the RTL_EXPR node temporary, not momentary, so that the | |
5645 | trampoline_list doesn't become garbage. */ | |
6f086dfc | 5646 | rtlexp = make_node (RTL_EXPR); |
6f086dfc RS |
5647 | |
5648 | RTL_EXPR_RTL (rtlexp) = tramp; | |
5649 | trampoline_list = tree_cons (function, rtlexp, trampoline_list); | |
5650 | } | |
5651 | ||
5652 | tramp = fix_lexical_addr (XEXP (tramp, 0), function); | |
b33493e3 | 5653 | return adjust_trampoline_addr (tramp); |
6f086dfc RS |
5654 | } |
5655 | ||
5656 | /* Given a trampoline address, | |
5657 | round it to multiple of TRAMPOLINE_ALIGNMENT. */ | |
5658 | ||
5659 | static rtx | |
5660 | round_trampoline_addr (tramp) | |
5661 | rtx tramp; | |
5662 | { | |
5663 | #ifdef TRAMPOLINE_ALIGNMENT | |
5664 | /* Round address up to desired boundary. */ | |
5665 | rtx temp = gen_reg_rtx (Pmode); | |
ef89d648 ZW |
5666 | rtx addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1); |
5667 | rtx mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT); | |
5668 | ||
5669 | temp = expand_simple_binop (Pmode, PLUS, tramp, addend, | |
5670 | temp, 0, OPTAB_LIB_WIDEN); | |
5671 | tramp = expand_simple_binop (Pmode, AND, temp, mask, | |
5672 | temp, 0, OPTAB_LIB_WIDEN); | |
6f086dfc RS |
5673 | #endif |
5674 | return tramp; | |
5675 | } | |
b33493e3 AO |
5676 | |
5677 | /* Given a trampoline address, round it then apply any | |
5678 | platform-specific adjustments so that the result can be used for a | |
30f7a378 | 5679 | function call . */ |
b33493e3 AO |
5680 | |
5681 | static rtx | |
5682 | adjust_trampoline_addr (tramp) | |
5683 | rtx tramp; | |
5684 | { | |
5685 | tramp = round_trampoline_addr (tramp); | |
5686 | #ifdef TRAMPOLINE_ADJUST_ADDRESS | |
5687 | TRAMPOLINE_ADJUST_ADDRESS (tramp); | |
5688 | #endif | |
5689 | return tramp; | |
5690 | } | |
6f086dfc | 5691 | \f |
b2a59b15 MS |
5692 | /* Put all this function's BLOCK nodes including those that are chained |
5693 | onto the first block into a vector, and return it. | |
467456d0 RS |
5694 | Also store in each NOTE for the beginning or end of a block |
5695 | the index of that block in the vector. | |
b2a59b15 | 5696 | The arguments are BLOCK, the chain of top-level blocks of the function, |
467456d0 RS |
5697 | and INSNS, the insn chain of the function. */ |
5698 | ||
1a4450c7 | 5699 | void |
116eebd6 | 5700 | identify_blocks () |
467456d0 | 5701 | { |
fc289cd1 | 5702 | int n_blocks; |
0a1c58a2 | 5703 | tree *block_vector, *last_block_vector; |
1a4450c7 | 5704 | tree *block_stack; |
116eebd6 | 5705 | tree block = DECL_INITIAL (current_function_decl); |
467456d0 | 5706 | |
b2a59b15 | 5707 | if (block == 0) |
1a4450c7 | 5708 | return; |
fc289cd1 | 5709 | |
1a4450c7 MM |
5710 | /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in |
5711 | depth-first order. */ | |
18c038b9 | 5712 | block_vector = get_block_vector (block, &n_blocks); |
4da896b2 | 5713 | block_stack = (tree *) xmalloc (n_blocks * sizeof (tree)); |
1a4450c7 | 5714 | |
718fe406 | 5715 | last_block_vector = identify_blocks_1 (get_insns (), |
116eebd6 | 5716 | block_vector + 1, |
718fe406 | 5717 | block_vector + n_blocks, |
116eebd6 | 5718 | block_stack); |
0a1c58a2 JL |
5719 | |
5720 | /* If we didn't use all of the subblocks, we've misplaced block notes. */ | |
a84efb51 JO |
5721 | /* ??? This appears to happen all the time. Latent bugs elsewhere? */ |
5722 | if (0 && last_block_vector != block_vector + n_blocks) | |
0a1c58a2 JL |
5723 | abort (); |
5724 | ||
5725 | free (block_vector); | |
5726 | free (block_stack); | |
5727 | } | |
5728 | ||
5729 | /* Subroutine of identify_blocks. Do the block substitution on the | |
5730 | insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains. | |
5731 | ||
5732 | BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair. | |
5733 | BLOCK_VECTOR is incremented for each block seen. */ | |
5734 | ||
5735 | static tree * | |
5736 | identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack) | |
5737 | rtx insns; | |
5738 | tree *block_vector; | |
5739 | tree *end_block_vector; | |
5740 | tree *orig_block_stack; | |
5741 | { | |
5742 | rtx insn; | |
5743 | tree *block_stack = orig_block_stack; | |
5744 | ||
467456d0 | 5745 | for (insn = insns; insn; insn = NEXT_INSN (insn)) |
0a1c58a2 JL |
5746 | { |
5747 | if (GET_CODE (insn) == NOTE) | |
5748 | { | |
5749 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG) | |
5750 | { | |
5751 | tree b; | |
1a4450c7 | 5752 | |
0a1c58a2 JL |
5753 | /* If there are more block notes than BLOCKs, something |
5754 | is badly wrong. */ | |
5755 | if (block_vector == end_block_vector) | |
5756 | abort (); | |
e6fd097e | 5757 | |
0a1c58a2 JL |
5758 | b = *block_vector++; |
5759 | NOTE_BLOCK (insn) = b; | |
5760 | *block_stack++ = b; | |
5761 | } | |
5762 | else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END) | |
5763 | { | |
5764 | /* If there are more NOTE_INSN_BLOCK_ENDs than | |
5765 | NOTE_INSN_BLOCK_BEGs, something is badly wrong. */ | |
5766 | if (block_stack == orig_block_stack) | |
5767 | abort (); | |
e6fd097e | 5768 | |
0a1c58a2 JL |
5769 | NOTE_BLOCK (insn) = *--block_stack; |
5770 | } | |
718fe406 | 5771 | } |
0a1c58a2 JL |
5772 | else if (GET_CODE (insn) == CALL_INSN |
5773 | && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER) | |
5774 | { | |
5775 | rtx cp = PATTERN (insn); | |
5776 | ||
718fe406 KH |
5777 | block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector, |
5778 | end_block_vector, block_stack); | |
0a1c58a2 JL |
5779 | if (XEXP (cp, 1)) |
5780 | block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector, | |
5781 | end_block_vector, block_stack); | |
5782 | if (XEXP (cp, 2)) | |
5783 | block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector, | |
5784 | end_block_vector, block_stack); | |
5785 | } | |
5786 | } | |
467456d0 | 5787 | |
0a1c58a2 JL |
5788 | /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs, |
5789 | something is badly wrong. */ | |
5790 | if (block_stack != orig_block_stack) | |
5791 | abort (); | |
5792 | ||
5793 | return block_vector; | |
467456d0 RS |
5794 | } |
5795 | ||
a20612aa RH |
5796 | /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END}, |
5797 | and create duplicate blocks. */ | |
5798 | /* ??? Need an option to either create block fragments or to create | |
5799 | abstract origin duplicates of a source block. It really depends | |
5800 | on what optimization has been performed. */ | |
467456d0 | 5801 | |
116eebd6 MM |
5802 | void |
5803 | reorder_blocks () | |
467456d0 | 5804 | { |
116eebd6 | 5805 | tree block = DECL_INITIAL (current_function_decl); |
18c038b9 | 5806 | varray_type block_stack; |
467456d0 | 5807 | |
1a4450c7 | 5808 | if (block == NULL_TREE) |
116eebd6 | 5809 | return; |
fc289cd1 | 5810 | |
18c038b9 MM |
5811 | VARRAY_TREE_INIT (block_stack, 10, "block_stack"); |
5812 | ||
a20612aa RH |
5813 | /* Reset the TREE_ASM_WRITTEN bit for all blocks. */ |
5814 | reorder_blocks_0 (block); | |
5815 | ||
116eebd6 MM |
5816 | /* Prune the old trees away, so that they don't get in the way. */ |
5817 | BLOCK_SUBBLOCKS (block) = NULL_TREE; | |
5818 | BLOCK_CHAIN (block) = NULL_TREE; | |
fc289cd1 | 5819 | |
a20612aa | 5820 | /* Recreate the block tree from the note nesting. */ |
116eebd6 | 5821 | reorder_blocks_1 (get_insns (), block, &block_stack); |
718fe406 | 5822 | BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block)); |
18c038b9 | 5823 | |
a20612aa RH |
5824 | /* Remove deleted blocks from the block fragment chains. */ |
5825 | reorder_fix_fragments (block); | |
5826 | ||
18c038b9 | 5827 | VARRAY_FREE (block_stack); |
467456d0 RS |
5828 | } |
5829 | ||
a20612aa | 5830 | /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */ |
0a1c58a2 | 5831 | |
cc1fe44f | 5832 | static void |
a20612aa RH |
5833 | reorder_blocks_0 (block) |
5834 | tree block; | |
cc1fe44f | 5835 | { |
a20612aa | 5836 | while (block) |
cc1fe44f | 5837 | { |
a20612aa RH |
5838 | TREE_ASM_WRITTEN (block) = 0; |
5839 | reorder_blocks_0 (BLOCK_SUBBLOCKS (block)); | |
5840 | block = BLOCK_CHAIN (block); | |
cc1fe44f DD |
5841 | } |
5842 | } | |
5843 | ||
0a1c58a2 JL |
5844 | static void |
5845 | reorder_blocks_1 (insns, current_block, p_block_stack) | |
5846 | rtx insns; | |
5847 | tree current_block; | |
5848 | varray_type *p_block_stack; | |
5849 | { | |
5850 | rtx insn; | |
5851 | ||
5852 | for (insn = insns; insn; insn = NEXT_INSN (insn)) | |
5853 | { | |
5854 | if (GET_CODE (insn) == NOTE) | |
5855 | { | |
5856 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG) | |
5857 | { | |
5858 | tree block = NOTE_BLOCK (insn); | |
a20612aa RH |
5859 | |
5860 | /* If we have seen this block before, that means it now | |
5861 | spans multiple address regions. Create a new fragment. */ | |
0a1c58a2 JL |
5862 | if (TREE_ASM_WRITTEN (block)) |
5863 | { | |
a20612aa RH |
5864 | tree new_block = copy_node (block); |
5865 | tree origin; | |
5866 | ||
5867 | origin = (BLOCK_FRAGMENT_ORIGIN (block) | |
5868 | ? BLOCK_FRAGMENT_ORIGIN (block) | |
5869 | : block); | |
5870 | BLOCK_FRAGMENT_ORIGIN (new_block) = origin; | |
5871 | BLOCK_FRAGMENT_CHAIN (new_block) | |
5872 | = BLOCK_FRAGMENT_CHAIN (origin); | |
5873 | BLOCK_FRAGMENT_CHAIN (origin) = new_block; | |
5874 | ||
5875 | NOTE_BLOCK (insn) = new_block; | |
5876 | block = new_block; | |
0a1c58a2 | 5877 | } |
a20612aa | 5878 | |
0a1c58a2 JL |
5879 | BLOCK_SUBBLOCKS (block) = 0; |
5880 | TREE_ASM_WRITTEN (block) = 1; | |
718fe406 | 5881 | BLOCK_SUPERCONTEXT (block) = current_block; |
0a1c58a2 JL |
5882 | BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block); |
5883 | BLOCK_SUBBLOCKS (current_block) = block; | |
5884 | current_block = block; | |
5885 | VARRAY_PUSH_TREE (*p_block_stack, block); | |
5886 | } | |
5887 | else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END) | |
5888 | { | |
5889 | NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack); | |
5890 | VARRAY_POP (*p_block_stack); | |
5891 | BLOCK_SUBBLOCKS (current_block) | |
5892 | = blocks_nreverse (BLOCK_SUBBLOCKS (current_block)); | |
5893 | current_block = BLOCK_SUPERCONTEXT (current_block); | |
5894 | } | |
5895 | } | |
5896 | else if (GET_CODE (insn) == CALL_INSN | |
5897 | && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER) | |
5898 | { | |
5899 | rtx cp = PATTERN (insn); | |
5900 | reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack); | |
5901 | if (XEXP (cp, 1)) | |
5902 | reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack); | |
5903 | if (XEXP (cp, 2)) | |
5904 | reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack); | |
5905 | } | |
5906 | } | |
5907 | } | |
5908 | ||
a20612aa RH |
5909 | /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer |
5910 | appears in the block tree, select one of the fragments to become | |
5911 | the new origin block. */ | |
5912 | ||
5913 | static void | |
5914 | reorder_fix_fragments (block) | |
5915 | tree block; | |
5916 | { | |
5917 | while (block) | |
5918 | { | |
5919 | tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block); | |
5920 | tree new_origin = NULL_TREE; | |
5921 | ||
5922 | if (dup_origin) | |
5923 | { | |
5924 | if (! TREE_ASM_WRITTEN (dup_origin)) | |
5925 | { | |
5926 | new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin); | |
5927 | ||
5928 | /* Find the first of the remaining fragments. There must | |
5929 | be at least one -- the current block. */ | |
5930 | while (! TREE_ASM_WRITTEN (new_origin)) | |
5931 | new_origin = BLOCK_FRAGMENT_CHAIN (new_origin); | |
5932 | BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE; | |
5933 | } | |
5934 | } | |
5935 | else if (! dup_origin) | |
5936 | new_origin = block; | |
5937 | ||
5938 | /* Re-root the rest of the fragments to the new origin. In the | |
5939 | case that DUP_ORIGIN was null, that means BLOCK was the origin | |
5940 | of a chain of fragments and we want to remove those fragments | |
5941 | that didn't make it to the output. */ | |
5942 | if (new_origin) | |
5943 | { | |
5944 | tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin); | |
5945 | tree chain = *pp; | |
5946 | ||
5947 | while (chain) | |
5948 | { | |
5949 | if (TREE_ASM_WRITTEN (chain)) | |
5950 | { | |
5951 | BLOCK_FRAGMENT_ORIGIN (chain) = new_origin; | |
5952 | *pp = chain; | |
5953 | pp = &BLOCK_FRAGMENT_CHAIN (chain); | |
5954 | } | |
5955 | chain = BLOCK_FRAGMENT_CHAIN (chain); | |
5956 | } | |
5957 | *pp = NULL_TREE; | |
5958 | } | |
5959 | ||
5960 | reorder_fix_fragments (BLOCK_SUBBLOCKS (block)); | |
5961 | block = BLOCK_CHAIN (block); | |
5962 | } | |
5963 | } | |
5964 | ||
467456d0 RS |
5965 | /* Reverse the order of elements in the chain T of blocks, |
5966 | and return the new head of the chain (old last element). */ | |
5967 | ||
5968 | static tree | |
5969 | blocks_nreverse (t) | |
5970 | tree t; | |
5971 | { | |
b3694847 | 5972 | tree prev = 0, decl, next; |
467456d0 RS |
5973 | for (decl = t; decl; decl = next) |
5974 | { | |
5975 | next = BLOCK_CHAIN (decl); | |
5976 | BLOCK_CHAIN (decl) = prev; | |
5977 | prev = decl; | |
5978 | } | |
5979 | return prev; | |
5980 | } | |
5981 | ||
18c038b9 MM |
5982 | /* Count the subblocks of the list starting with BLOCK. If VECTOR is |
5983 | non-NULL, list them all into VECTOR, in a depth-first preorder | |
5984 | traversal of the block tree. Also clear TREE_ASM_WRITTEN in all | |
b2a59b15 | 5985 | blocks. */ |
467456d0 RS |
5986 | |
5987 | static int | |
a84efb51 JO |
5988 | all_blocks (block, vector) |
5989 | tree block; | |
467456d0 RS |
5990 | tree *vector; |
5991 | { | |
b2a59b15 MS |
5992 | int n_blocks = 0; |
5993 | ||
a84efb51 JO |
5994 | while (block) |
5995 | { | |
5996 | TREE_ASM_WRITTEN (block) = 0; | |
b2a59b15 | 5997 | |
a84efb51 JO |
5998 | /* Record this block. */ |
5999 | if (vector) | |
6000 | vector[n_blocks] = block; | |
b2a59b15 | 6001 | |
a84efb51 | 6002 | ++n_blocks; |
718fe406 | 6003 | |
a84efb51 JO |
6004 | /* Record the subblocks, and their subblocks... */ |
6005 | n_blocks += all_blocks (BLOCK_SUBBLOCKS (block), | |
6006 | vector ? vector + n_blocks : 0); | |
6007 | block = BLOCK_CHAIN (block); | |
6008 | } | |
467456d0 RS |
6009 | |
6010 | return n_blocks; | |
6011 | } | |
18c038b9 MM |
6012 | |
6013 | /* Return a vector containing all the blocks rooted at BLOCK. The | |
6014 | number of elements in the vector is stored in N_BLOCKS_P. The | |
6015 | vector is dynamically allocated; it is the caller's responsibility | |
6016 | to call `free' on the pointer returned. */ | |
718fe406 | 6017 | |
18c038b9 MM |
6018 | static tree * |
6019 | get_block_vector (block, n_blocks_p) | |
6020 | tree block; | |
6021 | int *n_blocks_p; | |
6022 | { | |
6023 | tree *block_vector; | |
6024 | ||
6025 | *n_blocks_p = all_blocks (block, NULL); | |
6026 | block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree)); | |
6027 | all_blocks (block, block_vector); | |
6028 | ||
6029 | return block_vector; | |
6030 | } | |
6031 | ||
6032 | static int next_block_index = 2; | |
6033 | ||
6034 | /* Set BLOCK_NUMBER for all the blocks in FN. */ | |
6035 | ||
6036 | void | |
6037 | number_blocks (fn) | |
6038 | tree fn; | |
6039 | { | |
6040 | int i; | |
6041 | int n_blocks; | |
6042 | tree *block_vector; | |
6043 | ||
6044 | /* For SDB and XCOFF debugging output, we start numbering the blocks | |
6045 | from 1 within each function, rather than keeping a running | |
6046 | count. */ | |
6047 | #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO) | |
b0e3a658 RK |
6048 | if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG) |
6049 | next_block_index = 1; | |
18c038b9 MM |
6050 | #endif |
6051 | ||
6052 | block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks); | |
6053 | ||
6054 | /* The top-level BLOCK isn't numbered at all. */ | |
6055 | for (i = 1; i < n_blocks; ++i) | |
6056 | /* We number the blocks from two. */ | |
6057 | BLOCK_NUMBER (block_vector[i]) = next_block_index++; | |
6058 | ||
6059 | free (block_vector); | |
6060 | ||
6061 | return; | |
6062 | } | |
467456d0 | 6063 | \f |
b384405b | 6064 | /* Allocate a function structure and reset its contents to the defaults. */ |
7a80cf9a | 6065 | |
b384405b BS |
6066 | static void |
6067 | prepare_function_start () | |
6f086dfc | 6068 | { |
7a80cf9a | 6069 | cfun = (struct function *) ggc_alloc_cleared (sizeof (struct function)); |
e2ecd91c | 6070 | |
6f086dfc | 6071 | init_stmt_for_function (); |
fa51b01b | 6072 | init_eh_for_function (); |
6f086dfc RS |
6073 | |
6074 | cse_not_expected = ! optimize; | |
6075 | ||
6076 | /* Caller save not needed yet. */ | |
6077 | caller_save_needed = 0; | |
6078 | ||
6079 | /* No stack slots have been made yet. */ | |
6080 | stack_slot_list = 0; | |
6081 | ||
b384405b BS |
6082 | current_function_has_nonlocal_label = 0; |
6083 | current_function_has_nonlocal_goto = 0; | |
6084 | ||
6f086dfc | 6085 | /* There is no stack slot for handling nonlocal gotos. */ |
ba716ac9 | 6086 | nonlocal_goto_handler_slots = 0; |
6f086dfc RS |
6087 | nonlocal_goto_stack_level = 0; |
6088 | ||
6089 | /* No labels have been declared for nonlocal use. */ | |
6090 | nonlocal_labels = 0; | |
e881bb1b | 6091 | nonlocal_goto_handler_labels = 0; |
6f086dfc RS |
6092 | |
6093 | /* No function calls so far in this function. */ | |
6094 | function_call_count = 0; | |
6095 | ||
6096 | /* No parm regs have been allocated. | |
6097 | (This is important for output_inline_function.) */ | |
6098 | max_parm_reg = LAST_VIRTUAL_REGISTER + 1; | |
6099 | ||
6100 | /* Initialize the RTL mechanism. */ | |
6101 | init_emit (); | |
6102 | ||
6103 | /* Initialize the queue of pending postincrement and postdecrements, | |
6104 | and some other info in expr.c. */ | |
6105 | init_expr (); | |
718fe406 | 6106 | |
6f086dfc RS |
6107 | /* We haven't done register allocation yet. */ |
6108 | reg_renumber = 0; | |
6109 | ||
01d939e8 | 6110 | init_varasm_status (cfun); |
6f086dfc | 6111 | |
e2ecd91c | 6112 | /* Clear out data used for inlining. */ |
01d939e8 BS |
6113 | cfun->inlinable = 0; |
6114 | cfun->original_decl_initial = 0; | |
718fe406 | 6115 | cfun->original_arg_vector = 0; |
e2ecd91c | 6116 | |
c487e484 | 6117 | cfun->stack_alignment_needed = STACK_BOUNDARY; |
c2f8b491 | 6118 | cfun->preferred_stack_boundary = STACK_BOUNDARY; |
a0871656 | 6119 | |
6f086dfc RS |
6120 | /* Set if a call to setjmp is seen. */ |
6121 | current_function_calls_setjmp = 0; | |
6122 | ||
6123 | /* Set if a call to longjmp is seen. */ | |
6124 | current_function_calls_longjmp = 0; | |
6125 | ||
6126 | current_function_calls_alloca = 0; | |
6f086dfc | 6127 | current_function_contains_functions = 0; |
54ff41b7 | 6128 | current_function_is_leaf = 0; |
fb13d4d0 | 6129 | current_function_nothrow = 0; |
fdb8a883 | 6130 | current_function_sp_is_unchanging = 0; |
54ff41b7 | 6131 | current_function_uses_only_leaf_regs = 0; |
acd693d1 | 6132 | current_function_has_computed_jump = 0; |
173cd503 | 6133 | current_function_is_thunk = 0; |
6f086dfc RS |
6134 | |
6135 | current_function_returns_pcc_struct = 0; | |
6136 | current_function_returns_struct = 0; | |
6137 | current_function_epilogue_delay_list = 0; | |
6138 | current_function_uses_const_pool = 0; | |
6139 | current_function_uses_pic_offset_table = 0; | |
aeb302bb | 6140 | current_function_cannot_inline = 0; |
6f086dfc RS |
6141 | |
6142 | /* We have not yet needed to make a label to jump to for tail-recursion. */ | |
6143 | tail_recursion_label = 0; | |
6144 | ||
6145 | /* We haven't had a need to make a save area for ap yet. */ | |
6f086dfc RS |
6146 | arg_pointer_save_area = 0; |
6147 | ||
6148 | /* No stack slots allocated yet. */ | |
6149 | frame_offset = 0; | |
6150 | ||
6151 | /* No SAVE_EXPRs in this function yet. */ | |
6152 | save_expr_regs = 0; | |
6153 | ||
6154 | /* No RTL_EXPRs in this function yet. */ | |
6155 | rtl_expr_chain = 0; | |
6156 | ||
bc0ebdf9 RK |
6157 | /* Set up to allocate temporaries. */ |
6158 | init_temp_slots (); | |
6f086dfc | 6159 | |
b384405b BS |
6160 | /* Indicate that we need to distinguish between the return value of the |
6161 | present function and the return value of a function being called. */ | |
6162 | rtx_equal_function_value_matters = 1; | |
6163 | ||
6164 | /* Indicate that we have not instantiated virtual registers yet. */ | |
6165 | virtuals_instantiated = 0; | |
6166 | ||
1b3d8f8a GK |
6167 | /* Indicate that we want CONCATs now. */ |
6168 | generating_concat_p = 1; | |
6169 | ||
b384405b BS |
6170 | /* Indicate we have no need of a frame pointer yet. */ |
6171 | frame_pointer_needed = 0; | |
6172 | ||
6173 | /* By default assume not varargs or stdarg. */ | |
6174 | current_function_varargs = 0; | |
6175 | current_function_stdarg = 0; | |
6f086dfc | 6176 | |
d9a98e1a RK |
6177 | /* We haven't made any trampolines for this function yet. */ |
6178 | trampoline_list = 0; | |
6179 | ||
6f086dfc RS |
6180 | init_pending_stack_adjust (); |
6181 | inhibit_defer_pop = 0; | |
6182 | ||
6183 | current_function_outgoing_args_size = 0; | |
36edd3cc | 6184 | |
0a8a198c | 6185 | if (init_lang_status) |
01d939e8 | 6186 | (*init_lang_status) (cfun); |
36edd3cc | 6187 | if (init_machine_status) |
01d939e8 | 6188 | (*init_machine_status) (cfun); |
b384405b BS |
6189 | } |
6190 | ||
6191 | /* Initialize the rtl expansion mechanism so that we can do simple things | |
6192 | like generate sequences. This is used to provide a context during global | |
6193 | initialization of some passes. */ | |
6194 | void | |
6195 | init_dummy_function_start () | |
6196 | { | |
6197 | prepare_function_start (); | |
6198 | } | |
6199 | ||
6200 | /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node) | |
6201 | and initialize static variables for generating RTL for the statements | |
6202 | of the function. */ | |
6203 | ||
6204 | void | |
6205 | init_function_start (subr, filename, line) | |
6206 | tree subr; | |
36244024 | 6207 | const char *filename; |
b384405b BS |
6208 | int line; |
6209 | { | |
6210 | prepare_function_start (); | |
6211 | ||
b384405b | 6212 | current_function_name = (*decl_printable_name) (subr, 2); |
01d939e8 | 6213 | cfun->decl = subr; |
b384405b BS |
6214 | |
6215 | /* Nonzero if this is a nested function that uses a static chain. */ | |
6216 | ||
6217 | current_function_needs_context | |
6218 | = (decl_function_context (current_function_decl) != 0 | |
6219 | && ! DECL_NO_STATIC_CHAIN (current_function_decl)); | |
6220 | ||
6221 | /* Within function body, compute a type's size as soon it is laid out. */ | |
6222 | immediate_size_expand++; | |
6f086dfc | 6223 | |
6f086dfc | 6224 | /* Prevent ever trying to delete the first instruction of a function. |
b274104c | 6225 | Also tell final how to output a linenum before the function prologue. |
718fe406 | 6226 | Note linenums could be missing, e.g. when compiling a Java .class file. */ |
b274104c PB |
6227 | if (line > 0) |
6228 | emit_line_note (filename, line); | |
6f086dfc RS |
6229 | |
6230 | /* Make sure first insn is a note even if we don't want linenums. | |
6231 | This makes sure the first insn will never be deleted. | |
6232 | Also, final expects a note to appear there. */ | |
6496a589 | 6233 | emit_note (NULL, NOTE_INSN_DELETED); |
6f086dfc RS |
6234 | |
6235 | /* Set flags used by final.c. */ | |
6236 | if (aggregate_value_p (DECL_RESULT (subr))) | |
6237 | { | |
6238 | #ifdef PCC_STATIC_STRUCT_RETURN | |
1b8297c1 | 6239 | current_function_returns_pcc_struct = 1; |
6f086dfc | 6240 | #endif |
1b8297c1 | 6241 | current_function_returns_struct = 1; |
6f086dfc RS |
6242 | } |
6243 | ||
6244 | /* Warn if this value is an aggregate type, | |
6245 | regardless of which calling convention we are using for it. */ | |
6246 | if (warn_aggregate_return | |
05e3bdb9 | 6247 | && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)))) |
6f086dfc RS |
6248 | warning ("function returns an aggregate"); |
6249 | ||
6250 | current_function_returns_pointer | |
8eda074c | 6251 | = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr))); |
49ad7cfa | 6252 | } |
5c7675e9 | 6253 | |
49ad7cfa BS |
6254 | /* Make sure all values used by the optimization passes have sane |
6255 | defaults. */ | |
6256 | void | |
6257 | init_function_for_compilation () | |
6258 | { | |
6259 | reg_renumber = 0; | |
0a1c58a2 | 6260 | |
5c7675e9 | 6261 | /* No prologue/epilogue insns yet. */ |
0a1c58a2 JL |
6262 | VARRAY_GROW (prologue, 0); |
6263 | VARRAY_GROW (epilogue, 0); | |
6264 | VARRAY_GROW (sibcall_epilogue, 0); | |
6f086dfc RS |
6265 | } |
6266 | ||
6267 | /* Indicate that the current function uses extra args | |
6268 | not explicitly mentioned in the argument list in any fashion. */ | |
6269 | ||
6270 | void | |
6271 | mark_varargs () | |
6272 | { | |
6273 | current_function_varargs = 1; | |
6274 | } | |
6275 | ||
6276 | /* Expand a call to __main at the beginning of a possible main function. */ | |
6277 | ||
e2fd1d94 JM |
6278 | #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main) |
6279 | #undef HAS_INIT_SECTION | |
6280 | #define HAS_INIT_SECTION | |
6281 | #endif | |
6282 | ||
6f086dfc RS |
6283 | void |
6284 | expand_main_function () | |
6285 | { | |
1d482056 RH |
6286 | #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN |
6287 | if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN) | |
6288 | { | |
6289 | int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; | |
6290 | rtx tmp; | |
6291 | ||
ef89d648 | 6292 | /* Forcibly align the stack. */ |
1d482056 | 6293 | #ifdef STACK_GROWS_DOWNWARD |
ef89d648 ZW |
6294 | tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align), |
6295 | stack_pointer_rtx, 1, OPTAB_WIDEN); | |
1d482056 | 6296 | #else |
ef89d648 ZW |
6297 | tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx, |
6298 | GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN); | |
6299 | tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align), | |
6300 | stack_pointer_rtx, 1, OPTAB_WIDEN); | |
1d482056 RH |
6301 | #endif |
6302 | if (tmp != stack_pointer_rtx) | |
6303 | emit_move_insn (stack_pointer_rtx, tmp); | |
6304 | ||
6305 | /* Enlist allocate_dynamic_stack_space to pick up the pieces. */ | |
6306 | tmp = force_reg (Pmode, const0_rtx); | |
6307 | allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT); | |
6308 | } | |
6309 | #endif | |
6310 | ||
6311 | #ifndef HAS_INIT_SECTION | |
b93a436e JL |
6312 | emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0, |
6313 | VOIDmode, 0); | |
1d482056 | 6314 | #endif |
6f086dfc RS |
6315 | } |
6316 | \f | |
c20bf1f3 JB |
6317 | extern struct obstack permanent_obstack; |
6318 | ||
1f731749 MM |
6319 | /* The PENDING_SIZES represent the sizes of variable-sized types. |
6320 | Create RTL for the various sizes now (using temporary variables), | |
6321 | so that we can refer to the sizes from the RTL we are generating | |
6322 | for the current function. The PENDING_SIZES are a TREE_LIST. The | |
6323 | TREE_VALUE of each node is a SAVE_EXPR. */ | |
6324 | ||
6325 | void | |
6326 | expand_pending_sizes (pending_sizes) | |
6327 | tree pending_sizes; | |
6328 | { | |
6329 | tree tem; | |
6330 | ||
6331 | /* Evaluate now the sizes of any types declared among the arguments. */ | |
6332 | for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem)) | |
6333 | { | |
6334 | expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, | |
6335 | EXPAND_MEMORY_USE_BAD); | |
6336 | /* Flush the queue in case this parameter declaration has | |
6337 | side-effects. */ | |
6338 | emit_queue (); | |
6339 | } | |
6340 | } | |
6341 | ||
6f086dfc RS |
6342 | /* Start the RTL for a new function, and set variables used for |
6343 | emitting RTL. | |
6344 | SUBR is the FUNCTION_DECL node. | |
6345 | PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with | |
6346 | the function's parameters, which must be run at any return statement. */ | |
6347 | ||
6348 | void | |
6349 | expand_function_start (subr, parms_have_cleanups) | |
6350 | tree subr; | |
6351 | int parms_have_cleanups; | |
6352 | { | |
6f086dfc | 6353 | tree tem; |
4e86caed | 6354 | rtx last_ptr = NULL_RTX; |
6f086dfc RS |
6355 | |
6356 | /* Make sure volatile mem refs aren't considered | |
6357 | valid operands of arithmetic insns. */ | |
6358 | init_recog_no_volatile (); | |
6359 | ||
7d384cc0 KR |
6360 | /* Set this before generating any memory accesses. */ |
6361 | current_function_check_memory_usage | |
6362 | = (flag_check_memory_usage | |
6363 | && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl)); | |
6364 | ||
07417085 KR |
6365 | current_function_instrument_entry_exit |
6366 | = (flag_instrument_function_entry_exit | |
6367 | && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr)); | |
6368 | ||
a157febd GK |
6369 | current_function_limit_stack |
6370 | = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr)); | |
6371 | ||
6f086dfc RS |
6372 | /* If function gets a static chain arg, store it in the stack frame. |
6373 | Do this first, so it gets the first stack slot offset. */ | |
6374 | if (current_function_needs_context) | |
3e2481e9 JW |
6375 | { |
6376 | last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0); | |
f0c51a1e | 6377 | |
f0c51a1e RK |
6378 | /* Delay copying static chain if it is not a register to avoid |
6379 | conflicts with regs used for parameters. */ | |
f95182a4 ILT |
6380 | if (! SMALL_REGISTER_CLASSES |
6381 | || GET_CODE (static_chain_incoming_rtx) == REG) | |
718fe406 | 6382 | emit_move_insn (last_ptr, static_chain_incoming_rtx); |
3e2481e9 | 6383 | } |
6f086dfc RS |
6384 | |
6385 | /* If the parameters of this function need cleaning up, get a label | |
6386 | for the beginning of the code which executes those cleanups. This must | |
6387 | be done before doing anything with return_label. */ | |
6388 | if (parms_have_cleanups) | |
6389 | cleanup_label = gen_label_rtx (); | |
6390 | else | |
6391 | cleanup_label = 0; | |
6392 | ||
52a11cbf RH |
6393 | /* Make the label for return statements to jump to. Do not special |
6394 | case machines with special return instructions -- they will be | |
6395 | handled later during jump, ifcvt, or epilogue creation. */ | |
6f086dfc | 6396 | return_label = gen_label_rtx (); |
6f086dfc RS |
6397 | |
6398 | /* Initialize rtx used to return the value. */ | |
6399 | /* Do this before assign_parms so that we copy the struct value address | |
6400 | before any library calls that assign parms might generate. */ | |
6401 | ||
6402 | /* Decide whether to return the value in memory or in a register. */ | |
6403 | if (aggregate_value_p (DECL_RESULT (subr))) | |
6404 | { | |
6405 | /* Returning something that won't go in a register. */ | |
b3694847 | 6406 | rtx value_address = 0; |
6f086dfc RS |
6407 | |
6408 | #ifdef PCC_STATIC_STRUCT_RETURN | |
6409 | if (current_function_returns_pcc_struct) | |
6410 | { | |
6411 | int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr))); | |
6412 | value_address = assemble_static_space (size); | |
6413 | } | |
6414 | else | |
6415 | #endif | |
6416 | { | |
6417 | /* Expect to be passed the address of a place to store the value. | |
6418 | If it is passed as an argument, assign_parms will take care of | |
6419 | it. */ | |
6420 | if (struct_value_incoming_rtx) | |
6421 | { | |
6422 | value_address = gen_reg_rtx (Pmode); | |
6423 | emit_move_insn (value_address, struct_value_incoming_rtx); | |
6424 | } | |
6425 | } | |
6426 | if (value_address) | |
ccdecf58 | 6427 | { |
abde42f7 JH |
6428 | rtx x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address); |
6429 | set_mem_attributes (x, DECL_RESULT (subr), 1); | |
6430 | SET_DECL_RTL (DECL_RESULT (subr), x); | |
ccdecf58 | 6431 | } |
6f086dfc RS |
6432 | } |
6433 | else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode) | |
6434 | /* If return mode is void, this decl rtl should not be used. */ | |
19e7881c | 6435 | SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX); |
d5bf1143 | 6436 | else |
a53e14c0 | 6437 | { |
d5bf1143 RH |
6438 | /* Compute the return values into a pseudo reg, which we will copy |
6439 | into the true return register after the cleanups are done. */ | |
db3c0315 MM |
6440 | |
6441 | /* In order to figure out what mode to use for the pseudo, we | |
6442 | figure out what the mode of the eventual return register will | |
6443 | actually be, and use that. */ | |
6444 | rtx hard_reg | |
6445 | = hard_function_value (TREE_TYPE (DECL_RESULT (subr)), | |
6446 | subr, 1); | |
6447 | ||
80a480ca RH |
6448 | /* Structures that are returned in registers are not aggregate_value_p, |
6449 | so we may see a PARALLEL. Don't play pseudo games with this. */ | |
6450 | if (! REG_P (hard_reg)) | |
6451 | SET_DECL_RTL (DECL_RESULT (subr), hard_reg); | |
6452 | else | |
6453 | { | |
6454 | /* Create the pseudo. */ | |
6455 | SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg))); | |
a53e14c0 | 6456 | |
80a480ca RH |
6457 | /* Needed because we may need to move this to memory |
6458 | in case it's a named return value whose address is taken. */ | |
6459 | DECL_REGISTER (DECL_RESULT (subr)) = 1; | |
6460 | } | |
a53e14c0 | 6461 | } |
6f086dfc RS |
6462 | |
6463 | /* Initialize rtx for parameters and local variables. | |
6464 | In some cases this requires emitting insns. */ | |
6465 | ||
0d1416c6 | 6466 | assign_parms (subr); |
6f086dfc | 6467 | |
f0c51a1e RK |
6468 | /* Copy the static chain now if it wasn't a register. The delay is to |
6469 | avoid conflicts with the parameter passing registers. */ | |
6470 | ||
f95182a4 | 6471 | if (SMALL_REGISTER_CLASSES && current_function_needs_context) |
f0c51a1e RK |
6472 | if (GET_CODE (static_chain_incoming_rtx) != REG) |
6473 | emit_move_insn (last_ptr, static_chain_incoming_rtx); | |
f0c51a1e | 6474 | |
6f086dfc RS |
6475 | /* The following was moved from init_function_start. |
6476 | The move is supposed to make sdb output more accurate. */ | |
6477 | /* Indicate the beginning of the function body, | |
6478 | as opposed to parm setup. */ | |
6496a589 | 6479 | emit_note (NULL, NOTE_INSN_FUNCTION_BEG); |
6f086dfc | 6480 | |
6f086dfc | 6481 | if (GET_CODE (get_last_insn ()) != NOTE) |
6496a589 | 6482 | emit_note (NULL, NOTE_INSN_DELETED); |
6f086dfc RS |
6483 | parm_birth_insn = get_last_insn (); |
6484 | ||
6d7306f7 JM |
6485 | context_display = 0; |
6486 | if (current_function_needs_context) | |
ac9e20f0 | 6487 | { |
6d7306f7 JM |
6488 | /* Fetch static chain values for containing functions. */ |
6489 | tem = decl_function_context (current_function_decl); | |
d29c259b RH |
6490 | /* Copy the static chain pointer into a pseudo. If we have |
6491 | small register classes, copy the value from memory if | |
6492 | static_chain_incoming_rtx is a REG. */ | |
6493 | if (tem) | |
6d7306f7 | 6494 | { |
6d7306f7 JM |
6495 | /* If the static chain originally came in a register, put it back |
6496 | there, then move it out in the next insn. The reason for | |
6497 | this peculiar code is to satisfy function integration. */ | |
f95182a4 ILT |
6498 | if (SMALL_REGISTER_CLASSES |
6499 | && GET_CODE (static_chain_incoming_rtx) == REG) | |
6d7306f7 | 6500 | emit_move_insn (static_chain_incoming_rtx, last_ptr); |
6d7306f7 JM |
6501 | last_ptr = copy_to_reg (static_chain_incoming_rtx); |
6502 | } | |
ac9e20f0 | 6503 | |
6d7306f7 JM |
6504 | while (tem) |
6505 | { | |
6506 | tree rtlexp = make_node (RTL_EXPR); | |
6f086dfc | 6507 | |
6d7306f7 JM |
6508 | RTL_EXPR_RTL (rtlexp) = last_ptr; |
6509 | context_display = tree_cons (tem, rtlexp, context_display); | |
6510 | tem = decl_function_context (tem); | |
6511 | if (tem == 0) | |
6512 | break; | |
6513 | /* Chain thru stack frames, assuming pointer to next lexical frame | |
6514 | is found at the place we always store it. */ | |
6f086dfc | 6515 | #ifdef FRAME_GROWS_DOWNWARD |
768f0669 JJ |
6516 | last_ptr = plus_constant (last_ptr, |
6517 | -(HOST_WIDE_INT) GET_MODE_SIZE (Pmode)); | |
6f086dfc | 6518 | #endif |
3bdf5ad1 | 6519 | last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr)); |
6a1d250e | 6520 | set_mem_alias_set (last_ptr, get_frame_alias_set ()); |
3bdf5ad1 | 6521 | last_ptr = copy_to_reg (last_ptr); |
6d7306f7 JM |
6522 | |
6523 | /* If we are not optimizing, ensure that we know that this | |
6524 | piece of context is live over the entire function. */ | |
6525 | if (! optimize) | |
38a448ca RH |
6526 | save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr, |
6527 | save_expr_regs); | |
6d7306f7 | 6528 | } |
6f086dfc RS |
6529 | } |
6530 | ||
07417085 KR |
6531 | if (current_function_instrument_entry_exit) |
6532 | { | |
6533 | rtx fun = DECL_RTL (current_function_decl); | |
6534 | if (GET_CODE (fun) == MEM) | |
6535 | fun = XEXP (fun, 0); | |
6536 | else | |
6537 | abort (); | |
6538 | emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2, | |
6539 | fun, Pmode, | |
6540 | expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS, | |
6541 | 0, | |
6542 | hard_frame_pointer_rtx), | |
6543 | Pmode); | |
6544 | } | |
6545 | ||
411707f4 CC |
6546 | #ifdef PROFILE_HOOK |
6547 | if (profile_flag) | |
6548 | PROFILE_HOOK (profile_label_no); | |
6549 | #endif | |
6550 | ||
6f086dfc RS |
6551 | /* After the display initializations is where the tail-recursion label |
6552 | should go, if we end up needing one. Ensure we have a NOTE here | |
6553 | since some things (like trampolines) get placed before this. */ | |
6496a589 | 6554 | tail_recursion_reentry = emit_note (NULL, NOTE_INSN_DELETED); |
6f086dfc RS |
6555 | |
6556 | /* Evaluate now the sizes of any types declared among the arguments. */ | |
1f731749 | 6557 | expand_pending_sizes (nreverse (get_pending_sizes ())); |
6f086dfc RS |
6558 | |
6559 | /* Make sure there is a line number after the function entry setup code. */ | |
6560 | force_next_line_note (); | |
6561 | } | |
6562 | \f | |
49ad7cfa BS |
6563 | /* Undo the effects of init_dummy_function_start. */ |
6564 | void | |
6565 | expand_dummy_function_end () | |
6566 | { | |
6567 | /* End any sequences that failed to be closed due to syntax errors. */ | |
6568 | while (in_sequence_p ()) | |
6569 | end_sequence (); | |
6570 | ||
6571 | /* Outside function body, can't compute type's actual size | |
6572 | until next function's body starts. */ | |
fa51b01b | 6573 | |
01d939e8 BS |
6574 | free_after_parsing (cfun); |
6575 | free_after_compilation (cfun); | |
01d939e8 | 6576 | cfun = 0; |
49ad7cfa BS |
6577 | } |
6578 | ||
c13fde05 RH |
6579 | /* Call DOIT for each hard register used as a return value from |
6580 | the current function. */ | |
bd695e1e RH |
6581 | |
6582 | void | |
c13fde05 RH |
6583 | diddle_return_value (doit, arg) |
6584 | void (*doit) PARAMS ((rtx, void *)); | |
6585 | void *arg; | |
bd695e1e | 6586 | { |
c13fde05 RH |
6587 | rtx outgoing = current_function_return_rtx; |
6588 | ||
6589 | if (! outgoing) | |
6590 | return; | |
bd695e1e | 6591 | |
c13fde05 RH |
6592 | if (GET_CODE (outgoing) == REG) |
6593 | (*doit) (outgoing, arg); | |
6594 | else if (GET_CODE (outgoing) == PARALLEL) | |
6595 | { | |
6596 | int i; | |
bd695e1e | 6597 | |
c13fde05 RH |
6598 | for (i = 0; i < XVECLEN (outgoing, 0); i++) |
6599 | { | |
6600 | rtx x = XEXP (XVECEXP (outgoing, 0, i), 0); | |
6601 | ||
6602 | if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER) | |
6603 | (*doit) (x, arg); | |
bd695e1e RH |
6604 | } |
6605 | } | |
6606 | } | |
6607 | ||
c13fde05 RH |
6608 | static void |
6609 | do_clobber_return_reg (reg, arg) | |
6610 | rtx reg; | |
6611 | void *arg ATTRIBUTE_UNUSED; | |
6612 | { | |
6613 | emit_insn (gen_rtx_CLOBBER (VOIDmode, reg)); | |
6614 | } | |
6615 | ||
6616 | void | |
6617 | clobber_return_register () | |
6618 | { | |
6619 | diddle_return_value (do_clobber_return_reg, NULL); | |
9c65bbf4 JH |
6620 | |
6621 | /* In case we do use pseudo to return value, clobber it too. */ | |
6622 | if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl))) | |
6623 | { | |
6624 | tree decl_result = DECL_RESULT (current_function_decl); | |
6625 | rtx decl_rtl = DECL_RTL (decl_result); | |
6626 | if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER) | |
6627 | { | |
6628 | do_clobber_return_reg (decl_rtl, NULL); | |
6629 | } | |
6630 | } | |
c13fde05 RH |
6631 | } |
6632 | ||
6633 | static void | |
6634 | do_use_return_reg (reg, arg) | |
6635 | rtx reg; | |
6636 | void *arg ATTRIBUTE_UNUSED; | |
6637 | { | |
6638 | emit_insn (gen_rtx_USE (VOIDmode, reg)); | |
6639 | } | |
6640 | ||
6641 | void | |
6642 | use_return_register () | |
6643 | { | |
6644 | diddle_return_value (do_use_return_reg, NULL); | |
6645 | } | |
6646 | ||
6f086dfc | 6647 | /* Generate RTL for the end of the current function. |
718fe406 | 6648 | FILENAME and LINE are the current position in the source file. |
6f086dfc | 6649 | |
980697fd | 6650 | It is up to language-specific callers to do cleanups for parameters-- |
1be07046 | 6651 | or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */ |
6f086dfc RS |
6652 | |
6653 | void | |
1be07046 | 6654 | expand_function_end (filename, line, end_bindings) |
3b304f5b | 6655 | const char *filename; |
6f086dfc | 6656 | int line; |
1be07046 | 6657 | int end_bindings; |
6f086dfc | 6658 | { |
6f086dfc | 6659 | tree link; |
932f0847 | 6660 | rtx clobber_after; |
6f086dfc | 6661 | |
1e2414db | 6662 | #ifdef TRAMPOLINE_TEMPLATE |
6f086dfc | 6663 | static rtx initial_trampoline; |
1e2414db | 6664 | #endif |
6f086dfc | 6665 | |
49ad7cfa BS |
6666 | finish_expr_for_function (); |
6667 | ||
964be02f RH |
6668 | /* If arg_pointer_save_area was referenced only from a nested |
6669 | function, we will not have initialized it yet. Do that now. */ | |
6670 | if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init) | |
6671 | get_arg_pointer_save_area (cfun); | |
6672 | ||
6f086dfc RS |
6673 | #ifdef NON_SAVING_SETJMP |
6674 | /* Don't put any variables in registers if we call setjmp | |
6675 | on a machine that fails to restore the registers. */ | |
6676 | if (NON_SAVING_SETJMP && current_function_calls_setjmp) | |
6677 | { | |
b88a3142 RK |
6678 | if (DECL_INITIAL (current_function_decl) != error_mark_node) |
6679 | setjmp_protect (DECL_INITIAL (current_function_decl)); | |
6680 | ||
6f086dfc RS |
6681 | setjmp_protect_args (); |
6682 | } | |
6683 | #endif | |
6684 | ||
6f086dfc RS |
6685 | /* Initialize any trampolines required by this function. */ |
6686 | for (link = trampoline_list; link; link = TREE_CHAIN (link)) | |
6687 | { | |
6688 | tree function = TREE_PURPOSE (link); | |
57bed152 | 6689 | rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function); |
6f086dfc | 6690 | rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link)); |
7a87758d | 6691 | #ifdef TRAMPOLINE_TEMPLATE |
1e2414db | 6692 | rtx blktramp; |
7a87758d | 6693 | #endif |
6f086dfc RS |
6694 | rtx seq; |
6695 | ||
1e2414db | 6696 | #ifdef TRAMPOLINE_TEMPLATE |
6f086dfc RS |
6697 | /* First make sure this compilation has a template for |
6698 | initializing trampolines. */ | |
6699 | if (initial_trampoline == 0) | |
86f8eff3 | 6700 | { |
86f8eff3 | 6701 | initial_trampoline |
38a448ca | 6702 | = gen_rtx_MEM (BLKmode, assemble_trampoline_template ()); |
76095e2f RH |
6703 | |
6704 | ggc_add_rtx_root (&initial_trampoline, 1); | |
86f8eff3 | 6705 | } |
1e2414db | 6706 | #endif |
6f086dfc RS |
6707 | |
6708 | /* Generate insns to initialize the trampoline. */ | |
6709 | start_sequence (); | |
1e2414db RK |
6710 | tramp = round_trampoline_addr (XEXP (tramp, 0)); |
6711 | #ifdef TRAMPOLINE_TEMPLATE | |
6712 | blktramp = change_address (initial_trampoline, BLKmode, tramp); | |
6713 | emit_block_move (blktramp, initial_trampoline, | |
6714 | GEN_INT (TRAMPOLINE_SIZE), | |
744bfbfa | 6715 | TRAMPOLINE_ALIGNMENT); |
1e2414db RK |
6716 | #endif |
6717 | INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context); | |
6f086dfc RS |
6718 | seq = get_insns (); |
6719 | end_sequence (); | |
6720 | ||
6721 | /* Put those insns at entry to the containing function (this one). */ | |
6722 | emit_insns_before (seq, tail_recursion_reentry); | |
6723 | } | |
6f086dfc | 6724 | |
11044f66 RK |
6725 | /* If we are doing stack checking and this function makes calls, |
6726 | do a stack probe at the start of the function to ensure we have enough | |
6727 | space for another stack frame. */ | |
6728 | if (flag_stack_check && ! STACK_CHECK_BUILTIN) | |
6729 | { | |
6730 | rtx insn, seq; | |
6731 | ||
6732 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
6733 | if (GET_CODE (insn) == CALL_INSN) | |
6734 | { | |
6735 | start_sequence (); | |
6736 | probe_stack_range (STACK_CHECK_PROTECT, | |
6737 | GEN_INT (STACK_CHECK_MAX_FRAME_SIZE)); | |
6738 | seq = get_insns (); | |
6739 | end_sequence (); | |
6740 | emit_insns_before (seq, tail_recursion_reentry); | |
6741 | break; | |
6742 | } | |
6743 | } | |
6744 | ||
db8717d9 | 6745 | /* Warn about unused parms if extra warnings were specified. */ |
078721e1 AC |
6746 | /* Either ``-W -Wunused'' or ``-Wunused-parameter'' enables this |
6747 | warning. WARN_UNUSED_PARAMETER is negative when set by | |
718fe406 | 6748 | -Wunused. */ |
078721e1 AC |
6749 | if (warn_unused_parameter > 0 |
6750 | || (warn_unused_parameter < 0 && extra_warnings)) | |
6f086dfc | 6751 | { |
db8717d9 | 6752 | tree decl; |
6f086dfc RS |
6753 | |
6754 | for (decl = DECL_ARGUMENTS (current_function_decl); | |
6755 | decl; decl = TREE_CHAIN (decl)) | |
497dc802 JM |
6756 | if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL |
6757 | && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl)) | |
6f086dfc RS |
6758 | warning_with_decl (decl, "unused parameter `%s'"); |
6759 | } | |
6f086dfc RS |
6760 | |
6761 | /* Delete handlers for nonlocal gotos if nothing uses them. */ | |
ba716ac9 BS |
6762 | if (nonlocal_goto_handler_slots != 0 |
6763 | && ! current_function_has_nonlocal_label) | |
6f086dfc RS |
6764 | delete_handlers (); |
6765 | ||
6766 | /* End any sequences that failed to be closed due to syntax errors. */ | |
6767 | while (in_sequence_p ()) | |
5f4f0e22 | 6768 | end_sequence (); |
6f086dfc RS |
6769 | |
6770 | /* Outside function body, can't compute type's actual size | |
6771 | until next function's body starts. */ | |
6772 | immediate_size_expand--; | |
6773 | ||
6f086dfc RS |
6774 | clear_pending_stack_adjust (); |
6775 | do_pending_stack_adjust (); | |
6776 | ||
6777 | /* Mark the end of the function body. | |
6778 | If control reaches this insn, the function can drop through | |
6779 | without returning a value. */ | |
6496a589 | 6780 | emit_note (NULL, NOTE_INSN_FUNCTION_END); |
6f086dfc | 6781 | |
82e415a3 DE |
6782 | /* Must mark the last line number note in the function, so that the test |
6783 | coverage code can avoid counting the last line twice. This just tells | |
6784 | the code to ignore the immediately following line note, since there | |
6785 | already exists a copy of this note somewhere above. This line number | |
6786 | note is still needed for debugging though, so we can't delete it. */ | |
6787 | if (flag_test_coverage) | |
6496a589 | 6788 | emit_note (NULL, NOTE_INSN_REPEATED_LINE_NUMBER); |
82e415a3 | 6789 | |
6f086dfc RS |
6790 | /* Output a linenumber for the end of the function. |
6791 | SDB depends on this. */ | |
6792 | emit_line_note_force (filename, line); | |
6793 | ||
fbffc70a GK |
6794 | /* Before the return label (if any), clobber the return |
6795 | registers so that they are not propogated live to the rest of | |
6796 | the function. This can only happen with functions that drop | |
6797 | through; if there had been a return statement, there would | |
932f0847 JH |
6798 | have either been a return rtx, or a jump to the return label. |
6799 | ||
6800 | We delay actual code generation after the current_function_value_rtx | |
6801 | is computed. */ | |
6802 | clobber_after = get_last_insn (); | |
fbffc70a | 6803 | |
6f086dfc RS |
6804 | /* Output the label for the actual return from the function, |
6805 | if one is expected. This happens either because a function epilogue | |
6806 | is used instead of a return instruction, or because a return was done | |
6807 | with a goto in order to run local cleanups, or because of pcc-style | |
6808 | structure returning. */ | |
6f086dfc | 6809 | if (return_label) |
fbffc70a | 6810 | emit_label (return_label); |
6f086dfc | 6811 | |
1be07046 RS |
6812 | /* C++ uses this. */ |
6813 | if (end_bindings) | |
6814 | expand_end_bindings (0, 0, 0); | |
6815 | ||
07417085 KR |
6816 | if (current_function_instrument_entry_exit) |
6817 | { | |
6818 | rtx fun = DECL_RTL (current_function_decl); | |
6819 | if (GET_CODE (fun) == MEM) | |
6820 | fun = XEXP (fun, 0); | |
6821 | else | |
6822 | abort (); | |
6823 | emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2, | |
6824 | fun, Pmode, | |
6825 | expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS, | |
6826 | 0, | |
6827 | hard_frame_pointer_rtx), | |
6828 | Pmode); | |
6829 | } | |
6830 | ||
52a11cbf RH |
6831 | /* Let except.c know where it should emit the call to unregister |
6832 | the function context for sjlj exceptions. */ | |
6833 | if (flag_exceptions && USING_SJLJ_EXCEPTIONS) | |
6834 | sjlj_emit_function_exit_after (get_last_insn ()); | |
6835 | ||
6f086dfc RS |
6836 | /* If we had calls to alloca, and this machine needs |
6837 | an accurate stack pointer to exit the function, | |
6838 | insert some code to save and restore the stack pointer. */ | |
6839 | #ifdef EXIT_IGNORE_STACK | |
6840 | if (! EXIT_IGNORE_STACK) | |
6841 | #endif | |
6842 | if (current_function_calls_alloca) | |
6843 | { | |
59257ff7 RK |
6844 | rtx tem = 0; |
6845 | ||
6846 | emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn); | |
5f4f0e22 | 6847 | emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX); |
6f086dfc RS |
6848 | } |
6849 | ||
3e4eac3f RH |
6850 | /* If scalar return value was computed in a pseudo-reg, or was a named |
6851 | return value that got dumped to the stack, copy that to the hard | |
6852 | return register. */ | |
19e7881c | 6853 | if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl))) |
6f086dfc | 6854 | { |
3e4eac3f RH |
6855 | tree decl_result = DECL_RESULT (current_function_decl); |
6856 | rtx decl_rtl = DECL_RTL (decl_result); | |
6857 | ||
6858 | if (REG_P (decl_rtl) | |
6859 | ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER | |
6860 | : DECL_REGISTER (decl_result)) | |
6861 | { | |
6862 | rtx real_decl_rtl; | |
6f086dfc RS |
6863 | |
6864 | #ifdef FUNCTION_OUTGOING_VALUE | |
3e4eac3f RH |
6865 | real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result), |
6866 | current_function_decl); | |
6f086dfc | 6867 | #else |
3e4eac3f RH |
6868 | real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result), |
6869 | current_function_decl); | |
6f086dfc | 6870 | #endif |
3e4eac3f RH |
6871 | REG_FUNCTION_VALUE_P (real_decl_rtl) = 1; |
6872 | ||
6873 | /* If this is a BLKmode structure being returned in registers, | |
6874 | then use the mode computed in expand_return. Note that if | |
6875 | decl_rtl is memory, then its mode may have been changed, | |
6876 | but that current_function_return_rtx has not. */ | |
6877 | if (GET_MODE (real_decl_rtl) == BLKmode) | |
6878 | PUT_MODE (real_decl_rtl, GET_MODE (current_function_return_rtx)); | |
6879 | ||
6880 | /* If a named return value dumped decl_return to memory, then | |
6881 | we may need to re-do the PROMOTE_MODE signed/unsigned | |
6882 | extension. */ | |
6883 | if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl)) | |
6884 | { | |
6885 | int unsignedp = TREE_UNSIGNED (TREE_TYPE (decl_result)); | |
6886 | ||
6887 | #ifdef PROMOTE_FUNCTION_RETURN | |
6888 | promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl), | |
6889 | &unsignedp, 1); | |
6890 | #endif | |
6891 | ||
6892 | convert_move (real_decl_rtl, decl_rtl, unsignedp); | |
6893 | } | |
aa570f54 JW |
6894 | else if (GET_CODE (real_decl_rtl) == PARALLEL) |
6895 | emit_group_load (real_decl_rtl, decl_rtl, | |
6896 | int_size_in_bytes (TREE_TYPE (decl_result)), | |
6897 | TYPE_ALIGN (TREE_TYPE (decl_result))); | |
3e4eac3f RH |
6898 | else |
6899 | emit_move_insn (real_decl_rtl, decl_rtl); | |
6900 | ||
6901 | /* The delay slot scheduler assumes that current_function_return_rtx | |
6902 | holds the hard register containing the return value, not a | |
6903 | temporary pseudo. */ | |
6904 | current_function_return_rtx = real_decl_rtl; | |
6905 | } | |
6f086dfc RS |
6906 | } |
6907 | ||
6908 | /* If returning a structure, arrange to return the address of the value | |
6909 | in a place where debuggers expect to find it. | |
6910 | ||
6911 | If returning a structure PCC style, | |
6912 | the caller also depends on this value. | |
6913 | And current_function_returns_pcc_struct is not necessarily set. */ | |
6914 | if (current_function_returns_struct | |
6915 | || current_function_returns_pcc_struct) | |
6916 | { | |
d1608933 RK |
6917 | rtx value_address |
6918 | = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0); | |
6f086dfc RS |
6919 | tree type = TREE_TYPE (DECL_RESULT (current_function_decl)); |
6920 | #ifdef FUNCTION_OUTGOING_VALUE | |
6921 | rtx outgoing | |
6922 | = FUNCTION_OUTGOING_VALUE (build_pointer_type (type), | |
6923 | current_function_decl); | |
6924 | #else | |
6925 | rtx outgoing | |
d1608933 | 6926 | = FUNCTION_VALUE (build_pointer_type (type), current_function_decl); |
6f086dfc RS |
6927 | #endif |
6928 | ||
6929 | /* Mark this as a function return value so integrate will delete the | |
6930 | assignment and USE below when inlining this function. */ | |
6931 | REG_FUNCTION_VALUE_P (outgoing) = 1; | |
6932 | ||
d1608933 RK |
6933 | #ifdef POINTERS_EXTEND_UNSIGNED |
6934 | /* The address may be ptr_mode and OUTGOING may be Pmode. */ | |
6935 | if (GET_MODE (outgoing) != GET_MODE (value_address)) | |
6936 | value_address = convert_memory_address (GET_MODE (outgoing), | |
6937 | value_address); | |
6938 | #endif | |
6939 | ||
6f086dfc | 6940 | emit_move_insn (outgoing, value_address); |
d1608933 RK |
6941 | |
6942 | /* Show return register used to hold result (in this case the address | |
6943 | of the result. */ | |
6944 | current_function_return_rtx = outgoing; | |
6f086dfc RS |
6945 | } |
6946 | ||
52a11cbf RH |
6947 | /* If this is an implementation of throw, do what's necessary to |
6948 | communicate between __builtin_eh_return and the epilogue. */ | |
6949 | expand_eh_return (); | |
6950 | ||
932f0847 JH |
6951 | /* Emit the actual code to clobber return register. */ |
6952 | { | |
6953 | rtx seq, after; | |
6954 | ||
6955 | start_sequence (); | |
6956 | clobber_return_register (); | |
6957 | seq = gen_sequence (); | |
6958 | end_sequence (); | |
6959 | ||
6960 | after = emit_insn_after (seq, clobber_after); | |
6961 | ||
6962 | if (clobber_after != after) | |
6963 | cfun->x_clobber_return_insn = after; | |
6964 | } | |
6965 | ||
c13fde05 RH |
6966 | /* ??? This should no longer be necessary since stupid is no longer with |
6967 | us, but there are some parts of the compiler (eg reload_combine, and | |
6968 | sh mach_dep_reorg) that still try and compute their own lifetime info | |
6969 | instead of using the general framework. */ | |
6970 | use_return_register (); | |
6971 | ||
6f086dfc RS |
6972 | /* Fix up any gotos that jumped out to the outermost |
6973 | binding level of the function. | |
6974 | Must follow emitting RETURN_LABEL. */ | |
6975 | ||
6976 | /* If you have any cleanups to do at this point, | |
6977 | and they need to create temporary variables, | |
6978 | then you will lose. */ | |
e15679f8 | 6979 | expand_fixups (get_insns ()); |
6f086dfc | 6980 | } |
278ed218 RH |
6981 | |
6982 | rtx | |
6983 | get_arg_pointer_save_area (f) | |
6984 | struct function *f; | |
6985 | { | |
6986 | rtx ret = f->x_arg_pointer_save_area; | |
6987 | ||
6988 | if (! ret) | |
6989 | { | |
278ed218 RH |
6990 | ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f); |
6991 | f->x_arg_pointer_save_area = ret; | |
964be02f RH |
6992 | } |
6993 | ||
6994 | if (f == cfun && ! f->arg_pointer_save_area_init) | |
6995 | { | |
6996 | rtx seq; | |
278ed218 RH |
6997 | |
6998 | /* Save the arg pointer at the beginning of the function. The | |
964be02f | 6999 | generated stack slot may not be a valid memory address, so we |
278ed218 RH |
7000 | have to check it and fix it if necessary. */ |
7001 | start_sequence (); | |
7002 | emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx); | |
7003 | seq = gen_sequence (); | |
7004 | end_sequence (); | |
7005 | ||
964be02f RH |
7006 | push_topmost_sequence (); |
7007 | emit_insn_after (seq, get_insns ()); | |
7008 | pop_topmost_sequence (); | |
278ed218 RH |
7009 | } |
7010 | ||
7011 | return ret; | |
7012 | } | |
bdac5f58 | 7013 | \f |
0a1c58a2 JL |
7014 | /* Extend a vector that records the INSN_UIDs of INSNS (either a |
7015 | sequence or a single insn). */ | |
bdac5f58 | 7016 | |
0a1c58a2 JL |
7017 | static void |
7018 | record_insns (insns, vecp) | |
bdac5f58 | 7019 | rtx insns; |
0a1c58a2 | 7020 | varray_type *vecp; |
bdac5f58 | 7021 | { |
bdac5f58 TW |
7022 | if (GET_CODE (insns) == SEQUENCE) |
7023 | { | |
7024 | int len = XVECLEN (insns, 0); | |
0a1c58a2 JL |
7025 | int i = VARRAY_SIZE (*vecp); |
7026 | ||
7027 | VARRAY_GROW (*vecp, i + len); | |
bdac5f58 | 7028 | while (--len >= 0) |
0a1c58a2 JL |
7029 | { |
7030 | VARRAY_INT (*vecp, i) = INSN_UID (XVECEXP (insns, 0, len)); | |
7031 | ++i; | |
7032 | } | |
bdac5f58 TW |
7033 | } |
7034 | else | |
7035 | { | |
0a1c58a2 JL |
7036 | int i = VARRAY_SIZE (*vecp); |
7037 | VARRAY_GROW (*vecp, i + 1); | |
7038 | VARRAY_INT (*vecp, i) = INSN_UID (insns); | |
bdac5f58 | 7039 | } |
bdac5f58 TW |
7040 | } |
7041 | ||
10914065 | 7042 | /* Determine how many INSN_UIDs in VEC are part of INSN. */ |
bdac5f58 | 7043 | |
10914065 | 7044 | static int |
bdac5f58 TW |
7045 | contains (insn, vec) |
7046 | rtx insn; | |
0a1c58a2 | 7047 | varray_type vec; |
bdac5f58 | 7048 | { |
b3694847 | 7049 | int i, j; |
bdac5f58 TW |
7050 | |
7051 | if (GET_CODE (insn) == INSN | |
7052 | && GET_CODE (PATTERN (insn)) == SEQUENCE) | |
7053 | { | |
10914065 | 7054 | int count = 0; |
bdac5f58 | 7055 | for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--) |
0a1c58a2 JL |
7056 | for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j) |
7057 | if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j)) | |
10914065 TW |
7058 | count++; |
7059 | return count; | |
bdac5f58 TW |
7060 | } |
7061 | else | |
7062 | { | |
0a1c58a2 JL |
7063 | for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j) |
7064 | if (INSN_UID (insn) == VARRAY_INT (vec, j)) | |
10914065 | 7065 | return 1; |
bdac5f58 TW |
7066 | } |
7067 | return 0; | |
7068 | } | |
5c7675e9 RH |
7069 | |
7070 | int | |
7071 | prologue_epilogue_contains (insn) | |
7072 | rtx insn; | |
7073 | { | |
0a1c58a2 | 7074 | if (contains (insn, prologue)) |
5c7675e9 | 7075 | return 1; |
0a1c58a2 | 7076 | if (contains (insn, epilogue)) |
5c7675e9 RH |
7077 | return 1; |
7078 | return 0; | |
7079 | } | |
bdac5f58 | 7080 | |
0a1c58a2 JL |
7081 | int |
7082 | sibcall_epilogue_contains (insn) | |
718fe406 | 7083 | rtx insn; |
0a1c58a2 JL |
7084 | { |
7085 | if (sibcall_epilogue) | |
7086 | return contains (insn, sibcall_epilogue); | |
7087 | return 0; | |
7088 | } | |
7089 | ||
73ef99fb | 7090 | #ifdef HAVE_return |
69732dcb RH |
7091 | /* Insert gen_return at the end of block BB. This also means updating |
7092 | block_for_insn appropriately. */ | |
7093 | ||
7094 | static void | |
86c82654 | 7095 | emit_return_into_block (bb, line_note) |
69732dcb | 7096 | basic_block bb; |
86c82654 | 7097 | rtx line_note; |
69732dcb RH |
7098 | { |
7099 | rtx p, end; | |
7100 | ||
718fe406 | 7101 | p = NEXT_INSN (bb->end); |
86c82654 RH |
7102 | end = emit_jump_insn_after (gen_return (), bb->end); |
7103 | if (line_note) | |
7104 | emit_line_note_after (NOTE_SOURCE_FILE (line_note), | |
3c030e88 | 7105 | NOTE_LINE_NUMBER (line_note), PREV_INSN (bb->end)); |
69732dcb | 7106 | } |
73ef99fb | 7107 | #endif /* HAVE_return */ |
69732dcb | 7108 | |
7393c642 RK |
7109 | #ifdef HAVE_epilogue |
7110 | ||
7111 | /* Modify SEQ, a SEQUENCE that is part of the epilogue, to no modifications | |
7112 | to the stack pointer. */ | |
7113 | ||
7114 | static void | |
7115 | keep_stack_depressed (seq) | |
7116 | rtx seq; | |
7117 | { | |
7118 | int i; | |
7119 | rtx sp_from_reg = 0; | |
7120 | int sp_modified_unknown = 0; | |
7121 | ||
7122 | /* If the epilogue is just a single instruction, it's OK as is */ | |
7123 | ||
8207e7c6 RK |
7124 | if (GET_CODE (seq) != SEQUENCE) |
7125 | return; | |
7393c642 RK |
7126 | |
7127 | /* Scan all insns in SEQ looking for ones that modified the stack | |
7128 | pointer. Record if it modified the stack pointer by copying it | |
7129 | from the frame pointer or if it modified it in some other way. | |
7130 | Then modify any subsequent stack pointer references to take that | |
7131 | into account. We start by only allowing SP to be copied from a | |
7132 | register (presumably FP) and then be subsequently referenced. */ | |
7133 | ||
7134 | for (i = 0; i < XVECLEN (seq, 0); i++) | |
7135 | { | |
7136 | rtx insn = XVECEXP (seq, 0, i); | |
7137 | ||
7138 | if (GET_RTX_CLASS (GET_CODE (insn)) != 'i') | |
7139 | continue; | |
7140 | ||
7141 | if (reg_set_p (stack_pointer_rtx, insn)) | |
7142 | { | |
7143 | rtx set = single_set (insn); | |
7144 | ||
7145 | /* If SP is set as a side-effect, we can't support this. */ | |
7146 | if (set == 0) | |
7147 | abort (); | |
7148 | ||
7149 | if (GET_CODE (SET_SRC (set)) == REG) | |
7150 | sp_from_reg = SET_SRC (set); | |
7151 | else | |
7152 | sp_modified_unknown = 1; | |
7153 | ||
173b24b9 RK |
7154 | /* Don't allow the SP modification to happen. We don't call |
7155 | delete_insn here since INSN isn't in any chain. */ | |
7156 | PUT_CODE (insn, NOTE); | |
7157 | NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; | |
7158 | NOTE_SOURCE_FILE (insn) = 0; | |
7393c642 RK |
7159 | } |
7160 | else if (reg_referenced_p (stack_pointer_rtx, PATTERN (insn))) | |
7161 | { | |
7162 | if (sp_modified_unknown) | |
7163 | abort (); | |
7164 | ||
7165 | else if (sp_from_reg != 0) | |
7166 | PATTERN (insn) | |
7167 | = replace_rtx (PATTERN (insn), stack_pointer_rtx, sp_from_reg); | |
7168 | } | |
7169 | } | |
7170 | } | |
7171 | #endif | |
7172 | ||
9faa82d8 | 7173 | /* Generate the prologue and epilogue RTL if the machine supports it. Thread |
bdac5f58 TW |
7174 | this into place with notes indicating where the prologue ends and where |
7175 | the epilogue begins. Update the basic block information when possible. */ | |
7176 | ||
7177 | void | |
7178 | thread_prologue_and_epilogue_insns (f) | |
54ea1de9 | 7179 | rtx f ATTRIBUTE_UNUSED; |
bdac5f58 | 7180 | { |
ca1117cc | 7181 | int inserted = 0; |
19d3c25c RH |
7182 | edge e; |
7183 | rtx seq; | |
ca1117cc RH |
7184 | #ifdef HAVE_prologue |
7185 | rtx prologue_end = NULL_RTX; | |
7186 | #endif | |
86c82654 RH |
7187 | #if defined (HAVE_epilogue) || defined(HAVE_return) |
7188 | rtx epilogue_end = NULL_RTX; | |
7189 | #endif | |
e881bb1b | 7190 | |
bdac5f58 TW |
7191 | #ifdef HAVE_prologue |
7192 | if (HAVE_prologue) | |
7193 | { | |
e881bb1b | 7194 | start_sequence (); |
718fe406 | 7195 | seq = gen_prologue (); |
e881bb1b | 7196 | emit_insn (seq); |
bdac5f58 TW |
7197 | |
7198 | /* Retain a map of the prologue insns. */ | |
e881bb1b RH |
7199 | if (GET_CODE (seq) != SEQUENCE) |
7200 | seq = get_insns (); | |
0a1c58a2 | 7201 | record_insns (seq, &prologue); |
ca1117cc | 7202 | prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END); |
9185a8d5 | 7203 | |
e881bb1b RH |
7204 | seq = gen_sequence (); |
7205 | end_sequence (); | |
7206 | ||
75540af0 JH |
7207 | /* Can't deal with multiple successsors of the entry block |
7208 | at the moment. Function should always have at least one | |
7209 | entry point. */ | |
7210 | if (!ENTRY_BLOCK_PTR->succ || ENTRY_BLOCK_PTR->succ->succ_next) | |
7211 | abort (); | |
e881bb1b | 7212 | |
75540af0 JH |
7213 | insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ); |
7214 | inserted = 1; | |
bdac5f58 | 7215 | } |
bdac5f58 | 7216 | #endif |
bdac5f58 | 7217 | |
19d3c25c RH |
7218 | /* If the exit block has no non-fake predecessors, we don't need |
7219 | an epilogue. */ | |
718fe406 | 7220 | for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next) |
19d3c25c RH |
7221 | if ((e->flags & EDGE_FAKE) == 0) |
7222 | break; | |
7223 | if (e == NULL) | |
7224 | goto epilogue_done; | |
7225 | ||
69732dcb RH |
7226 | #ifdef HAVE_return |
7227 | if (optimize && HAVE_return) | |
7228 | { | |
7229 | /* If we're allowed to generate a simple return instruction, | |
7230 | then by definition we don't need a full epilogue. Examine | |
718fe406 KH |
7231 | the block that falls through to EXIT. If it does not |
7232 | contain any code, examine its predecessors and try to | |
69732dcb RH |
7233 | emit (conditional) return instructions. */ |
7234 | ||
7235 | basic_block last; | |
7236 | edge e_next; | |
7237 | rtx label; | |
7238 | ||
718fe406 | 7239 | for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next) |
69732dcb RH |
7240 | if (e->flags & EDGE_FALLTHRU) |
7241 | break; | |
7242 | if (e == NULL) | |
7243 | goto epilogue_done; | |
7244 | last = e->src; | |
7245 | ||
7246 | /* Verify that there are no active instructions in the last block. */ | |
7247 | label = last->end; | |
7248 | while (label && GET_CODE (label) != CODE_LABEL) | |
7249 | { | |
7250 | if (active_insn_p (label)) | |
7251 | break; | |
7252 | label = PREV_INSN (label); | |
7253 | } | |
7254 | ||
7255 | if (last->head == label && GET_CODE (label) == CODE_LABEL) | |
7256 | { | |
718fe406 | 7257 | rtx epilogue_line_note = NULL_RTX; |
86c82654 RH |
7258 | |
7259 | /* Locate the line number associated with the closing brace, | |
7260 | if we can find one. */ | |
7261 | for (seq = get_last_insn (); | |
7262 | seq && ! active_insn_p (seq); | |
7263 | seq = PREV_INSN (seq)) | |
7264 | if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0) | |
7265 | { | |
7266 | epilogue_line_note = seq; | |
7267 | break; | |
7268 | } | |
7269 | ||
718fe406 | 7270 | for (e = last->pred; e; e = e_next) |
69732dcb RH |
7271 | { |
7272 | basic_block bb = e->src; | |
7273 | rtx jump; | |
7274 | ||
7275 | e_next = e->pred_next; | |
7276 | if (bb == ENTRY_BLOCK_PTR) | |
7277 | continue; | |
7278 | ||
7279 | jump = bb->end; | |
a617c13f | 7280 | if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label) |
69732dcb RH |
7281 | continue; |
7282 | ||
7283 | /* If we have an unconditional jump, we can replace that | |
7284 | with a simple return instruction. */ | |
7285 | if (simplejump_p (jump)) | |
7286 | { | |
86c82654 | 7287 | emit_return_into_block (bb, epilogue_line_note); |
53c17031 | 7288 | delete_insn (jump); |
69732dcb RH |
7289 | } |
7290 | ||
7291 | /* If we have a conditional jump, we can try to replace | |
7292 | that with a conditional return instruction. */ | |
7293 | else if (condjump_p (jump)) | |
7294 | { | |
7295 | rtx ret, *loc; | |
7296 | ||
7297 | ret = SET_SRC (PATTERN (jump)); | |
7298 | if (GET_CODE (XEXP (ret, 1)) == LABEL_REF) | |
7299 | loc = &XEXP (ret, 1); | |
7300 | else | |
7301 | loc = &XEXP (ret, 2); | |
7302 | ret = gen_rtx_RETURN (VOIDmode); | |
7303 | ||
7304 | if (! validate_change (jump, loc, ret, 0)) | |
7305 | continue; | |
7306 | if (JUMP_LABEL (jump)) | |
7307 | LABEL_NUSES (JUMP_LABEL (jump))--; | |
718fe406 | 7308 | |
3a75e42e CP |
7309 | /* If this block has only one successor, it both jumps |
7310 | and falls through to the fallthru block, so we can't | |
7311 | delete the edge. */ | |
718fe406 KH |
7312 | if (bb->succ->succ_next == NULL) |
7313 | continue; | |
69732dcb RH |
7314 | } |
7315 | else | |
7316 | continue; | |
7317 | ||
7318 | /* Fix up the CFG for the successful change we just made. */ | |
86c82654 | 7319 | redirect_edge_succ (e, EXIT_BLOCK_PTR); |
69732dcb | 7320 | } |
69732dcb | 7321 | |
2dd8bc01 GK |
7322 | /* Emit a return insn for the exit fallthru block. Whether |
7323 | this is still reachable will be determined later. */ | |
69732dcb | 7324 | |
2dd8bc01 | 7325 | emit_barrier_after (last->end); |
86c82654 RH |
7326 | emit_return_into_block (last, epilogue_line_note); |
7327 | epilogue_end = last->end; | |
ab75d1f1 | 7328 | last->succ->flags &= ~EDGE_FALLTHRU; |
718fe406 | 7329 | goto epilogue_done; |
2dd8bc01 | 7330 | } |
69732dcb RH |
7331 | } |
7332 | #endif | |
bdac5f58 TW |
7333 | #ifdef HAVE_epilogue |
7334 | if (HAVE_epilogue) | |
7335 | { | |
19d3c25c RH |
7336 | /* Find the edge that falls through to EXIT. Other edges may exist |
7337 | due to RETURN instructions, but those don't need epilogues. | |
7338 | There really shouldn't be a mixture -- either all should have | |
7339 | been converted or none, however... */ | |
e881bb1b | 7340 | |
718fe406 | 7341 | for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next) |
19d3c25c RH |
7342 | if (e->flags & EDGE_FALLTHRU) |
7343 | break; | |
7344 | if (e == NULL) | |
7345 | goto epilogue_done; | |
a78bdb38 | 7346 | |
19d3c25c | 7347 | start_sequence (); |
86c82654 | 7348 | epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG); |
a78bdb38 | 7349 | |
19d3c25c | 7350 | seq = gen_epilogue (); |
7393c642 RK |
7351 | |
7352 | /* If this function returns with the stack depressed, massage | |
7353 | the epilogue to actually do that. */ | |
43db0363 RK |
7354 | if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE |
7355 | && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl))) | |
7393c642 RK |
7356 | keep_stack_depressed (seq); |
7357 | ||
19d3c25c | 7358 | emit_jump_insn (seq); |
bdac5f58 | 7359 | |
19d3c25c RH |
7360 | /* Retain a map of the epilogue insns. */ |
7361 | if (GET_CODE (seq) != SEQUENCE) | |
7362 | seq = get_insns (); | |
0a1c58a2 | 7363 | record_insns (seq, &epilogue); |
bdac5f58 | 7364 | |
19d3c25c | 7365 | seq = gen_sequence (); |
718fe406 | 7366 | end_sequence (); |
e881bb1b | 7367 | |
19d3c25c | 7368 | insert_insn_on_edge (seq, e); |
ca1117cc | 7369 | inserted = 1; |
bdac5f58 TW |
7370 | } |
7371 | #endif | |
19d3c25c | 7372 | epilogue_done: |
e881bb1b | 7373 | |
ca1117cc | 7374 | if (inserted) |
e881bb1b | 7375 | commit_edge_insertions (); |
0a1c58a2 JL |
7376 | |
7377 | #ifdef HAVE_sibcall_epilogue | |
7378 | /* Emit sibling epilogues before any sibling call sites. */ | |
718fe406 | 7379 | for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next) |
0a1c58a2 JL |
7380 | { |
7381 | basic_block bb = e->src; | |
7382 | rtx insn = bb->end; | |
7383 | rtx i; | |
1b513b77 | 7384 | rtx newinsn; |
0a1c58a2 JL |
7385 | |
7386 | if (GET_CODE (insn) != CALL_INSN | |
7387 | || ! SIBLING_CALL_P (insn)) | |
7388 | continue; | |
7389 | ||
7390 | start_sequence (); | |
7391 | seq = gen_sibcall_epilogue (); | |
7392 | end_sequence (); | |
7393 | ||
7394 | i = PREV_INSN (insn); | |
1b513b77 | 7395 | newinsn = emit_insn_before (seq, insn); |
0a1c58a2 | 7396 | |
0a1c58a2 JL |
7397 | /* Retain a map of the epilogue insns. Used in life analysis to |
7398 | avoid getting rid of sibcall epilogue insns. */ | |
1b513b77 JH |
7399 | record_insns (GET_CODE (seq) == SEQUENCE |
7400 | ? seq : newinsn, &sibcall_epilogue); | |
0a1c58a2 JL |
7401 | } |
7402 | #endif | |
ca1117cc RH |
7403 | |
7404 | #ifdef HAVE_prologue | |
7405 | if (prologue_end) | |
7406 | { | |
7407 | rtx insn, prev; | |
7408 | ||
7409 | /* GDB handles `break f' by setting a breakpoint on the first | |
30196c1f | 7410 | line note after the prologue. Which means (1) that if |
ca1117cc | 7411 | there are line number notes before where we inserted the |
30196c1f RH |
7412 | prologue we should move them, and (2) we should generate a |
7413 | note before the end of the first basic block, if there isn't | |
016030fe JH |
7414 | one already there. |
7415 | ||
7416 | ??? This behaviour is completely broken when dealing with | |
7417 | multiple entry functions. We simply place the note always | |
7418 | into first basic block and let alternate entry points | |
7419 | to be missed. | |
7420 | */ | |
ca1117cc | 7421 | |
718fe406 | 7422 | for (insn = prologue_end; insn; insn = prev) |
ca1117cc RH |
7423 | { |
7424 | prev = PREV_INSN (insn); | |
7425 | if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0) | |
7426 | { | |
7427 | /* Note that we cannot reorder the first insn in the | |
7428 | chain, since rest_of_compilation relies on that | |
30196c1f | 7429 | remaining constant. */ |
ca1117cc | 7430 | if (prev == NULL) |
30196c1f RH |
7431 | break; |
7432 | reorder_insns (insn, insn, prologue_end); | |
ca1117cc RH |
7433 | } |
7434 | } | |
7435 | ||
30196c1f RH |
7436 | /* Find the last line number note in the first block. */ |
7437 | for (insn = BASIC_BLOCK (0)->end; | |
016030fe | 7438 | insn != prologue_end && insn; |
30196c1f RH |
7439 | insn = PREV_INSN (insn)) |
7440 | if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0) | |
7441 | break; | |
7442 | ||
7443 | /* If we didn't find one, make a copy of the first line number | |
7444 | we run across. */ | |
7445 | if (! insn) | |
ca1117cc | 7446 | { |
30196c1f RH |
7447 | for (insn = next_active_insn (prologue_end); |
7448 | insn; | |
7449 | insn = PREV_INSN (insn)) | |
7450 | if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0) | |
7451 | { | |
7452 | emit_line_note_after (NOTE_SOURCE_FILE (insn), | |
7453 | NOTE_LINE_NUMBER (insn), | |
7454 | prologue_end); | |
7455 | break; | |
7456 | } | |
ca1117cc RH |
7457 | } |
7458 | } | |
7459 | #endif | |
86c82654 RH |
7460 | #ifdef HAVE_epilogue |
7461 | if (epilogue_end) | |
7462 | { | |
7463 | rtx insn, next; | |
7464 | ||
7465 | /* Similarly, move any line notes that appear after the epilogue. | |
7466 | There is no need, however, to be quite so anal about the existance | |
7467 | of such a note. */ | |
718fe406 | 7468 | for (insn = epilogue_end; insn; insn = next) |
86c82654 RH |
7469 | { |
7470 | next = NEXT_INSN (insn); | |
7471 | if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0) | |
7472 | reorder_insns (insn, insn, PREV_INSN (epilogue_end)); | |
7473 | } | |
7474 | } | |
7475 | #endif | |
bdac5f58 TW |
7476 | } |
7477 | ||
7478 | /* Reposition the prologue-end and epilogue-begin notes after instruction | |
7479 | scheduling and delayed branch scheduling. */ | |
7480 | ||
7481 | void | |
7482 | reposition_prologue_and_epilogue_notes (f) | |
79c9824e | 7483 | rtx f ATTRIBUTE_UNUSED; |
bdac5f58 TW |
7484 | { |
7485 | #if defined (HAVE_prologue) || defined (HAVE_epilogue) | |
0a1c58a2 JL |
7486 | int len; |
7487 | ||
7488 | if ((len = VARRAY_SIZE (prologue)) > 0) | |
bdac5f58 | 7489 | { |
b3694847 | 7490 | rtx insn, note = 0; |
bdac5f58 | 7491 | |
0a1c58a2 JL |
7492 | /* Scan from the beginning until we reach the last prologue insn. |
7493 | We apparently can't depend on basic_block_{head,end} after | |
7494 | reorg has run. */ | |
7495 | for (insn = f; len && insn; insn = NEXT_INSN (insn)) | |
bdac5f58 | 7496 | { |
0a1c58a2 | 7497 | if (GET_CODE (insn) == NOTE) |
9392c110 | 7498 | { |
0a1c58a2 JL |
7499 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END) |
7500 | note = insn; | |
7501 | } | |
7502 | else if ((len -= contains (insn, prologue)) == 0) | |
7503 | { | |
7504 | rtx next; | |
7505 | /* Find the prologue-end note if we haven't already, and | |
7506 | move it to just after the last prologue insn. */ | |
7507 | if (note == 0) | |
9392c110 | 7508 | { |
0a1c58a2 JL |
7509 | for (note = insn; (note = NEXT_INSN (note));) |
7510 | if (GET_CODE (note) == NOTE | |
7511 | && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END) | |
7512 | break; | |
9392c110 | 7513 | } |
c93b03c2 | 7514 | |
0a1c58a2 | 7515 | next = NEXT_INSN (note); |
c93b03c2 | 7516 | |
718fe406 | 7517 | /* Whether or not we can depend on BLOCK_HEAD, |
0a1c58a2 JL |
7518 | attempt to keep it up-to-date. */ |
7519 | if (BLOCK_HEAD (0) == note) | |
7520 | BLOCK_HEAD (0) = next; | |
c93b03c2 | 7521 | |
0a1c58a2 | 7522 | remove_insn (note); |
016030fe JH |
7523 | /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */ |
7524 | if (GET_CODE (insn) == CODE_LABEL) | |
7525 | insn = NEXT_INSN (insn); | |
0a1c58a2 | 7526 | add_insn_after (note, insn); |
9392c110 | 7527 | } |
bdac5f58 | 7528 | } |
0a1c58a2 JL |
7529 | } |
7530 | ||
7531 | if ((len = VARRAY_SIZE (epilogue)) > 0) | |
7532 | { | |
b3694847 | 7533 | rtx insn, note = 0; |
bdac5f58 | 7534 | |
0a1c58a2 JL |
7535 | /* Scan from the end until we reach the first epilogue insn. |
7536 | We apparently can't depend on basic_block_{head,end} after | |
7537 | reorg has run. */ | |
7538 | for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn)) | |
bdac5f58 | 7539 | { |
0a1c58a2 | 7540 | if (GET_CODE (insn) == NOTE) |
9392c110 | 7541 | { |
0a1c58a2 JL |
7542 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG) |
7543 | note = insn; | |
7544 | } | |
7545 | else if ((len -= contains (insn, epilogue)) == 0) | |
7546 | { | |
7547 | /* Find the epilogue-begin note if we haven't already, and | |
7548 | move it to just before the first epilogue insn. */ | |
7549 | if (note == 0) | |
9392c110 | 7550 | { |
0a1c58a2 JL |
7551 | for (note = insn; (note = PREV_INSN (note));) |
7552 | if (GET_CODE (note) == NOTE | |
7553 | && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG) | |
7554 | break; | |
9392c110 | 7555 | } |
c93b03c2 | 7556 | |
718fe406 | 7557 | /* Whether or not we can depend on BLOCK_HEAD, |
0a1c58a2 JL |
7558 | attempt to keep it up-to-date. */ |
7559 | if (n_basic_blocks | |
7560 | && BLOCK_HEAD (n_basic_blocks-1) == insn) | |
7561 | BLOCK_HEAD (n_basic_blocks-1) = note; | |
c93b03c2 | 7562 | |
0a1c58a2 JL |
7563 | remove_insn (note); |
7564 | add_insn_before (note, insn); | |
9392c110 | 7565 | } |
bdac5f58 TW |
7566 | } |
7567 | } | |
7568 | #endif /* HAVE_prologue or HAVE_epilogue */ | |
7569 | } | |
87ff9c8e | 7570 | |
87ff9c8e RH |
7571 | /* Mark P for GC. */ |
7572 | ||
7573 | static void | |
fa51b01b | 7574 | mark_function_status (p) |
87ff9c8e RH |
7575 | struct function *p; |
7576 | { | |
7a80cf9a RK |
7577 | struct var_refs_queue *q; |
7578 | struct temp_slot *t; | |
87ff9c8e RH |
7579 | int i; |
7580 | rtx *r; | |
7581 | ||
7582 | if (p == 0) | |
7583 | return; | |
7584 | ||
7585 | ggc_mark_rtx (p->arg_offset_rtx); | |
7586 | ||
21cd906e MM |
7587 | if (p->x_parm_reg_stack_loc) |
7588 | for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc; | |
7589 | i > 0; --i, ++r) | |
7590 | ggc_mark_rtx (*r); | |
87ff9c8e RH |
7591 | |
7592 | ggc_mark_rtx (p->return_rtx); | |
7593 | ggc_mark_rtx (p->x_cleanup_label); | |
7594 | ggc_mark_rtx (p->x_return_label); | |
7595 | ggc_mark_rtx (p->x_save_expr_regs); | |
7596 | ggc_mark_rtx (p->x_stack_slot_list); | |
7597 | ggc_mark_rtx (p->x_parm_birth_insn); | |
7598 | ggc_mark_rtx (p->x_tail_recursion_label); | |
7599 | ggc_mark_rtx (p->x_tail_recursion_reentry); | |
7600 | ggc_mark_rtx (p->internal_arg_pointer); | |
7601 | ggc_mark_rtx (p->x_arg_pointer_save_area); | |
7602 | ggc_mark_tree (p->x_rtl_expr_chain); | |
7603 | ggc_mark_rtx (p->x_last_parm_insn); | |
7604 | ggc_mark_tree (p->x_context_display); | |
7605 | ggc_mark_tree (p->x_trampoline_list); | |
7606 | ggc_mark_rtx (p->epilogue_delay_list); | |
b313a0fe | 7607 | ggc_mark_rtx (p->x_clobber_return_insn); |
87ff9c8e | 7608 | |
7a80cf9a RK |
7609 | for (t = p->x_temp_slots; t != 0; t = t->next) |
7610 | { | |
7611 | ggc_mark (t); | |
7612 | ggc_mark_rtx (t->slot); | |
7613 | ggc_mark_rtx (t->address); | |
7614 | ggc_mark_tree (t->rtl_expr); | |
7615 | ggc_mark_tree (t->type); | |
7616 | } | |
87ff9c8e | 7617 | |
7a80cf9a RK |
7618 | for (q = p->fixup_var_refs_queue; q != 0; q = q->next) |
7619 | { | |
7620 | ggc_mark (q); | |
7621 | ggc_mark_rtx (q->modified); | |
87ff9c8e | 7622 | } |
87ff9c8e RH |
7623 | |
7624 | ggc_mark_rtx (p->x_nonlocal_goto_handler_slots); | |
afe3d090 | 7625 | ggc_mark_rtx (p->x_nonlocal_goto_handler_labels); |
87ff9c8e RH |
7626 | ggc_mark_rtx (p->x_nonlocal_goto_stack_level); |
7627 | ggc_mark_tree (p->x_nonlocal_labels); | |
c0e7830f DD |
7628 | |
7629 | mark_hard_reg_initial_vals (p); | |
87ff9c8e RH |
7630 | } |
7631 | ||
eb3ae3e1 ZW |
7632 | /* Mark the struct function pointed to by *ARG for GC, if it is not |
7633 | NULL. This is used to mark the current function and the outer | |
7634 | function chain. */ | |
7a80cf9a | 7635 | |
87ff9c8e | 7636 | static void |
eb3ae3e1 | 7637 | maybe_mark_struct_function (arg) |
87ff9c8e RH |
7638 | void *arg; |
7639 | { | |
7640 | struct function *f = *(struct function **) arg; | |
7641 | ||
eb3ae3e1 ZW |
7642 | if (f == 0) |
7643 | return; | |
7644 | ||
7645 | ggc_mark_struct_function (f); | |
7646 | } | |
7647 | ||
7648 | /* Mark a struct function * for GC. This is called from ggc-common.c. */ | |
7a80cf9a | 7649 | |
eb3ae3e1 ZW |
7650 | void |
7651 | ggc_mark_struct_function (f) | |
7652 | struct function *f; | |
7653 | { | |
7a80cf9a | 7654 | ggc_mark (f); |
eb3ae3e1 ZW |
7655 | ggc_mark_tree (f->decl); |
7656 | ||
7657 | mark_function_status (f); | |
7658 | mark_eh_status (f->eh); | |
7659 | mark_stmt_status (f->stmt); | |
7660 | mark_expr_status (f->expr); | |
7661 | mark_emit_status (f->emit); | |
7662 | mark_varasm_status (f->varasm); | |
7663 | ||
7664 | if (mark_machine_status) | |
7665 | (*mark_machine_status) (f); | |
7666 | if (mark_lang_status) | |
7667 | (*mark_lang_status) (f); | |
7668 | ||
7669 | if (f->original_arg_vector) | |
7670 | ggc_mark_rtvec ((rtvec) f->original_arg_vector); | |
7671 | if (f->original_decl_initial) | |
7672 | ggc_mark_tree (f->original_decl_initial); | |
ae6f2a1c ZW |
7673 | if (f->outer) |
7674 | ggc_mark_struct_function (f->outer); | |
87ff9c8e RH |
7675 | } |
7676 | ||
7677 | /* Called once, at initialization, to initialize function.c. */ | |
7678 | ||
7679 | void | |
7680 | init_function_once () | |
7681 | { | |
eb3ae3e1 ZW |
7682 | ggc_add_root (&cfun, 1, sizeof cfun, maybe_mark_struct_function); |
7683 | ggc_add_root (&outer_function_chain, 1, sizeof outer_function_chain, | |
7684 | maybe_mark_struct_function); | |
0a1c58a2 JL |
7685 | |
7686 | VARRAY_INT_INIT (prologue, 0, "prologue"); | |
7687 | VARRAY_INT_INIT (epilogue, 0, "epilogue"); | |
7688 | VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue"); | |
87ff9c8e | 7689 | } |