]>
Commit | Line | Data |
---|---|---|
28dcb9ed | 1 | /* Data flow analysis for GNU compiler. |
9028a007 | 2 | Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, |
77353e4f | 3 | 1999, 2000, 2001, 2002 Free Software Foundation, Inc. |
28dcb9ed | 4 | |
f12b58b3 | 5 | This file is part of GCC. |
28dcb9ed | 6 | |
f12b58b3 | 7 | GCC is free software; you can redistribute it and/or modify it under |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 2, or (at your option) any later | |
10 | version. | |
28dcb9ed | 11 | |
f12b58b3 | 12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
28dcb9ed | 16 | |
17 | You should have received a copy of the GNU General Public License | |
f12b58b3 | 18 | along with GCC; see the file COPYING. If not, write to the Free |
19 | Software Foundation, 59 Temple Place - Suite 330, Boston, MA | |
20 | 02111-1307, USA. */ | |
28dcb9ed | 21 | |
71caadc0 | 22 | /* This file contains the data flow analysis pass of the compiler. It |
23 | computes data flow information which tells combine_instructions | |
24 | which insns to consider combining and controls register allocation. | |
28dcb9ed | 25 | |
71caadc0 | 26 | Additional data flow information that is too bulky to record is |
27 | generated during the analysis, and is used at that time to create | |
28 | autoincrement and autodecrement addressing. | |
28dcb9ed | 29 | |
30 | The first step is dividing the function into basic blocks. | |
31 | find_basic_blocks does this. Then life_analysis determines | |
32 | where each register is live and where it is dead. | |
33 | ||
34 | ** find_basic_blocks ** | |
35 | ||
71caadc0 | 36 | find_basic_blocks divides the current function's rtl into basic |
37 | blocks and constructs the CFG. The blocks are recorded in the | |
38 | basic_block_info array; the CFG exists in the edge structures | |
39 | referenced by the blocks. | |
28dcb9ed | 40 | |
71caadc0 | 41 | find_basic_blocks also finds any unreachable loops and deletes them. |
28dcb9ed | 42 | |
43 | ** life_analysis ** | |
44 | ||
45 | life_analysis is called immediately after find_basic_blocks. | |
46 | It uses the basic block information to determine where each | |
47 | hard or pseudo register is live. | |
48 | ||
49 | ** live-register info ** | |
50 | ||
51 | The information about where each register is live is in two parts: | |
71caadc0 | 52 | the REG_NOTES of insns, and the vector basic_block->global_live_at_start. |
28dcb9ed | 53 | |
71caadc0 | 54 | basic_block->global_live_at_start has an element for each basic |
55 | block, and the element is a bit-vector with a bit for each hard or | |
56 | pseudo register. The bit is 1 if the register is live at the | |
57 | beginning of the basic block. | |
28dcb9ed | 58 | |
9028a007 | 59 | Two types of elements can be added to an insn's REG_NOTES. |
28dcb9ed | 60 | A REG_DEAD note is added to an insn's REG_NOTES for any register |
61 | that meets both of two conditions: The value in the register is not | |
62 | needed in subsequent insns and the insn does not replace the value in | |
63 | the register (in the case of multi-word hard registers, the value in | |
64 | each register must be replaced by the insn to avoid a REG_DEAD note). | |
65 | ||
66 | In the vast majority of cases, an object in a REG_DEAD note will be | |
67 | used somewhere in the insn. The (rare) exception to this is if an | |
68 | insn uses a multi-word hard register and only some of the registers are | |
69 | needed in subsequent insns. In that case, REG_DEAD notes will be | |
70 | provided for those hard registers that are not subsequently needed. | |
71 | Partial REG_DEAD notes of this type do not occur when an insn sets | |
72 | only some of the hard registers used in such a multi-word operand; | |
73 | omitting REG_DEAD notes for objects stored in an insn is optional and | |
74 | the desire to do so does not justify the complexity of the partial | |
75 | REG_DEAD notes. | |
76 | ||
77 | REG_UNUSED notes are added for each register that is set by the insn | |
78 | but is unused subsequently (if every register set by the insn is unused | |
79 | and the insn does not reference memory or have some other side-effect, | |
80 | the insn is deleted instead). If only part of a multi-word hard | |
81 | register is used in a subsequent insn, REG_UNUSED notes are made for | |
82 | the parts that will not be used. | |
83 | ||
84 | To determine which registers are live after any insn, one can | |
85 | start from the beginning of the basic block and scan insns, noting | |
86 | which registers are set by each insn and which die there. | |
87 | ||
88 | ** Other actions of life_analysis ** | |
89 | ||
90 | life_analysis sets up the LOG_LINKS fields of insns because the | |
91 | information needed to do so is readily available. | |
92 | ||
93 | life_analysis deletes insns whose only effect is to store a value | |
94 | that is never used. | |
95 | ||
96 | life_analysis notices cases where a reference to a register as | |
97 | a memory address can be combined with a preceding or following | |
98 | incrementation or decrementation of the register. The separate | |
99 | instruction to increment or decrement is deleted and the address | |
100 | is changed to a POST_INC or similar rtx. | |
101 | ||
102 | Each time an incrementing or decrementing address is created, | |
103 | a REG_INC element is added to the insn's REG_NOTES list. | |
104 | ||
105 | life_analysis fills in certain vectors containing information about | |
083a2b5e | 106 | register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH, |
107 | REG_N_CALLS_CROSSED and REG_BASIC_BLOCK. | |
22c8984a | 108 | |
109 | life_analysis sets current_function_sp_is_unchanging if the function | |
110 | doesn't modify the stack pointer. */ | |
71caadc0 | 111 | |
9028a007 | 112 | /* TODO: |
71caadc0 | 113 | |
114 | Split out from life_analysis: | |
115 | - local property discovery (bb->local_live, bb->local_set) | |
116 | - global property computation | |
117 | - log links creation | |
118 | - pre/post modify transformation | |
119 | */ | |
28dcb9ed | 120 | \f |
28dcb9ed | 121 | #include "config.h" |
405711de | 122 | #include "system.h" |
805e22b2 | 123 | #include "coretypes.h" |
124 | #include "tm.h" | |
2d9b9dfe | 125 | #include "tree.h" |
28dcb9ed | 126 | #include "rtl.h" |
7953c610 | 127 | #include "tm_p.h" |
d6cb6164 | 128 | #include "hard-reg-set.h" |
28dcb9ed | 129 | #include "basic-block.h" |
130 | #include "insn-config.h" | |
131 | #include "regs.h" | |
28dcb9ed | 132 | #include "flags.h" |
133 | #include "output.h" | |
304c5bf1 | 134 | #include "function.h" |
037a5228 | 135 | #include "except.h" |
ce1fd7fc | 136 | #include "toplev.h" |
ba1c8484 | 137 | #include "recog.h" |
5a88ea64 | 138 | #include "expr.h" |
1deb248e | 139 | #include "ssa.h" |
6d866f03 | 140 | #include "timevar.h" |
28dcb9ed | 141 | |
142 | #include "obstack.h" | |
c8fbb318 | 143 | #include "splay-tree.h" |
7014838c | 144 | |
71caadc0 | 145 | /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function, |
146 | the stack pointer does not matter. The value is tested only in | |
147 | functions that have frame pointers. | |
148 | No definition is equivalent to always zero. */ | |
149 | #ifndef EXIT_IGNORE_STACK | |
150 | #define EXIT_IGNORE_STACK 0 | |
151 | #endif | |
152 | ||
2d9b9dfe | 153 | #ifndef HAVE_epilogue |
154 | #define HAVE_epilogue 0 | |
155 | #endif | |
2d9b9dfe | 156 | #ifndef HAVE_prologue |
157 | #define HAVE_prologue 0 | |
158 | #endif | |
60ecc450 | 159 | #ifndef HAVE_sibcall_epilogue |
160 | #define HAVE_sibcall_epilogue 0 | |
161 | #endif | |
2d9b9dfe | 162 | |
5490c1ec | 163 | #ifndef LOCAL_REGNO |
164 | #define LOCAL_REGNO(REGNO) 0 | |
165 | #endif | |
166 | #ifndef EPILOGUE_USES | |
167 | #define EPILOGUE_USES(REGNO) 0 | |
168 | #endif | |
1c6bdf07 | 169 | #ifndef EH_USES |
170 | #define EH_USES(REGNO) 0 | |
171 | #endif | |
5490c1ec | 172 | |
204c86f0 | 173 | #ifdef HAVE_conditional_execution |
174 | #ifndef REVERSE_CONDEXEC_PREDICATES_P | |
175 | #define REVERSE_CONDEXEC_PREDICATES_P(x, y) ((x) == reverse_condition (y)) | |
176 | #endif | |
177 | #endif | |
178 | ||
1d2be6a7 | 179 | /* Nonzero if the second flow pass has completed. */ |
180 | int flow2_completed; | |
181 | ||
28dcb9ed | 182 | /* Maximum register number used in this function, plus one. */ |
183 | ||
184 | int max_regno; | |
185 | ||
394685a4 | 186 | /* Indexed by n, giving various register information */ |
28dcb9ed | 187 | |
d6ff8d83 | 188 | varray_type reg_n_info; |
28dcb9ed | 189 | |
28dcb9ed | 190 | /* Size of a regset for the current function, |
191 | in (1) bytes and (2) elements. */ | |
192 | ||
193 | int regset_bytes; | |
194 | int regset_size; | |
195 | ||
28dcb9ed | 196 | /* Regset of regs live when calls to `setjmp'-like functions happen. */ |
71caadc0 | 197 | /* ??? Does this exist only for the setjmp-clobbered warning message? */ |
28dcb9ed | 198 | |
199 | regset regs_live_at_setjmp; | |
200 | ||
201 | /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers | |
202 | that have to go in the same hard reg. | |
203 | The first two regs in the list are a pair, and the next two | |
204 | are another pair, etc. */ | |
205 | rtx regs_may_share; | |
206 | ||
5e6015b1 | 207 | /* Callback that determines if it's ok for a function to have no |
208 | noreturn attribute. */ | |
209 | int (*lang_missing_noreturn_ok_p) PARAMS ((tree)); | |
210 | ||
28dcb9ed | 211 | /* Set of registers that may be eliminable. These are handled specially |
212 | in updating regs_ever_live. */ | |
213 | ||
214 | static HARD_REG_SET elim_reg_set; | |
215 | ||
c8fbb318 | 216 | /* Holds information for tracking conditional register life information. */ |
217 | struct reg_cond_life_info | |
218 | { | |
0d8d2585 | 219 | /* A boolean expression of conditions under which a register is dead. */ |
c8fbb318 | 220 | rtx condition; |
0d8d2585 | 221 | /* Conditions under which a register is dead at the basic block end. */ |
222 | rtx orig_condition; | |
223 | ||
224 | /* A boolean expression of conditions under which a register has been | |
225 | stored into. */ | |
226 | rtx stores; | |
c8fbb318 | 227 | |
228 | /* ??? Could store mask of bytes that are dead, so that we could finally | |
229 | track lifetimes of multi-word registers accessed via subregs. */ | |
230 | }; | |
231 | ||
7771530e | 232 | /* For use in communicating between propagate_block and its subroutines. |
233 | Holds all information needed to compute life and def-use information. */ | |
234 | ||
235 | struct propagate_block_info | |
236 | { | |
237 | /* The basic block we're considering. */ | |
238 | basic_block bb; | |
239 | ||
240 | /* Bit N is set if register N is conditionally or unconditionally live. */ | |
241 | regset reg_live; | |
242 | ||
6091ea3c | 243 | /* Bit N is set if register N is set this insn. */ |
244 | regset new_set; | |
f901bcb1 | 245 | |
7771530e | 246 | /* Element N is the next insn that uses (hard or pseudo) register N |
247 | within the current basic block; or zero, if there is no such insn. */ | |
248 | rtx *reg_next_use; | |
249 | ||
250 | /* Contains a list of all the MEMs we are tracking for dead store | |
251 | elimination. */ | |
252 | rtx mem_set_list; | |
253 | ||
b2bbb655 | 254 | /* If non-null, record the set of registers set unconditionally in the |
255 | basic block. */ | |
7771530e | 256 | regset local_set; |
257 | ||
b2bbb655 | 258 | /* If non-null, record the set of registers set conditionally in the |
259 | basic block. */ | |
260 | regset cond_local_set; | |
261 | ||
c8fbb318 | 262 | #ifdef HAVE_conditional_execution |
263 | /* Indexed by register number, holds a reg_cond_life_info for each | |
264 | register that is not unconditionally live or dead. */ | |
265 | splay_tree reg_cond_dead; | |
266 | ||
267 | /* Bit N is set if register N is in an expression in reg_cond_dead. */ | |
268 | regset reg_cond_reg; | |
269 | #endif | |
270 | ||
b4486a24 | 271 | /* The length of mem_set_list. */ |
272 | int mem_set_list_len; | |
273 | ||
6ef828f9 | 274 | /* Nonzero if the value of CC0 is live. */ |
7771530e | 275 | int cc0_live; |
276 | ||
277 | /* Flags controling the set of information propagate_block collects. */ | |
278 | int flags; | |
279 | }; | |
280 | ||
fb20d6fa | 281 | /* Number of dead insns removed. */ |
282 | static int ndead; | |
283 | ||
b4486a24 | 284 | /* Maximum length of pbi->mem_set_list before we start dropping |
285 | new elements on the floor. */ | |
286 | #define MAX_MEM_SET_LIST_LEN 100 | |
287 | ||
28dcb9ed | 288 | /* Forward declarations */ |
621f6678 | 289 | static int verify_wide_reg_1 PARAMS ((rtx *, void *)); |
a9d18fe1 | 290 | static void verify_wide_reg PARAMS ((int, basic_block)); |
621f6678 | 291 | static void verify_local_live_at_start PARAMS ((regset, basic_block)); |
5edecf3c | 292 | static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *)); |
293 | static void notice_stack_pointer_modification PARAMS ((rtx)); | |
2766437e | 294 | static void mark_reg PARAMS ((rtx, void *)); |
621f6678 | 295 | static void mark_regs_live_at_end PARAMS ((regset)); |
8a5b87ad | 296 | static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *)); |
621f6678 | 297 | static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int)); |
fb20d6fa | 298 | static void propagate_block_delete_insn PARAMS ((rtx)); |
268cb459 | 299 | static rtx propagate_block_delete_libcall PARAMS ((rtx, rtx)); |
7771530e | 300 | static int insn_dead_p PARAMS ((struct propagate_block_info *, |
301 | rtx, int, rtx)); | |
302 | static int libcall_dead_p PARAMS ((struct propagate_block_info *, | |
49ef4267 | 303 | rtx, rtx)); |
7771530e | 304 | static void mark_set_regs PARAMS ((struct propagate_block_info *, |
f901bcb1 | 305 | rtx, rtx)); |
7771530e | 306 | static void mark_set_1 PARAMS ((struct propagate_block_info *, |
96c0ab5d | 307 | enum rtx_code, rtx, rtx, |
308 | rtx, int)); | |
a5d181df | 309 | static int find_regno_partial PARAMS ((rtx *, void *)); |
310 | ||
c8fbb318 | 311 | #ifdef HAVE_conditional_execution |
312 | static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *, | |
313 | int, rtx)); | |
314 | static void free_reg_cond_life_info PARAMS ((splay_tree_value)); | |
315 | static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *)); | |
316 | static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *, | |
317 | int)); | |
4f84cefc | 318 | static rtx elim_reg_cond PARAMS ((rtx, unsigned int)); |
319 | static rtx ior_reg_cond PARAMS ((rtx, rtx, int)); | |
c8fbb318 | 320 | static rtx not_reg_cond PARAMS ((rtx)); |
4f84cefc | 321 | static rtx and_reg_cond PARAMS ((rtx, rtx, int)); |
c8fbb318 | 322 | #endif |
99c14947 | 323 | #ifdef AUTO_INC_DEC |
40988080 | 324 | static void attempt_auto_inc PARAMS ((struct propagate_block_info *, |
325 | rtx, rtx, rtx, rtx, rtx)); | |
7771530e | 326 | static void find_auto_inc PARAMS ((struct propagate_block_info *, |
327 | rtx, rtx)); | |
328 | static int try_pre_increment_1 PARAMS ((struct propagate_block_info *, | |
329 | rtx)); | |
621f6678 | 330 | static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT)); |
99c14947 | 331 | #endif |
7771530e | 332 | static void mark_used_reg PARAMS ((struct propagate_block_info *, |
f901bcb1 | 333 | rtx, rtx, rtx)); |
7771530e | 334 | static void mark_used_regs PARAMS ((struct propagate_block_info *, |
f901bcb1 | 335 | rtx, rtx, rtx)); |
621f6678 | 336 | void dump_flow_info PARAMS ((FILE *)); |
337 | void debug_flow_info PARAMS ((void)); | |
054c53d8 | 338 | static void add_to_mem_set_list PARAMS ((struct propagate_block_info *, |
339 | rtx)); | |
638987bd | 340 | static int invalidate_mems_from_autoinc PARAMS ((rtx *, void *)); |
fd10d9d5 | 341 | static void invalidate_mems_from_set PARAMS ((struct propagate_block_info *, |
342 | rtx)); | |
9645fa4f | 343 | static void clear_log_links PARAMS ((sbitmap)); |
28dcb9ed | 344 | \f |
61e82936 | 345 | |
fbc1c83e | 346 | void |
347 | check_function_return_warnings () | |
348 | { | |
349 | if (warn_missing_noreturn | |
350 | && !TREE_THIS_VOLATILE (cfun->decl) | |
5e6015b1 | 351 | && EXIT_BLOCK_PTR->pred == NULL |
352 | && (lang_missing_noreturn_ok_p | |
353 | && !lang_missing_noreturn_ok_p (cfun->decl))) | |
fbc1c83e | 354 | warning ("function might be possible candidate for attribute `noreturn'"); |
355 | ||
356 | /* If we have a path to EXIT, then we do return. */ | |
357 | if (TREE_THIS_VOLATILE (cfun->decl) | |
358 | && EXIT_BLOCK_PTR->pred != NULL) | |
359 | warning ("`noreturn' function does return"); | |
360 | ||
361 | /* If the clobber_return_insn appears in some basic block, then we | |
362 | do reach the end without returning a value. */ | |
363 | else if (warn_return_type | |
364 | && cfun->x_clobber_return_insn != NULL | |
365 | && EXIT_BLOCK_PTR->pred != NULL) | |
366 | { | |
367 | int max_uid = get_max_uid (); | |
368 | ||
369 | /* If clobber_return_insn was excised by jump1, then renumber_insns | |
370 | can make max_uid smaller than the number still recorded in our rtx. | |
371 | That's fine, since this is a quick way of verifying that the insn | |
372 | is no longer in the chain. */ | |
373 | if (INSN_UID (cfun->x_clobber_return_insn) < max_uid) | |
374 | { | |
ab87d1bc | 375 | rtx insn; |
376 | ||
377 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
378 | if (insn == cfun->x_clobber_return_insn) | |
379 | { | |
380 | warning ("control reaches end of non-void function"); | |
381 | break; | |
382 | } | |
fbc1c83e | 383 | } |
384 | } | |
385 | } | |
65f34de5 | 386 | \f |
387 | /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK | |
388 | note associated with the BLOCK. */ | |
389 | ||
390 | rtx | |
391 | first_insn_after_basic_block_note (block) | |
392 | basic_block block; | |
393 | { | |
394 | rtx insn; | |
fbc1c83e | 395 | |
65f34de5 | 396 | /* Get the first instruction in the block. */ |
397 | insn = block->head; | |
cfc185a4 | 398 | |
65f34de5 | 399 | if (insn == NULL_RTX) |
400 | return NULL_RTX; | |
401 | if (GET_CODE (insn) == CODE_LABEL) | |
402 | insn = NEXT_INSN (insn); | |
403 | if (!NOTE_INSN_BASIC_BLOCK_P (insn)) | |
404 | abort (); | |
405 | ||
406 | return NEXT_INSN (insn); | |
407 | } | |
408 | \f | |
409 | /* Perform data flow analysis. | |
410 | F is the first insn of the function; FLAGS is a set of PROP_* flags | |
411 | to be used in accumulating flow info. */ | |
412 | ||
413 | void | |
414 | life_analysis (f, file, flags) | |
71caadc0 | 415 | rtx f; |
65f34de5 | 416 | FILE *file; |
417 | int flags; | |
71caadc0 | 418 | { |
19cb6b50 | 419 | int i; |
897118e8 | 420 | #ifdef ELIMINABLE_REGS |
e99c3a1d | 421 | static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS; |
65f34de5 | 422 | #endif |
cfc185a4 | 423 | |
65f34de5 | 424 | /* Record which registers will be eliminated. We use this in |
425 | mark_used_regs. */ | |
71caadc0 | 426 | |
65f34de5 | 427 | CLEAR_HARD_REG_SET (elim_reg_set); |
439e86cb | 428 | |
65f34de5 | 429 | #ifdef ELIMINABLE_REGS |
430 | for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++) | |
431 | SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from); | |
432 | #else | |
433 | SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM); | |
434 | #endif | |
df4b504c | 435 | |
897118e8 | 436 | |
437 | #ifdef CANNOT_CHANGE_MODE_CLASS | |
438 | if (flags & PROP_REG_INFO) | |
439 | for (i=0; i < NUM_MACHINE_MODES; ++i) | |
440 | INIT_REG_SET (&subregs_of_mode[i]); | |
441 | #endif | |
442 | ||
65f34de5 | 443 | if (! optimize) |
444 | flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC | PROP_ALLOW_CFG_CHANGES); | |
df4b504c | 445 | |
65f34de5 | 446 | /* The post-reload life analysis have (on a global basis) the same |
447 | registers live as was computed by reload itself. elimination | |
448 | Otherwise offsets and such may be incorrect. | |
71caadc0 | 449 | |
65f34de5 | 450 | Reload will make some registers as live even though they do not |
451 | appear in the rtl. | |
71caadc0 | 452 | |
65f34de5 | 453 | We don't want to create new auto-incs after reload, since they |
454 | are unlikely to be useful and can cause problems with shared | |
455 | stack slots. */ | |
456 | if (reload_completed) | |
457 | flags &= ~(PROP_REG_INFO | PROP_AUTOINC); | |
71caadc0 | 458 | |
65f34de5 | 459 | /* We want alias analysis information for local dead store elimination. */ |
2d30935d | 460 | if (optimize && (flags & PROP_SCAN_DEAD_STORES)) |
65f34de5 | 461 | init_alias_analysis (); |
cfc185a4 | 462 | |
65f34de5 | 463 | /* Always remove no-op moves. Do this before other processing so |
464 | that we don't have to keep re-scanning them. */ | |
465 | delete_noop_moves (f); | |
6d027963 | 466 | |
65f34de5 | 467 | /* Some targets can emit simpler epilogues if they know that sp was |
468 | not ever modified during the function. After reload, of course, | |
469 | we've already emitted the epilogue so there's no sense searching. */ | |
470 | if (! reload_completed) | |
471 | notice_stack_pointer_modification (f); | |
6d027963 | 472 | |
65f34de5 | 473 | /* Allocate and zero out data structures that will record the |
474 | data from lifetime analysis. */ | |
475 | allocate_reg_life_data (); | |
476 | allocate_bb_life_data (); | |
6d027963 | 477 | |
65f34de5 | 478 | /* Find the set of registers live on function exit. */ |
479 | mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start); | |
6d027963 | 480 | |
65f34de5 | 481 | /* "Update" life info from zero. It'd be nice to begin the |
482 | relaxation with just the exit and noreturn blocks, but that set | |
483 | is not immediately handy. */ | |
9028a007 | 484 | |
65f34de5 | 485 | if (flags & PROP_REG_INFO) |
486 | memset (regs_ever_live, 0, sizeof (regs_ever_live)); | |
487 | update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags); | |
6d027963 | 488 | |
65f34de5 | 489 | /* Clean up. */ |
2d30935d | 490 | if (optimize && (flags & PROP_SCAN_DEAD_STORES)) |
65f34de5 | 491 | end_alias_analysis (); |
6d027963 | 492 | |
65f34de5 | 493 | if (file) |
494 | dump_flow_info (file); | |
b08cd584 | 495 | |
65f34de5 | 496 | free_basic_block_vars (1); |
f9eb917b | 497 | |
65f34de5 | 498 | /* Removing dead insns should've made jumptables really dead. */ |
499 | delete_dead_jumptables (); | |
500 | } | |
b08cd584 | 501 | |
65f34de5 | 502 | /* A subroutine of verify_wide_reg, called through for_each_rtx. |
a9d18fe1 | 503 | Search for REGNO. If found, return 2 if it is not wider than |
504 | word_mode. */ | |
f9eb917b | 505 | |
65f34de5 | 506 | static int |
507 | verify_wide_reg_1 (px, pregno) | |
508 | rtx *px; | |
509 | void *pregno; | |
510 | { | |
511 | rtx x = *px; | |
512 | unsigned int regno = *(int *) pregno; | |
eb429644 | 513 | |
65f34de5 | 514 | if (GET_CODE (x) == REG && REGNO (x) == regno) |
eb429644 | 515 | { |
65f34de5 | 516 | if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD) |
a9d18fe1 | 517 | return 2; |
65f34de5 | 518 | return 1; |
eb429644 | 519 | } |
65f34de5 | 520 | return 0; |
f9eb917b | 521 | } |
522 | ||
65f34de5 | 523 | /* A subroutine of verify_local_live_at_start. Search through insns |
a9d18fe1 | 524 | of BB looking for register REGNO. */ |
dd1cc380 | 525 | |
74b0991d | 526 | static void |
a9d18fe1 | 527 | verify_wide_reg (regno, bb) |
65f34de5 | 528 | int regno; |
a9d18fe1 | 529 | basic_block bb; |
71caadc0 | 530 | { |
a9d18fe1 | 531 | rtx head = bb->head, end = bb->end; |
532 | ||
65f34de5 | 533 | while (1) |
71caadc0 | 534 | { |
a9d18fe1 | 535 | if (INSN_P (head)) |
536 | { | |
537 | int r = for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no); | |
538 | if (r == 1) | |
539 | return; | |
540 | if (r == 2) | |
541 | break; | |
542 | } | |
65f34de5 | 543 | if (head == end) |
544 | break; | |
545 | head = NEXT_INSN (head); | |
546 | } | |
28dcb9ed | 547 | |
65f34de5 | 548 | if (rtl_dump_file) |
a9d18fe1 | 549 | { |
550 | fprintf (rtl_dump_file, "Register %d died unexpectedly.\n", regno); | |
551 | dump_bb (bb, rtl_dump_file); | |
552 | } | |
553 | abort (); | |
65f34de5 | 554 | } |
439e86cb | 555 | |
65f34de5 | 556 | /* A subroutine of update_life_info. Verify that there are no untoward |
557 | changes in live_at_start during a local update. */ | |
5d3a797d | 558 | |
65f34de5 | 559 | static void |
560 | verify_local_live_at_start (new_live_at_start, bb) | |
561 | regset new_live_at_start; | |
562 | basic_block bb; | |
563 | { | |
564 | if (reload_completed) | |
565 | { | |
566 | /* After reload, there are no pseudos, nor subregs of multi-word | |
567 | registers. The regsets should exactly match. */ | |
568 | if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start)) | |
569 | { | |
570 | if (rtl_dump_file) | |
71caadc0 | 571 | { |
65f34de5 | 572 | fprintf (rtl_dump_file, |
a9d18fe1 | 573 | "live_at_start mismatch in bb %d, aborting\nNew:\n", |
b3d6de89 | 574 | bb->index); |
65f34de5 | 575 | debug_bitmap_file (rtl_dump_file, new_live_at_start); |
a9d18fe1 | 576 | fputs ("Old:\n", rtl_dump_file); |
577 | dump_bb (bb, rtl_dump_file); | |
71caadc0 | 578 | } |
a9d18fe1 | 579 | abort (); |
71caadc0 | 580 | } |
65f34de5 | 581 | } |
582 | else | |
583 | { | |
584 | int i; | |
28dcb9ed | 585 | |
65f34de5 | 586 | /* Find the set of changed registers. */ |
587 | XOR_REG_SET (new_live_at_start, bb->global_live_at_start); | |
22548064 | 588 | |
65f34de5 | 589 | EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i, |
590 | { | |
d3371fcd | 591 | /* No registers should die. */ |
65f34de5 | 592 | if (REGNO_REG_SET_P (bb->global_live_at_start, i)) |
593 | { | |
594 | if (rtl_dump_file) | |
a9d18fe1 | 595 | { |
596 | fprintf (rtl_dump_file, | |
597 | "Register %d died unexpectedly.\n", i); | |
598 | dump_bb (bb, rtl_dump_file); | |
599 | } | |
600 | abort (); | |
65f34de5 | 601 | } |
9028a007 | 602 | |
d3371fcd | 603 | /* Verify that the now-live register is wider than word_mode. */ |
a9d18fe1 | 604 | verify_wide_reg (i, bb); |
65f34de5 | 605 | }); |
71caadc0 | 606 | } |
65f34de5 | 607 | } |
28dcb9ed | 608 | |
65f34de5 | 609 | /* Updates life information starting with the basic blocks set in BLOCKS. |
610 | If BLOCKS is null, consider it to be the universal set. | |
1b5174f0 | 611 | |
65f34de5 | 612 | If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing, |
613 | we are only expecting local modifications to basic blocks. If we find | |
614 | extra registers live at the beginning of a block, then we either killed | |
615 | useful data, or we have a broken split that wants data not provided. | |
616 | If we find registers removed from live_at_start, that means we have | |
617 | a broken peephole that is killing a register it shouldn't. | |
1b5174f0 | 618 | |
65f34de5 | 619 | ??? This is not true in one situation -- when a pre-reload splitter |
620 | generates subregs of a multi-word pseudo, current life analysis will | |
621 | lose the kill. So we _can_ have a pseudo go live. How irritating. | |
61e82936 | 622 | |
65f34de5 | 623 | Including PROP_REG_INFO does not properly refresh regs_ever_live |
624 | unless the caller resets it to zero. */ | |
777e249a | 625 | |
fb20d6fa | 626 | int |
65f34de5 | 627 | update_life_info (blocks, extent, prop_flags) |
628 | sbitmap blocks; | |
629 | enum update_life_extent extent; | |
630 | int prop_flags; | |
777e249a | 631 | { |
65f34de5 | 632 | regset tmp; |
633 | regset_head tmp_head; | |
a6a1b9be | 634 | int i; |
4ed2a48b | 635 | int stabilized_prop_flags = prop_flags; |
4c26117a | 636 | basic_block bb; |
a6a1b9be | 637 | |
65f34de5 | 638 | tmp = INITIALIZE_REG_SET (tmp_head); |
fb20d6fa | 639 | ndead = 0; |
4cbd1be8 | 640 | |
9645fa4f | 641 | timevar_push ((extent == UPDATE_LIFE_LOCAL || blocks) |
642 | ? TV_LIFE_UPDATE : TV_LIFE); | |
643 | ||
65f34de5 | 644 | /* Changes to the CFG are only allowed when |
645 | doing a global update for the entire CFG. */ | |
646 | if ((prop_flags & PROP_ALLOW_CFG_CHANGES) | |
647 | && (extent == UPDATE_LIFE_LOCAL || blocks)) | |
648 | abort (); | |
a6a1b9be | 649 | |
65f34de5 | 650 | /* For a global update, we go through the relaxation process again. */ |
651 | if (extent != UPDATE_LIFE_LOCAL) | |
652 | { | |
653 | for ( ; ; ) | |
654 | { | |
655 | int changed = 0; | |
777e249a | 656 | |
65f34de5 | 657 | calculate_global_regs_live (blocks, blocks, |
658 | prop_flags & (PROP_SCAN_DEAD_CODE | |
2d30935d | 659 | | PROP_SCAN_DEAD_STORES |
65f34de5 | 660 | | PROP_ALLOW_CFG_CHANGES)); |
61e82936 | 661 | |
65f34de5 | 662 | if ((prop_flags & (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES)) |
663 | != (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES)) | |
664 | break; | |
71caadc0 | 665 | |
65f34de5 | 666 | /* Removing dead code may allow the CFG to be simplified which |
667 | in turn may allow for further dead code detection / removal. */ | |
4c26117a | 668 | FOR_EACH_BB_REVERSE (bb) |
65f34de5 | 669 | { |
65f34de5 | 670 | COPY_REG_SET (tmp, bb->global_live_at_end); |
671 | changed |= propagate_block (bb, tmp, NULL, NULL, | |
672 | prop_flags & (PROP_SCAN_DEAD_CODE | |
2d30935d | 673 | | PROP_SCAN_DEAD_STORES |
65f34de5 | 674 | | PROP_KILL_DEAD_CODE)); |
675 | } | |
75bfac32 | 676 | |
4ed2a48b | 677 | /* Don't pass PROP_SCAN_DEAD_CODE or PROP_KILL_DEAD_CODE to |
678 | subsequent propagate_block calls, since removing or acting as | |
679 | removing dead code can affect global register liveness, which | |
680 | is supposed to be finalized for this call after this loop. */ | |
681 | stabilized_prop_flags | |
2d30935d | 682 | &= ~(PROP_SCAN_DEAD_CODE | PROP_SCAN_DEAD_STORES |
683 | | PROP_KILL_DEAD_CODE); | |
4ed2a48b | 684 | |
685 | if (! changed) | |
65f34de5 | 686 | break; |
4ed2a48b | 687 | |
688 | /* We repeat regardless of what cleanup_cfg says. If there were | |
689 | instructions deleted above, that might have been only a | |
690 | partial improvement (see MAX_MEM_SET_LIST_LEN usage). | |
691 | Further improvement may be possible. */ | |
692 | cleanup_cfg (CLEANUP_EXPENSIVE); | |
71caadc0 | 693 | } |
75bfac32 | 694 | |
65f34de5 | 695 | /* If asked, remove notes from the blocks we'll update. */ |
696 | if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES) | |
697 | count_or_remove_death_notes (blocks, 1); | |
698 | } | |
699 | ||
308f9b79 | 700 | /* Clear log links in case we are asked to (re)compute them. */ |
701 | if (prop_flags & PROP_LOG_LINKS) | |
702 | clear_log_links (blocks); | |
703 | ||
65f34de5 | 704 | if (blocks) |
705 | { | |
706 | EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i, | |
707 | { | |
4c26117a | 708 | bb = BASIC_BLOCK (i); |
65f34de5 | 709 | |
710 | COPY_REG_SET (tmp, bb->global_live_at_end); | |
4ed2a48b | 711 | propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags); |
65f34de5 | 712 | |
713 | if (extent == UPDATE_LIFE_LOCAL) | |
714 | verify_local_live_at_start (tmp, bb); | |
715 | }); | |
61e82936 | 716 | } |
71caadc0 | 717 | else |
718 | { | |
4c26117a | 719 | FOR_EACH_BB_REVERSE (bb) |
4c5da238 | 720 | { |
65f34de5 | 721 | COPY_REG_SET (tmp, bb->global_live_at_end); |
4ed2a48b | 722 | |
723 | propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags); | |
22548064 | 724 | |
65f34de5 | 725 | if (extent == UPDATE_LIFE_LOCAL) |
726 | verify_local_live_at_start (tmp, bb); | |
71caadc0 | 727 | } |
71caadc0 | 728 | } |
729 | ||
65f34de5 | 730 | FREE_REG_SET (tmp); |
0abf422d | 731 | |
65f34de5 | 732 | if (prop_flags & PROP_REG_INFO) |
733 | { | |
734 | /* The only pseudos that are live at the beginning of the function | |
735 | are those that were not set anywhere in the function. local-alloc | |
736 | doesn't know how to handle these correctly, so mark them as not | |
737 | local to any one basic block. */ | |
738 | EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end, | |
739 | FIRST_PSEUDO_REGISTER, i, | |
740 | { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; }); | |
71caadc0 | 741 | |
65f34de5 | 742 | /* We have a problem with any pseudoreg that lives across the setjmp. |
743 | ANSI says that if a user variable does not change in value between | |
744 | the setjmp and the longjmp, then the longjmp preserves it. This | |
745 | includes longjmp from a place where the pseudo appears dead. | |
746 | (In principle, the value still exists if it is in scope.) | |
747 | If the pseudo goes in a hard reg, some other value may occupy | |
748 | that hard reg where this pseudo is dead, thus clobbering the pseudo. | |
749 | Conclusion: such a pseudo must not go in a hard reg. */ | |
750 | EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp, | |
751 | FIRST_PSEUDO_REGISTER, i, | |
752 | { | |
753 | if (regno_reg_rtx[i] != 0) | |
754 | { | |
755 | REG_LIVE_LENGTH (i) = -1; | |
756 | REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN; | |
757 | } | |
758 | }); | |
759 | } | |
9645fa4f | 760 | timevar_pop ((extent == UPDATE_LIFE_LOCAL || blocks) |
761 | ? TV_LIFE_UPDATE : TV_LIFE); | |
fb20d6fa | 762 | if (ndead && rtl_dump_file) |
763 | fprintf (rtl_dump_file, "deleted %i dead insns\n", ndead); | |
764 | return ndead; | |
22548064 | 765 | } |
8faa719f | 766 | |
308f9b79 | 767 | /* Update life information in all blocks where BB_DIRTY is set. */ |
768 | ||
fb20d6fa | 769 | int |
308f9b79 | 770 | update_life_info_in_dirty_blocks (extent, prop_flags) |
771 | enum update_life_extent extent; | |
772 | int prop_flags; | |
773 | { | |
f20183e6 | 774 | sbitmap update_life_blocks = sbitmap_alloc (last_basic_block); |
308f9b79 | 775 | int n = 0; |
4c26117a | 776 | basic_block bb; |
26fb1781 | 777 | int retval = 0; |
308f9b79 | 778 | |
779 | sbitmap_zero (update_life_blocks); | |
4c26117a | 780 | FOR_EACH_BB (bb) |
9817b015 | 781 | { |
782 | if (extent == UPDATE_LIFE_LOCAL) | |
783 | { | |
784 | if (bb->flags & BB_DIRTY) | |
785 | { | |
786 | SET_BIT (update_life_blocks, bb->index); | |
787 | n++; | |
788 | } | |
789 | } | |
790 | else | |
791 | { | |
792 | /* ??? Bootstrap with -march=pentium4 fails to terminate | |
793 | with only a partial life update. */ | |
794 | SET_BIT (update_life_blocks, bb->index); | |
795 | if (bb->flags & BB_DIRTY) | |
796 | n++; | |
797 | } | |
798 | } | |
308f9b79 | 799 | |
800 | if (n) | |
26fb1781 | 801 | retval = update_life_info (update_life_blocks, extent, prop_flags); |
308f9b79 | 802 | |
803 | sbitmap_free (update_life_blocks); | |
26fb1781 | 804 | return retval; |
308f9b79 | 805 | } |
806 | ||
65f34de5 | 807 | /* Free the variables allocated by find_basic_blocks. |
8faa719f | 808 | |
6ef828f9 | 809 | KEEP_HEAD_END_P is nonzero if basic_block_info is not to be freed. */ |
22548064 | 810 | |
54ae8502 | 811 | void |
65f34de5 | 812 | free_basic_block_vars (keep_head_end_p) |
813 | int keep_head_end_p; | |
22548064 | 814 | { |
65f34de5 | 815 | if (! keep_head_end_p) |
816 | { | |
817 | if (basic_block_info) | |
71caadc0 | 818 | { |
65f34de5 | 819 | clear_edges (); |
820 | VARRAY_FREE (basic_block_info); | |
71caadc0 | 821 | } |
b3d6de89 | 822 | n_basic_blocks = 0; |
f20183e6 | 823 | last_basic_block = 0; |
65f34de5 | 824 | |
825 | ENTRY_BLOCK_PTR->aux = NULL; | |
826 | ENTRY_BLOCK_PTR->global_live_at_end = NULL; | |
827 | EXIT_BLOCK_PTR->aux = NULL; | |
828 | EXIT_BLOCK_PTR->global_live_at_start = NULL; | |
71caadc0 | 829 | } |
22548064 | 830 | } |
831 | ||
65f34de5 | 832 | /* Delete any insns that copy a register to itself. */ |
22548064 | 833 | |
fb20d6fa | 834 | int |
65f34de5 | 835 | delete_noop_moves (f) |
836 | rtx f ATTRIBUTE_UNUSED; | |
22548064 | 837 | { |
65f34de5 | 838 | rtx insn, next; |
839 | basic_block bb; | |
fb20d6fa | 840 | int nnoops = 0; |
22548064 | 841 | |
4c26117a | 842 | FOR_EACH_BB (bb) |
22548064 | 843 | { |
65f34de5 | 844 | for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = next) |
22548064 | 845 | { |
65f34de5 | 846 | next = NEXT_INSN (insn); |
847 | if (INSN_P (insn) && noop_move_p (insn)) | |
848 | { | |
228498fe | 849 | rtx note; |
850 | ||
851 | /* If we're about to remove the first insn of a libcall | |
852 | then move the libcall note to the next real insn and | |
853 | update the retval note. */ | |
854 | if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) | |
855 | && XEXP (note, 0) != insn) | |
856 | { | |
857 | rtx new_libcall_insn = next_real_insn (insn); | |
858 | rtx retval_note = find_reg_note (XEXP (note, 0), | |
859 | REG_RETVAL, NULL_RTX); | |
860 | REG_NOTES (new_libcall_insn) | |
861 | = gen_rtx_INSN_LIST (REG_LIBCALL, XEXP (note, 0), | |
862 | REG_NOTES (new_libcall_insn)); | |
863 | XEXP (retval_note, 0) = new_libcall_insn; | |
864 | } | |
865 | ||
fb20d6fa | 866 | delete_insn_and_edges (insn); |
867 | nnoops++; | |
65f34de5 | 868 | } |
22548064 | 869 | } |
870 | } | |
fb20d6fa | 871 | if (nnoops && rtl_dump_file) |
872 | fprintf (rtl_dump_file, "deleted %i noop moves", nnoops); | |
873 | return nnoops; | |
22548064 | 874 | } |
875 | ||
65f34de5 | 876 | /* Delete any jump tables never referenced. We can't delete them at the |
4a82352a | 877 | time of removing tablejump insn as they are referenced by the preceding |
65f34de5 | 878 | insns computing the destination, so we delay deleting and garbagecollect |
879 | them once life information is computed. */ | |
2c3b2bf1 | 880 | void |
65f34de5 | 881 | delete_dead_jumptables () |
882 | { | |
883 | rtx insn, next; | |
884 | for (insn = get_insns (); insn; insn = next) | |
22548064 | 885 | { |
65f34de5 | 886 | next = NEXT_INSN (insn); |
887 | if (GET_CODE (insn) == CODE_LABEL | |
0de616ac | 888 | && LABEL_NUSES (insn) == LABEL_PRESERVE_P (insn) |
65f34de5 | 889 | && GET_CODE (next) == JUMP_INSN |
890 | && (GET_CODE (PATTERN (next)) == ADDR_VEC | |
891 | || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC)) | |
71caadc0 | 892 | { |
65f34de5 | 893 | if (rtl_dump_file) |
894 | fprintf (rtl_dump_file, "Dead jumptable %i removed\n", INSN_UID (insn)); | |
e4bf866d | 895 | delete_insn (NEXT_INSN (insn)); |
896 | delete_insn (insn); | |
65f34de5 | 897 | next = NEXT_INSN (next); |
71caadc0 | 898 | } |
cfc185a4 | 899 | } |
71caadc0 | 900 | } |
901 | ||
65f34de5 | 902 | /* Determine if the stack pointer is constant over the life of the function. |
903 | Only useful before prologues have been emitted. */ | |
71caadc0 | 904 | |
905 | static void | |
65f34de5 | 906 | notice_stack_pointer_modification_1 (x, pat, data) |
907 | rtx x; | |
908 | rtx pat ATTRIBUTE_UNUSED; | |
909 | void *data ATTRIBUTE_UNUSED; | |
71caadc0 | 910 | { |
65f34de5 | 911 | if (x == stack_pointer_rtx |
912 | /* The stack pointer is only modified indirectly as the result | |
913 | of a push until later in flow. See the comments in rtl.texi | |
914 | regarding Embedded Side-Effects on Addresses. */ | |
915 | || (GET_CODE (x) == MEM | |
916 | && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a' | |
917 | && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx)) | |
918 | current_function_sp_is_unchanging = 0; | |
71caadc0 | 919 | } |
d63ea2f2 | 920 | |
0e21c32a | 921 | static void |
65f34de5 | 922 | notice_stack_pointer_modification (f) |
923 | rtx f; | |
71caadc0 | 924 | { |
65f34de5 | 925 | rtx insn; |
71caadc0 | 926 | |
65f34de5 | 927 | /* Assume that the stack pointer is unchanging if alloca hasn't |
928 | been used. */ | |
929 | current_function_sp_is_unchanging = !current_function_calls_alloca; | |
930 | if (! current_function_sp_is_unchanging) | |
931 | return; | |
71caadc0 | 932 | |
65f34de5 | 933 | for (insn = f; insn; insn = NEXT_INSN (insn)) |
d63ea2f2 | 934 | { |
65f34de5 | 935 | if (INSN_P (insn)) |
71caadc0 | 936 | { |
65f34de5 | 937 | /* Check if insn modifies the stack pointer. */ |
938 | note_stores (PATTERN (insn), notice_stack_pointer_modification_1, | |
939 | NULL); | |
940 | if (! current_function_sp_is_unchanging) | |
941 | return; | |
71caadc0 | 942 | } |
d63ea2f2 | 943 | } |
71caadc0 | 944 | } |
ecb7b891 | 945 | |
65f34de5 | 946 | /* Mark a register in SET. Hard registers in large modes get all |
947 | of their component registers set as well. */ | |
ecb7b891 | 948 | |
65f34de5 | 949 | static void |
950 | mark_reg (reg, xset) | |
951 | rtx reg; | |
952 | void *xset; | |
ecb7b891 | 953 | { |
65f34de5 | 954 | regset set = (regset) xset; |
955 | int regno = REGNO (reg); | |
ecb7b891 | 956 | |
65f34de5 | 957 | if (GET_MODE (reg) == BLKmode) |
958 | abort (); | |
ecb7b891 | 959 | |
65f34de5 | 960 | SET_REGNO_REG_SET (set, regno); |
961 | if (regno < FIRST_PSEUDO_REGISTER) | |
ecb7b891 | 962 | { |
65f34de5 | 963 | int n = HARD_REGNO_NREGS (regno, GET_MODE (reg)); |
964 | while (--n > 0) | |
965 | SET_REGNO_REG_SET (set, regno + n); | |
ecb7b891 | 966 | } |
ecb7b891 | 967 | } |
b7bef132 | 968 | |
65f34de5 | 969 | /* Mark those regs which are needed at the end of the function as live |
970 | at the end of the last basic block. */ | |
b7bef132 | 971 | |
65f34de5 | 972 | static void |
973 | mark_regs_live_at_end (set) | |
974 | regset set; | |
975 | { | |
976 | unsigned int i; | |
b7bef132 | 977 | |
65f34de5 | 978 | /* If exiting needs the right stack value, consider the stack pointer |
979 | live at the end of the function. */ | |
980 | if ((HAVE_epilogue && reload_completed) | |
981 | || ! EXIT_IGNORE_STACK | |
982 | || (! FRAME_POINTER_REQUIRED | |
983 | && ! current_function_calls_alloca | |
984 | && flag_omit_frame_pointer) | |
985 | || current_function_sp_is_unchanging) | |
b7bef132 | 986 | { |
65f34de5 | 987 | SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM); |
b7bef132 | 988 | } |
989 | ||
65f34de5 | 990 | /* Mark the frame pointer if needed at the end of the function. If |
991 | we end up eliminating it, it will be removed from the live list | |
992 | of each basic block by reload. */ | |
b7bef132 | 993 | |
65f34de5 | 994 | if (! reload_completed || frame_pointer_needed) |
f9eb917b | 995 | { |
65f34de5 | 996 | SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM); |
997 | #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM | |
998 | /* If they are different, also mark the hard frame pointer as live. */ | |
999 | if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM)) | |
d3371fcd | 1000 | SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM); |
65f34de5 | 1001 | #endif |
f9eb917b | 1002 | } |
b7bef132 | 1003 | |
65f34de5 | 1004 | #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED |
1005 | /* Many architectures have a GP register even without flag_pic. | |
1006 | Assume the pic register is not in use, or will be handled by | |
1007 | other means, if it is not fixed. */ | |
1008 | if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM | |
1009 | && fixed_regs[PIC_OFFSET_TABLE_REGNUM]) | |
1010 | SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM); | |
1011 | #endif | |
b7bef132 | 1012 | |
65f34de5 | 1013 | /* Mark all global registers, and all registers used by the epilogue |
1014 | as being live at the end of the function since they may be | |
1015 | referenced by our caller. */ | |
1016 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
1017 | if (global_regs[i] || EPILOGUE_USES (i)) | |
1018 | SET_REGNO_REG_SET (set, i); | |
b7bef132 | 1019 | |
65f34de5 | 1020 | if (HAVE_epilogue && reload_completed) |
20297067 | 1021 | { |
65f34de5 | 1022 | /* Mark all call-saved registers that we actually used. */ |
1023 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
1024 | if (regs_ever_live[i] && ! LOCAL_REGNO (i) | |
1025 | && ! TEST_HARD_REG_BIT (regs_invalidated_by_call, i)) | |
1026 | SET_REGNO_REG_SET (set, i); | |
20297067 | 1027 | } |
d62d9931 | 1028 | |
65f34de5 | 1029 | #ifdef EH_RETURN_DATA_REGNO |
1030 | /* Mark the registers that will contain data for the handler. */ | |
1031 | if (reload_completed && current_function_calls_eh_return) | |
1032 | for (i = 0; ; ++i) | |
1033 | { | |
1034 | unsigned regno = EH_RETURN_DATA_REGNO(i); | |
1035 | if (regno == INVALID_REGNUM) | |
1036 | break; | |
1037 | SET_REGNO_REG_SET (set, regno); | |
1038 | } | |
b5a71f2d | 1039 | #endif |
65f34de5 | 1040 | #ifdef EH_RETURN_STACKADJ_RTX |
1041 | if ((! HAVE_epilogue || ! reload_completed) | |
1042 | && current_function_calls_eh_return) | |
b1e17e10 | 1043 | { |
65f34de5 | 1044 | rtx tmp = EH_RETURN_STACKADJ_RTX; |
1045 | if (tmp && REG_P (tmp)) | |
1046 | mark_reg (tmp, set); | |
b1e17e10 | 1047 | } |
65f34de5 | 1048 | #endif |
1049 | #ifdef EH_RETURN_HANDLER_RTX | |
1050 | if ((! HAVE_epilogue || ! reload_completed) | |
1051 | && current_function_calls_eh_return) | |
c0b3f9a6 | 1052 | { |
65f34de5 | 1053 | rtx tmp = EH_RETURN_HANDLER_RTX; |
1054 | if (tmp && REG_P (tmp)) | |
1055 | mark_reg (tmp, set); | |
c0b3f9a6 | 1056 | } |
65f34de5 | 1057 | #endif |
b1e17e10 | 1058 | |
65f34de5 | 1059 | /* Mark function return value. */ |
1060 | diddle_return_value (mark_reg, set); | |
b1e17e10 | 1061 | } |
1062 | ||
65f34de5 | 1063 | /* Callback function for for_each_successor_phi. DATA is a regset. |
1064 | Sets the SRC_REGNO, the regno of the phi alternative for phi node | |
1065 | INSN, in the regset. */ | |
532c3782 | 1066 | |
65f34de5 | 1067 | static int |
1068 | set_phi_alternative_reg (insn, dest_regno, src_regno, data) | |
1069 | rtx insn ATTRIBUTE_UNUSED; | |
1070 | int dest_regno ATTRIBUTE_UNUSED; | |
1071 | int src_regno; | |
1072 | void *data; | |
532c3782 | 1073 | { |
65f34de5 | 1074 | regset live = (regset) data; |
1075 | SET_REGNO_REG_SET (live, src_regno); | |
1076 | return 0; | |
532c3782 | 1077 | } |
1078 | ||
65f34de5 | 1079 | /* Propagate global life info around the graph of basic blocks. Begin |
1080 | considering blocks with their corresponding bit set in BLOCKS_IN. | |
1081 | If BLOCKS_IN is null, consider it the universal set. | |
d62d9931 | 1082 | |
65f34de5 | 1083 | BLOCKS_OUT is set for every block that was changed. */ |
d62d9931 | 1084 | |
65f34de5 | 1085 | static void |
1086 | calculate_global_regs_live (blocks_in, blocks_out, flags) | |
1087 | sbitmap blocks_in, blocks_out; | |
1088 | int flags; | |
1089 | { | |
4c26117a | 1090 | basic_block *queue, *qhead, *qtail, *qend, bb; |
1da8586a | 1091 | regset tmp, new_live_at_end, invalidated_by_call; |
1092 | regset_head tmp_head, invalidated_by_call_head; | |
65f34de5 | 1093 | regset_head new_live_at_end_head; |
1094 | int i; | |
d62d9931 | 1095 | |
73e2b81c | 1096 | /* Some passes used to forget clear aux field of basic block causing |
974e2c0c | 1097 | sick behavior here. */ |
73e2b81c | 1098 | #ifdef ENABLE_CHECKING |
4c26117a | 1099 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb) |
1100 | if (bb->aux) | |
73e2b81c | 1101 | abort (); |
1102 | #endif | |
1103 | ||
65f34de5 | 1104 | tmp = INITIALIZE_REG_SET (tmp_head); |
1105 | new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head); | |
1da8586a | 1106 | invalidated_by_call = INITIALIZE_REG_SET (invalidated_by_call_head); |
d62d9931 | 1107 | |
cb0ccc1e | 1108 | /* Inconveniently, this is only readily available in hard reg set form. */ |
65f34de5 | 1109 | for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i) |
1da8586a | 1110 | if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i)) |
1111 | SET_REGNO_REG_SET (invalidated_by_call, i); | |
c0b3f9a6 | 1112 | |
65f34de5 | 1113 | /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one |
1114 | because the `head == tail' style test for an empty queue doesn't | |
1115 | work with a full queue. */ | |
b3d6de89 | 1116 | queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue)); |
65f34de5 | 1117 | qtail = queue; |
b3d6de89 | 1118 | qhead = qend = queue + n_basic_blocks + 2; |
c0b3f9a6 | 1119 | |
65f34de5 | 1120 | /* Queue the blocks set in the initial mask. Do this in reverse block |
1121 | number order so that we are more likely for the first round to do | |
1122 | useful work. We use AUX non-null to flag that the block is queued. */ | |
1123 | if (blocks_in) | |
39aa2b16 | 1124 | { |
4c26117a | 1125 | FOR_EACH_BB (bb) |
1126 | if (TEST_BIT (blocks_in, bb->index)) | |
1127 | { | |
1128 | *--qhead = bb; | |
1129 | bb->aux = bb; | |
1130 | } | |
c0b3f9a6 | 1131 | } |
65f34de5 | 1132 | else |
71caadc0 | 1133 | { |
3c0a32c9 | 1134 | FOR_EACH_BB (bb) |
65f34de5 | 1135 | { |
65f34de5 | 1136 | *--qhead = bb; |
1137 | bb->aux = bb; | |
1138 | } | |
71caadc0 | 1139 | } |
71caadc0 | 1140 | |
1707315d | 1141 | /* We clean aux when we remove the initially-enqueued bbs, but we |
1142 | don't enqueue ENTRY and EXIT initially, so clean them upfront and | |
1143 | unconditionally. */ | |
1144 | ENTRY_BLOCK_PTR->aux = EXIT_BLOCK_PTR->aux = NULL; | |
1145 | ||
65f34de5 | 1146 | if (blocks_out) |
1147 | sbitmap_zero (blocks_out); | |
71caadc0 | 1148 | |
65f34de5 | 1149 | /* We work through the queue until there are no more blocks. What |
1150 | is live at the end of this block is precisely the union of what | |
1151 | is live at the beginning of all its successors. So, we set its | |
1152 | GLOBAL_LIVE_AT_END field based on the GLOBAL_LIVE_AT_START field | |
1153 | for its successors. Then, we compute GLOBAL_LIVE_AT_START for | |
1154 | this block by walking through the instructions in this block in | |
1155 | reverse order and updating as we go. If that changed | |
1156 | GLOBAL_LIVE_AT_START, we add the predecessors of the block to the | |
1157 | queue; they will now need to recalculate GLOBAL_LIVE_AT_END. | |
71caadc0 | 1158 | |
65f34de5 | 1159 | We are guaranteed to terminate, because GLOBAL_LIVE_AT_START |
1160 | never shrinks. If a register appears in GLOBAL_LIVE_AT_START, it | |
1161 | must either be live at the end of the block, or used within the | |
1162 | block. In the latter case, it will certainly never disappear | |
1163 | from GLOBAL_LIVE_AT_START. In the former case, the register | |
1164 | could go away only if it disappeared from GLOBAL_LIVE_AT_START | |
1165 | for one of the successor blocks. By induction, that cannot | |
1166 | occur. */ | |
1167 | while (qhead != qtail) | |
71caadc0 | 1168 | { |
65f34de5 | 1169 | int rescan, changed; |
1170 | basic_block bb; | |
71caadc0 | 1171 | edge e; |
71caadc0 | 1172 | |
65f34de5 | 1173 | bb = *qhead++; |
1174 | if (qhead == qend) | |
1175 | qhead = queue; | |
1176 | bb->aux = NULL; | |
1177 | ||
1178 | /* Begin by propagating live_at_start from the successor blocks. */ | |
1179 | CLEAR_REG_SET (new_live_at_end); | |
71caadc0 | 1180 | |
1c6bdf07 | 1181 | if (bb->succ) |
1182 | for (e = bb->succ; e; e = e->succ_next) | |
1183 | { | |
1184 | basic_block sb = e->dest; | |
1185 | ||
1186 | /* Call-clobbered registers die across exception and | |
1187 | call edges. */ | |
1188 | /* ??? Abnormal call edges ignored for the moment, as this gets | |
1189 | confused by sibling call edges, which crashes reg-stack. */ | |
1190 | if (e->flags & EDGE_EH) | |
1191 | { | |
1192 | bitmap_operation (tmp, sb->global_live_at_start, | |
1da8586a | 1193 | invalidated_by_call, BITMAP_AND_COMPL); |
1c6bdf07 | 1194 | IOR_REG_SET (new_live_at_end, tmp); |
1195 | } | |
1196 | else | |
1197 | IOR_REG_SET (new_live_at_end, sb->global_live_at_start); | |
1198 | ||
1199 | /* If a target saves one register in another (instead of on | |
1200 | the stack) the save register will need to be live for EH. */ | |
1201 | if (e->flags & EDGE_EH) | |
1202 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
1203 | if (EH_USES (i)) | |
1204 | SET_REGNO_REG_SET (new_live_at_end, i); | |
1205 | } | |
1206 | else | |
1207 | { | |
1208 | /* This might be a noreturn function that throws. And | |
1209 | even if it isn't, getting the unwind info right helps | |
1210 | debugging. */ | |
1211 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
1212 | if (EH_USES (i)) | |
1213 | SET_REGNO_REG_SET (new_live_at_end, i); | |
65f34de5 | 1214 | } |
71caadc0 | 1215 | |
65f34de5 | 1216 | /* The all-important stack pointer must always be live. */ |
1217 | SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM); | |
724880ae | 1218 | |
65f34de5 | 1219 | /* Before reload, there are a few registers that must be forced |
1220 | live everywhere -- which might not already be the case for | |
1221 | blocks within infinite loops. */ | |
1222 | if (! reload_completed) | |
1223 | { | |
1224 | /* Any reference to any pseudo before reload is a potential | |
1225 | reference of the frame pointer. */ | |
1226 | SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM); | |
9028a007 | 1227 | |
65f34de5 | 1228 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM |
1229 | /* Pseudos with argument area equivalences may require | |
1230 | reloading via the argument pointer. */ | |
1231 | if (fixed_regs[ARG_POINTER_REGNUM]) | |
1232 | SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM); | |
1233 | #endif | |
71caadc0 | 1234 | |
65f34de5 | 1235 | /* Any constant, or pseudo with constant equivalences, may |
1236 | require reloading from memory using the pic register. */ | |
1237 | if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM | |
1238 | && fixed_regs[PIC_OFFSET_TABLE_REGNUM]) | |
1239 | SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM); | |
71caadc0 | 1240 | } |
71caadc0 | 1241 | |
65f34de5 | 1242 | /* Regs used in phi nodes are not included in |
1243 | global_live_at_start, since they are live only along a | |
1244 | particular edge. Set those regs that are live because of a | |
1245 | phi node alternative corresponding to this particular block. */ | |
1246 | if (in_ssa_form) | |
1247 | for_each_successor_phi (bb, &set_phi_alternative_reg, | |
1248 | new_live_at_end); | |
71caadc0 | 1249 | |
65f34de5 | 1250 | if (bb == ENTRY_BLOCK_PTR) |
1251 | { | |
1252 | COPY_REG_SET (bb->global_live_at_end, new_live_at_end); | |
1253 | continue; | |
1254 | } | |
71caadc0 | 1255 | |
65f34de5 | 1256 | /* On our first pass through this block, we'll go ahead and continue. |
1257 | Recognize first pass by local_set NULL. On subsequent passes, we | |
1258 | get to skip out early if live_at_end wouldn't have changed. */ | |
71caadc0 | 1259 | |
65f34de5 | 1260 | if (bb->local_set == NULL) |
1261 | { | |
1262 | bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack); | |
1263 | bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack); | |
1264 | rescan = 1; | |
1265 | } | |
1266 | else | |
1267 | { | |
1268 | /* If any bits were removed from live_at_end, we'll have to | |
1269 | rescan the block. This wouldn't be necessary if we had | |
1270 | precalculated local_live, however with PROP_SCAN_DEAD_CODE | |
1271 | local_live is really dependent on live_at_end. */ | |
1272 | CLEAR_REG_SET (tmp); | |
1273 | rescan = bitmap_operation (tmp, bb->global_live_at_end, | |
1274 | new_live_at_end, BITMAP_AND_COMPL); | |
71caadc0 | 1275 | |
65f34de5 | 1276 | if (! rescan) |
1277 | { | |
1278 | /* If any of the registers in the new live_at_end set are | |
1279 | conditionally set in this basic block, we must rescan. | |
1280 | This is because conditional lifetimes at the end of the | |
1281 | block do not just take the live_at_end set into account, | |
1282 | but also the liveness at the start of each successor | |
1283 | block. We can miss changes in those sets if we only | |
1284 | compare the new live_at_end against the previous one. */ | |
1285 | CLEAR_REG_SET (tmp); | |
1286 | rescan = bitmap_operation (tmp, new_live_at_end, | |
1287 | bb->cond_local_set, BITMAP_AND); | |
1288 | } | |
71caadc0 | 1289 | |
65f34de5 | 1290 | if (! rescan) |
1291 | { | |
1292 | /* Find the set of changed bits. Take this opportunity | |
1293 | to notice that this set is empty and early out. */ | |
1294 | CLEAR_REG_SET (tmp); | |
1295 | changed = bitmap_operation (tmp, bb->global_live_at_end, | |
1296 | new_live_at_end, BITMAP_XOR); | |
1297 | if (! changed) | |
1298 | continue; | |
71caadc0 | 1299 | |
65f34de5 | 1300 | /* If any of the changed bits overlap with local_set, |
1301 | we'll have to rescan the block. Detect overlap by | |
1302 | the AND with ~local_set turning off bits. */ | |
1303 | rescan = bitmap_operation (tmp, tmp, bb->local_set, | |
1304 | BITMAP_AND_COMPL); | |
1305 | } | |
1306 | } | |
71caadc0 | 1307 | |
65f34de5 | 1308 | /* Let our caller know that BB changed enough to require its |
1309 | death notes updated. */ | |
1310 | if (blocks_out) | |
b3d6de89 | 1311 | SET_BIT (blocks_out, bb->index); |
71caadc0 | 1312 | |
65f34de5 | 1313 | if (! rescan) |
1314 | { | |
1315 | /* Add to live_at_start the set of all registers in | |
1316 | new_live_at_end that aren't in the old live_at_end. */ | |
777e249a | 1317 | |
65f34de5 | 1318 | bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end, |
1319 | BITMAP_AND_COMPL); | |
1320 | COPY_REG_SET (bb->global_live_at_end, new_live_at_end); | |
9028a007 | 1321 | |
65f34de5 | 1322 | changed = bitmap_operation (bb->global_live_at_start, |
1323 | bb->global_live_at_start, | |
1324 | tmp, BITMAP_IOR); | |
1325 | if (! changed) | |
1326 | continue; | |
71caadc0 | 1327 | } |
1328 | else | |
1329 | { | |
65f34de5 | 1330 | COPY_REG_SET (bb->global_live_at_end, new_live_at_end); |
71caadc0 | 1331 | |
65f34de5 | 1332 | /* Rescan the block insn by insn to turn (a copy of) live_at_end |
1333 | into live_at_start. */ | |
1334 | propagate_block (bb, new_live_at_end, bb->local_set, | |
1335 | bb->cond_local_set, flags); | |
71caadc0 | 1336 | |
65f34de5 | 1337 | /* If live_at start didn't change, no need to go farther. */ |
1338 | if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end)) | |
1339 | continue; | |
71caadc0 | 1340 | |
65f34de5 | 1341 | COPY_REG_SET (bb->global_live_at_start, new_live_at_end); |
1342 | } | |
829aa0d6 | 1343 | |
65f34de5 | 1344 | /* Queue all predecessors of BB so that we may re-examine |
1345 | their live_at_end. */ | |
1346 | for (e = bb->pred; e; e = e->pred_next) | |
1347 | { | |
1348 | basic_block pb = e->src; | |
1349 | if (pb->aux == NULL) | |
1350 | { | |
1351 | *qtail++ = pb; | |
1352 | if (qtail == qend) | |
1353 | qtail = queue; | |
1354 | pb->aux = pb; | |
1355 | } | |
1356 | } | |
829aa0d6 | 1357 | } |
1358 | ||
65f34de5 | 1359 | FREE_REG_SET (tmp); |
1360 | FREE_REG_SET (new_live_at_end); | |
1da8586a | 1361 | FREE_REG_SET (invalidated_by_call); |
cbd914e1 | 1362 | |
65f34de5 | 1363 | if (blocks_out) |
1364 | { | |
1365 | EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i, | |
1366 | { | |
b3d6de89 | 1367 | basic_block bb = BASIC_BLOCK (i); |
65f34de5 | 1368 | FREE_REG_SET (bb->local_set); |
1369 | FREE_REG_SET (bb->cond_local_set); | |
1370 | }); | |
71caadc0 | 1371 | } |
1372 | else | |
1373 | { | |
4c26117a | 1374 | FOR_EACH_BB (bb) |
65f34de5 | 1375 | { |
65f34de5 | 1376 | FREE_REG_SET (bb->local_set); |
1377 | FREE_REG_SET (bb->cond_local_set); | |
1378 | } | |
cbd914e1 | 1379 | } |
777e249a | 1380 | |
65f34de5 | 1381 | free (queue); |
71caadc0 | 1382 | } |
a5d181df | 1383 | |
1384 | \f | |
edc2a478 | 1385 | /* This structure is used to pass parameters to and from the |
031ed459 | 1386 | the function find_regno_partial(). It is used to pass in the |
1387 | register number we are looking, as well as to return any rtx | |
a5d181df | 1388 | we find. */ |
1389 | ||
1390 | typedef struct { | |
1391 | unsigned regno_to_find; | |
1392 | rtx retval; | |
1393 | } find_regno_partial_param; | |
1394 | ||
1395 | ||
1396 | /* Find the rtx for the reg numbers specified in 'data' if it is | |
1397 | part of an expression which only uses part of the register. Return | |
1398 | it in the structure passed in. */ | |
031ed459 | 1399 | static int |
a5d181df | 1400 | find_regno_partial (ptr, data) |
1401 | rtx *ptr; | |
1402 | void *data; | |
1403 | { | |
1404 | find_regno_partial_param *param = (find_regno_partial_param *)data; | |
1405 | unsigned reg = param->regno_to_find; | |
1406 | param->retval = NULL_RTX; | |
1407 | ||
1408 | if (*ptr == NULL_RTX) | |
1409 | return 0; | |
1410 | ||
031ed459 | 1411 | switch (GET_CODE (*ptr)) |
a5d181df | 1412 | { |
e452b904 | 1413 | case ZERO_EXTRACT: |
1414 | case SIGN_EXTRACT: | |
1415 | case STRICT_LOW_PART: | |
1416 | if (GET_CODE (XEXP (*ptr, 0)) == REG && REGNO (XEXP (*ptr, 0)) == reg) | |
1417 | { | |
1418 | param->retval = XEXP (*ptr, 0); | |
1419 | return 1; | |
1420 | } | |
1421 | break; | |
a5d181df | 1422 | |
e452b904 | 1423 | case SUBREG: |
031ed459 | 1424 | if (GET_CODE (SUBREG_REG (*ptr)) == REG |
e452b904 | 1425 | && REGNO (SUBREG_REG (*ptr)) == reg) |
1426 | { | |
1427 | param->retval = SUBREG_REG (*ptr); | |
1428 | return 1; | |
1429 | } | |
1430 | break; | |
1431 | ||
1432 | default: | |
1433 | break; | |
a5d181df | 1434 | } |
1435 | ||
1436 | return 0; | |
1437 | } | |
1438 | ||
1439 | /* Process all immediate successors of the entry block looking for pseudo | |
031ed459 | 1440 | registers which are live on entry. Find all of those whose first |
1441 | instance is a partial register reference of some kind, and initialize | |
a5d181df | 1442 | them to 0 after the entry block. This will prevent bit sets within |
031ed459 | 1443 | registers whose value is unknown, and may contain some kind of sticky |
a5d181df | 1444 | bits we don't want. */ |
1445 | ||
1446 | int | |
031ed459 | 1447 | initialize_uninitialized_subregs () |
a5d181df | 1448 | { |
1449 | rtx insn; | |
1450 | edge e; | |
1451 | int reg, did_something = 0; | |
1452 | find_regno_partial_param param; | |
1453 | ||
1454 | for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next) | |
1455 | { | |
1456 | basic_block bb = e->dest; | |
1457 | regset map = bb->global_live_at_start; | |
1458 | EXECUTE_IF_SET_IN_REG_SET (map, | |
1459 | FIRST_PSEUDO_REGISTER, reg, | |
1460 | { | |
1461 | int uid = REGNO_FIRST_UID (reg); | |
1462 | rtx i; | |
1463 | ||
1464 | /* Find an insn which mentions the register we are looking for. | |
1465 | Its preferable to have an instance of the register's rtl since | |
031ed459 | 1466 | there may be various flags set which we need to duplicate. |
a5d181df | 1467 | If we can't find it, its probably an automatic whose initial |
00e0eb3d | 1468 | value doesn't matter, or hopefully something we don't care about. */ |
a5d181df | 1469 | for (i = get_insns (); i && INSN_UID (i) != uid; i = NEXT_INSN (i)) |
1470 | ; | |
1471 | if (i != NULL_RTX) | |
1472 | { | |
1473 | /* Found the insn, now get the REG rtx, if we can. */ | |
1474 | param.regno_to_find = reg; | |
1475 | for_each_rtx (&i, find_regno_partial, ¶m); | |
1476 | if (param.retval != NULL_RTX) | |
1477 | { | |
031ed459 | 1478 | insn = gen_move_insn (param.retval, |
a5d181df | 1479 | CONST0_RTX (GET_MODE (param.retval))); |
1480 | insert_insn_on_edge (insn, e); | |
1481 | did_something = 1; | |
1482 | } | |
1483 | } | |
1484 | }); | |
1485 | } | |
1486 | ||
1487 | if (did_something) | |
1488 | commit_edge_insertions (); | |
1489 | return did_something; | |
1490 | } | |
1491 | ||
65f34de5 | 1492 | \f |
1493 | /* Subroutines of life analysis. */ | |
71caadc0 | 1494 | |
65f34de5 | 1495 | /* Allocate the permanent data structures that represent the results |
1496 | of life analysis. Not static since used also for stupid life analysis. */ | |
71caadc0 | 1497 | |
1498 | void | |
65f34de5 | 1499 | allocate_bb_life_data () |
71caadc0 | 1500 | { |
4c26117a | 1501 | basic_block bb; |
9028a007 | 1502 | |
4c26117a | 1503 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb) |
71caadc0 | 1504 | { |
65f34de5 | 1505 | bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack); |
1506 | bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack); | |
71caadc0 | 1507 | } |
2f0bfe72 | 1508 | |
65f34de5 | 1509 | regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack); |
1510 | } | |
4cef0eda | 1511 | |
65f34de5 | 1512 | void |
1513 | allocate_reg_life_data () | |
4cef0eda | 1514 | { |
1515 | int i; | |
4cef0eda | 1516 | |
65f34de5 | 1517 | max_regno = max_reg_num (); |
4cef0eda | 1518 | |
65f34de5 | 1519 | /* Recalculate the register space, in case it has grown. Old style |
1520 | vector oriented regsets would set regset_{size,bytes} here also. */ | |
1521 | allocate_reg_info (max_regno, FALSE, FALSE); | |
4cef0eda | 1522 | |
65f34de5 | 1523 | /* Reset all the data we'll collect in propagate_block and its |
1524 | subroutines. */ | |
1525 | for (i = 0; i < max_regno; i++) | |
4cef0eda | 1526 | { |
65f34de5 | 1527 | REG_N_SETS (i) = 0; |
1528 | REG_N_REFS (i) = 0; | |
1529 | REG_N_DEATHS (i) = 0; | |
1530 | REG_N_CALLS_CROSSED (i) = 0; | |
1531 | REG_LIVE_LENGTH (i) = 0; | |
1532 | REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN; | |
4cef0eda | 1533 | } |
65f34de5 | 1534 | } |
4cef0eda | 1535 | |
65f34de5 | 1536 | /* Delete dead instructions for propagate_block. */ |
2f0bfe72 | 1537 | |
65f34de5 | 1538 | static void |
fb20d6fa | 1539 | propagate_block_delete_insn (insn) |
65f34de5 | 1540 | rtx insn; |
1541 | { | |
1542 | rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX); | |
2f0bfe72 | 1543 | |
65f34de5 | 1544 | /* If the insn referred to a label, and that label was attached to |
1545 | an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's | |
1546 | pretty much mandatory to delete it, because the ADDR_VEC may be | |
1547 | referencing labels that no longer exist. | |
2f0bfe72 | 1548 | |
65f34de5 | 1549 | INSN may reference a deleted label, particularly when a jump |
1550 | table has been optimized into a direct jump. There's no | |
1551 | real good way to fix up the reference to the deleted label | |
6ce48f9b | 1552 | when the label is deleted, so we just allow it here. */ |
4cef0eda | 1553 | |
65f34de5 | 1554 | if (inote && GET_CODE (inote) == CODE_LABEL) |
4cef0eda | 1555 | { |
65f34de5 | 1556 | rtx label = XEXP (inote, 0); |
1557 | rtx next; | |
4cef0eda | 1558 | |
65f34de5 | 1559 | /* The label may be forced if it has been put in the constant |
1560 | pool. If that is the only use we must discard the table | |
1561 | jump following it, but not the label itself. */ | |
1562 | if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label) | |
1563 | && (next = next_nonnote_insn (label)) != NULL | |
1564 | && GET_CODE (next) == JUMP_INSN | |
1565 | && (GET_CODE (PATTERN (next)) == ADDR_VEC | |
1566 | || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC)) | |
4cef0eda | 1567 | { |
65f34de5 | 1568 | rtx pat = PATTERN (next); |
1569 | int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC; | |
1570 | int len = XVECLEN (pat, diff_vec_p); | |
1571 | int i; | |
2f0bfe72 | 1572 | |
65f34de5 | 1573 | for (i = 0; i < len; i++) |
1574 | LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--; | |
4cef0eda | 1575 | |
fb20d6fa | 1576 | delete_insn_and_edges (next); |
1577 | ndead++; | |
4cef0eda | 1578 | } |
1579 | } | |
1580 | ||
fb20d6fa | 1581 | delete_insn_and_edges (insn); |
1582 | ndead++; | |
4cef0eda | 1583 | } |
71caadc0 | 1584 | |
65f34de5 | 1585 | /* Delete dead libcalls for propagate_block. Return the insn |
1586 | before the libcall. */ | |
71caadc0 | 1587 | |
65f34de5 | 1588 | static rtx |
268cb459 | 1589 | propagate_block_delete_libcall ( insn, note) |
65f34de5 | 1590 | rtx insn, note; |
1591 | { | |
1592 | rtx first = XEXP (note, 0); | |
1593 | rtx before = PREV_INSN (first); | |
71caadc0 | 1594 | |
fb20d6fa | 1595 | delete_insn_chain_and_edges (first, insn); |
1596 | ndead++; | |
65f34de5 | 1597 | return before; |
c6c8ab23 | 1598 | } |
1599 | ||
65f34de5 | 1600 | /* Update the life-status of regs for one insn. Return the previous insn. */ |
1601 | ||
1602 | rtx | |
1603 | propagate_one_insn (pbi, insn) | |
1604 | struct propagate_block_info *pbi; | |
1605 | rtx insn; | |
c6c8ab23 | 1606 | { |
65f34de5 | 1607 | rtx prev = PREV_INSN (insn); |
1608 | int flags = pbi->flags; | |
1609 | int insn_is_dead = 0; | |
1610 | int libcall_is_dead = 0; | |
1611 | rtx note; | |
c6c8ab23 | 1612 | int i; |
1613 | ||
65f34de5 | 1614 | if (! INSN_P (insn)) |
1615 | return prev; | |
66c30567 | 1616 | |
65f34de5 | 1617 | note = find_reg_note (insn, REG_RETVAL, NULL_RTX); |
1618 | if (flags & PROP_SCAN_DEAD_CODE) | |
1619 | { | |
1620 | insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn)); | |
1621 | libcall_is_dead = (insn_is_dead && note != 0 | |
1622 | && libcall_dead_p (pbi, note, insn)); | |
1623 | } | |
71caadc0 | 1624 | |
65f34de5 | 1625 | /* If an instruction consists of just dead store(s) on final pass, |
1626 | delete it. */ | |
1627 | if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead) | |
71caadc0 | 1628 | { |
65f34de5 | 1629 | /* If we're trying to delete a prologue or epilogue instruction |
1630 | that isn't flagged as possibly being dead, something is wrong. | |
1631 | But if we are keeping the stack pointer depressed, we might well | |
1632 | be deleting insns that are used to compute the amount to update | |
1633 | it by, so they are fine. */ | |
1634 | if (reload_completed | |
1635 | && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE | |
1636 | && (TYPE_RETURNS_STACK_DEPRESSED | |
1637 | (TREE_TYPE (current_function_decl)))) | |
1638 | && (((HAVE_epilogue || HAVE_prologue) | |
1639 | && prologue_epilogue_contains (insn)) | |
1640 | || (HAVE_sibcall_epilogue | |
1641 | && sibcall_epilogue_contains (insn))) | |
1642 | && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0) | |
816c4c4b | 1643 | fatal_insn ("Attempt to delete prologue/epilogue insn:", insn); |
71caadc0 | 1644 | |
65f34de5 | 1645 | /* Record sets. Do this even for dead instructions, since they |
1646 | would have killed the values if they hadn't been deleted. */ | |
1647 | mark_set_regs (pbi, PATTERN (insn), insn); | |
71caadc0 | 1648 | |
65f34de5 | 1649 | /* CC0 is now known to be dead. Either this insn used it, |
1650 | in which case it doesn't anymore, or clobbered it, | |
1651 | so the next insn can't use it. */ | |
1652 | pbi->cc0_live = 0; | |
71caadc0 | 1653 | |
65f34de5 | 1654 | if (libcall_is_dead) |
268cb459 | 1655 | prev = propagate_block_delete_libcall ( insn, note); |
c82ce26c | 1656 | else |
1657 | { | |
1658 | ||
e4341699 | 1659 | /* If INSN contains a RETVAL note and is dead, but the libcall |
1660 | as a whole is not dead, then we want to remove INSN, but | |
1661 | not the whole libcall sequence. | |
1662 | ||
1663 | However, we need to also remove the dangling REG_LIBCALL | |
1664 | note so that we do not have mis-matched LIBCALL/RETVAL | |
1665 | notes. In theory we could find a new location for the | |
1666 | REG_RETVAL note, but it hardly seems worth the effort. | |
1667 | ||
1668 | NOTE at this point will be the RETVAL note if it exists. */ | |
c82ce26c | 1669 | if (note) |
1670 | { | |
c82ce26c | 1671 | rtx libcall_note; |
1672 | ||
1673 | libcall_note | |
1674 | = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX); | |
1675 | remove_note (XEXP (note, 0), libcall_note); | |
1676 | } | |
e4341699 | 1677 | |
1678 | /* Similarly if INSN contains a LIBCALL note, remove the | |
1679 | dnagling REG_RETVAL note. */ | |
1680 | note = find_reg_note (insn, REG_LIBCALL, NULL_RTX); | |
1681 | if (note) | |
1682 | { | |
1683 | rtx retval_note; | |
1684 | ||
1685 | retval_note | |
1686 | = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX); | |
1687 | remove_note (XEXP (note, 0), retval_note); | |
1688 | } | |
1689 | ||
1690 | /* Now delete INSN. */ | |
c82ce26c | 1691 | propagate_block_delete_insn (insn); |
1692 | } | |
71caadc0 | 1693 | |
65f34de5 | 1694 | return prev; |
1695 | } | |
71caadc0 | 1696 | |
65f34de5 | 1697 | /* See if this is an increment or decrement that can be merged into |
1698 | a following memory address. */ | |
1699 | #ifdef AUTO_INC_DEC | |
1700 | { | |
19cb6b50 | 1701 | rtx x = single_set (insn); |
71caadc0 | 1702 | |
65f34de5 | 1703 | /* Does this instruction increment or decrement a register? */ |
1704 | if ((flags & PROP_AUTOINC) | |
1705 | && x != 0 | |
1706 | && GET_CODE (SET_DEST (x)) == REG | |
1707 | && (GET_CODE (SET_SRC (x)) == PLUS | |
1708 | || GET_CODE (SET_SRC (x)) == MINUS) | |
1709 | && XEXP (SET_SRC (x), 0) == SET_DEST (x) | |
1710 | && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT | |
1711 | /* Ok, look for a following memory ref we can combine with. | |
1712 | If one is found, change the memory ref to a PRE_INC | |
1713 | or PRE_DEC, cancel this insn, and return 1. | |
1714 | Return 0 if nothing has been done. */ | |
1715 | && try_pre_increment_1 (pbi, insn)) | |
1716 | return prev; | |
1717 | } | |
1718 | #endif /* AUTO_INC_DEC */ | |
71caadc0 | 1719 | |
65f34de5 | 1720 | CLEAR_REG_SET (pbi->new_set); |
71caadc0 | 1721 | |
65f34de5 | 1722 | /* If this is not the final pass, and this insn is copying the value of |
1723 | a library call and it's dead, don't scan the insns that perform the | |
1724 | library call, so that the call's arguments are not marked live. */ | |
1725 | if (libcall_is_dead) | |
71caadc0 | 1726 | { |
65f34de5 | 1727 | /* Record the death of the dest reg. */ |
1728 | mark_set_regs (pbi, PATTERN (insn), insn); | |
71caadc0 | 1729 | |
65f34de5 | 1730 | insn = XEXP (note, 0); |
1731 | return PREV_INSN (insn); | |
71caadc0 | 1732 | } |
65f34de5 | 1733 | else if (GET_CODE (PATTERN (insn)) == SET |
1734 | && SET_DEST (PATTERN (insn)) == stack_pointer_rtx | |
1735 | && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS | |
1736 | && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx | |
1737 | && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT) | |
1738 | /* We have an insn to pop a constant amount off the stack. | |
1739 | (Such insns use PLUS regardless of the direction of the stack, | |
1740 | and any insn to adjust the stack by a constant is always a pop.) | |
638987bd | 1741 | These insns, if not dead stores, have no effect on life, though |
1742 | they do have an effect on the memory stores we are tracking. */ | |
1743 | invalidate_mems_from_set (pbi, stack_pointer_rtx); | |
65f34de5 | 1744 | else |
1745 | { | |
992c7138 | 1746 | rtx note; |
65f34de5 | 1747 | /* Any regs live at the time of a call instruction must not go |
1748 | in a register clobbered by calls. Find all regs now live and | |
1749 | record this for them. */ | |
71caadc0 | 1750 | |
65f34de5 | 1751 | if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO)) |
1752 | EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i, | |
1753 | { REG_N_CALLS_CROSSED (i)++; }); | |
71caadc0 | 1754 | |
65f34de5 | 1755 | /* Record sets. Do this even for dead instructions, since they |
1756 | would have killed the values if they hadn't been deleted. */ | |
1757 | mark_set_regs (pbi, PATTERN (insn), insn); | |
71caadc0 | 1758 | |
65f34de5 | 1759 | if (GET_CODE (insn) == CALL_INSN) |
1760 | { | |
19cb6b50 | 1761 | int i; |
65f34de5 | 1762 | rtx note, cond; |
71caadc0 | 1763 | |
65f34de5 | 1764 | cond = NULL_RTX; |
1765 | if (GET_CODE (PATTERN (insn)) == COND_EXEC) | |
1766 | cond = COND_EXEC_TEST (PATTERN (insn)); | |
71caadc0 | 1767 | |
638987bd | 1768 | /* Non-constant calls clobber memory, constant calls do not |
1769 | clobber memory, though they may clobber outgoing arguments | |
1770 | on the stack. */ | |
65f34de5 | 1771 | if (! CONST_OR_PURE_CALL_P (insn)) |
1772 | { | |
1773 | free_EXPR_LIST_list (&pbi->mem_set_list); | |
1774 | pbi->mem_set_list_len = 0; | |
1775 | } | |
d3371fcd | 1776 | else |
638987bd | 1777 | invalidate_mems_from_set (pbi, stack_pointer_rtx); |
71caadc0 | 1778 | |
65f34de5 | 1779 | /* There may be extra registers to be clobbered. */ |
1780 | for (note = CALL_INSN_FUNCTION_USAGE (insn); | |
1781 | note; | |
1782 | note = XEXP (note, 1)) | |
1783 | if (GET_CODE (XEXP (note, 0)) == CLOBBER) | |
1784 | mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0), | |
1785 | cond, insn, pbi->flags); | |
9028a007 | 1786 | |
65f34de5 | 1787 | /* Calls change all call-used and global registers. */ |
1788 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
1789 | if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i)) | |
1790 | { | |
1791 | /* We do not want REG_UNUSED notes for these registers. */ | |
936082bb | 1792 | mark_set_1 (pbi, CLOBBER, regno_reg_rtx[i], cond, insn, |
65f34de5 | 1793 | pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO)); |
1794 | } | |
1795 | } | |
71a3455a | 1796 | |
65f34de5 | 1797 | /* If an insn doesn't use CC0, it becomes dead since we assume |
1798 | that every insn clobbers it. So show it dead here; | |
1799 | mark_used_regs will set it live if it is referenced. */ | |
1800 | pbi->cc0_live = 0; | |
71caadc0 | 1801 | |
65f34de5 | 1802 | /* Record uses. */ |
1803 | if (! insn_is_dead) | |
1804 | mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn); | |
992c7138 | 1805 | if ((flags & PROP_EQUAL_NOTES) |
1806 | && ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)) | |
1807 | || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)))) | |
1808 | mark_used_regs (pbi, XEXP (note, 0), NULL_RTX, insn); | |
71caadc0 | 1809 | |
65f34de5 | 1810 | /* Sometimes we may have inserted something before INSN (such as a move) |
1811 | when we make an auto-inc. So ensure we will scan those insns. */ | |
1812 | #ifdef AUTO_INC_DEC | |
1813 | prev = PREV_INSN (insn); | |
1814 | #endif | |
71caadc0 | 1815 | |
65f34de5 | 1816 | if (! insn_is_dead && GET_CODE (insn) == CALL_INSN) |
1817 | { | |
19cb6b50 | 1818 | int i; |
65f34de5 | 1819 | rtx note, cond; |
71caadc0 | 1820 | |
65f34de5 | 1821 | cond = NULL_RTX; |
1822 | if (GET_CODE (PATTERN (insn)) == COND_EXEC) | |
1823 | cond = COND_EXEC_TEST (PATTERN (insn)); | |
71caadc0 | 1824 | |
65f34de5 | 1825 | /* Calls use their arguments. */ |
1826 | for (note = CALL_INSN_FUNCTION_USAGE (insn); | |
1827 | note; | |
1828 | note = XEXP (note, 1)) | |
1829 | if (GET_CODE (XEXP (note, 0)) == USE) | |
1830 | mark_used_regs (pbi, XEXP (XEXP (note, 0), 0), | |
1831 | cond, insn); | |
71caadc0 | 1832 | |
65f34de5 | 1833 | /* The stack ptr is used (honorarily) by a CALL insn. */ |
1834 | SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM); | |
71caadc0 | 1835 | |
65f34de5 | 1836 | /* Calls may also reference any of the global registers, |
1837 | so they are made live. */ | |
1838 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
1839 | if (global_regs[i]) | |
936082bb | 1840 | mark_used_reg (pbi, regno_reg_rtx[i], cond, insn); |
65f34de5 | 1841 | } |
71caadc0 | 1842 | } |
1843 | ||
65f34de5 | 1844 | /* On final pass, update counts of how many insns in which each reg |
1845 | is live. */ | |
1846 | if (flags & PROP_REG_INFO) | |
1847 | EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i, | |
1848 | { REG_LIVE_LENGTH (i)++; }); | |
1849 | ||
1850 | return prev; | |
71caadc0 | 1851 | } |
1852 | ||
65f34de5 | 1853 | /* Initialize a propagate_block_info struct for public consumption. |
1854 | Note that the structure itself is opaque to this file, but that | |
1855 | the user can use the regsets provided here. */ | |
71caadc0 | 1856 | |
65f34de5 | 1857 | struct propagate_block_info * |
1858 | init_propagate_block_info (bb, live, local_set, cond_local_set, flags) | |
1859 | basic_block bb; | |
1860 | regset live, local_set, cond_local_set; | |
1861 | int flags; | |
71caadc0 | 1862 | { |
65f34de5 | 1863 | struct propagate_block_info *pbi = xmalloc (sizeof (*pbi)); |
71caadc0 | 1864 | |
65f34de5 | 1865 | pbi->bb = bb; |
1866 | pbi->reg_live = live; | |
1867 | pbi->mem_set_list = NULL_RTX; | |
1868 | pbi->mem_set_list_len = 0; | |
1869 | pbi->local_set = local_set; | |
1870 | pbi->cond_local_set = cond_local_set; | |
1871 | pbi->cc0_live = 0; | |
1872 | pbi->flags = flags; | |
9028a007 | 1873 | |
65f34de5 | 1874 | if (flags & (PROP_LOG_LINKS | PROP_AUTOINC)) |
1875 | pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx)); | |
71caadc0 | 1876 | else |
65f34de5 | 1877 | pbi->reg_next_use = NULL; |
d63ea2f2 | 1878 | |
65f34de5 | 1879 | pbi->new_set = BITMAP_XMALLOC (); |
b1e17e10 | 1880 | |
65f34de5 | 1881 | #ifdef HAVE_conditional_execution |
1882 | pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL, | |
1883 | free_reg_cond_life_info); | |
1884 | pbi->reg_cond_reg = BITMAP_XMALLOC (); | |
b1e17e10 | 1885 | |
65f34de5 | 1886 | /* If this block ends in a conditional branch, for each register live |
1887 | from one side of the branch and not the other, record the register | |
1888 | as conditionally dead. */ | |
1889 | if (GET_CODE (bb->end) == JUMP_INSN | |
1890 | && any_condjump_p (bb->end)) | |
1891 | { | |
1892 | regset_head diff_head; | |
1893 | regset diff = INITIALIZE_REG_SET (diff_head); | |
1894 | basic_block bb_true, bb_false; | |
1895 | rtx cond_true, cond_false, set_src; | |
1896 | int i; | |
22548064 | 1897 | |
65f34de5 | 1898 | /* Identify the successor blocks. */ |
1899 | bb_true = bb->succ->dest; | |
1900 | if (bb->succ->succ_next != NULL) | |
1901 | { | |
1902 | bb_false = bb->succ->succ_next->dest; | |
9028a007 | 1903 | |
65f34de5 | 1904 | if (bb->succ->flags & EDGE_FALLTHRU) |
1905 | { | |
1906 | basic_block t = bb_false; | |
1907 | bb_false = bb_true; | |
1908 | bb_true = t; | |
1909 | } | |
1910 | else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU)) | |
1911 | abort (); | |
1912 | } | |
1913 | else | |
1914 | { | |
1915 | /* This can happen with a conditional jump to the next insn. */ | |
1916 | if (JUMP_LABEL (bb->end) != bb_true->head) | |
1917 | abort (); | |
22548064 | 1918 | |
65f34de5 | 1919 | /* Simplest way to do nothing. */ |
1920 | bb_false = bb_true; | |
1921 | } | |
74b0991d | 1922 | |
65f34de5 | 1923 | /* Extract the condition from the branch. */ |
1924 | set_src = SET_SRC (pc_set (bb->end)); | |
1925 | cond_true = XEXP (set_src, 0); | |
1926 | cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)), | |
1927 | GET_MODE (cond_true), XEXP (cond_true, 0), | |
1928 | XEXP (cond_true, 1)); | |
1929 | if (GET_CODE (XEXP (set_src, 1)) == PC) | |
1930 | { | |
1931 | rtx t = cond_false; | |
1932 | cond_false = cond_true; | |
1933 | cond_true = t; | |
1934 | } | |
74b0991d | 1935 | |
65f34de5 | 1936 | /* Compute which register lead different lives in the successors. */ |
1937 | if (bitmap_operation (diff, bb_true->global_live_at_start, | |
1938 | bb_false->global_live_at_start, BITMAP_XOR)) | |
1939 | { | |
1940 | rtx reg = XEXP (cond_true, 0); | |
74b0991d | 1941 | |
65f34de5 | 1942 | if (GET_CODE (reg) == SUBREG) |
1943 | reg = SUBREG_REG (reg); | |
a4c9602b | 1944 | |
65f34de5 | 1945 | if (GET_CODE (reg) != REG) |
1946 | abort (); | |
a4c9602b | 1947 | |
65f34de5 | 1948 | SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg)); |
a4c9602b | 1949 | |
65f34de5 | 1950 | /* For each such register, mark it conditionally dead. */ |
1951 | EXECUTE_IF_SET_IN_REG_SET | |
1952 | (diff, 0, i, | |
1953 | { | |
1954 | struct reg_cond_life_info *rcli; | |
1955 | rtx cond; | |
a4c9602b | 1956 | |
65f34de5 | 1957 | rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli)); |
a4c9602b | 1958 | |
65f34de5 | 1959 | if (REGNO_REG_SET_P (bb_true->global_live_at_start, i)) |
1960 | cond = cond_false; | |
1961 | else | |
1962 | cond = cond_true; | |
1963 | rcli->condition = cond; | |
1964 | rcli->stores = const0_rtx; | |
1965 | rcli->orig_condition = cond; | |
a4c9602b | 1966 | |
65f34de5 | 1967 | splay_tree_insert (pbi->reg_cond_dead, i, |
1968 | (splay_tree_value) rcli); | |
1969 | }); | |
a4c9602b | 1970 | } |
a4c9602b | 1971 | |
65f34de5 | 1972 | FREE_REG_SET (diff); |
a4c9602b | 1973 | } |
65f34de5 | 1974 | #endif |
1975 | ||
1976 | /* If this block has no successors, any stores to the frame that aren't | |
1977 | used later in the block are dead. So make a pass over the block | |
1978 | recording any such that are made and show them dead at the end. We do | |
1979 | a very conservative and simple job here. */ | |
1980 | if (optimize | |
1981 | && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE | |
1982 | && (TYPE_RETURNS_STACK_DEPRESSED | |
1983 | (TREE_TYPE (current_function_decl)))) | |
2d30935d | 1984 | && (flags & PROP_SCAN_DEAD_STORES) |
65f34de5 | 1985 | && (bb->succ == NULL |
1986 | || (bb->succ->succ_next == NULL | |
1987 | && bb->succ->dest == EXIT_BLOCK_PTR | |
1988 | && ! current_function_calls_eh_return))) | |
a4c9602b | 1989 | { |
65f34de5 | 1990 | rtx insn, set; |
1991 | for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn)) | |
1992 | if (GET_CODE (insn) == INSN | |
1993 | && (set = single_set (insn)) | |
1994 | && GET_CODE (SET_DEST (set)) == MEM) | |
1995 | { | |
1996 | rtx mem = SET_DEST (set); | |
1997 | rtx canon_mem = canon_rtx (mem); | |
1998 | ||
1999 | /* This optimization is performed by faking a store to the | |
2000 | memory at the end of the block. This doesn't work for | |
2001 | unchanging memories because multiple stores to unchanging | |
2002 | memory is illegal and alias analysis doesn't consider it. */ | |
2003 | if (RTX_UNCHANGING_P (canon_mem)) | |
2004 | continue; | |
2005 | ||
2006 | if (XEXP (canon_mem, 0) == frame_pointer_rtx | |
2007 | || (GET_CODE (XEXP (canon_mem, 0)) == PLUS | |
2008 | && XEXP (XEXP (canon_mem, 0), 0) == frame_pointer_rtx | |
2009 | && GET_CODE (XEXP (XEXP (canon_mem, 0), 1)) == CONST_INT)) | |
2010 | add_to_mem_set_list (pbi, canon_mem); | |
2011 | } | |
a4c9602b | 2012 | } |
a4c9602b | 2013 | |
65f34de5 | 2014 | return pbi; |
a4c9602b | 2015 | } |
2016 | ||
65f34de5 | 2017 | /* Release a propagate_block_info struct. */ |
e05cb078 | 2018 | |
65f34de5 | 2019 | void |
2020 | free_propagate_block_info (pbi) | |
2021 | struct propagate_block_info *pbi; | |
e05cb078 | 2022 | { |
65f34de5 | 2023 | free_EXPR_LIST_list (&pbi->mem_set_list); |
e05cb078 | 2024 | |
65f34de5 | 2025 | BITMAP_XFREE (pbi->new_set); |
e05cb078 | 2026 | |
65f34de5 | 2027 | #ifdef HAVE_conditional_execution |
2028 | splay_tree_delete (pbi->reg_cond_dead); | |
2029 | BITMAP_XFREE (pbi->reg_cond_reg); | |
2030 | #endif | |
e05cb078 | 2031 | |
65f34de5 | 2032 | if (pbi->reg_next_use) |
2033 | free (pbi->reg_next_use); | |
e05cb078 | 2034 | |
65f34de5 | 2035 | free (pbi); |
2036 | } | |
0e21c32a | 2037 | |
65f34de5 | 2038 | /* Compute the registers live at the beginning of a basic block BB from |
2039 | those live at the end. | |
9028a007 | 2040 | |
65f34de5 | 2041 | When called, REG_LIVE contains those live at the end. On return, it |
2042 | contains those live at the beginning. | |
ecb643e2 | 2043 | |
65f34de5 | 2044 | LOCAL_SET, if non-null, will be set with all registers killed |
2045 | unconditionally by this basic block. | |
2046 | Likewise, COND_LOCAL_SET, if non-null, will be set with all registers | |
2047 | killed conditionally by this basic block. If there is any unconditional | |
2048 | set of a register, then the corresponding bit will be set in LOCAL_SET | |
2049 | and cleared in COND_LOCAL_SET. | |
2050 | It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this | |
2051 | case, the resulting set will be equal to the union of the two sets that | |
2052 | would otherwise be computed. | |
e05cb078 | 2053 | |
6ef828f9 | 2054 | Return nonzero if an INSN is deleted (i.e. by dead code removal). */ |
e05cb078 | 2055 | |
65f34de5 | 2056 | int |
2057 | propagate_block (bb, live, local_set, cond_local_set, flags) | |
2058 | basic_block bb; | |
2059 | regset live; | |
2060 | regset local_set; | |
2061 | regset cond_local_set; | |
2062 | int flags; | |
e05cb078 | 2063 | { |
65f34de5 | 2064 | struct propagate_block_info *pbi; |
2065 | rtx insn, prev; | |
2066 | int changed; | |
e05cb078 | 2067 | |
65f34de5 | 2068 | pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags); |
74b0991d | 2069 | |
65f34de5 | 2070 | if (flags & PROP_REG_INFO) |
74b0991d | 2071 | { |
19cb6b50 | 2072 | int i; |
e05cb078 | 2073 | |
65f34de5 | 2074 | /* Process the regs live at the end of the block. |
2075 | Mark them as not local to any one basic block. */ | |
2076 | EXECUTE_IF_SET_IN_REG_SET (live, 0, i, | |
2077 | { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; }); | |
2078 | } | |
e05cb078 | 2079 | |
65f34de5 | 2080 | /* Scan the block an insn at a time from end to beginning. */ |
e05cb078 | 2081 | |
65f34de5 | 2082 | changed = 0; |
2083 | for (insn = bb->end;; insn = prev) | |
2084 | { | |
2085 | /* If this is a call to `setjmp' et al, warn if any | |
2086 | non-volatile datum is live. */ | |
2087 | if ((flags & PROP_REG_INFO) | |
2088 | && GET_CODE (insn) == CALL_INSN | |
2089 | && find_reg_note (insn, REG_SETJMP, NULL)) | |
2090 | IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live); | |
e05cb078 | 2091 | |
65f34de5 | 2092 | prev = propagate_one_insn (pbi, insn); |
2093 | changed |= NEXT_INSN (prev) != insn; | |
0e21c32a | 2094 | |
65f34de5 | 2095 | if (insn == bb->head) |
2096 | break; | |
0e21c32a | 2097 | } |
2098 | ||
65f34de5 | 2099 | free_propagate_block_info (pbi); |
2100 | ||
2101 | return changed; | |
e05cb078 | 2102 | } |
65f34de5 | 2103 | \f |
2104 | /* Return 1 if X (the body of an insn, or part of it) is just dead stores | |
2105 | (SET expressions whose destinations are registers dead after the insn). | |
2106 | NEEDED is the regset that says which regs are alive after the insn. | |
2107 | ||
6ef828f9 | 2108 | Unless CALL_OK is nonzero, an insn is needed if it contains a CALL. |
e05cb078 | 2109 | |
65f34de5 | 2110 | If X is the entire body of an insn, NOTES contains the reg notes |
2111 | pertaining to the insn. */ | |
cfc185a4 | 2112 | |
cfc185a4 | 2113 | static int |
65f34de5 | 2114 | insn_dead_p (pbi, x, call_ok, notes) |
2115 | struct propagate_block_info *pbi; | |
2116 | rtx x; | |
2117 | int call_ok; | |
2118 | rtx notes ATTRIBUTE_UNUSED; | |
cfc185a4 | 2119 | { |
65f34de5 | 2120 | enum rtx_code code = GET_CODE (x); |
74b0991d | 2121 | |
8ca56a3b | 2122 | /* Don't eliminate insns that may trap. */ |
2123 | if (flag_non_call_exceptions && may_trap_p (x)) | |
2124 | return 0; | |
2125 | ||
65f34de5 | 2126 | #ifdef AUTO_INC_DEC |
c78891e6 | 2127 | /* As flow is invoked after combine, we must take existing AUTO_INC |
2128 | expressions into account. */ | |
2129 | for (; notes; notes = XEXP (notes, 1)) | |
71caadc0 | 2130 | { |
c78891e6 | 2131 | if (REG_NOTE_KIND (notes) == REG_INC) |
0e21c32a | 2132 | { |
c78891e6 | 2133 | int regno = REGNO (XEXP (notes, 0)); |
031ed459 | 2134 | |
c78891e6 | 2135 | /* Don't delete insns to set global regs. */ |
2136 | if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno]) | |
2137 | || REGNO_REG_SET_P (pbi->reg_live, regno)) | |
2138 | return 0; | |
65f34de5 | 2139 | } |
0e21c32a | 2140 | } |
65f34de5 | 2141 | #endif |
6d866f03 | 2142 | |
65f34de5 | 2143 | /* If setting something that's a reg or part of one, |
2144 | see if that register's altered value will be live. */ | |
e05cb078 | 2145 | |
65f34de5 | 2146 | if (code == SET) |
b1e17e10 | 2147 | { |
65f34de5 | 2148 | rtx r = SET_DEST (x); |
1fece65d | 2149 | |
65f34de5 | 2150 | #ifdef HAVE_cc0 |
2151 | if (GET_CODE (r) == CC0) | |
2152 | return ! pbi->cc0_live; | |
2153 | #endif | |
eca1c9c0 | 2154 | |
65f34de5 | 2155 | /* A SET that is a subroutine call cannot be dead. */ |
2156 | if (GET_CODE (SET_SRC (x)) == CALL) | |
2157 | { | |
2158 | if (! call_ok) | |
2159 | return 0; | |
2160 | } | |
1fece65d | 2161 | |
65f34de5 | 2162 | /* Don't eliminate loads from volatile memory or volatile asms. */ |
2163 | else if (volatile_refs_p (SET_SRC (x))) | |
2164 | return 0; | |
b1e17e10 | 2165 | |
65f34de5 | 2166 | if (GET_CODE (r) == MEM) |
b1e17e10 | 2167 | { |
65f34de5 | 2168 | rtx temp, canon_r; |
eca1c9c0 | 2169 | |
65f34de5 | 2170 | if (MEM_VOLATILE_P (r) || GET_MODE (r) == BLKmode) |
2171 | return 0; | |
4d1f4307 | 2172 | |
65f34de5 | 2173 | canon_r = canon_rtx (r); |
4d1f4307 | 2174 | |
65f34de5 | 2175 | /* Walk the set of memory locations we are currently tracking |
2176 | and see if one is an identical match to this memory location. | |
2177 | If so, this memory write is dead (remember, we're walking | |
2178 | backwards from the end of the block to the start). Since | |
2179 | rtx_equal_p does not check the alias set or flags, we also | |
2180 | must have the potential for them to conflict (anti_dependence). */ | |
2181 | for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1)) | |
2182 | if (anti_dependence (r, XEXP (temp, 0))) | |
2183 | { | |
2184 | rtx mem = XEXP (temp, 0); | |
4d1f4307 | 2185 | |
65f34de5 | 2186 | if (rtx_equal_p (XEXP (canon_r, 0), XEXP (mem, 0)) |
2187 | && (GET_MODE_SIZE (GET_MODE (canon_r)) | |
2188 | <= GET_MODE_SIZE (GET_MODE (mem)))) | |
2189 | return 1; | |
b1e17e10 | 2190 | |
65f34de5 | 2191 | #ifdef AUTO_INC_DEC |
2192 | /* Check if memory reference matches an auto increment. Only | |
2193 | post increment/decrement or modify are valid. */ | |
2194 | if (GET_MODE (mem) == GET_MODE (r) | |
2195 | && (GET_CODE (XEXP (mem, 0)) == POST_DEC | |
2196 | || GET_CODE (XEXP (mem, 0)) == POST_INC | |
2197 | || GET_CODE (XEXP (mem, 0)) == POST_MODIFY) | |
2198 | && GET_MODE (XEXP (mem, 0)) == GET_MODE (r) | |
2199 | && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0))) | |
2200 | return 1; | |
2201 | #endif | |
2202 | } | |
1fece65d | 2203 | } |
532c3782 | 2204 | else |
b1e17e10 | 2205 | { |
65f34de5 | 2206 | while (GET_CODE (r) == SUBREG |
2207 | || GET_CODE (r) == STRICT_LOW_PART | |
2208 | || GET_CODE (r) == ZERO_EXTRACT) | |
2209 | r = XEXP (r, 0); | |
1fece65d | 2210 | |
65f34de5 | 2211 | if (GET_CODE (r) == REG) |
532c3782 | 2212 | { |
65f34de5 | 2213 | int regno = REGNO (r); |
1fece65d | 2214 | |
65f34de5 | 2215 | /* Obvious. */ |
2216 | if (REGNO_REG_SET_P (pbi->reg_live, regno)) | |
2217 | return 0; | |
b1e17e10 | 2218 | |
65f34de5 | 2219 | /* If this is a hard register, verify that subsequent |
2220 | words are not needed. */ | |
2221 | if (regno < FIRST_PSEUDO_REGISTER) | |
2222 | { | |
2223 | int n = HARD_REGNO_NREGS (regno, GET_MODE (r)); | |
a0d79d69 | 2224 | |
65f34de5 | 2225 | while (--n > 0) |
2226 | if (REGNO_REG_SET_P (pbi->reg_live, regno+n)) | |
2227 | return 0; | |
2228 | } | |
a0d79d69 | 2229 | |
65f34de5 | 2230 | /* Don't delete insns to set global regs. */ |
2231 | if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]) | |
2232 | return 0; | |
a0d79d69 | 2233 | |
65f34de5 | 2234 | /* Make sure insns to set the stack pointer aren't deleted. */ |
2235 | if (regno == STACK_POINTER_REGNUM) | |
2236 | return 0; | |
1fece65d | 2237 | |
65f34de5 | 2238 | /* ??? These bits might be redundant with the force live bits |
2239 | in calculate_global_regs_live. We would delete from | |
2240 | sequential sets; whether this actually affects real code | |
2241 | for anything but the stack pointer I don't know. */ | |
2242 | /* Make sure insns to set the frame pointer aren't deleted. */ | |
2243 | if (regno == FRAME_POINTER_REGNUM | |
2244 | && (! reload_completed || frame_pointer_needed)) | |
2245 | return 0; | |
2246 | #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM | |
2247 | if (regno == HARD_FRAME_POINTER_REGNUM | |
2248 | && (! reload_completed || frame_pointer_needed)) | |
2249 | return 0; | |
2250 | #endif | |
1fece65d | 2251 | |
65f34de5 | 2252 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM |
2253 | /* Make sure insns to set arg pointer are never deleted | |
2254 | (if the arg pointer isn't fixed, there will be a USE | |
2255 | for it, so we can treat it normally). */ | |
2256 | if (regno == ARG_POINTER_REGNUM && fixed_regs[regno]) | |
2257 | return 0; | |
2258 | #endif | |
a0d79d69 | 2259 | |
65f34de5 | 2260 | /* Otherwise, the set is dead. */ |
2261 | return 1; | |
2262 | } | |
2263 | } | |
2264 | } | |
a0d79d69 | 2265 | |
65f34de5 | 2266 | /* If performing several activities, insn is dead if each activity |
2267 | is individually dead. Also, CLOBBERs and USEs can be ignored; a | |
2268 | CLOBBER or USE that's inside a PARALLEL doesn't make the insn | |
2269 | worth keeping. */ | |
2270 | else if (code == PARALLEL) | |
2271 | { | |
2272 | int i = XVECLEN (x, 0); | |
a0d79d69 | 2273 | |
65f34de5 | 2274 | for (i--; i >= 0; i--) |
2275 | if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER | |
2276 | && GET_CODE (XVECEXP (x, 0, i)) != USE | |
2277 | && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX)) | |
2278 | return 0; | |
a0d79d69 | 2279 | |
65f34de5 | 2280 | return 1; |
2281 | } | |
a0d79d69 | 2282 | |
65f34de5 | 2283 | /* A CLOBBER of a pseudo-register that is dead serves no purpose. That |
2284 | is not necessarily true for hard registers. */ | |
2285 | else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG | |
2286 | && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER | |
2287 | && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0)))) | |
2288 | return 1; | |
a0d79d69 | 2289 | |
65f34de5 | 2290 | /* We do not check other CLOBBER or USE here. An insn consisting of just |
2291 | a CLOBBER or just a USE should not be deleted. */ | |
2292 | return 0; | |
2293 | } | |
a0d79d69 | 2294 | |
65f34de5 | 2295 | /* If INSN is the last insn in a libcall, and assuming INSN is dead, |
2296 | return 1 if the entire library call is dead. | |
2297 | This is true if INSN copies a register (hard or pseudo) | |
2298 | and if the hard return reg of the call insn is dead. | |
2299 | (The caller should have tested the destination of the SET inside | |
2300 | INSN already for death.) | |
a0d79d69 | 2301 | |
65f34de5 | 2302 | If this insn doesn't just copy a register, then we don't |
2303 | have an ordinary libcall. In that case, cse could not have | |
2304 | managed to substitute the source for the dest later on, | |
2305 | so we can assume the libcall is dead. | |
a0d79d69 | 2306 | |
65f34de5 | 2307 | PBI is the block info giving pseudoregs live before this insn. |
2308 | NOTE is the REG_RETVAL note of the insn. */ | |
a0d79d69 | 2309 | |
65f34de5 | 2310 | static int |
2311 | libcall_dead_p (pbi, note, insn) | |
2312 | struct propagate_block_info *pbi; | |
2313 | rtx note; | |
2314 | rtx insn; | |
2315 | { | |
2316 | rtx x = single_set (insn); | |
a0d79d69 | 2317 | |
65f34de5 | 2318 | if (x) |
2319 | { | |
19cb6b50 | 2320 | rtx r = SET_SRC (x); |
a0d79d69 | 2321 | |
65f34de5 | 2322 | if (GET_CODE (r) == REG) |
2323 | { | |
2324 | rtx call = XEXP (note, 0); | |
2325 | rtx call_pat; | |
19cb6b50 | 2326 | int i; |
a0d79d69 | 2327 | |
65f34de5 | 2328 | /* Find the call insn. */ |
2329 | while (call != insn && GET_CODE (call) != CALL_INSN) | |
2330 | call = NEXT_INSN (call); | |
a0d79d69 | 2331 | |
65f34de5 | 2332 | /* If there is none, do nothing special, |
2333 | since ordinary death handling can understand these insns. */ | |
2334 | if (call == insn) | |
2335 | return 0; | |
1fece65d | 2336 | |
65f34de5 | 2337 | /* See if the hard reg holding the value is dead. |
2338 | If this is a PARALLEL, find the call within it. */ | |
2339 | call_pat = PATTERN (call); | |
2340 | if (GET_CODE (call_pat) == PARALLEL) | |
a0d79d69 | 2341 | { |
65f34de5 | 2342 | for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--) |
2343 | if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET | |
2344 | && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL) | |
2345 | break; | |
2346 | ||
2347 | /* This may be a library call that is returning a value | |
2348 | via invisible pointer. Do nothing special, since | |
2349 | ordinary death handling can understand these insns. */ | |
2350 | if (i < 0) | |
2351 | return 0; | |
2352 | ||
2353 | call_pat = XVECEXP (call_pat, 0, i); | |
a0d79d69 | 2354 | } |
a0d79d69 | 2355 | |
65f34de5 | 2356 | return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call)); |
a0d79d69 | 2357 | } |
a0d79d69 | 2358 | } |
65f34de5 | 2359 | return 1; |
2360 | } | |
a0d79d69 | 2361 | |
65f34de5 | 2362 | /* Return 1 if register REGNO was used before it was set, i.e. if it is |
2363 | live at function entry. Don't count global register variables, variables | |
2364 | in registers that can be used for function arg passing, or variables in | |
2365 | fixed hard registers. */ | |
1fece65d | 2366 | |
65f34de5 | 2367 | int |
2368 | regno_uninitialized (regno) | |
03030d4d | 2369 | unsigned int regno; |
65f34de5 | 2370 | { |
b3d6de89 | 2371 | if (n_basic_blocks == 0 |
65f34de5 | 2372 | || (regno < FIRST_PSEUDO_REGISTER |
2373 | && (global_regs[regno] | |
2374 | || fixed_regs[regno] | |
2375 | || FUNCTION_ARG_REGNO_P (regno)))) | |
2376 | return 0; | |
a0d79d69 | 2377 | |
345ac34a | 2378 | return REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, regno); |
a0d79d69 | 2379 | } |
2380 | ||
65f34de5 | 2381 | /* 1 if register REGNO was alive at a place where `setjmp' was called |
2382 | and was set more than once or is an argument. | |
2383 | Such regs may be clobbered by `longjmp'. */ | |
1fece65d | 2384 | |
65f34de5 | 2385 | int |
2386 | regno_clobbered_at_setjmp (regno) | |
2387 | int regno; | |
2388 | { | |
b3d6de89 | 2389 | if (n_basic_blocks == 0) |
65f34de5 | 2390 | return 0; |
2391 | ||
2392 | return ((REG_N_SETS (regno) > 1 | |
345ac34a | 2393 | || REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, regno)) |
65f34de5 | 2394 | && REGNO_REG_SET_P (regs_live_at_setjmp, regno)); |
2395 | } | |
2396 | \f | |
2397 | /* Add MEM to PBI->MEM_SET_LIST. MEM should be canonical. Respect the | |
2398 | maximal list size; look for overlaps in mode and select the largest. */ | |
2399 | static void | |
2400 | add_to_mem_set_list (pbi, mem) | |
2401 | struct propagate_block_info *pbi; | |
2402 | rtx mem; | |
a0d79d69 | 2403 | { |
65f34de5 | 2404 | rtx i; |
2405 | ||
2406 | /* We don't know how large a BLKmode store is, so we must not | |
2407 | take them into consideration. */ | |
2408 | if (GET_MODE (mem) == BLKmode) | |
2409 | return; | |
2410 | ||
2411 | for (i = pbi->mem_set_list; i ; i = XEXP (i, 1)) | |
a0d79d69 | 2412 | { |
65f34de5 | 2413 | rtx e = XEXP (i, 0); |
2414 | if (rtx_equal_p (XEXP (mem, 0), XEXP (e, 0))) | |
a0d79d69 | 2415 | { |
65f34de5 | 2416 | if (GET_MODE_SIZE (GET_MODE (mem)) > GET_MODE_SIZE (GET_MODE (e))) |
1fece65d | 2417 | { |
65f34de5 | 2418 | #ifdef AUTO_INC_DEC |
2419 | /* If we must store a copy of the mem, we can just modify | |
2420 | the mode of the stored copy. */ | |
2421 | if (pbi->flags & PROP_AUTOINC) | |
2422 | PUT_MODE (e, GET_MODE (mem)); | |
2423 | else | |
2424 | #endif | |
2425 | XEXP (i, 0) = mem; | |
1fece65d | 2426 | } |
65f34de5 | 2427 | return; |
a0d79d69 | 2428 | } |
a0d79d69 | 2429 | } |
1fece65d | 2430 | |
65f34de5 | 2431 | if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN) |
2432 | { | |
2433 | #ifdef AUTO_INC_DEC | |
2434 | /* Store a copy of mem, otherwise the address may be | |
2435 | scrogged by find_auto_inc. */ | |
2436 | if (pbi->flags & PROP_AUTOINC) | |
2437 | mem = shallow_copy_rtx (mem); | |
2438 | #endif | |
2439 | pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list); | |
2440 | pbi->mem_set_list_len++; | |
2441 | } | |
a0d79d69 | 2442 | } |
2443 | ||
65f34de5 | 2444 | /* INSN references memory, possibly using autoincrement addressing modes. |
2445 | Find any entries on the mem_set_list that need to be invalidated due | |
2446 | to an address change. */ | |
1fece65d | 2447 | |
638987bd | 2448 | static int |
2449 | invalidate_mems_from_autoinc (px, data) | |
2450 | rtx *px; | |
2451 | void *data; | |
a0d79d69 | 2452 | { |
638987bd | 2453 | rtx x = *px; |
2454 | struct propagate_block_info *pbi = data; | |
2455 | ||
2456 | if (GET_RTX_CLASS (GET_CODE (x)) == 'a') | |
2457 | { | |
2458 | invalidate_mems_from_set (pbi, XEXP (x, 0)); | |
2459 | return -1; | |
2460 | } | |
2461 | ||
2462 | return 0; | |
65f34de5 | 2463 | } |
a0d79d69 | 2464 | |
424da949 | 2465 | /* EXP is a REG. Remove any dependent entries from pbi->mem_set_list. */ |
a0d79d69 | 2466 | |
65f34de5 | 2467 | static void |
2468 | invalidate_mems_from_set (pbi, exp) | |
2469 | struct propagate_block_info *pbi; | |
2470 | rtx exp; | |
2471 | { | |
2472 | rtx temp = pbi->mem_set_list; | |
2473 | rtx prev = NULL_RTX; | |
2474 | rtx next; | |
a0d79d69 | 2475 | |
65f34de5 | 2476 | while (temp) |
a0d79d69 | 2477 | { |
65f34de5 | 2478 | next = XEXP (temp, 1); |
2479 | if (reg_overlap_mentioned_p (exp, XEXP (temp, 0))) | |
a0d79d69 | 2480 | { |
65f34de5 | 2481 | /* Splice this entry out of the list. */ |
2482 | if (prev) | |
2483 | XEXP (prev, 1) = next; | |
2484 | else | |
2485 | pbi->mem_set_list = next; | |
2486 | free_EXPR_LIST_node (temp); | |
2487 | pbi->mem_set_list_len--; | |
a0d79d69 | 2488 | } |
a0d79d69 | 2489 | else |
65f34de5 | 2490 | prev = temp; |
2491 | temp = next; | |
a0d79d69 | 2492 | } |
65f34de5 | 2493 | } |
a0d79d69 | 2494 | |
65f34de5 | 2495 | /* Process the registers that are set within X. Their bits are set to |
2496 | 1 in the regset DEAD, because they are dead prior to this insn. | |
1fece65d | 2497 | |
65f34de5 | 2498 | If INSN is nonzero, it is the insn being processed. |
a0d79d69 | 2499 | |
65f34de5 | 2500 | FLAGS is the set of operations to perform. */ |
1fece65d | 2501 | |
65f34de5 | 2502 | static void |
2503 | mark_set_regs (pbi, x, insn) | |
2504 | struct propagate_block_info *pbi; | |
2505 | rtx x, insn; | |
a0d79d69 | 2506 | { |
65f34de5 | 2507 | rtx cond = NULL_RTX; |
2508 | rtx link; | |
2509 | enum rtx_code code; | |
a0d79d69 | 2510 | |
65f34de5 | 2511 | if (insn) |
2512 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
2513 | { | |
2514 | if (REG_NOTE_KIND (link) == REG_INC) | |
2515 | mark_set_1 (pbi, SET, XEXP (link, 0), | |
2516 | (GET_CODE (x) == COND_EXEC | |
2517 | ? COND_EXEC_TEST (x) : NULL_RTX), | |
2518 | insn, pbi->flags); | |
2519 | } | |
2520 | retry: | |
2521 | switch (code = GET_CODE (x)) | |
a0d79d69 | 2522 | { |
65f34de5 | 2523 | case SET: |
2524 | case CLOBBER: | |
2525 | mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags); | |
2526 | return; | |
1fece65d | 2527 | |
65f34de5 | 2528 | case COND_EXEC: |
2529 | cond = COND_EXEC_TEST (x); | |
2530 | x = COND_EXEC_CODE (x); | |
2531 | goto retry; | |
1fece65d | 2532 | |
65f34de5 | 2533 | case PARALLEL: |
2534 | { | |
19cb6b50 | 2535 | int i; |
2536 | ||
65f34de5 | 2537 | for (i = XVECLEN (x, 0) - 1; i >= 0; i--) |
2538 | { | |
2539 | rtx sub = XVECEXP (x, 0, i); | |
2540 | switch (code = GET_CODE (sub)) | |
2541 | { | |
2542 | case COND_EXEC: | |
2543 | if (cond != NULL_RTX) | |
2544 | abort (); | |
1fece65d | 2545 | |
65f34de5 | 2546 | cond = COND_EXEC_TEST (sub); |
2547 | sub = COND_EXEC_CODE (sub); | |
2548 | if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER) | |
2549 | break; | |
2550 | /* Fall through. */ | |
1fece65d | 2551 | |
65f34de5 | 2552 | case SET: |
2553 | case CLOBBER: | |
2554 | mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags); | |
2555 | break; | |
1fece65d | 2556 | |
65f34de5 | 2557 | default: |
2558 | break; | |
2559 | } | |
2560 | } | |
2561 | break; | |
2562 | } | |
1fece65d | 2563 | |
65f34de5 | 2564 | default: |
2565 | break; | |
a0d79d69 | 2566 | } |
a0d79d69 | 2567 | } |
2568 | ||
65f34de5 | 2569 | /* Process a single set, which appears in INSN. REG (which may not |
2570 | actually be a REG, it may also be a SUBREG, PARALLEL, etc.) is | |
2571 | being set using the CODE (which may be SET, CLOBBER, or COND_EXEC). | |
2572 | If the set is conditional (because it appear in a COND_EXEC), COND | |
2573 | will be the condition. */ | |
b1e17e10 | 2574 | |
65f34de5 | 2575 | static void |
2576 | mark_set_1 (pbi, code, reg, cond, insn, flags) | |
2577 | struct propagate_block_info *pbi; | |
2578 | enum rtx_code code; | |
2579 | rtx reg, cond, insn; | |
2580 | int flags; | |
0e21c32a | 2581 | { |
65f34de5 | 2582 | int regno_first = -1, regno_last = -1; |
2583 | unsigned long not_dead = 0; | |
0e21c32a | 2584 | int i; |
2585 | ||
65f34de5 | 2586 | /* Modifying just one hardware register of a multi-reg value or just a |
2587 | byte field of a register does not mean the value from before this insn | |
2588 | is now dead. Of course, if it was dead after it's unused now. */ | |
0e21c32a | 2589 | |
65f34de5 | 2590 | switch (GET_CODE (reg)) |
0e21c32a | 2591 | { |
65f34de5 | 2592 | case PARALLEL: |
2593 | /* Some targets place small structures in registers for return values of | |
2594 | functions. We have to detect this case specially here to get correct | |
2595 | flow information. */ | |
2596 | for (i = XVECLEN (reg, 0) - 1; i >= 0; i--) | |
2597 | if (XEXP (XVECEXP (reg, 0, i), 0) != 0) | |
2598 | mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn, | |
2599 | flags); | |
2600 | return; | |
2601 | ||
2602 | case ZERO_EXTRACT: | |
2603 | case SIGN_EXTRACT: | |
2604 | case STRICT_LOW_PART: | |
2605 | /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */ | |
2606 | do | |
2607 | reg = XEXP (reg, 0); | |
2608 | while (GET_CODE (reg) == SUBREG | |
2609 | || GET_CODE (reg) == ZERO_EXTRACT | |
2610 | || GET_CODE (reg) == SIGN_EXTRACT | |
2611 | || GET_CODE (reg) == STRICT_LOW_PART); | |
2612 | if (GET_CODE (reg) == MEM) | |
2613 | break; | |
2614 | not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg)); | |
2615 | /* Fall through. */ | |
1fece65d | 2616 | |
65f34de5 | 2617 | case REG: |
2618 | regno_last = regno_first = REGNO (reg); | |
2619 | if (regno_first < FIRST_PSEUDO_REGISTER) | |
2620 | regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1; | |
2621 | break; | |
1fece65d | 2622 | |
65f34de5 | 2623 | case SUBREG: |
2624 | if (GET_CODE (SUBREG_REG (reg)) == REG) | |
b1e17e10 | 2625 | { |
65f34de5 | 2626 | enum machine_mode outer_mode = GET_MODE (reg); |
2627 | enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg)); | |
b1e17e10 | 2628 | |
65f34de5 | 2629 | /* Identify the range of registers affected. This is moderately |
2630 | tricky for hard registers. See alter_subreg. */ | |
1fece65d | 2631 | |
65f34de5 | 2632 | regno_last = regno_first = REGNO (SUBREG_REG (reg)); |
2633 | if (regno_first < FIRST_PSEUDO_REGISTER) | |
6d866f03 | 2634 | { |
65f34de5 | 2635 | regno_first += subreg_regno_offset (regno_first, inner_mode, |
2636 | SUBREG_BYTE (reg), | |
2637 | outer_mode); | |
2638 | regno_last = (regno_first | |
2639 | + HARD_REGNO_NREGS (regno_first, outer_mode) - 1); | |
f34a52ce | 2640 | |
65f34de5 | 2641 | /* Since we've just adjusted the register number ranges, make |
2642 | sure REG matches. Otherwise some_was_live will be clear | |
2643 | when it shouldn't have been, and we'll create incorrect | |
2644 | REG_UNUSED notes. */ | |
2645 | reg = gen_rtx_REG (outer_mode, regno_first); | |
7771530e | 2646 | } |
65f34de5 | 2647 | else |
2d9b9dfe | 2648 | { |
65f34de5 | 2649 | /* If the number of words in the subreg is less than the number |
2650 | of words in the full register, we have a well-defined partial | |
2651 | set. Otherwise the high bits are undefined. | |
2d9b9dfe | 2652 | |
65f34de5 | 2653 | This is only really applicable to pseudos, since we just took |
2654 | care of multi-word hard registers. */ | |
2655 | if (((GET_MODE_SIZE (outer_mode) | |
2656 | + UNITS_PER_WORD - 1) / UNITS_PER_WORD) | |
2657 | < ((GET_MODE_SIZE (inner_mode) | |
2658 | + UNITS_PER_WORD - 1) / UNITS_PER_WORD)) | |
2659 | not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, | |
2660 | regno_first); | |
2d9b9dfe | 2661 | |
65f34de5 | 2662 | reg = SUBREG_REG (reg); |
2d9b9dfe | 2663 | } |
2d9b9dfe | 2664 | } |
65f34de5 | 2665 | else |
2666 | reg = SUBREG_REG (reg); | |
2667 | break; | |
b7bef132 | 2668 | |
65f34de5 | 2669 | default: |
2670 | break; | |
b7bef132 | 2671 | } |
b7bef132 | 2672 | |
65f34de5 | 2673 | /* If this set is a MEM, then it kills any aliased writes. |
2674 | If this set is a REG, then it kills any MEMs which use the reg. */ | |
2d30935d | 2675 | if (optimize && (flags & PROP_SCAN_DEAD_STORES)) |
71caadc0 | 2676 | { |
65f34de5 | 2677 | if (GET_CODE (reg) == REG) |
2678 | invalidate_mems_from_set (pbi, reg); | |
71caadc0 | 2679 | |
65f34de5 | 2680 | /* If the memory reference had embedded side effects (autoincrement |
2681 | address modes. Then we may need to kill some entries on the | |
2682 | memory set list. */ | |
2683 | if (insn && GET_CODE (reg) == MEM) | |
638987bd | 2684 | for_each_rtx (&PATTERN (insn), invalidate_mems_from_autoinc, pbi); |
9b2949b9 | 2685 | |
65f34de5 | 2686 | if (GET_CODE (reg) == MEM && ! side_effects_p (reg) |
2687 | /* ??? With more effort we could track conditional memory life. */ | |
638987bd | 2688 | && ! cond) |
d3371fcd | 2689 | add_to_mem_set_list (pbi, canon_rtx (reg)); |
9b2949b9 | 2690 | } |
0cc10fd3 | 2691 | |
65f34de5 | 2692 | if (GET_CODE (reg) == REG |
2693 | && ! (regno_first == FRAME_POINTER_REGNUM | |
2694 | && (! reload_completed || frame_pointer_needed)) | |
2695 | #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM | |
2696 | && ! (regno_first == HARD_FRAME_POINTER_REGNUM | |
2697 | && (! reload_completed || frame_pointer_needed)) | |
2698 | #endif | |
2699 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM | |
2700 | && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first]) | |
2701 | #endif | |
2702 | ) | |
0cc10fd3 | 2703 | { |
65f34de5 | 2704 | int some_was_live = 0, some_was_dead = 0; |
0cc10fd3 | 2705 | |
65f34de5 | 2706 | for (i = regno_first; i <= regno_last; ++i) |
0cc10fd3 | 2707 | { |
65f34de5 | 2708 | int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i); |
2709 | if (pbi->local_set) | |
0cc10fd3 | 2710 | { |
65f34de5 | 2711 | /* Order of the set operation matters here since both |
2712 | sets may be the same. */ | |
2713 | CLEAR_REGNO_REG_SET (pbi->cond_local_set, i); | |
2714 | if (cond != NULL_RTX | |
2715 | && ! REGNO_REG_SET_P (pbi->local_set, i)) | |
2716 | SET_REGNO_REG_SET (pbi->cond_local_set, i); | |
2717 | else | |
2718 | SET_REGNO_REG_SET (pbi->local_set, i); | |
0cc10fd3 | 2719 | } |
65f34de5 | 2720 | if (code != CLOBBER) |
2721 | SET_REGNO_REG_SET (pbi->new_set, i); | |
2d9b9dfe | 2722 | |
65f34de5 | 2723 | some_was_live |= needed_regno; |
2724 | some_was_dead |= ! needed_regno; | |
0cc10fd3 | 2725 | } |
65f34de5 | 2726 | |
2727 | #ifdef HAVE_conditional_execution | |
2728 | /* Consider conditional death in deciding that the register needs | |
2729 | a death note. */ | |
2730 | if (some_was_live && ! not_dead | |
2731 | /* The stack pointer is never dead. Well, not strictly true, | |
2732 | but it's very difficult to tell from here. Hopefully | |
2733 | combine_stack_adjustments will fix up the most egregious | |
2734 | errors. */ | |
2735 | && regno_first != STACK_POINTER_REGNUM) | |
2d9b9dfe | 2736 | { |
65f34de5 | 2737 | for (i = regno_first; i <= regno_last; ++i) |
2738 | if (! mark_regno_cond_dead (pbi, i, cond)) | |
2739 | not_dead |= ((unsigned long) 1) << (i - regno_first); | |
2d9b9dfe | 2740 | } |
65f34de5 | 2741 | #endif |
08a45887 | 2742 | |
65f34de5 | 2743 | /* Additional data to record if this is the final pass. */ |
2744 | if (flags & (PROP_LOG_LINKS | PROP_REG_INFO | |
2745 | | PROP_DEATH_NOTES | PROP_AUTOINC)) | |
0cc10fd3 | 2746 | { |
19cb6b50 | 2747 | rtx y; |
b3d6de89 | 2748 | int blocknum = pbi->bb->index; |
65f34de5 | 2749 | |
2750 | y = NULL_RTX; | |
2751 | if (flags & (PROP_LOG_LINKS | PROP_AUTOINC)) | |
20297067 | 2752 | { |
65f34de5 | 2753 | y = pbi->reg_next_use[regno_first]; |
20297067 | 2754 | |
65f34de5 | 2755 | /* The next use is no longer next, since a store intervenes. */ |
2756 | for (i = regno_first; i <= regno_last; ++i) | |
2757 | pbi->reg_next_use[i] = 0; | |
2758 | } | |
ee8f4738 | 2759 | |
65f34de5 | 2760 | if (flags & PROP_REG_INFO) |
a0d79d69 | 2761 | { |
65f34de5 | 2762 | for (i = regno_first; i <= regno_last; ++i) |
2763 | { | |
2764 | /* Count (weighted) references, stores, etc. This counts a | |
2765 | register twice if it is modified, but that is correct. */ | |
2766 | REG_N_SETS (i) += 1; | |
2767 | REG_N_REFS (i) += 1; | |
2768 | REG_FREQ (i) += REG_FREQ_FROM_BB (pbi->bb); | |
2769 | ||
2770 | /* The insns where a reg is live are normally counted | |
2771 | elsewhere, but we want the count to include the insn | |
2772 | where the reg is set, and the normal counting mechanism | |
2773 | would not count it. */ | |
2774 | REG_LIVE_LENGTH (i) += 1; | |
2775 | } | |
2776 | ||
2777 | /* If this is a hard reg, record this function uses the reg. */ | |
2778 | if (regno_first < FIRST_PSEUDO_REGISTER) | |
ee8f4738 | 2779 | { |
65f34de5 | 2780 | for (i = regno_first; i <= regno_last; i++) |
2781 | regs_ever_live[i] = 1; | |
ee8f4738 | 2782 | } |
2783 | else | |
ee8f4738 | 2784 | { |
65f34de5 | 2785 | /* Keep track of which basic blocks each reg appears in. */ |
2786 | if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN) | |
2787 | REG_BASIC_BLOCK (regno_first) = blocknum; | |
2788 | else if (REG_BASIC_BLOCK (regno_first) != blocknum) | |
2789 | REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL; | |
ee8f4738 | 2790 | } |
65f34de5 | 2791 | } |
0cc10fd3 | 2792 | |
65f34de5 | 2793 | if (! some_was_dead) |
0cc10fd3 | 2794 | { |
65f34de5 | 2795 | if (flags & PROP_LOG_LINKS) |
2796 | { | |
2797 | /* Make a logical link from the next following insn | |
2798 | that uses this register, back to this insn. | |
2799 | The following insns have already been processed. | |
2800 | ||
2801 | We don't build a LOG_LINK for hard registers containing | |
2802 | in ASM_OPERANDs. If these registers get replaced, | |
2803 | we might wind up changing the semantics of the insn, | |
2804 | even if reload can make what appear to be valid | |
2805 | assignments later. */ | |
2806 | if (y && (BLOCK_NUM (y) == blocknum) | |
2807 | && (regno_first >= FIRST_PSEUDO_REGISTER | |
2808 | || asm_noperands (PATTERN (y)) < 0)) | |
2809 | LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y)); | |
2810 | } | |
6bddc623 | 2811 | } |
65f34de5 | 2812 | else if (not_dead) |
2813 | ; | |
2814 | else if (! some_was_live) | |
2815 | { | |
2816 | if (flags & PROP_REG_INFO) | |
2817 | REG_N_DEATHS (regno_first) += 1; | |
6bddc623 | 2818 | |
65f34de5 | 2819 | if (flags & PROP_DEATH_NOTES) |
2820 | { | |
2821 | /* Note that dead stores have already been deleted | |
2822 | when possible. If we get here, we have found a | |
2823 | dead store that cannot be eliminated (because the | |
2824 | same insn does something useful). Indicate this | |
2825 | by marking the reg being set as dying here. */ | |
2826 | REG_NOTES (insn) | |
2827 | = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn)); | |
2828 | } | |
2829 | } | |
2830 | else | |
6bddc623 | 2831 | { |
65f34de5 | 2832 | if (flags & PROP_DEATH_NOTES) |
2833 | { | |
2834 | /* This is a case where we have a multi-word hard register | |
2835 | and some, but not all, of the words of the register are | |
2836 | needed in subsequent insns. Write REG_UNUSED notes | |
2837 | for those parts that were not needed. This case should | |
2838 | be rare. */ | |
2839 | ||
2840 | for (i = regno_first; i <= regno_last; ++i) | |
2841 | if (! REGNO_REG_SET_P (pbi->reg_live, i)) | |
2842 | REG_NOTES (insn) | |
2843 | = alloc_EXPR_LIST (REG_UNUSED, | |
936082bb | 2844 | regno_reg_rtx[i], |
65f34de5 | 2845 | REG_NOTES (insn)); |
2846 | } | |
6bddc623 | 2847 | } |
6bddc623 | 2848 | } |
65f34de5 | 2849 | |
2850 | /* Mark the register as being dead. */ | |
2851 | if (some_was_live | |
2852 | /* The stack pointer is never dead. Well, not strictly true, | |
2853 | but it's very difficult to tell from here. Hopefully | |
2854 | combine_stack_adjustments will fix up the most egregious | |
2855 | errors. */ | |
2856 | && regno_first != STACK_POINTER_REGNUM) | |
6bddc623 | 2857 | { |
65f34de5 | 2858 | for (i = regno_first; i <= regno_last; ++i) |
2859 | if (!(not_dead & (((unsigned long) 1) << (i - regno_first)))) | |
2860 | CLEAR_REGNO_REG_SET (pbi->reg_live, i); | |
6bddc623 | 2861 | } |
65f34de5 | 2862 | } |
2863 | else if (GET_CODE (reg) == REG) | |
2864 | { | |
2865 | if (flags & (PROP_LOG_LINKS | PROP_AUTOINC)) | |
2866 | pbi->reg_next_use[regno_first] = 0; | |
2867 | } | |
2868 | ||
2869 | /* If this is the last pass and this is a SCRATCH, show it will be dying | |
2870 | here and count it. */ | |
2871 | else if (GET_CODE (reg) == SCRATCH) | |
2872 | { | |
2873 | if (flags & PROP_DEATH_NOTES) | |
2874 | REG_NOTES (insn) | |
2875 | = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn)); | |
2876 | } | |
2877 | } | |
2878 | \f | |
2879 | #ifdef HAVE_conditional_execution | |
2880 | /* Mark REGNO conditionally dead. | |
2881 | Return true if the register is now unconditionally dead. */ | |
2882 | ||
2883 | static int | |
2884 | mark_regno_cond_dead (pbi, regno, cond) | |
2885 | struct propagate_block_info *pbi; | |
2886 | int regno; | |
2887 | rtx cond; | |
2888 | { | |
2889 | /* If this is a store to a predicate register, the value of the | |
2890 | predicate is changing, we don't know that the predicate as seen | |
2891 | before is the same as that seen after. Flush all dependent | |
2892 | conditions from reg_cond_dead. This will make all such | |
2893 | conditionally live registers unconditionally live. */ | |
2894 | if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno)) | |
2895 | flush_reg_cond_reg (pbi, regno); | |
2896 | ||
2897 | /* If this is an unconditional store, remove any conditional | |
2898 | life that may have existed. */ | |
2899 | if (cond == NULL_RTX) | |
2900 | splay_tree_remove (pbi->reg_cond_dead, regno); | |
2901 | else | |
2902 | { | |
2903 | splay_tree_node node; | |
2904 | struct reg_cond_life_info *rcli; | |
2905 | rtx ncond; | |
2906 | ||
2907 | /* Otherwise this is a conditional set. Record that fact. | |
2908 | It may have been conditionally used, or there may be a | |
2909 | subsequent set with a complimentary condition. */ | |
6bddc623 | 2910 | |
65f34de5 | 2911 | node = splay_tree_lookup (pbi->reg_cond_dead, regno); |
2912 | if (node == NULL) | |
6bddc623 | 2913 | { |
65f34de5 | 2914 | /* The register was unconditionally live previously. |
2915 | Record the current condition as the condition under | |
2916 | which it is dead. */ | |
2917 | rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli)); | |
2918 | rcli->condition = cond; | |
2919 | rcli->stores = cond; | |
2920 | rcli->orig_condition = const0_rtx; | |
2921 | splay_tree_insert (pbi->reg_cond_dead, regno, | |
2922 | (splay_tree_value) rcli); | |
2923 | ||
2924 | SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0))); | |
2925 | ||
cb0ccc1e | 2926 | /* Not unconditionally dead. */ |
65f34de5 | 2927 | return 0; |
6bddc623 | 2928 | } |
2929 | else | |
2930 | { | |
65f34de5 | 2931 | /* The register was conditionally live previously. |
2932 | Add the new condition to the old. */ | |
2933 | rcli = (struct reg_cond_life_info *) node->value; | |
2934 | ncond = rcli->condition; | |
2935 | ncond = ior_reg_cond (ncond, cond, 1); | |
2936 | if (rcli->stores == const0_rtx) | |
2937 | rcli->stores = cond; | |
2938 | else if (rcli->stores != const1_rtx) | |
2939 | rcli->stores = ior_reg_cond (rcli->stores, cond, 1); | |
6bddc623 | 2940 | |
65f34de5 | 2941 | /* If the register is now unconditionally dead, remove the entry |
2942 | in the splay_tree. A register is unconditionally dead if the | |
2943 | dead condition ncond is true. A register is also unconditionally | |
2944 | dead if the sum of all conditional stores is an unconditional | |
2945 | store (stores is true), and the dead condition is identically the | |
2946 | same as the original dead condition initialized at the end of | |
2947 | the block. This is a pointer compare, not an rtx_equal_p | |
2948 | compare. */ | |
2949 | if (ncond == const1_rtx | |
2950 | || (ncond == rcli->orig_condition && rcli->stores == const1_rtx)) | |
2951 | splay_tree_remove (pbi->reg_cond_dead, regno); | |
2952 | else | |
2953 | { | |
2954 | rcli->condition = ncond; | |
6bddc623 | 2955 | |
65f34de5 | 2956 | SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0))); |
6bddc623 | 2957 | |
cb0ccc1e | 2958 | /* Not unconditionally dead. */ |
65f34de5 | 2959 | return 0; |
6bddc623 | 2960 | } |
2961 | } | |
2962 | } | |
2963 | ||
65f34de5 | 2964 | return 1; |
2965 | } | |
39f0a72c | 2966 | |
65f34de5 | 2967 | /* Called from splay_tree_delete for pbi->reg_cond_life. */ |
eca1c9c0 | 2968 | |
65f34de5 | 2969 | static void |
2970 | free_reg_cond_life_info (value) | |
2971 | splay_tree_value value; | |
2972 | { | |
2973 | struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value; | |
2974 | free (rcli); | |
2975 | } | |
5da03558 | 2976 | |
65f34de5 | 2977 | /* Helper function for flush_reg_cond_reg. */ |
6bddc623 | 2978 | |
65f34de5 | 2979 | static int |
2980 | flush_reg_cond_reg_1 (node, data) | |
2981 | splay_tree_node node; | |
2982 | void *data; | |
2983 | { | |
2984 | struct reg_cond_life_info *rcli; | |
2985 | int *xdata = (int *) data; | |
2986 | unsigned int regno = xdata[0]; | |
6bddc623 | 2987 | |
65f34de5 | 2988 | /* Don't need to search if last flushed value was farther on in |
2989 | the in-order traversal. */ | |
2990 | if (xdata[1] >= (int) node->key) | |
2991 | return 0; | |
6bddc623 | 2992 | |
65f34de5 | 2993 | /* Splice out portions of the expression that refer to regno. */ |
2994 | rcli = (struct reg_cond_life_info *) node->value; | |
2995 | rcli->condition = elim_reg_cond (rcli->condition, regno); | |
2996 | if (rcli->stores != const0_rtx && rcli->stores != const1_rtx) | |
2997 | rcli->stores = elim_reg_cond (rcli->stores, regno); | |
4732b639 | 2998 | |
65f34de5 | 2999 | /* If the entire condition is now false, signal the node to be removed. */ |
3000 | if (rcli->condition == const0_rtx) | |
3001 | { | |
3002 | xdata[1] = node->key; | |
3003 | return -1; | |
6bddc623 | 3004 | } |
65f34de5 | 3005 | else if (rcli->condition == const1_rtx) |
3006 | abort (); | |
2d9b9dfe | 3007 | |
65f34de5 | 3008 | return 0; |
6bddc623 | 3009 | } |
26d63a15 | 3010 | |
65f34de5 | 3011 | /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */ |
26d63a15 | 3012 | |
65f34de5 | 3013 | static void |
3014 | flush_reg_cond_reg (pbi, regno) | |
3015 | struct propagate_block_info *pbi; | |
3016 | int regno; | |
3017 | { | |
3018 | int pair[2]; | |
26d63a15 | 3019 | |
65f34de5 | 3020 | pair[0] = regno; |
3021 | pair[1] = -1; | |
3022 | while (splay_tree_foreach (pbi->reg_cond_dead, | |
3023 | flush_reg_cond_reg_1, pair) == -1) | |
3024 | splay_tree_remove (pbi->reg_cond_dead, pair[1]); | |
26d63a15 | 3025 | |
65f34de5 | 3026 | CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno); |
3027 | } | |
26d63a15 | 3028 | |
65f34de5 | 3029 | /* Logical arithmetic on predicate conditions. IOR, NOT and AND. |
3030 | For ior/and, the ADD flag determines whether we want to add the new | |
3031 | condition X to the old one unconditionally. If it is zero, we will | |
3032 | only return a new expression if X allows us to simplify part of | |
0e06376e | 3033 | OLD, otherwise we return NULL to the caller. |
65f34de5 | 3034 | If ADD is nonzero, we will return a new condition in all cases. The |
3035 | toplevel caller of one of these functions should always pass 1 for | |
3036 | ADD. */ | |
26d63a15 | 3037 | |
65f34de5 | 3038 | static rtx |
3039 | ior_reg_cond (old, x, add) | |
3040 | rtx old, x; | |
3041 | int add; | |
3042 | { | |
3043 | rtx op0, op1; | |
26d63a15 | 3044 | |
65f34de5 | 3045 | if (GET_RTX_CLASS (GET_CODE (old)) == '<') |
26d63a15 | 3046 | { |
65f34de5 | 3047 | if (GET_RTX_CLASS (GET_CODE (x)) == '<' |
3048 | && REVERSE_CONDEXEC_PREDICATES_P (GET_CODE (x), GET_CODE (old)) | |
3049 | && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0))) | |
3050 | return const1_rtx; | |
3051 | if (GET_CODE (x) == GET_CODE (old) | |
3052 | && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0))) | |
3053 | return old; | |
3054 | if (! add) | |
0e06376e | 3055 | return NULL; |
65f34de5 | 3056 | return gen_rtx_IOR (0, old, x); |
26d63a15 | 3057 | } |
9028a007 | 3058 | |
65f34de5 | 3059 | switch (GET_CODE (old)) |
26d63a15 | 3060 | { |
65f34de5 | 3061 | case IOR: |
3062 | op0 = ior_reg_cond (XEXP (old, 0), x, 0); | |
3063 | op1 = ior_reg_cond (XEXP (old, 1), x, 0); | |
0e06376e | 3064 | if (op0 != NULL || op1 != NULL) |
65f34de5 | 3065 | { |
3066 | if (op0 == const0_rtx) | |
0e06376e | 3067 | return op1 ? op1 : gen_rtx_IOR (0, XEXP (old, 1), x); |
65f34de5 | 3068 | if (op1 == const0_rtx) |
0e06376e | 3069 | return op0 ? op0 : gen_rtx_IOR (0, XEXP (old, 0), x); |
65f34de5 | 3070 | if (op0 == const1_rtx || op1 == const1_rtx) |
3071 | return const1_rtx; | |
0e06376e | 3072 | if (op0 == NULL) |
3073 | op0 = gen_rtx_IOR (0, XEXP (old, 0), x); | |
3074 | else if (rtx_equal_p (x, op0)) | |
3075 | /* (x | A) | x ~ (x | A). */ | |
3076 | return old; | |
3077 | if (op1 == NULL) | |
3078 | op1 = gen_rtx_IOR (0, XEXP (old, 1), x); | |
3079 | else if (rtx_equal_p (x, op1)) | |
3080 | /* (A | x) | x ~ (A | x). */ | |
3081 | return old; | |
65f34de5 | 3082 | return gen_rtx_IOR (0, op0, op1); |
3083 | } | |
3084 | if (! add) | |
0e06376e | 3085 | return NULL; |
65f34de5 | 3086 | return gen_rtx_IOR (0, old, x); |
26d63a15 | 3087 | |
65f34de5 | 3088 | case AND: |
3089 | op0 = ior_reg_cond (XEXP (old, 0), x, 0); | |
3090 | op1 = ior_reg_cond (XEXP (old, 1), x, 0); | |
0e06376e | 3091 | if (op0 != NULL || op1 != NULL) |
26d63a15 | 3092 | { |
65f34de5 | 3093 | if (op0 == const1_rtx) |
0e06376e | 3094 | return op1 ? op1 : gen_rtx_IOR (0, XEXP (old, 1), x); |
65f34de5 | 3095 | if (op1 == const1_rtx) |
0e06376e | 3096 | return op0 ? op0 : gen_rtx_IOR (0, XEXP (old, 0), x); |
65f34de5 | 3097 | if (op0 == const0_rtx || op1 == const0_rtx) |
3098 | return const0_rtx; | |
0e06376e | 3099 | if (op0 == NULL) |
3100 | op0 = gen_rtx_IOR (0, XEXP (old, 0), x); | |
3101 | else if (rtx_equal_p (x, op0)) | |
3102 | /* (x & A) | x ~ x. */ | |
3103 | return op0; | |
3104 | if (op1 == NULL) | |
3105 | op1 = gen_rtx_IOR (0, XEXP (old, 1), x); | |
3106 | else if (rtx_equal_p (x, op1)) | |
3107 | /* (A & x) | x ~ x. */ | |
3108 | return op1; | |
65f34de5 | 3109 | return gen_rtx_AND (0, op0, op1); |
26d63a15 | 3110 | } |
65f34de5 | 3111 | if (! add) |
0e06376e | 3112 | return NULL; |
65f34de5 | 3113 | return gen_rtx_IOR (0, old, x); |
26d63a15 | 3114 | |
65f34de5 | 3115 | case NOT: |
3116 | op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0); | |
0e06376e | 3117 | if (op0 != NULL) |
65f34de5 | 3118 | return not_reg_cond (op0); |
3119 | if (! add) | |
0e06376e | 3120 | return NULL; |
65f34de5 | 3121 | return gen_rtx_IOR (0, old, x); |
9028a007 | 3122 | |
65f34de5 | 3123 | default: |
3124 | abort (); | |
26d63a15 | 3125 | } |
3126 | } | |
3127 | ||
65f34de5 | 3128 | static rtx |
3129 | not_reg_cond (x) | |
3130 | rtx x; | |
26d63a15 | 3131 | { |
65f34de5 | 3132 | enum rtx_code x_code; |
26d63a15 | 3133 | |
65f34de5 | 3134 | if (x == const0_rtx) |
3135 | return const1_rtx; | |
3136 | else if (x == const1_rtx) | |
3137 | return const0_rtx; | |
3138 | x_code = GET_CODE (x); | |
3139 | if (x_code == NOT) | |
3140 | return XEXP (x, 0); | |
3141 | if (GET_RTX_CLASS (x_code) == '<' | |
3142 | && GET_CODE (XEXP (x, 0)) == REG) | |
26d63a15 | 3143 | { |
65f34de5 | 3144 | if (XEXP (x, 1) != const0_rtx) |
3145 | abort (); | |
26d63a15 | 3146 | |
65f34de5 | 3147 | return gen_rtx_fmt_ee (reverse_condition (x_code), |
3148 | VOIDmode, XEXP (x, 0), const0_rtx); | |
26d63a15 | 3149 | } |
65f34de5 | 3150 | return gen_rtx_NOT (0, x); |
26d63a15 | 3151 | } |
3152 | ||
65f34de5 | 3153 | static rtx |
3154 | and_reg_cond (old, x, add) | |
3155 | rtx old, x; | |
3156 | int add; | |
26d63a15 | 3157 | { |
65f34de5 | 3158 | rtx op0, op1; |
26d63a15 | 3159 | |
65f34de5 | 3160 | if (GET_RTX_CLASS (GET_CODE (old)) == '<') |
26d63a15 | 3161 | { |
65f34de5 | 3162 | if (GET_RTX_CLASS (GET_CODE (x)) == '<' |
3163 | && GET_CODE (x) == reverse_condition (GET_CODE (old)) | |
3164 | && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0))) | |
3165 | return const0_rtx; | |
3166 | if (GET_CODE (x) == GET_CODE (old) | |
3167 | && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0))) | |
3168 | return old; | |
3169 | if (! add) | |
0e06376e | 3170 | return NULL; |
65f34de5 | 3171 | return gen_rtx_AND (0, old, x); |
3172 | } | |
26d63a15 | 3173 | |
65f34de5 | 3174 | switch (GET_CODE (old)) |
3175 | { | |
3176 | case IOR: | |
3177 | op0 = and_reg_cond (XEXP (old, 0), x, 0); | |
3178 | op1 = and_reg_cond (XEXP (old, 1), x, 0); | |
0e06376e | 3179 | if (op0 != NULL || op1 != NULL) |
26d63a15 | 3180 | { |
65f34de5 | 3181 | if (op0 == const0_rtx) |
0e06376e | 3182 | return op1 ? op1 : gen_rtx_AND (0, XEXP (old, 1), x); |
65f34de5 | 3183 | if (op1 == const0_rtx) |
0e06376e | 3184 | return op0 ? op0 : gen_rtx_AND (0, XEXP (old, 0), x); |
65f34de5 | 3185 | if (op0 == const1_rtx || op1 == const1_rtx) |
3186 | return const1_rtx; | |
0e06376e | 3187 | if (op0 == NULL) |
3188 | op0 = gen_rtx_AND (0, XEXP (old, 0), x); | |
3189 | else if (rtx_equal_p (x, op0)) | |
3190 | /* (x | A) & x ~ x. */ | |
3191 | return op0; | |
3192 | if (op1 == NULL) | |
3193 | op1 = gen_rtx_AND (0, XEXP (old, 1), x); | |
3194 | else if (rtx_equal_p (x, op1)) | |
3195 | /* (A | x) & x ~ x. */ | |
3196 | return op1; | |
65f34de5 | 3197 | return gen_rtx_IOR (0, op0, op1); |
26d63a15 | 3198 | } |
65f34de5 | 3199 | if (! add) |
0e06376e | 3200 | return NULL; |
65f34de5 | 3201 | return gen_rtx_AND (0, old, x); |
3202 | ||
3203 | case AND: | |
3204 | op0 = and_reg_cond (XEXP (old, 0), x, 0); | |
3205 | op1 = and_reg_cond (XEXP (old, 1), x, 0); | |
0e06376e | 3206 | if (op0 != NULL || op1 != NULL) |
26d63a15 | 3207 | { |
65f34de5 | 3208 | if (op0 == const1_rtx) |
0e06376e | 3209 | return op1 ? op1 : gen_rtx_AND (0, XEXP (old, 1), x); |
65f34de5 | 3210 | if (op1 == const1_rtx) |
0e06376e | 3211 | return op0 ? op0 : gen_rtx_AND (0, XEXP (old, 0), x); |
65f34de5 | 3212 | if (op0 == const0_rtx || op1 == const0_rtx) |
3213 | return const0_rtx; | |
0e06376e | 3214 | if (op0 == NULL) |
3215 | op0 = gen_rtx_AND (0, XEXP (old, 0), x); | |
3216 | else if (rtx_equal_p (x, op0)) | |
3217 | /* (x & A) & x ~ (x & A). */ | |
3218 | return old; | |
3219 | if (op1 == NULL) | |
3220 | op1 = gen_rtx_AND (0, XEXP (old, 1), x); | |
3221 | else if (rtx_equal_p (x, op1)) | |
3222 | /* (A & x) & x ~ (A & x). */ | |
3223 | return old; | |
65f34de5 | 3224 | return gen_rtx_AND (0, op0, op1); |
26d63a15 | 3225 | } |
65f34de5 | 3226 | if (! add) |
0e06376e | 3227 | return NULL; |
65f34de5 | 3228 | return gen_rtx_AND (0, old, x); |
26d63a15 | 3229 | |
65f34de5 | 3230 | case NOT: |
3231 | op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0); | |
0e06376e | 3232 | if (op0 != NULL) |
65f34de5 | 3233 | return not_reg_cond (op0); |
3234 | if (! add) | |
0e06376e | 3235 | return NULL; |
65f34de5 | 3236 | return gen_rtx_AND (0, old, x); |
26d63a15 | 3237 | |
65f34de5 | 3238 | default: |
3239 | abort (); | |
9028a007 | 3240 | } |
26d63a15 | 3241 | } |
3242 | ||
65f34de5 | 3243 | /* Given a condition X, remove references to reg REGNO and return the |
3244 | new condition. The removal will be done so that all conditions | |
3245 | involving REGNO are considered to evaluate to false. This function | |
3246 | is used when the value of REGNO changes. */ | |
9028a007 | 3247 | |
65f34de5 | 3248 | static rtx |
3249 | elim_reg_cond (x, regno) | |
3250 | rtx x; | |
3251 | unsigned int regno; | |
26d63a15 | 3252 | { |
65f34de5 | 3253 | rtx op0, op1; |
3254 | ||
3255 | if (GET_RTX_CLASS (GET_CODE (x)) == '<') | |
26d63a15 | 3256 | { |
65f34de5 | 3257 | if (REGNO (XEXP (x, 0)) == regno) |
3258 | return const0_rtx; | |
3259 | return x; | |
26d63a15 | 3260 | } |
9028a007 | 3261 | |
65f34de5 | 3262 | switch (GET_CODE (x)) |
3263 | { | |
3264 | case AND: | |
3265 | op0 = elim_reg_cond (XEXP (x, 0), regno); | |
3266 | op1 = elim_reg_cond (XEXP (x, 1), regno); | |
3267 | if (op0 == const0_rtx || op1 == const0_rtx) | |
3268 | return const0_rtx; | |
3269 | if (op0 == const1_rtx) | |
3270 | return op1; | |
3271 | if (op1 == const1_rtx) | |
3272 | return op0; | |
3273 | if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1)) | |
3274 | return x; | |
3275 | return gen_rtx_AND (0, op0, op1); | |
61001c97 | 3276 | |
65f34de5 | 3277 | case IOR: |
3278 | op0 = elim_reg_cond (XEXP (x, 0), regno); | |
3279 | op1 = elim_reg_cond (XEXP (x, 1), regno); | |
3280 | if (op0 == const1_rtx || op1 == const1_rtx) | |
3281 | return const1_rtx; | |
3282 | if (op0 == const0_rtx) | |
3283 | return op1; | |
3284 | if (op1 == const0_rtx) | |
3285 | return op0; | |
3286 | if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1)) | |
3287 | return x; | |
3288 | return gen_rtx_IOR (0, op0, op1); | |
61001c97 | 3289 | |
65f34de5 | 3290 | case NOT: |
3291 | op0 = elim_reg_cond (XEXP (x, 0), regno); | |
3292 | if (op0 == const0_rtx) | |
3293 | return const1_rtx; | |
3294 | if (op0 == const1_rtx) | |
3295 | return const0_rtx; | |
3296 | if (op0 != XEXP (x, 0)) | |
3297 | return not_reg_cond (op0); | |
3298 | return x; | |
61001c97 | 3299 | |
65f34de5 | 3300 | default: |
3301 | abort (); | |
3302 | } | |
61001c97 | 3303 | } |
65f34de5 | 3304 | #endif /* HAVE_conditional_execution */ |
3305 | \f | |
3306 | #ifdef AUTO_INC_DEC | |
61001c97 | 3307 | |
65f34de5 | 3308 | /* Try to substitute the auto-inc expression INC as the address inside |
3309 | MEM which occurs in INSN. Currently, the address of MEM is an expression | |
3310 | involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn | |
3311 | that has a single set whose source is a PLUS of INCR_REG and something | |
3312 | else. */ | |
9028a007 | 3313 | |
61001c97 | 3314 | static void |
65f34de5 | 3315 | attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg) |
3316 | struct propagate_block_info *pbi; | |
3317 | rtx inc, insn, mem, incr, incr_reg; | |
61001c97 | 3318 | { |
65f34de5 | 3319 | int regno = REGNO (incr_reg); |
3320 | rtx set = single_set (incr); | |
3321 | rtx q = SET_DEST (set); | |
3322 | rtx y = SET_SRC (set); | |
3323 | int opnum = XEXP (y, 0) == incr_reg ? 0 : 1; | |
9028a007 | 3324 | |
65f34de5 | 3325 | /* Make sure this reg appears only once in this insn. */ |
3326 | if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1) | |
3327 | return; | |
61001c97 | 3328 | |
65f34de5 | 3329 | if (dead_or_set_p (incr, incr_reg) |
3330 | /* Mustn't autoinc an eliminable register. */ | |
3331 | && (regno >= FIRST_PSEUDO_REGISTER | |
3332 | || ! TEST_HARD_REG_BIT (elim_reg_set, regno))) | |
3333 | { | |
3334 | /* This is the simple case. Try to make the auto-inc. If | |
3335 | we can't, we are done. Otherwise, we will do any | |
3336 | needed updates below. */ | |
3337 | if (! validate_change (insn, &XEXP (mem, 0), inc, 0)) | |
3338 | return; | |
3339 | } | |
3340 | else if (GET_CODE (q) == REG | |
3341 | /* PREV_INSN used here to check the semi-open interval | |
3342 | [insn,incr). */ | |
3343 | && ! reg_used_between_p (q, PREV_INSN (insn), incr) | |
3344 | /* We must also check for sets of q as q may be | |
3345 | a call clobbered hard register and there may | |
3346 | be a call between PREV_INSN (insn) and incr. */ | |
3347 | && ! reg_set_between_p (q, PREV_INSN (insn), incr)) | |
3348 | { | |
3349 | /* We have *p followed sometime later by q = p+size. | |
3350 | Both p and q must be live afterward, | |
3351 | and q is not used between INSN and its assignment. | |
3352 | Change it to q = p, ...*q..., q = q+size. | |
3353 | Then fall into the usual case. */ | |
3354 | rtx insns, temp; | |
2d9b9dfe | 3355 | |
65f34de5 | 3356 | start_sequence (); |
3357 | emit_move_insn (q, incr_reg); | |
3358 | insns = get_insns (); | |
3359 | end_sequence (); | |
61001c97 | 3360 | |
65f34de5 | 3361 | /* If we can't make the auto-inc, or can't make the |
3362 | replacement into Y, exit. There's no point in making | |
3363 | the change below if we can't do the auto-inc and doing | |
3364 | so is not correct in the pre-inc case. */ | |
61001c97 | 3365 | |
65f34de5 | 3366 | XEXP (inc, 0) = q; |
3367 | validate_change (insn, &XEXP (mem, 0), inc, 1); | |
3368 | validate_change (incr, &XEXP (y, opnum), q, 1); | |
3369 | if (! apply_change_group ()) | |
3370 | return; | |
075d20cf | 3371 | |
65f34de5 | 3372 | /* We now know we'll be doing this change, so emit the |
3373 | new insn(s) and do the updates. */ | |
31d3e01c | 3374 | emit_insn_before (insns, insn); |
075d20cf | 3375 | |
65f34de5 | 3376 | if (pbi->bb->head == insn) |
3377 | pbi->bb->head = insns; | |
1deb248e | 3378 | |
65f34de5 | 3379 | /* INCR will become a NOTE and INSN won't contain a |
3380 | use of INCR_REG. If a use of INCR_REG was just placed in | |
3381 | the insn before INSN, make that the next use. | |
3382 | Otherwise, invalidate it. */ | |
3383 | if (GET_CODE (PREV_INSN (insn)) == INSN | |
3384 | && GET_CODE (PATTERN (PREV_INSN (insn))) == SET | |
3385 | && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg) | |
3386 | pbi->reg_next_use[regno] = PREV_INSN (insn); | |
3387 | else | |
3388 | pbi->reg_next_use[regno] = 0; | |
9028a007 | 3389 | |
65f34de5 | 3390 | incr_reg = q; |
3391 | regno = REGNO (q); | |
1deb248e | 3392 | |
65f34de5 | 3393 | /* REGNO is now used in INCR which is below INSN, but |
3394 | it previously wasn't live here. If we don't mark | |
3395 | it as live, we'll put a REG_DEAD note for it | |
3396 | on this insn, which is incorrect. */ | |
3397 | SET_REGNO_REG_SET (pbi->reg_live, regno); | |
1deb248e | 3398 | |
65f34de5 | 3399 | /* If there are any calls between INSN and INCR, show |
3400 | that REGNO now crosses them. */ | |
3401 | for (temp = insn; temp != incr; temp = NEXT_INSN (temp)) | |
3402 | if (GET_CODE (temp) == CALL_INSN) | |
3403 | REG_N_CALLS_CROSSED (regno)++; | |
9028a007 | 3404 | |
65f34de5 | 3405 | /* Invalidate alias info for Q since we just changed its value. */ |
3406 | clear_reg_alias_info (q); | |
1deb248e | 3407 | } |
65f34de5 | 3408 | else |
3409 | return; | |
1deb248e | 3410 | |
65f34de5 | 3411 | /* If we haven't returned, it means we were able to make the |
3412 | auto-inc, so update the status. First, record that this insn | |
3413 | has an implicit side effect. */ | |
075d20cf | 3414 | |
65f34de5 | 3415 | REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn)); |
075d20cf | 3416 | |
65f34de5 | 3417 | /* Modify the old increment-insn to simply copy |
3418 | the already-incremented value of our register. */ | |
3419 | if (! validate_change (incr, &SET_SRC (set), incr_reg, 0)) | |
3420 | abort (); | |
20297067 | 3421 | |
65f34de5 | 3422 | /* If that makes it a no-op (copying the register into itself) delete |
3423 | it so it won't appear to be a "use" and a "set" of this | |
3424 | register. */ | |
3425 | if (REGNO (SET_DEST (set)) == REGNO (incr_reg)) | |
20297067 | 3426 | { |
65f34de5 | 3427 | /* If the original source was dead, it's dead now. */ |
3428 | rtx note; | |
20297067 | 3429 | |
65f34de5 | 3430 | while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX) |
3431 | { | |
3432 | remove_note (incr, note); | |
3433 | if (XEXP (note, 0) != incr_reg) | |
3434 | CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0))); | |
3435 | } | |
9028a007 | 3436 | |
65f34de5 | 3437 | PUT_CODE (incr, NOTE); |
3438 | NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED; | |
3439 | NOTE_SOURCE_FILE (incr) = 0; | |
3440 | } | |
075d20cf | 3441 | |
65f34de5 | 3442 | if (regno >= FIRST_PSEUDO_REGISTER) |
3443 | { | |
3444 | /* Count an extra reference to the reg. When a reg is | |
3445 | incremented, spilling it is worse, so we want to make | |
3446 | that less likely. */ | |
3447 | REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb); | |
075d20cf | 3448 | |
65f34de5 | 3449 | /* Count the increment as a setting of the register, |
3450 | even though it isn't a SET in rtl. */ | |
3451 | REG_N_SETS (regno)++; | |
3452 | } | |
075d20cf | 3453 | } |
65f34de5 | 3454 | |
3455 | /* X is a MEM found in INSN. See if we can convert it into an auto-increment | |
3456 | reference. */ | |
9028a007 | 3457 | |
5e6015b1 | 3458 | static void |
65f34de5 | 3459 | find_auto_inc (pbi, x, insn) |
3460 | struct propagate_block_info *pbi; | |
3461 | rtx x; | |
3462 | rtx insn; | |
a4d05baf | 3463 | { |
65f34de5 | 3464 | rtx addr = XEXP (x, 0); |
3465 | HOST_WIDE_INT offset = 0; | |
3466 | rtx set, y, incr, inc_val; | |
3467 | int regno; | |
3468 | int size = GET_MODE_SIZE (GET_MODE (x)); | |
a4d05baf | 3469 | |
65f34de5 | 3470 | if (GET_CODE (insn) == JUMP_INSN) |
4076d5a5 | 3471 | return; |
3472 | ||
65f34de5 | 3473 | /* Here we detect use of an index register which might be good for |
3474 | postincrement, postdecrement, preincrement, or predecrement. */ | |
3475 | ||
3476 | if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT) | |
3477 | offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0); | |
a4d05baf | 3478 | |
65f34de5 | 3479 | if (GET_CODE (addr) != REG) |
3480 | return; | |
9028a007 | 3481 | |
65f34de5 | 3482 | regno = REGNO (addr); |
4076d5a5 | 3483 | |
65f34de5 | 3484 | /* Is the next use an increment that might make auto-increment? */ |
3485 | incr = pbi->reg_next_use[regno]; | |
3486 | if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn)) | |
3487 | return; | |
3488 | set = single_set (incr); | |
3489 | if (set == 0 || GET_CODE (set) != SET) | |
3490 | return; | |
3491 | y = SET_SRC (set); | |
a4d05baf | 3492 | |
65f34de5 | 3493 | if (GET_CODE (y) != PLUS) |
4076d5a5 | 3494 | return; |
3495 | ||
65f34de5 | 3496 | if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr)) |
3497 | inc_val = XEXP (y, 1); | |
3498 | else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr)) | |
3499 | inc_val = XEXP (y, 0); | |
3500 | else | |
3501 | return; | |
a4d05baf | 3502 | |
65f34de5 | 3503 | if (GET_CODE (inc_val) == CONST_INT) |
3504 | { | |
3505 | if (HAVE_POST_INCREMENT | |
3506 | && (INTVAL (inc_val) == size && offset == 0)) | |
3507 | attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x, | |
3508 | incr, addr); | |
3509 | else if (HAVE_POST_DECREMENT | |
3510 | && (INTVAL (inc_val) == -size && offset == 0)) | |
3511 | attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x, | |
3512 | incr, addr); | |
3513 | else if (HAVE_PRE_INCREMENT | |
3514 | && (INTVAL (inc_val) == size && offset == size)) | |
3515 | attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x, | |
3516 | incr, addr); | |
3517 | else if (HAVE_PRE_DECREMENT | |
3518 | && (INTVAL (inc_val) == -size && offset == -size)) | |
3519 | attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x, | |
3520 | incr, addr); | |
3521 | else if (HAVE_POST_MODIFY_DISP && offset == 0) | |
3522 | attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr, | |
3523 | gen_rtx_PLUS (Pmode, | |
3524 | addr, | |
3525 | inc_val)), | |
3526 | insn, x, incr, addr); | |
3527 | } | |
3528 | else if (GET_CODE (inc_val) == REG | |
3529 | && ! reg_set_between_p (inc_val, PREV_INSN (insn), | |
3530 | NEXT_INSN (incr))) | |
4076d5a5 | 3531 | |
65f34de5 | 3532 | { |
3533 | if (HAVE_POST_MODIFY_REG && offset == 0) | |
3534 | attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr, | |
3535 | gen_rtx_PLUS (Pmode, | |
3536 | addr, | |
3537 | inc_val)), | |
3538 | insn, x, incr, addr); | |
3539 | } | |
3540 | } | |
9028a007 | 3541 | |
65f34de5 | 3542 | #endif /* AUTO_INC_DEC */ |
3543 | \f | |
a4d05baf | 3544 | static void |
65f34de5 | 3545 | mark_used_reg (pbi, reg, cond, insn) |
3546 | struct propagate_block_info *pbi; | |
3547 | rtx reg; | |
3548 | rtx cond ATTRIBUTE_UNUSED; | |
3549 | rtx insn; | |
a4d05baf | 3550 | { |
65f34de5 | 3551 | unsigned int regno_first, regno_last, i; |
3552 | int some_was_live, some_was_dead, some_not_set; | |
a4d05baf | 3553 | |
65f34de5 | 3554 | regno_last = regno_first = REGNO (reg); |
3555 | if (regno_first < FIRST_PSEUDO_REGISTER) | |
3556 | regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1; | |
a4d05baf | 3557 | |
65f34de5 | 3558 | /* Find out if any of this register is live after this instruction. */ |
3559 | some_was_live = some_was_dead = 0; | |
3560 | for (i = regno_first; i <= regno_last; ++i) | |
a4d05baf | 3561 | { |
65f34de5 | 3562 | int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i); |
3563 | some_was_live |= needed_regno; | |
3564 | some_was_dead |= ! needed_regno; | |
a4d05baf | 3565 | } |
3566 | ||
65f34de5 | 3567 | /* Find out if any of the register was set this insn. */ |
3568 | some_not_set = 0; | |
3569 | for (i = regno_first; i <= regno_last; ++i) | |
3570 | some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, i); | |
3571 | ||
3572 | if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC)) | |
2d48cabb | 3573 | { |
65f34de5 | 3574 | /* Record where each reg is used, so when the reg is set we know |
3575 | the next insn that uses it. */ | |
3576 | pbi->reg_next_use[regno_first] = insn; | |
2d48cabb | 3577 | } |
9028a007 | 3578 | |
65f34de5 | 3579 | if (pbi->flags & PROP_REG_INFO) |
3580 | { | |
3581 | if (regno_first < FIRST_PSEUDO_REGISTER) | |
3582 | { | |
3583 | /* If this is a register we are going to try to eliminate, | |
3584 | don't mark it live here. If we are successful in | |
3585 | eliminating it, it need not be live unless it is used for | |
3586 | pseudos, in which case it will have been set live when it | |
3587 | was allocated to the pseudos. If the register will not | |
3588 | be eliminated, reload will set it live at that point. | |
a4d05baf | 3589 | |
65f34de5 | 3590 | Otherwise, record that this function uses this register. */ |
3591 | /* ??? The PPC backend tries to "eliminate" on the pic | |
3592 | register to itself. This should be fixed. In the mean | |
3593 | time, hack around it. */ | |
9028a007 | 3594 | |
65f34de5 | 3595 | if (! (TEST_HARD_REG_BIT (elim_reg_set, regno_first) |
3596 | && (regno_first == FRAME_POINTER_REGNUM | |
3597 | || regno_first == ARG_POINTER_REGNUM))) | |
3598 | for (i = regno_first; i <= regno_last; ++i) | |
3599 | regs_ever_live[i] = 1; | |
3600 | } | |
3601 | else | |
3602 | { | |
3603 | /* Keep track of which basic block each reg appears in. */ | |
0437fa92 | 3604 | |
b3d6de89 | 3605 | int blocknum = pbi->bb->index; |
65f34de5 | 3606 | if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN) |
3607 | REG_BASIC_BLOCK (regno_first) = blocknum; | |
3608 | else if (REG_BASIC_BLOCK (regno_first) != blocknum) | |
3609 | REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL; | |
0437fa92 | 3610 | |
65f34de5 | 3611 | /* Count (weighted) number of uses of each reg. */ |
3612 | REG_FREQ (regno_first) += REG_FREQ_FROM_BB (pbi->bb); | |
3613 | REG_N_REFS (regno_first)++; | |
3614 | } | |
3615 | } | |
0437fa92 | 3616 | |
65f34de5 | 3617 | /* Record and count the insns in which a reg dies. If it is used in |
3618 | this insn and was dead below the insn then it dies in this insn. | |
3619 | If it was set in this insn, we do not make a REG_DEAD note; | |
3620 | likewise if we already made such a note. */ | |
3621 | if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO)) | |
3622 | && some_was_dead | |
3623 | && some_not_set) | |
3624 | { | |
3625 | /* Check for the case where the register dying partially | |
3626 | overlaps the register set by this insn. */ | |
3627 | if (regno_first != regno_last) | |
3628 | for (i = regno_first; i <= regno_last; ++i) | |
3629 | some_was_live |= REGNO_REG_SET_P (pbi->new_set, i); | |
a4d05baf | 3630 | |
65f34de5 | 3631 | /* If none of the words in X is needed, make a REG_DEAD note. |
3632 | Otherwise, we must make partial REG_DEAD notes. */ | |
3633 | if (! some_was_live) | |
3634 | { | |
3635 | if ((pbi->flags & PROP_DEATH_NOTES) | |
3636 | && ! find_regno_note (insn, REG_DEAD, regno_first)) | |
3637 | REG_NOTES (insn) | |
3638 | = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn)); | |
a4d05baf | 3639 | |
65f34de5 | 3640 | if (pbi->flags & PROP_REG_INFO) |
3641 | REG_N_DEATHS (regno_first)++; | |
3642 | } | |
3643 | else | |
3644 | { | |
3645 | /* Don't make a REG_DEAD note for a part of a register | |
3646 | that is set in the insn. */ | |
3647 | for (i = regno_first; i <= regno_last; ++i) | |
3648 | if (! REGNO_REG_SET_P (pbi->reg_live, i) | |
3649 | && ! dead_or_set_regno_p (insn, i)) | |
3650 | REG_NOTES (insn) | |
3651 | = alloc_EXPR_LIST (REG_DEAD, | |
936082bb | 3652 | regno_reg_rtx[i], |
65f34de5 | 3653 | REG_NOTES (insn)); |
3654 | } | |
3655 | } | |
a4d05baf | 3656 | |
65f34de5 | 3657 | /* Mark the register as being live. */ |
3658 | for (i = regno_first; i <= regno_last; ++i) | |
a4d05baf | 3659 | { |
f3cb52fc | 3660 | #ifdef HAVE_conditional_execution |
3661 | int this_was_live = REGNO_REG_SET_P (pbi->reg_live, i); | |
3662 | #endif | |
3663 | ||
65f34de5 | 3664 | SET_REGNO_REG_SET (pbi->reg_live, i); |
a4d05baf | 3665 | |
65f34de5 | 3666 | #ifdef HAVE_conditional_execution |
3667 | /* If this is a conditional use, record that fact. If it is later | |
3668 | conditionally set, we'll know to kill the register. */ | |
3669 | if (cond != NULL_RTX) | |
a4d05baf | 3670 | { |
65f34de5 | 3671 | splay_tree_node node; |
3672 | struct reg_cond_life_info *rcli; | |
3673 | rtx ncond; | |
3674 | ||
f3cb52fc | 3675 | if (this_was_live) |
65f34de5 | 3676 | { |
3677 | node = splay_tree_lookup (pbi->reg_cond_dead, i); | |
3678 | if (node == NULL) | |
3679 | { | |
3680 | /* The register was unconditionally live previously. | |
3681 | No need to do anything. */ | |
3682 | } | |
3683 | else | |
3684 | { | |
3685 | /* The register was conditionally live previously. | |
3686 | Subtract the new life cond from the old death cond. */ | |
3687 | rcli = (struct reg_cond_life_info *) node->value; | |
3688 | ncond = rcli->condition; | |
3689 | ncond = and_reg_cond (ncond, not_reg_cond (cond), 1); | |
a4d05baf | 3690 | |
65f34de5 | 3691 | /* If the register is now unconditionally live, |
3692 | remove the entry in the splay_tree. */ | |
3693 | if (ncond == const0_rtx) | |
3694 | splay_tree_remove (pbi->reg_cond_dead, i); | |
3695 | else | |
3696 | { | |
3697 | rcli->condition = ncond; | |
3698 | SET_REGNO_REG_SET (pbi->reg_cond_reg, | |
3699 | REGNO (XEXP (cond, 0))); | |
3700 | } | |
3701 | } | |
3702 | } | |
3703 | else | |
a4d05baf | 3704 | { |
65f34de5 | 3705 | /* The register was not previously live at all. Record |
3706 | the condition under which it is still dead. */ | |
3707 | rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli)); | |
3708 | rcli->condition = not_reg_cond (cond); | |
3709 | rcli->stores = const0_rtx; | |
3710 | rcli->orig_condition = const0_rtx; | |
3711 | splay_tree_insert (pbi->reg_cond_dead, i, | |
3712 | (splay_tree_value) rcli); | |
a4d05baf | 3713 | |
65f34de5 | 3714 | SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0))); |
a4d05baf | 3715 | } |
3716 | } | |
f3cb52fc | 3717 | else if (this_was_live) |
a4d05baf | 3718 | { |
65f34de5 | 3719 | /* The register may have been conditionally live previously, but |
3720 | is now unconditionally live. Remove it from the conditionally | |
3721 | dead list, so that a conditional set won't cause us to think | |
3722 | it dead. */ | |
3723 | splay_tree_remove (pbi->reg_cond_dead, i); | |
a4d05baf | 3724 | } |
65f34de5 | 3725 | #endif |
a4d05baf | 3726 | } |
3727 | } | |
3728 | ||
65f34de5 | 3729 | /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses. |
3730 | This is done assuming the registers needed from X are those that | |
3731 | have 1-bits in PBI->REG_LIVE. | |
0437fa92 | 3732 | |
65f34de5 | 3733 | INSN is the containing instruction. If INSN is dead, this function |
3734 | is not called. */ | |
4076d5a5 | 3735 | |
65f34de5 | 3736 | static void |
3737 | mark_used_regs (pbi, x, cond, insn) | |
3738 | struct propagate_block_info *pbi; | |
3739 | rtx x, cond, insn; | |
4076d5a5 | 3740 | { |
19cb6b50 | 3741 | RTX_CODE code; |
3742 | int regno; | |
65f34de5 | 3743 | int flags = pbi->flags; |
4076d5a5 | 3744 | |
65f34de5 | 3745 | retry: |
992c7138 | 3746 | if (!x) |
3747 | return; | |
65f34de5 | 3748 | code = GET_CODE (x); |
3749 | switch (code) | |
4076d5a5 | 3750 | { |
65f34de5 | 3751 | case LABEL_REF: |
3752 | case SYMBOL_REF: | |
3753 | case CONST_INT: | |
3754 | case CONST: | |
3755 | case CONST_DOUBLE: | |
886cfd4f | 3756 | case CONST_VECTOR: |
65f34de5 | 3757 | case PC: |
3758 | case ADDR_VEC: | |
3759 | case ADDR_DIFF_VEC: | |
3760 | return; | |
a4d05baf | 3761 | |
65f34de5 | 3762 | #ifdef HAVE_cc0 |
3763 | case CC0: | |
3764 | pbi->cc0_live = 1; | |
3765 | return; | |
3766 | #endif | |
a4d05baf | 3767 | |
65f34de5 | 3768 | case CLOBBER: |
3769 | /* If we are clobbering a MEM, mark any registers inside the address | |
3770 | as being used. */ | |
3771 | if (GET_CODE (XEXP (x, 0)) == MEM) | |
3772 | mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn); | |
3773 | return; | |
a4d05baf | 3774 | |
65f34de5 | 3775 | case MEM: |
3776 | /* Don't bother watching stores to mems if this is not the | |
3777 | final pass. We'll not be deleting dead stores this round. */ | |
2d30935d | 3778 | if (optimize && (flags & PROP_SCAN_DEAD_STORES)) |
a4d05baf | 3779 | { |
65f34de5 | 3780 | /* Invalidate the data for the last MEM stored, but only if MEM is |
3781 | something that can be stored into. */ | |
3782 | if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF | |
3783 | && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0))) | |
3784 | /* Needn't clear the memory set list. */ | |
3785 | ; | |
3786 | else | |
a4d05baf | 3787 | { |
65f34de5 | 3788 | rtx temp = pbi->mem_set_list; |
3789 | rtx prev = NULL_RTX; | |
3790 | rtx next; | |
3791 | ||
3792 | while (temp) | |
3793 | { | |
3794 | next = XEXP (temp, 1); | |
3795 | if (anti_dependence (XEXP (temp, 0), x)) | |
3796 | { | |
3797 | /* Splice temp out of the list. */ | |
3798 | if (prev) | |
3799 | XEXP (prev, 1) = next; | |
3800 | else | |
3801 | pbi->mem_set_list = next; | |
3802 | free_EXPR_LIST_node (temp); | |
3803 | pbi->mem_set_list_len--; | |
3804 | } | |
3805 | else | |
3806 | prev = temp; | |
3807 | temp = next; | |
3808 | } | |
a4d05baf | 3809 | } |
65f34de5 | 3810 | |
3811 | /* If the memory reference had embedded side effects (autoincrement | |
3812 | address modes. Then we may need to kill some entries on the | |
3813 | memory set list. */ | |
3814 | if (insn) | |
638987bd | 3815 | for_each_rtx (&PATTERN (insn), invalidate_mems_from_autoinc, pbi); |
a4d05baf | 3816 | } |
a4d05baf | 3817 | |
65f34de5 | 3818 | #ifdef AUTO_INC_DEC |
3819 | if (flags & PROP_AUTOINC) | |
d3371fcd | 3820 | find_auto_inc (pbi, x, insn); |
65f34de5 | 3821 | #endif |
3822 | break; | |
2f95707a | 3823 | |
65f34de5 | 3824 | case SUBREG: |
897118e8 | 3825 | #ifdef CANNOT_CHANGE_MODE_CLASS |
65f34de5 | 3826 | if (GET_CODE (SUBREG_REG (x)) == REG |
897118e8 | 3827 | && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER) |
3828 | SET_REGNO_REG_SET (&subregs_of_mode[GET_MODE (x)], | |
3829 | REGNO (SUBREG_REG (x))); | |
65f34de5 | 3830 | #endif |
2f95707a | 3831 | |
65f34de5 | 3832 | /* While we're here, optimize this case. */ |
3833 | x = SUBREG_REG (x); | |
3834 | if (GET_CODE (x) != REG) | |
3835 | goto retry; | |
3836 | /* Fall through. */ | |
2f95707a | 3837 | |
65f34de5 | 3838 | case REG: |
3839 | /* See a register other than being set => mark it as needed. */ | |
3840 | mark_used_reg (pbi, x, cond, insn); | |
3841 | return; | |
2f95707a | 3842 | |
65f34de5 | 3843 | case SET: |
3844 | { | |
19cb6b50 | 3845 | rtx testreg = SET_DEST (x); |
65f34de5 | 3846 | int mark_dest = 0; |
2f95707a | 3847 | |
65f34de5 | 3848 | /* If storing into MEM, don't show it as being used. But do |
3849 | show the address as being used. */ | |
3850 | if (GET_CODE (testreg) == MEM) | |
3851 | { | |
3852 | #ifdef AUTO_INC_DEC | |
3853 | if (flags & PROP_AUTOINC) | |
3854 | find_auto_inc (pbi, testreg, insn); | |
3855 | #endif | |
3856 | mark_used_regs (pbi, XEXP (testreg, 0), cond, insn); | |
3857 | mark_used_regs (pbi, SET_SRC (x), cond, insn); | |
3858 | return; | |
3859 | } | |
2f95707a | 3860 | |
65f34de5 | 3861 | /* Storing in STRICT_LOW_PART is like storing in a reg |
3862 | in that this SET might be dead, so ignore it in TESTREG. | |
3863 | but in some other ways it is like using the reg. | |
2f95707a | 3864 | |
65f34de5 | 3865 | Storing in a SUBREG or a bit field is like storing the entire |
3866 | register in that if the register's value is not used | |
3867 | then this SET is not needed. */ | |
3868 | while (GET_CODE (testreg) == STRICT_LOW_PART | |
3869 | || GET_CODE (testreg) == ZERO_EXTRACT | |
3870 | || GET_CODE (testreg) == SIGN_EXTRACT | |
3871 | || GET_CODE (testreg) == SUBREG) | |
3872 | { | |
897118e8 | 3873 | #ifdef CANNOT_CHANGE_MODE_CLASS |
65f34de5 | 3874 | if (GET_CODE (testreg) == SUBREG |
3875 | && GET_CODE (SUBREG_REG (testreg)) == REG | |
897118e8 | 3876 | && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER) |
3877 | SET_REGNO_REG_SET (&subregs_of_mode[GET_MODE (testreg)], | |
3878 | REGNO (SUBREG_REG (testreg))); | |
65f34de5 | 3879 | #endif |
2f95707a | 3880 | |
65f34de5 | 3881 | /* Modifying a single register in an alternate mode |
3882 | does not use any of the old value. But these other | |
3883 | ways of storing in a register do use the old value. */ | |
3884 | if (GET_CODE (testreg) == SUBREG | |
eda949d5 | 3885 | && !((REG_BYTES (SUBREG_REG (testreg)) |
3886 | + UNITS_PER_WORD - 1) / UNITS_PER_WORD | |
3887 | > (REG_BYTES (testreg) | |
3888 | + UNITS_PER_WORD - 1) / UNITS_PER_WORD)) | |
65f34de5 | 3889 | ; |
3890 | else | |
3891 | mark_dest = 1; | |
2f95707a | 3892 | |
65f34de5 | 3893 | testreg = XEXP (testreg, 0); |
3894 | } | |
2f95707a | 3895 | |
65f34de5 | 3896 | /* If this is a store into a register or group of registers, |
3897 | recursively scan the value being stored. */ | |
2f95707a | 3898 | |
65f34de5 | 3899 | if ((GET_CODE (testreg) == PARALLEL |
3900 | && GET_MODE (testreg) == BLKmode) | |
3901 | || (GET_CODE (testreg) == REG | |
3902 | && (regno = REGNO (testreg), | |
3903 | ! (regno == FRAME_POINTER_REGNUM | |
3904 | && (! reload_completed || frame_pointer_needed))) | |
3905 | #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM | |
3906 | && ! (regno == HARD_FRAME_POINTER_REGNUM | |
3907 | && (! reload_completed || frame_pointer_needed)) | |
3908 | #endif | |
3909 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM | |
3910 | && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno]) | |
3911 | #endif | |
3912 | )) | |
3913 | { | |
3914 | if (mark_dest) | |
3915 | mark_used_regs (pbi, SET_DEST (x), cond, insn); | |
3916 | mark_used_regs (pbi, SET_SRC (x), cond, insn); | |
3917 | return; | |
3918 | } | |
3919 | } | |
3920 | break; | |
9028a007 | 3921 | |
65f34de5 | 3922 | case ASM_OPERANDS: |
3923 | case UNSPEC_VOLATILE: | |
3924 | case TRAP_IF: | |
3925 | case ASM_INPUT: | |
3926 | { | |
3927 | /* Traditional and volatile asm instructions must be considered to use | |
3928 | and clobber all hard registers, all pseudo-registers and all of | |
3929 | memory. So must TRAP_IF and UNSPEC_VOLATILE operations. | |
a4d05baf | 3930 | |
65f34de5 | 3931 | Consider for instance a volatile asm that changes the fpu rounding |
3932 | mode. An insn should not be moved across this even if it only uses | |
3933 | pseudo-regs because it might give an incorrectly rounded result. | |
a4d05baf | 3934 | |
65f34de5 | 3935 | ?!? Unfortunately, marking all hard registers as live causes massive |
3936 | problems for the register allocator and marking all pseudos as live | |
3937 | creates mountains of uninitialized variable warnings. | |
a4d05baf | 3938 | |
65f34de5 | 3939 | So for now, just clear the memory set list and mark any regs |
3940 | we can find in ASM_OPERANDS as used. */ | |
3941 | if (code != ASM_OPERANDS || MEM_VOLATILE_P (x)) | |
3942 | { | |
3943 | free_EXPR_LIST_list (&pbi->mem_set_list); | |
3944 | pbi->mem_set_list_len = 0; | |
3945 | } | |
9028a007 | 3946 | |
65f34de5 | 3947 | /* For all ASM_OPERANDS, we must traverse the vector of input operands. |
3948 | We can not just fall through here since then we would be confused | |
3949 | by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate | |
3950 | traditional asms unlike their normal usage. */ | |
3951 | if (code == ASM_OPERANDS) | |
3952 | { | |
3953 | int j; | |
570f0cf1 | 3954 | |
65f34de5 | 3955 | for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++) |
3956 | mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn); | |
3957 | } | |
3958 | break; | |
3959 | } | |
570f0cf1 | 3960 | |
65f34de5 | 3961 | case COND_EXEC: |
3962 | if (cond != NULL_RTX) | |
3963 | abort (); | |
9028a007 | 3964 | |
65f34de5 | 3965 | mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn); |
9028a007 | 3966 | |
65f34de5 | 3967 | cond = COND_EXEC_TEST (x); |
3968 | x = COND_EXEC_CODE (x); | |
3969 | goto retry; | |
570f0cf1 | 3970 | |
65f34de5 | 3971 | case PHI: |
3972 | /* We _do_not_ want to scan operands of phi nodes. Operands of | |
3973 | a phi function are evaluated only when control reaches this | |
3974 | block along a particular edge. Therefore, regs that appear | |
3975 | as arguments to phi should not be added to the global live at | |
3976 | start. */ | |
3977 | return; | |
9028a007 | 3978 | |
65f34de5 | 3979 | default: |
3980 | break; | |
a4d05baf | 3981 | } |
570f0cf1 | 3982 | |
65f34de5 | 3983 | /* Recursively scan the operands of this expression. */ |
a4d05baf | 3984 | |
65f34de5 | 3985 | { |
19cb6b50 | 3986 | const char * const fmt = GET_RTX_FORMAT (code); |
3987 | int i; | |
a4d05baf | 3988 | |
65f34de5 | 3989 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
3990 | { | |
3991 | if (fmt[i] == 'e') | |
3992 | { | |
3993 | /* Tail recursive case: save a function call level. */ | |
3994 | if (i == 0) | |
3995 | { | |
3996 | x = XEXP (x, 0); | |
3997 | goto retry; | |
3998 | } | |
3999 | mark_used_regs (pbi, XEXP (x, i), cond, insn); | |
4000 | } | |
4001 | else if (fmt[i] == 'E') | |
4002 | { | |
19cb6b50 | 4003 | int j; |
65f34de5 | 4004 | for (j = 0; j < XVECLEN (x, i); j++) |
4005 | mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn); | |
4006 | } | |
4007 | } | |
4008 | } | |
a4d05baf | 4009 | } |
65f34de5 | 4010 | \f |
4011 | #ifdef AUTO_INC_DEC | |
a4d05baf | 4012 | |
65f34de5 | 4013 | static int |
4014 | try_pre_increment_1 (pbi, insn) | |
4015 | struct propagate_block_info *pbi; | |
4016 | rtx insn; | |
4017 | { | |
4018 | /* Find the next use of this reg. If in same basic block, | |
4019 | make it do pre-increment or pre-decrement if appropriate. */ | |
4020 | rtx x = single_set (insn); | |
4021 | HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1) | |
4022 | * INTVAL (XEXP (SET_SRC (x), 1))); | |
4023 | int regno = REGNO (SET_DEST (x)); | |
4024 | rtx y = pbi->reg_next_use[regno]; | |
4025 | if (y != 0 | |
4026 | && SET_DEST (x) != stack_pointer_rtx | |
4027 | && BLOCK_NUM (y) == BLOCK_NUM (insn) | |
4028 | /* Don't do this if the reg dies, or gets set in y; a standard addressing | |
4029 | mode would be better. */ | |
4030 | && ! dead_or_set_p (y, SET_DEST (x)) | |
4031 | && try_pre_increment (y, SET_DEST (x), amount)) | |
4032 | { | |
4033 | /* We have found a suitable auto-increment and already changed | |
4034 | insn Y to do it. So flush this increment instruction. */ | |
fb20d6fa | 4035 | propagate_block_delete_insn (insn); |
1deb248e | 4036 | |
65f34de5 | 4037 | /* Count a reference to this reg for the increment insn we are |
4038 | deleting. When a reg is incremented, spilling it is worse, | |
4039 | so we want to make that less likely. */ | |
4040 | if (regno >= FIRST_PSEUDO_REGISTER) | |
4041 | { | |
4042 | REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb); | |
4043 | REG_N_SETS (regno)++; | |
4044 | } | |
1deb248e | 4045 | |
65f34de5 | 4046 | /* Flush any remembered memories depending on the value of |
4047 | the incremented register. */ | |
4048 | invalidate_mems_from_set (pbi, SET_DEST (x)); | |
1deb248e | 4049 | |
65f34de5 | 4050 | return 1; |
4051 | } | |
4052 | return 0; | |
4053 | } | |
1deb248e | 4054 | |
65f34de5 | 4055 | /* Try to change INSN so that it does pre-increment or pre-decrement |
4056 | addressing on register REG in order to add AMOUNT to REG. | |
4057 | AMOUNT is negative for pre-decrement. | |
4058 | Returns 1 if the change could be made. | |
4059 | This checks all about the validity of the result of modifying INSN. */ | |
1deb248e | 4060 | |
65f34de5 | 4061 | static int |
4062 | try_pre_increment (insn, reg, amount) | |
4063 | rtx insn, reg; | |
4064 | HOST_WIDE_INT amount; | |
1deb248e | 4065 | { |
19cb6b50 | 4066 | rtx use; |
1deb248e | 4067 | |
65f34de5 | 4068 | /* Nonzero if we can try to make a pre-increment or pre-decrement. |
4069 | For example, addl $4,r1; movl (r1),... can become movl +(r1),... */ | |
4070 | int pre_ok = 0; | |
4071 | /* Nonzero if we can try to make a post-increment or post-decrement. | |
4072 | For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,... | |
4073 | It is possible for both PRE_OK and POST_OK to be nonzero if the machine | |
4074 | supports both pre-inc and post-inc, or both pre-dec and post-dec. */ | |
4075 | int post_ok = 0; | |
1deb248e | 4076 | |
65f34de5 | 4077 | /* Nonzero if the opportunity actually requires post-inc or post-dec. */ |
4078 | int do_post = 0; | |
1deb248e | 4079 | |
65f34de5 | 4080 | /* From the sign of increment, see which possibilities are conceivable |
4081 | on this target machine. */ | |
4082 | if (HAVE_PRE_INCREMENT && amount > 0) | |
4083 | pre_ok = 1; | |
4084 | if (HAVE_POST_INCREMENT && amount > 0) | |
4085 | post_ok = 1; | |
1deb248e | 4086 | |
65f34de5 | 4087 | if (HAVE_PRE_DECREMENT && amount < 0) |
4088 | pre_ok = 1; | |
4089 | if (HAVE_POST_DECREMENT && amount < 0) | |
4090 | post_ok = 1; | |
1deb248e | 4091 | |
65f34de5 | 4092 | if (! (pre_ok || post_ok)) |
4093 | return 0; | |
1deb248e | 4094 | |
65f34de5 | 4095 | /* It is not safe to add a side effect to a jump insn |
4096 | because if the incremented register is spilled and must be reloaded | |
4097 | there would be no way to store the incremented value back in memory. */ | |
9028a007 | 4098 | |
65f34de5 | 4099 | if (GET_CODE (insn) == JUMP_INSN) |
4100 | return 0; | |
1deb248e | 4101 | |
65f34de5 | 4102 | use = 0; |
4103 | if (pre_ok) | |
4104 | use = find_use_as_address (PATTERN (insn), reg, 0); | |
be051fd5 | 4105 | if (post_ok && (use == 0 || use == (rtx) (size_t) 1)) |
1deb248e | 4106 | { |
65f34de5 | 4107 | use = find_use_as_address (PATTERN (insn), reg, -amount); |
4108 | do_post = 1; | |
1deb248e | 4109 | } |
4110 | ||
be051fd5 | 4111 | if (use == 0 || use == (rtx) (size_t) 1) |
65f34de5 | 4112 | return 0; |
4113 | ||
4114 | if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount)) | |
4115 | return 0; | |
1deb248e | 4116 | |
65f34de5 | 4117 | /* See if this combination of instruction and addressing mode exists. */ |
4118 | if (! validate_change (insn, &XEXP (use, 0), | |
4119 | gen_rtx_fmt_e (amount > 0 | |
4120 | ? (do_post ? POST_INC : PRE_INC) | |
4121 | : (do_post ? POST_DEC : PRE_DEC), | |
4122 | Pmode, reg), 0)) | |
4123 | return 0; | |
1deb248e | 4124 | |
65f34de5 | 4125 | /* Record that this insn now has an implicit side effect on X. */ |
4126 | REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn)); | |
4127 | return 1; | |
1deb248e | 4128 | } |
4129 | ||
65f34de5 | 4130 | #endif /* AUTO_INC_DEC */ |
4131 | \f | |
4132 | /* Find the place in the rtx X where REG is used as a memory address. | |
4133 | Return the MEM rtx that so uses it. | |
4134 | If PLUSCONST is nonzero, search instead for a memory address equivalent to | |
4135 | (plus REG (const_int PLUSCONST)). | |
0ed5c697 | 4136 | |
65f34de5 | 4137 | If such an address does not appear, return 0. |
4138 | If REG appears more than once, or is used other than in such an address, | |
be051fd5 | 4139 | return (rtx) 1. */ |
0ed5c697 | 4140 | |
65f34de5 | 4141 | rtx |
4142 | find_use_as_address (x, reg, plusconst) | |
19cb6b50 | 4143 | rtx x; |
65f34de5 | 4144 | rtx reg; |
4145 | HOST_WIDE_INT plusconst; | |
0ed5c697 | 4146 | { |
65f34de5 | 4147 | enum rtx_code code = GET_CODE (x); |
4148 | const char * const fmt = GET_RTX_FORMAT (code); | |
19cb6b50 | 4149 | int i; |
4150 | rtx value = 0; | |
4151 | rtx tem; | |
1f080ed5 | 4152 | |
65f34de5 | 4153 | if (code == MEM && XEXP (x, 0) == reg && plusconst == 0) |
4154 | return x; | |
0ed5c697 | 4155 | |
65f34de5 | 4156 | if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS |
4157 | && XEXP (XEXP (x, 0), 0) == reg | |
4158 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
4159 | && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst) | |
4160 | return x; | |
69b917b7 | 4161 | |
65f34de5 | 4162 | if (code == SIGN_EXTRACT || code == ZERO_EXTRACT) |
0ed5c697 | 4163 | { |
65f34de5 | 4164 | /* If REG occurs inside a MEM used in a bit-field reference, |
4165 | that is unacceptable. */ | |
4166 | if (find_use_as_address (XEXP (x, 0), reg, 0) != 0) | |
be051fd5 | 4167 | return (rtx) (size_t) 1; |
0ed5c697 | 4168 | } |
0ed5c697 | 4169 | |
65f34de5 | 4170 | if (x == reg) |
be051fd5 | 4171 | return (rtx) (size_t) 1; |
a4d05baf | 4172 | |
65f34de5 | 4173 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
a4d05baf | 4174 | { |
65f34de5 | 4175 | if (fmt[i] == 'e') |
4176 | { | |
4177 | tem = find_use_as_address (XEXP (x, i), reg, plusconst); | |
4178 | if (value == 0) | |
4179 | value = tem; | |
4180 | else if (tem != 0) | |
be051fd5 | 4181 | return (rtx) (size_t) 1; |
65f34de5 | 4182 | } |
4183 | else if (fmt[i] == 'E') | |
a4d05baf | 4184 | { |
19cb6b50 | 4185 | int j; |
65f34de5 | 4186 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
a4d05baf | 4187 | { |
65f34de5 | 4188 | tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst); |
4189 | if (value == 0) | |
4190 | value = tem; | |
4191 | else if (tem != 0) | |
be051fd5 | 4192 | return (rtx) (size_t) 1; |
a4d05baf | 4193 | } |
4194 | } | |
4195 | } | |
a4d05baf | 4196 | |
65f34de5 | 4197 | return value; |
4198 | } | |
4199 | \f | |
4200 | /* Write information about registers and basic blocks into FILE. | |
4201 | This is part of making a debugging dump. */ | |
9028a007 | 4202 | |
65f34de5 | 4203 | void |
4204 | dump_regset (r, outf) | |
4205 | regset r; | |
4206 | FILE *outf; | |
a4d05baf | 4207 | { |
65f34de5 | 4208 | int i; |
4209 | if (r == NULL) | |
052f57a6 | 4210 | { |
65f34de5 | 4211 | fputs (" (nil)", outf); |
052f57a6 | 4212 | return; |
4213 | } | |
a4d05baf | 4214 | |
65f34de5 | 4215 | EXECUTE_IF_SET_IN_REG_SET (r, 0, i, |
a4d05baf | 4216 | { |
65f34de5 | 4217 | fprintf (outf, " %d", i); |
4218 | if (i < FIRST_PSEUDO_REGISTER) | |
4219 | fprintf (outf, " [%s]", | |
4220 | reg_names[i]); | |
4221 | }); | |
a4d05baf | 4222 | } |
4223 | ||
65f34de5 | 4224 | /* Print a human-reaable representation of R on the standard error |
4225 | stream. This function is designed to be used from within the | |
4226 | debugger. */ | |
9028a007 | 4227 | |
65f34de5 | 4228 | void |
4229 | debug_regset (r) | |
4230 | regset r; | |
a4d05baf | 4231 | { |
65f34de5 | 4232 | dump_regset (r, stderr); |
4233 | putc ('\n', stderr); | |
a4d05baf | 4234 | } |
4235 | ||
65f34de5 | 4236 | /* Recompute register set/reference counts immediately prior to register |
4237 | allocation. | |
0ed5c697 | 4238 | |
65f34de5 | 4239 | This avoids problems with set/reference counts changing to/from values |
4240 | which have special meanings to the register allocators. | |
7cf5d853 | 4241 | |
65f34de5 | 4242 | Additionally, the reference counts are the primary component used by the |
4243 | register allocators to prioritize pseudos for allocation to hard regs. | |
4244 | More accurate reference counts generally lead to better register allocation. | |
7cf5d853 | 4245 | |
65f34de5 | 4246 | F is the first insn to be scanned. |
7cf5d853 | 4247 | |
65f34de5 | 4248 | LOOP_STEP denotes how much loop_depth should be incremented per |
4249 | loop nesting level in order to increase the ref count more for | |
4250 | references in a loop. | |
eca1c9c0 | 4251 | |
65f34de5 | 4252 | It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and |
4253 | possibly other information which is used by the register allocators. */ | |
7cf5d853 | 4254 | |
65f34de5 | 4255 | void |
4256 | recompute_reg_usage (f, loop_step) | |
4257 | rtx f ATTRIBUTE_UNUSED; | |
4258 | int loop_step ATTRIBUTE_UNUSED; | |
4259 | { | |
4260 | allocate_reg_life_data (); | |
4261 | update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO); | |
7cf5d853 | 4262 | } |
4263 | ||
65f34de5 | 4264 | /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of |
4265 | blocks. If BLOCKS is NULL, assume the universal set. Returns a count | |
4266 | of the number of registers that died. */ | |
083a2b5e | 4267 | |
9028a007 | 4268 | int |
65f34de5 | 4269 | count_or_remove_death_notes (blocks, kill) |
4270 | sbitmap blocks; | |
4271 | int kill; | |
a4d05baf | 4272 | { |
4c26117a | 4273 | int count = 0; |
4274 | basic_block bb; | |
9ceb03fb | 4275 | |
4c26117a | 4276 | FOR_EACH_BB_REVERSE (bb) |
a4d05baf | 4277 | { |
65f34de5 | 4278 | rtx insn; |
0ed5c697 | 4279 | |
4c26117a | 4280 | if (blocks && ! TEST_BIT (blocks, bb->index)) |
65f34de5 | 4281 | continue; |
0ed5c697 | 4282 | |
65f34de5 | 4283 | for (insn = bb->head;; insn = NEXT_INSN (insn)) |
a4d05baf | 4284 | { |
65f34de5 | 4285 | if (INSN_P (insn)) |
a4d05baf | 4286 | { |
65f34de5 | 4287 | rtx *pprev = ®_NOTES (insn); |
4288 | rtx link = *pprev; | |
4289 | ||
4290 | while (link) | |
a4d05baf | 4291 | { |
65f34de5 | 4292 | switch (REG_NOTE_KIND (link)) |
4293 | { | |
4294 | case REG_DEAD: | |
4295 | if (GET_CODE (XEXP (link, 0)) == REG) | |
4296 | { | |
4297 | rtx reg = XEXP (link, 0); | |
4298 | int n; | |
9028a007 | 4299 | |
65f34de5 | 4300 | if (REGNO (reg) >= FIRST_PSEUDO_REGISTER) |
4301 | n = 1; | |
4302 | else | |
4303 | n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)); | |
4304 | count += n; | |
4305 | } | |
4306 | /* Fall through. */ | |
9028a007 | 4307 | |
65f34de5 | 4308 | case REG_UNUSED: |
4309 | if (kill) | |
4310 | { | |
4311 | rtx next = XEXP (link, 1); | |
4312 | free_EXPR_LIST_node (link); | |
4313 | *pprev = link = next; | |
4314 | break; | |
4315 | } | |
4316 | /* Fall through. */ | |
9028a007 | 4317 | |
65f34de5 | 4318 | default: |
4319 | pprev = &XEXP (link, 1); | |
4320 | link = *pprev; | |
4321 | break; | |
4322 | } | |
a4d05baf | 4323 | } |
4324 | } | |
9028a007 | 4325 | |
65f34de5 | 4326 | if (insn == bb->end) |
4327 | break; | |
0ed5c697 | 4328 | } |
cca23eb2 | 4329 | } |
a4d05baf | 4330 | |
65f34de5 | 4331 | return count; |
a4d05baf | 4332 | } |
9645fa4f | 4333 | /* Clear LOG_LINKS fields of insns in a selected blocks or whole chain |
4334 | if blocks is NULL. */ | |
d6cb6164 | 4335 | |
9645fa4f | 4336 | static void |
4337 | clear_log_links (blocks) | |
4338 | sbitmap blocks; | |
dadde703 | 4339 | { |
9645fa4f | 4340 | rtx insn; |
4341 | int i; | |
06a125d8 | 4342 | |
9645fa4f | 4343 | if (!blocks) |
06a125d8 | 4344 | { |
9645fa4f | 4345 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) |
4346 | if (INSN_P (insn)) | |
43bc0539 | 4347 | free_INSN_LIST_list (&LOG_LINKS (insn)); |
06a125d8 | 4348 | } |
9645fa4f | 4349 | else |
4350 | EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i, | |
4351 | { | |
4352 | basic_block bb = BASIC_BLOCK (i); | |
044c7691 | 4353 | |
9645fa4f | 4354 | for (insn = bb->head; insn != NEXT_INSN (bb->end); |
4355 | insn = NEXT_INSN (insn)) | |
4356 | if (INSN_P (insn)) | |
43bc0539 | 4357 | free_INSN_LIST_list (&LOG_LINKS (insn)); |
9645fa4f | 4358 | }); |
dadde703 | 4359 | } |
d6cb6164 | 4360 | |
4361 | /* Given a register bitmap, turn on the bits in a HARD_REG_SET that | |
4362 | correspond to the hard registers, if any, set in that map. This | |
4363 | could be done far more efficiently by having all sorts of special-cases | |
4364 | with moving single words, but probably isn't worth the trouble. */ | |
4365 | ||
4366 | void | |
4367 | reg_set_to_hard_reg_set (to, from) | |
4368 | HARD_REG_SET *to; | |
4369 | bitmap from; | |
4370 | { | |
4371 | int i; | |
4372 | ||
4373 | EXECUTE_IF_SET_IN_BITMAP | |
4374 | (from, 0, i, | |
4375 | { | |
4376 | if (i >= FIRST_PSEUDO_REGISTER) | |
4377 | return; | |
4378 | SET_HARD_REG_BIT (*to, i); | |
4379 | }); | |
4380 | } |