]>
Commit | Line | Data |
---|---|---|
14d66741 | 1 | /* Definitions for computing resource usage of specific insns. |
10a15ee4 | 2 | Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, |
71e45bc2 | 3 | 2009, 2010, 2011, 2012 Free Software Foundation, Inc. |
14d66741 | 4 | |
f12b58b3 | 5 | This file is part of GCC. |
14d66741 | 6 | |
f12b58b3 | 7 | GCC is free software; you can redistribute it and/or modify it under |
8 | the terms of the GNU General Public License as published by the Free | |
8c4c00c1 | 9 | Software Foundation; either version 3, or (at your option) any later |
f12b58b3 | 10 | version. |
14d66741 | 11 | |
f12b58b3 | 12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
14d66741 | 16 | |
17 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ | |
14d66741 | 20 | |
29bd1808 | 21 | #include "config.h" |
27560af1 | 22 | #include "system.h" |
805e22b2 | 23 | #include "coretypes.h" |
24 | #include "tm.h" | |
0b205f4c | 25 | #include "diagnostic-core.h" |
29bd1808 | 26 | #include "rtl.h" |
7953c610 | 27 | #include "tm_p.h" |
29bd1808 | 28 | #include "hard-reg-set.h" |
0a893c29 | 29 | #include "function.h" |
29bd1808 | 30 | #include "regs.h" |
31 | #include "flags.h" | |
32 | #include "output.h" | |
33 | #include "resource.h" | |
05806416 | 34 | #include "except.h" |
d8c9779c | 35 | #include "insn-attr.h" |
98d5e888 | 36 | #include "params.h" |
3072d30e | 37 | #include "df.h" |
29bd1808 | 38 | |
39 | /* This structure is used to record liveness information at the targets or | |
40 | fallthrough insns of branches. We will most likely need the information | |
41 | at targets again, so save them in a hash table rather than recomputing them | |
42 | each time. */ | |
43 | ||
44 | struct target_info | |
45 | { | |
46 | int uid; /* INSN_UID of target. */ | |
47 | struct target_info *next; /* Next info for same hash bucket. */ | |
48 | HARD_REG_SET live_regs; /* Registers live at target. */ | |
49 | int block; /* Basic block number containing target. */ | |
50 | int bb_tick; /* Generation count of basic block info. */ | |
51 | }; | |
52 | ||
53 | #define TARGET_HASH_PRIME 257 | |
54 | ||
55 | /* Indicates what resources are required at the beginning of the epilogue. */ | |
56 | static struct resources start_of_epilogue_needs; | |
57 | ||
58 | /* Indicates what resources are required at function end. */ | |
59 | static struct resources end_of_function_needs; | |
60 | ||
61 | /* Define the hash table itself. */ | |
62 | static struct target_info **target_hash_table = NULL; | |
63 | ||
64 | /* For each basic block, we maintain a generation number of its basic | |
65 | block info, which is updated each time we move an insn from the | |
66 | target of a jump. This is the generation number indexed by block | |
67 | number. */ | |
68 | ||
69 | static int *bb_ticks; | |
70 | ||
71 | /* Marks registers possibly live at the current place being scanned by | |
98d5e888 | 72 | mark_target_live_regs. Also used by update_live_status. */ |
29bd1808 | 73 | |
74 | static HARD_REG_SET current_live_regs; | |
75 | ||
76 | /* Marks registers for which we have seen a REG_DEAD note but no assignment. | |
77 | Also only used by the next two functions. */ | |
78 | ||
79 | static HARD_REG_SET pending_dead_regs; | |
14d66741 | 80 | \f |
81a410b1 | 81 | static void update_live_status (rtx, const_rtx, void *); |
3ad4992f | 82 | static int find_basic_block (rtx, int); |
83 | static rtx next_insn_no_annul (rtx); | |
84 | static rtx find_dead_or_set_registers (rtx, struct resources*, | |
85 | rtx*, int, struct resources, | |
86 | struct resources); | |
14d66741 | 87 | \f |
29bd1808 | 88 | /* Utility function called from mark_target_live_regs via note_stores. |
89 | It deadens any CLOBBERed registers and livens any SET registers. */ | |
90 | ||
91 | static void | |
81a410b1 | 92 | update_live_status (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED) |
29bd1808 | 93 | { |
94 | int first_regno, last_regno; | |
95 | int i; | |
96 | ||
8ad4c111 | 97 | if (!REG_P (dest) |
98 | && (GET_CODE (dest) != SUBREG || !REG_P (SUBREG_REG (dest)))) | |
29bd1808 | 99 | return; |
100 | ||
101 | if (GET_CODE (dest) == SUBREG) | |
fe2ebfc8 | 102 | { |
103 | first_regno = subreg_regno (dest); | |
104 | last_regno = first_regno + subreg_nregs (dest); | |
29bd1808 | 105 | |
fe2ebfc8 | 106 | } |
107 | else | |
108 | { | |
109 | first_regno = REGNO (dest); | |
a2c6f0b7 | 110 | last_regno = END_HARD_REGNO (dest); |
fe2ebfc8 | 111 | } |
29bd1808 | 112 | |
113 | if (GET_CODE (x) == CLOBBER) | |
114 | for (i = first_regno; i < last_regno; i++) | |
115 | CLEAR_HARD_REG_BIT (current_live_regs, i); | |
116 | else | |
117 | for (i = first_regno; i < last_regno; i++) | |
118 | { | |
119 | SET_HARD_REG_BIT (current_live_regs, i); | |
120 | CLEAR_HARD_REG_BIT (pending_dead_regs, i); | |
121 | } | |
122 | } | |
98d5e888 | 123 | |
124 | /* Find the number of the basic block with correct live register | |
125 | information that starts closest to INSN. Return -1 if we couldn't | |
126 | find such a basic block or the beginning is more than | |
127 | SEARCH_LIMIT instructions before INSN. Use SEARCH_LIMIT = -1 for | |
128 | an unlimited search. | |
129 | ||
130 | The delay slot filling code destroys the control-flow graph so, | |
131 | instead of finding the basic block containing INSN, we search | |
132 | backwards toward a BARRIER where the live register information is | |
133 | correct. */ | |
f770745a | 134 | |
135 | static int | |
3ad4992f | 136 | find_basic_block (rtx insn, int search_limit) |
f770745a | 137 | { |
f770745a | 138 | /* Scan backwards to the previous BARRIER. Then see if we can find a |
139 | label that starts a basic block. Return the basic block number. */ | |
f770745a | 140 | for (insn = prev_nonnote_insn (insn); |
6d7dc5b9 | 141 | insn && !BARRIER_P (insn) && search_limit != 0; |
98d5e888 | 142 | insn = prev_nonnote_insn (insn), --search_limit) |
f770745a | 143 | ; |
144 | ||
98d5e888 | 145 | /* The closest BARRIER is too far away. */ |
146 | if (search_limit == 0) | |
147 | return -1; | |
148 | ||
345ac34a | 149 | /* The start of the function. */ |
98d5e888 | 150 | else if (insn == 0) |
345ac34a | 151 | return ENTRY_BLOCK_PTR->next_bb->index; |
f770745a | 152 | |
153 | /* See if any of the upcoming CODE_LABELs start a basic block. If we reach | |
154 | anything other than a CODE_LABEL or note, we can't find this code. */ | |
155 | for (insn = next_nonnote_insn (insn); | |
6d7dc5b9 | 156 | insn && LABEL_P (insn); |
f770745a | 157 | insn = next_nonnote_insn (insn)) |
075f6052 | 158 | if (BLOCK_FOR_INSN (insn)) |
159 | return BLOCK_FOR_INSN (insn)->index; | |
f770745a | 160 | |
161 | return -1; | |
162 | } | |
29bd1808 | 163 | \f |
164 | /* Similar to next_insn, but ignores insns in the delay slots of | |
165 | an annulled branch. */ | |
166 | ||
167 | static rtx | |
3ad4992f | 168 | next_insn_no_annul (rtx insn) |
29bd1808 | 169 | { |
170 | if (insn) | |
171 | { | |
172 | /* If INSN is an annulled branch, skip any insns from the target | |
173 | of the branch. */ | |
7e66a69e | 174 | if (JUMP_P (insn) |
00f2bb6a | 175 | && INSN_ANNULLED_BRANCH_P (insn) |
29bd1808 | 176 | && NEXT_INSN (PREV_INSN (insn)) != insn) |
00f2bb6a | 177 | { |
178 | rtx next = NEXT_INSN (insn); | |
179 | enum rtx_code code = GET_CODE (next); | |
180 | ||
181 | while ((code == INSN || code == JUMP_INSN || code == CALL_INSN) | |
182 | && INSN_FROM_TARGET_P (next)) | |
183 | { | |
184 | insn = next; | |
185 | next = NEXT_INSN (insn); | |
186 | code = GET_CODE (next); | |
187 | } | |
188 | } | |
29bd1808 | 189 | |
190 | insn = NEXT_INSN (insn); | |
6d7dc5b9 | 191 | if (insn && NONJUMP_INSN_P (insn) |
29bd1808 | 192 | && GET_CODE (PATTERN (insn)) == SEQUENCE) |
193 | insn = XVECEXP (PATTERN (insn), 0, 0); | |
194 | } | |
195 | ||
196 | return insn; | |
197 | } | |
198 | \f | |
199 | /* Given X, some rtl, and RES, a pointer to a `struct resource', mark | |
92a50326 | 200 | which resources are referenced by the insn. If INCLUDE_DELAYED_EFFECTS |
29bd1808 | 201 | is TRUE, resources used by the called routine will be included for |
202 | CALL_INSNs. */ | |
203 | ||
204 | void | |
3ad4992f | 205 | mark_referenced_resources (rtx x, struct resources *res, |
10a15ee4 | 206 | bool include_delayed_effects) |
29bd1808 | 207 | { |
02e7a332 | 208 | enum rtx_code code = GET_CODE (x); |
209 | int i, j; | |
210 | unsigned int r; | |
19cb6b50 | 211 | const char *format_ptr; |
29bd1808 | 212 | |
213 | /* Handle leaf items for which we set resource flags. Also, special-case | |
214 | CALL, SET and CLOBBER operators. */ | |
215 | switch (code) | |
216 | { | |
217 | case CONST: | |
0349edce | 218 | CASE_CONST_ANY: |
29bd1808 | 219 | case PC: |
220 | case SYMBOL_REF: | |
221 | case LABEL_REF: | |
222 | return; | |
223 | ||
224 | case SUBREG: | |
8ad4c111 | 225 | if (!REG_P (SUBREG_REG (x))) |
10a15ee4 | 226 | mark_referenced_resources (SUBREG_REG (x), res, false); |
29bd1808 | 227 | else |
228 | { | |
701e46d0 | 229 | unsigned int regno = subreg_regno (x); |
fe2ebfc8 | 230 | unsigned int last_regno = regno + subreg_nregs (x); |
02e7a332 | 231 | |
04e579b6 | 232 | gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER); |
02e7a332 | 233 | for (r = regno; r < last_regno; r++) |
234 | SET_HARD_REG_BIT (res->regs, r); | |
29bd1808 | 235 | } |
236 | return; | |
237 | ||
238 | case REG: | |
a2c6f0b7 | 239 | gcc_assert (HARD_REGISTER_P (x)); |
240 | add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x)); | |
29bd1808 | 241 | return; |
242 | ||
243 | case MEM: | |
244 | /* If this memory shouldn't change, it really isn't referencing | |
245 | memory. */ | |
b04fab2a | 246 | if (MEM_READONLY_P (x)) |
29bd1808 | 247 | res->unch_memory = 1; |
248 | else | |
249 | res->memory = 1; | |
c64322a3 | 250 | res->volatil |= MEM_VOLATILE_P (x); |
29bd1808 | 251 | |
252 | /* Mark registers used to access memory. */ | |
10a15ee4 | 253 | mark_referenced_resources (XEXP (x, 0), res, false); |
29bd1808 | 254 | return; |
255 | ||
256 | case CC0: | |
257 | res->cc = 1; | |
258 | return; | |
259 | ||
260 | case UNSPEC_VOLATILE: | |
94c13a9e | 261 | case TRAP_IF: |
29bd1808 | 262 | case ASM_INPUT: |
263 | /* Traditional asm's are always volatile. */ | |
29bd1808 | 264 | res->volatil = 1; |
265 | break; | |
266 | ||
267 | case ASM_OPERANDS: | |
c64322a3 | 268 | res->volatil |= MEM_VOLATILE_P (x); |
29bd1808 | 269 | |
270 | /* For all ASM_OPERANDS, we must traverse the vector of input operands. | |
271 | We can not just fall through here since then we would be confused | |
272 | by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate | |
273 | traditional asms unlike their normal usage. */ | |
2617fe26 | 274 | |
29bd1808 | 275 | for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++) |
10a15ee4 | 276 | mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, false); |
29bd1808 | 277 | return; |
278 | ||
279 | case CALL: | |
280 | /* The first operand will be a (MEM (xxx)) but doesn't really reference | |
281 | memory. The second operand may be referenced, though. */ | |
10a15ee4 | 282 | mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, false); |
283 | mark_referenced_resources (XEXP (x, 1), res, false); | |
29bd1808 | 284 | return; |
285 | ||
286 | case SET: | |
287 | /* Usually, the first operand of SET is set, not referenced. But | |
288 | registers used to access memory are referenced. SET_DEST is | |
476d094d | 289 | also referenced if it is a ZERO_EXTRACT. */ |
29bd1808 | 290 | |
10a15ee4 | 291 | mark_referenced_resources (SET_SRC (x), res, false); |
29bd1808 | 292 | |
293 | x = SET_DEST (x); | |
476d094d | 294 | if (GET_CODE (x) == ZERO_EXTRACT |
9913cf73 | 295 | || GET_CODE (x) == STRICT_LOW_PART) |
10a15ee4 | 296 | mark_referenced_resources (x, res, false); |
29bd1808 | 297 | else if (GET_CODE (x) == SUBREG) |
298 | x = SUBREG_REG (x); | |
e16ceb8e | 299 | if (MEM_P (x)) |
10a15ee4 | 300 | mark_referenced_resources (XEXP (x, 0), res, false); |
29bd1808 | 301 | return; |
302 | ||
303 | case CLOBBER: | |
304 | return; | |
305 | ||
306 | case CALL_INSN: | |
307 | if (include_delayed_effects) | |
308 | { | |
309 | /* A CALL references memory, the frame pointer if it exists, the | |
310 | stack pointer, any global registers and any registers given in | |
311 | USE insns immediately in front of the CALL. | |
312 | ||
313 | However, we may have moved some of the parameter loading insns | |
314 | into the delay slot of this CALL. If so, the USE's for them | |
315 | don't count and should be skipped. */ | |
316 | rtx insn = PREV_INSN (x); | |
317 | rtx sequence = 0; | |
318 | int seq_size = 0; | |
29bd1808 | 319 | int i; |
320 | ||
321 | /* If we are part of a delay slot sequence, point at the SEQUENCE. */ | |
322 | if (NEXT_INSN (insn) != x) | |
323 | { | |
29bd1808 | 324 | sequence = PATTERN (NEXT_INSN (insn)); |
325 | seq_size = XVECLEN (sequence, 0); | |
04e579b6 | 326 | gcc_assert (GET_CODE (sequence) == SEQUENCE); |
29bd1808 | 327 | } |
328 | ||
329 | res->memory = 1; | |
330 | SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM); | |
331 | if (frame_pointer_needed) | |
332 | { | |
333 | SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM); | |
5ae82d58 | 334 | #if !HARD_FRAME_POINTER_IS_FRAME_POINTER |
29bd1808 | 335 | SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM); |
336 | #endif | |
337 | } | |
338 | ||
339 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
340 | if (global_regs[i]) | |
341 | SET_HARD_REG_BIT (res->regs, i); | |
342 | ||
9239aee6 | 343 | /* Check for a REG_SETJMP. If it exists, then we must |
29bd1808 | 344 | assume that this call can need any register. |
345 | ||
346 | This is done to be more conservative about how we handle setjmp. | |
347 | We assume that they both use and set all registers. Using all | |
348 | registers ensures that a register will not be considered dead | |
349 | just because it crosses a setjmp call. A register should be | |
7fd957fe | 350 | considered dead only if the setjmp call returns nonzero. */ |
9239aee6 | 351 | if (find_reg_note (x, REG_SETJMP, NULL)) |
29bd1808 | 352 | SET_HARD_REG_SET (res->regs); |
353 | ||
354 | { | |
355 | rtx link; | |
356 | ||
357 | for (link = CALL_INSN_FUNCTION_USAGE (x); | |
358 | link; | |
359 | link = XEXP (link, 1)) | |
360 | if (GET_CODE (XEXP (link, 0)) == USE) | |
361 | { | |
362 | for (i = 1; i < seq_size; i++) | |
363 | { | |
364 | rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i)); | |
365 | if (GET_CODE (slot_pat) == SET | |
366 | && rtx_equal_p (SET_DEST (slot_pat), | |
2edca339 | 367 | XEXP (XEXP (link, 0), 0))) |
29bd1808 | 368 | break; |
369 | } | |
370 | if (i >= seq_size) | |
2edca339 | 371 | mark_referenced_resources (XEXP (XEXP (link, 0), 0), |
10a15ee4 | 372 | res, false); |
29bd1808 | 373 | } |
374 | } | |
375 | } | |
376 | ||
377 | /* ... fall through to other INSN processing ... */ | |
378 | ||
379 | case INSN: | |
380 | case JUMP_INSN: | |
381 | ||
382 | #ifdef INSN_REFERENCES_ARE_DELAYED | |
383 | if (! include_delayed_effects | |
384 | && INSN_REFERENCES_ARE_DELAYED (x)) | |
385 | return; | |
386 | #endif | |
387 | ||
388 | /* No special processing, just speed up. */ | |
389 | mark_referenced_resources (PATTERN (x), res, include_delayed_effects); | |
390 | return; | |
391 | ||
392 | default: | |
393 | break; | |
394 | } | |
395 | ||
396 | /* Process each sub-expression and flag what it needs. */ | |
397 | format_ptr = GET_RTX_FORMAT (code); | |
398 | for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
399 | switch (*format_ptr++) | |
400 | { | |
401 | case 'e': | |
402 | mark_referenced_resources (XEXP (x, i), res, include_delayed_effects); | |
403 | break; | |
404 | ||
405 | case 'E': | |
406 | for (j = 0; j < XVECLEN (x, i); j++) | |
407 | mark_referenced_resources (XVECEXP (x, i, j), res, | |
408 | include_delayed_effects); | |
409 | break; | |
410 | } | |
411 | } | |
412 | \f | |
413 | /* A subroutine of mark_target_live_regs. Search forward from TARGET | |
2617fe26 | 414 | looking for registers that are set before they are used. These are dead. |
29bd1808 | 415 | Stop after passing a few conditional jumps, and/or a small |
416 | number of unconditional branches. */ | |
417 | ||
418 | static rtx | |
3ad4992f | 419 | find_dead_or_set_registers (rtx target, struct resources *res, |
420 | rtx *jump_target, int jump_count, | |
421 | struct resources set, struct resources needed) | |
29bd1808 | 422 | { |
423 | HARD_REG_SET scratch; | |
424 | rtx insn, next; | |
425 | rtx jump_insn = 0; | |
426 | int i; | |
427 | ||
428 | for (insn = target; insn; insn = next) | |
429 | { | |
430 | rtx this_jump_insn = insn; | |
431 | ||
432 | next = NEXT_INSN (insn); | |
4642b205 | 433 | |
434 | /* If this instruction can throw an exception, then we don't | |
435 | know where we might end up next. That means that we have to | |
436 | assume that whatever we have already marked as live really is | |
437 | live. */ | |
9223d17f | 438 | if (can_throw_internal (insn)) |
4642b205 | 439 | break; |
440 | ||
29bd1808 | 441 | switch (GET_CODE (insn)) |
442 | { | |
443 | case CODE_LABEL: | |
444 | /* After a label, any pending dead registers that weren't yet | |
445 | used can be made dead. */ | |
446 | AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs); | |
447 | AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs); | |
448 | CLEAR_HARD_REG_SET (pending_dead_regs); | |
449 | ||
450 | continue; | |
451 | ||
452 | case BARRIER: | |
453 | case NOTE: | |
454 | continue; | |
455 | ||
456 | case INSN: | |
457 | if (GET_CODE (PATTERN (insn)) == USE) | |
458 | { | |
459 | /* If INSN is a USE made by update_block, we care about the | |
460 | underlying insn. Any registers set by the underlying insn | |
461 | are live since the insn is being done somewhere else. */ | |
9204e736 | 462 | if (INSN_P (XEXP (PATTERN (insn), 0))) |
d2137327 | 463 | mark_set_resources (XEXP (PATTERN (insn), 0), res, 0, |
464 | MARK_SRC_DEST_CALL); | |
29bd1808 | 465 | |
466 | /* All other USE insns are to be ignored. */ | |
467 | continue; | |
468 | } | |
469 | else if (GET_CODE (PATTERN (insn)) == CLOBBER) | |
470 | continue; | |
471 | else if (GET_CODE (PATTERN (insn)) == SEQUENCE) | |
472 | { | |
473 | /* An unconditional jump can be used to fill the delay slot | |
474 | of a call, so search for a JUMP_INSN in any position. */ | |
475 | for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++) | |
476 | { | |
477 | this_jump_insn = XVECEXP (PATTERN (insn), 0, i); | |
6d7dc5b9 | 478 | if (JUMP_P (this_jump_insn)) |
29bd1808 | 479 | break; |
480 | } | |
481 | } | |
482 | ||
483 | default: | |
484 | break; | |
485 | } | |
486 | ||
6d7dc5b9 | 487 | if (JUMP_P (this_jump_insn)) |
29bd1808 | 488 | { |
489 | if (jump_count++ < 10) | |
490 | { | |
b2816317 | 491 | if (any_uncondjump_p (this_jump_insn) |
9cb2517e | 492 | || ANY_RETURN_P (PATTERN (this_jump_insn))) |
29bd1808 | 493 | { |
494 | next = JUMP_LABEL (this_jump_insn); | |
4115ac36 | 495 | if (ANY_RETURN_P (next)) |
496 | next = NULL_RTX; | |
29bd1808 | 497 | if (jump_insn == 0) |
498 | { | |
499 | jump_insn = insn; | |
500 | if (jump_target) | |
501 | *jump_target = JUMP_LABEL (this_jump_insn); | |
502 | } | |
503 | } | |
b2816317 | 504 | else if (any_condjump_p (this_jump_insn)) |
29bd1808 | 505 | { |
506 | struct resources target_set, target_res; | |
507 | struct resources fallthrough_res; | |
508 | ||
509 | /* We can handle conditional branches here by following | |
510 | both paths, and then IOR the results of the two paths | |
511 | together, which will give us registers that are dead | |
512 | on both paths. Since this is expensive, we give it | |
513 | a much higher cost than unconditional branches. The | |
514 | cost was chosen so that we will follow at most 1 | |
515 | conditional branch. */ | |
516 | ||
517 | jump_count += 4; | |
518 | if (jump_count >= 10) | |
519 | break; | |
520 | ||
10a15ee4 | 521 | mark_referenced_resources (insn, &needed, true); |
29bd1808 | 522 | |
523 | /* For an annulled branch, mark_set_resources ignores slots | |
524 | filled by instructions from the target. This is correct | |
525 | if the branch is not taken. Since we are following both | |
526 | paths from the branch, we must also compute correct info | |
527 | if the branch is taken. We do this by inverting all of | |
528 | the INSN_FROM_TARGET_P bits, calling mark_set_resources, | |
529 | and then inverting the INSN_FROM_TARGET_P bits again. */ | |
530 | ||
531 | if (GET_CODE (PATTERN (insn)) == SEQUENCE | |
532 | && INSN_ANNULLED_BRANCH_P (this_jump_insn)) | |
533 | { | |
534 | for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++) | |
535 | INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) | |
536 | = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)); | |
537 | ||
538 | target_set = set; | |
d2137327 | 539 | mark_set_resources (insn, &target_set, 0, |
540 | MARK_SRC_DEST_CALL); | |
29bd1808 | 541 | |
542 | for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++) | |
543 | INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) | |
544 | = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)); | |
545 | ||
d2137327 | 546 | mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); |
29bd1808 | 547 | } |
548 | else | |
549 | { | |
d2137327 | 550 | mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); |
29bd1808 | 551 | target_set = set; |
552 | } | |
553 | ||
554 | target_res = *res; | |
555 | COPY_HARD_REG_SET (scratch, target_set.regs); | |
556 | AND_COMPL_HARD_REG_SET (scratch, needed.regs); | |
557 | AND_COMPL_HARD_REG_SET (target_res.regs, scratch); | |
558 | ||
559 | fallthrough_res = *res; | |
560 | COPY_HARD_REG_SET (scratch, set.regs); | |
561 | AND_COMPL_HARD_REG_SET (scratch, needed.regs); | |
562 | AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch); | |
563 | ||
4115ac36 | 564 | if (!ANY_RETURN_P (JUMP_LABEL (this_jump_insn))) |
565 | find_dead_or_set_registers (JUMP_LABEL (this_jump_insn), | |
566 | &target_res, 0, jump_count, | |
567 | target_set, needed); | |
29bd1808 | 568 | find_dead_or_set_registers (next, |
569 | &fallthrough_res, 0, jump_count, | |
570 | set, needed); | |
571 | IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs); | |
572 | AND_HARD_REG_SET (res->regs, fallthrough_res.regs); | |
573 | break; | |
574 | } | |
575 | else | |
576 | break; | |
577 | } | |
578 | else | |
579 | { | |
580 | /* Don't try this optimization if we expired our jump count | |
581 | above, since that would mean there may be an infinite loop | |
582 | in the function being compiled. */ | |
583 | jump_insn = 0; | |
584 | break; | |
585 | } | |
586 | } | |
587 | ||
10a15ee4 | 588 | mark_referenced_resources (insn, &needed, true); |
d2137327 | 589 | mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); |
29bd1808 | 590 | |
591 | COPY_HARD_REG_SET (scratch, set.regs); | |
592 | AND_COMPL_HARD_REG_SET (scratch, needed.regs); | |
593 | AND_COMPL_HARD_REG_SET (res->regs, scratch); | |
594 | } | |
595 | ||
596 | return jump_insn; | |
597 | } | |
598 | \f | |
599 | /* Given X, a part of an insn, and a pointer to a `struct resource', | |
600 | RES, indicate which resources are modified by the insn. If | |
d2137327 | 601 | MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially |
fbb30aa4 | 602 | set by the called routine. |
29bd1808 | 603 | |
604 | If IN_DEST is nonzero, it means we are inside a SET. Otherwise, | |
605 | objects are being referenced instead of set. | |
606 | ||
607 | We never mark the insn as modifying the condition code unless it explicitly | |
608 | SETs CC0 even though this is not totally correct. The reason for this is | |
609 | that we require a SET of CC0 to immediately precede the reference to CC0. | |
610 | So if some other insn sets CC0 as a side-effect, we know it cannot affect | |
1e625a2e | 611 | our computation and thus may be placed in a delay slot. */ |
29bd1808 | 612 | |
613 | void | |
3ad4992f | 614 | mark_set_resources (rtx x, struct resources *res, int in_dest, |
615 | enum mark_resource_type mark_type) | |
29bd1808 | 616 | { |
02e7a332 | 617 | enum rtx_code code; |
618 | int i, j; | |
619 | unsigned int r; | |
620 | const char *format_ptr; | |
29bd1808 | 621 | |
622 | restart: | |
623 | ||
624 | code = GET_CODE (x); | |
625 | ||
626 | switch (code) | |
627 | { | |
628 | case NOTE: | |
629 | case BARRIER: | |
630 | case CODE_LABEL: | |
631 | case USE: | |
0349edce | 632 | CASE_CONST_ANY: |
29bd1808 | 633 | case LABEL_REF: |
634 | case SYMBOL_REF: | |
635 | case CONST: | |
636 | case PC: | |
637 | /* These don't set any resources. */ | |
638 | return; | |
639 | ||
640 | case CC0: | |
641 | if (in_dest) | |
642 | res->cc = 1; | |
643 | return; | |
644 | ||
645 | case CALL_INSN: | |
646 | /* Called routine modifies the condition code, memory, any registers | |
647 | that aren't saved across calls, global registers and anything | |
648 | explicitly CLOBBERed immediately after the CALL_INSN. */ | |
649 | ||
d2137327 | 650 | if (mark_type == MARK_SRC_DEST_CALL) |
29bd1808 | 651 | { |
29bd1808 | 652 | rtx link; |
653 | ||
654 | res->cc = res->memory = 1; | |
84131f22 | 655 | |
656 | IOR_HARD_REG_SET (res->regs, regs_invalidated_by_call); | |
29bd1808 | 657 | |
29bd1808 | 658 | for (link = CALL_INSN_FUNCTION_USAGE (x); |
659 | link; link = XEXP (link, 1)) | |
660 | if (GET_CODE (XEXP (link, 0)) == CLOBBER) | |
d2137327 | 661 | mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1, |
662 | MARK_SRC_DEST); | |
29bd1808 | 663 | |
9239aee6 | 664 | /* Check for a REG_SETJMP. If it exists, then we must |
29bd1808 | 665 | assume that this call can clobber any register. */ |
9239aee6 | 666 | if (find_reg_note (x, REG_SETJMP, NULL)) |
29bd1808 | 667 | SET_HARD_REG_SET (res->regs); |
668 | } | |
669 | ||
670 | /* ... and also what its RTL says it modifies, if anything. */ | |
671 | ||
672 | case JUMP_INSN: | |
673 | case INSN: | |
674 | ||
675 | /* An insn consisting of just a CLOBBER (or USE) is just for flow | |
676 | and doesn't actually do anything, so we ignore it. */ | |
677 | ||
678 | #ifdef INSN_SETS_ARE_DELAYED | |
d2137327 | 679 | if (mark_type != MARK_SRC_DEST_CALL |
29bd1808 | 680 | && INSN_SETS_ARE_DELAYED (x)) |
681 | return; | |
682 | #endif | |
683 | ||
684 | x = PATTERN (x); | |
685 | if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER) | |
686 | goto restart; | |
687 | return; | |
688 | ||
689 | case SET: | |
690 | /* If the source of a SET is a CALL, this is actually done by | |
691 | the called routine. So only include it if we are to include the | |
692 | effects of the calling routine. */ | |
693 | ||
694 | mark_set_resources (SET_DEST (x), res, | |
d2137327 | 695 | (mark_type == MARK_SRC_DEST_CALL |
29bd1808 | 696 | || GET_CODE (SET_SRC (x)) != CALL), |
d2137327 | 697 | mark_type); |
29bd1808 | 698 | |
fbb30aa4 | 699 | mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST); |
29bd1808 | 700 | return; |
701 | ||
702 | case CLOBBER: | |
d2137327 | 703 | mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST); |
29bd1808 | 704 | return; |
2617fe26 | 705 | |
29bd1808 | 706 | case SEQUENCE: |
7e66a69e | 707 | { |
708 | rtx control = XVECEXP (x, 0, 0); | |
709 | bool annul_p = JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control); | |
710 | ||
711 | mark_set_resources (control, res, 0, mark_type); | |
712 | for (i = XVECLEN (x, 0) - 1; i >= 0; --i) | |
713 | { | |
714 | rtx elt = XVECEXP (x, 0, i); | |
715 | if (!annul_p && INSN_FROM_TARGET_P (elt)) | |
716 | mark_set_resources (elt, res, 0, mark_type); | |
717 | } | |
718 | } | |
29bd1808 | 719 | return; |
720 | ||
721 | case POST_INC: | |
722 | case PRE_INC: | |
723 | case POST_DEC: | |
724 | case PRE_DEC: | |
d2137327 | 725 | mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST); |
29bd1808 | 726 | return; |
727 | ||
40988080 | 728 | case PRE_MODIFY: |
729 | case POST_MODIFY: | |
a3da8215 | 730 | mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST); |
731 | mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, MARK_SRC_DEST); | |
732 | mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, MARK_SRC_DEST); | |
40988080 | 733 | return; |
734 | ||
d2137327 | 735 | case SIGN_EXTRACT: |
29bd1808 | 736 | case ZERO_EXTRACT: |
fbb30aa4 | 737 | mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST); |
738 | mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST); | |
739 | mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST); | |
29bd1808 | 740 | return; |
741 | ||
742 | case MEM: | |
743 | if (in_dest) | |
744 | { | |
745 | res->memory = 1; | |
b04fab2a | 746 | res->unch_memory |= MEM_READONLY_P (x); |
c64322a3 | 747 | res->volatil |= MEM_VOLATILE_P (x); |
29bd1808 | 748 | } |
749 | ||
d2137327 | 750 | mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST); |
29bd1808 | 751 | return; |
752 | ||
753 | case SUBREG: | |
754 | if (in_dest) | |
755 | { | |
8ad4c111 | 756 | if (!REG_P (SUBREG_REG (x))) |
d2137327 | 757 | mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type); |
29bd1808 | 758 | else |
759 | { | |
701e46d0 | 760 | unsigned int regno = subreg_regno (x); |
fe2ebfc8 | 761 | unsigned int last_regno = regno + subreg_nregs (x); |
02e7a332 | 762 | |
04e579b6 | 763 | gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER); |
02e7a332 | 764 | for (r = regno; r < last_regno; r++) |
765 | SET_HARD_REG_BIT (res->regs, r); | |
29bd1808 | 766 | } |
767 | } | |
768 | return; | |
769 | ||
770 | case REG: | |
771 | if (in_dest) | |
cb690619 | 772 | { |
a2c6f0b7 | 773 | gcc_assert (HARD_REGISTER_P (x)); |
774 | add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x)); | |
cb690619 | 775 | } |
29bd1808 | 776 | return; |
777 | ||
64511c32 | 778 | case UNSPEC_VOLATILE: |
779 | case ASM_INPUT: | |
780 | /* Traditional asm's are always volatile. */ | |
781 | res->volatil = 1; | |
782 | return; | |
783 | ||
784 | case TRAP_IF: | |
785 | res->volatil = 1; | |
786 | break; | |
787 | ||
788 | case ASM_OPERANDS: | |
c64322a3 | 789 | res->volatil |= MEM_VOLATILE_P (x); |
64511c32 | 790 | |
791 | /* For all ASM_OPERANDS, we must traverse the vector of input operands. | |
792 | We can not just fall through here since then we would be confused | |
793 | by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate | |
794 | traditional asms unlike their normal usage. */ | |
2617fe26 | 795 | |
64511c32 | 796 | for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++) |
d2137327 | 797 | mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest, |
798 | MARK_SRC_DEST); | |
64511c32 | 799 | return; |
800 | ||
29bd1808 | 801 | default: |
802 | break; | |
803 | } | |
804 | ||
805 | /* Process each sub-expression and flag what it needs. */ | |
806 | format_ptr = GET_RTX_FORMAT (code); | |
807 | for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
808 | switch (*format_ptr++) | |
809 | { | |
810 | case 'e': | |
d2137327 | 811 | mark_set_resources (XEXP (x, i), res, in_dest, mark_type); |
29bd1808 | 812 | break; |
813 | ||
814 | case 'E': | |
815 | for (j = 0; j < XVECLEN (x, i); j++) | |
d2137327 | 816 | mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type); |
29bd1808 | 817 | break; |
818 | } | |
819 | } | |
820 | \f | |
3657db9e | 821 | /* Return TRUE if INSN is a return, possibly with a filled delay slot. */ |
822 | ||
823 | static bool | |
7ecb5bb2 | 824 | return_insn_p (const_rtx insn) |
3657db9e | 825 | { |
9cb2517e | 826 | if (JUMP_P (insn) && ANY_RETURN_P (PATTERN (insn))) |
3657db9e | 827 | return true; |
828 | ||
1c14a50e | 829 | if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE) |
3657db9e | 830 | return return_insn_p (XVECEXP (PATTERN (insn), 0, 0)); |
831 | ||
832 | return false; | |
833 | } | |
834 | ||
29bd1808 | 835 | /* Set the resources that are live at TARGET. |
836 | ||
837 | If TARGET is zero, we refer to the end of the current function and can | |
838 | return our precomputed value. | |
839 | ||
840 | Otherwise, we try to find out what is live by consulting the basic block | |
841 | information. This is tricky, because we must consider the actions of | |
842 | reload and jump optimization, which occur after the basic block information | |
843 | has been computed. | |
844 | ||
845 | Accordingly, we proceed as follows:: | |
846 | ||
847 | We find the previous BARRIER and look at all immediately following labels | |
848 | (with no intervening active insns) to see if any of them start a basic | |
849 | block. If we hit the start of the function first, we use block 0. | |
850 | ||
075f6052 | 851 | Once we have found a basic block and a corresponding first insn, we can |
852 | accurately compute the live status (by starting at a label following a | |
853 | BARRIER, we are immune to actions taken by reload and jump.) Then we | |
854 | scan all insns between that point and our target. For each CLOBBER (or | |
855 | for call-clobbered regs when we pass a CALL_INSN), mark the appropriate | |
856 | registers are dead. For a SET, mark them as live. | |
29bd1808 | 857 | |
858 | We have to be careful when using REG_DEAD notes because they are not | |
859 | updated by such things as find_equiv_reg. So keep track of registers | |
860 | marked as dead that haven't been assigned to, and mark them dead at the | |
861 | next CODE_LABEL since reload and jump won't propagate values across labels. | |
862 | ||
863 | If we cannot find the start of a basic block (should be a very rare | |
864 | case, if it can happen at all), mark everything as potentially live. | |
865 | ||
866 | Next, scan forward from TARGET looking for things set or clobbered | |
867 | before they are used. These are not live. | |
868 | ||
869 | Because we can be called many times on the same target, save our results | |
870 | in a hash table indexed by INSN_UID. This is only done if the function | |
871 | init_resource_info () was invoked before we are called. */ | |
872 | ||
873 | void | |
3ad4992f | 874 | mark_target_live_regs (rtx insns, rtx target, struct resources *res) |
29bd1808 | 875 | { |
876 | int b = -1; | |
27d0c333 | 877 | unsigned int i; |
29bd1808 | 878 | struct target_info *tinfo = NULL; |
879 | rtx insn; | |
880 | rtx jump_insn = 0; | |
881 | rtx jump_target; | |
882 | HARD_REG_SET scratch; | |
883 | struct resources set, needed; | |
884 | ||
885 | /* Handle end of function. */ | |
4115ac36 | 886 | if (target == 0 || ANY_RETURN_P (target)) |
29bd1808 | 887 | { |
888 | *res = end_of_function_needs; | |
889 | return; | |
890 | } | |
891 | ||
3657db9e | 892 | /* Handle return insn. */ |
893 | else if (return_insn_p (target)) | |
894 | { | |
895 | *res = end_of_function_needs; | |
10a15ee4 | 896 | mark_referenced_resources (target, res, false); |
3657db9e | 897 | return; |
898 | } | |
899 | ||
29bd1808 | 900 | /* We have to assume memory is needed, but the CC isn't. */ |
901 | res->memory = 1; | |
902 | res->volatil = res->unch_memory = 0; | |
903 | res->cc = 0; | |
904 | ||
905 | /* See if we have computed this value already. */ | |
906 | if (target_hash_table != NULL) | |
907 | { | |
908 | for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME]; | |
909 | tinfo; tinfo = tinfo->next) | |
910 | if (tinfo->uid == INSN_UID (target)) | |
911 | break; | |
912 | ||
913 | /* Start by getting the basic block number. If we have saved | |
914 | information, we can get it from there unless the insn at the | |
915 | start of the basic block has been deleted. */ | |
916 | if (tinfo && tinfo->block != -1 | |
5496dbfc | 917 | && ! INSN_DELETED_P (BB_HEAD (BASIC_BLOCK (tinfo->block)))) |
29bd1808 | 918 | b = tinfo->block; |
919 | } | |
920 | ||
f770745a | 921 | if (b == -1) |
98d5e888 | 922 | b = find_basic_block (target, MAX_DELAY_SLOT_LIVE_SEARCH); |
29bd1808 | 923 | |
924 | if (target_hash_table != NULL) | |
925 | { | |
926 | if (tinfo) | |
927 | { | |
928 | /* If the information is up-to-date, use it. Otherwise, we will | |
929 | update it below. */ | |
930 | if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b]) | |
931 | { | |
932 | COPY_HARD_REG_SET (res->regs, tinfo->live_regs); | |
933 | return; | |
934 | } | |
935 | } | |
936 | else | |
937 | { | |
2617fe26 | 938 | /* Allocate a place to put our results and chain it into the |
29bd1808 | 939 | hash table. */ |
4c36ffe6 | 940 | tinfo = XNEW (struct target_info); |
29bd1808 | 941 | tinfo->uid = INSN_UID (target); |
942 | tinfo->block = b; | |
27d0c333 | 943 | tinfo->next |
944 | = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME]; | |
29bd1808 | 945 | target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo; |
946 | } | |
947 | } | |
948 | ||
949 | CLEAR_HARD_REG_SET (pending_dead_regs); | |
950 | ||
951 | /* If we found a basic block, get the live registers from it and update | |
952 | them with anything set or killed between its start and the insn before | |
705e8c40 | 953 | TARGET; this custom life analysis is really about registers so we need |
954 | to use the LR problem. Otherwise, we must assume everything is live. */ | |
29bd1808 | 955 | if (b != -1) |
956 | { | |
705e8c40 | 957 | regset regs_live = DF_LR_IN (BASIC_BLOCK (b)); |
29bd1808 | 958 | rtx start_insn, stop_insn; |
959 | ||
075f6052 | 960 | /* Compute hard regs live at start of block. */ |
29bd1808 | 961 | REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live); |
962 | ||
29bd1808 | 963 | /* Get starting and ending insn, handling the case where each might |
964 | be a SEQUENCE. */ | |
48e1416a | 965 | start_insn = (b == ENTRY_BLOCK_PTR->next_bb->index ? |
3072d30e | 966 | insns : BB_HEAD (BASIC_BLOCK (b))); |
29bd1808 | 967 | stop_insn = target; |
968 | ||
6d7dc5b9 | 969 | if (NONJUMP_INSN_P (start_insn) |
29bd1808 | 970 | && GET_CODE (PATTERN (start_insn)) == SEQUENCE) |
971 | start_insn = XVECEXP (PATTERN (start_insn), 0, 0); | |
972 | ||
6d7dc5b9 | 973 | if (NONJUMP_INSN_P (stop_insn) |
29bd1808 | 974 | && GET_CODE (PATTERN (stop_insn)) == SEQUENCE) |
975 | stop_insn = next_insn (PREV_INSN (stop_insn)); | |
976 | ||
977 | for (insn = start_insn; insn != stop_insn; | |
978 | insn = next_insn_no_annul (insn)) | |
979 | { | |
980 | rtx link; | |
981 | rtx real_insn = insn; | |
00f2bb6a | 982 | enum rtx_code code = GET_CODE (insn); |
29bd1808 | 983 | |
9845d120 | 984 | if (DEBUG_INSN_P (insn)) |
985 | continue; | |
986 | ||
29bd1808 | 987 | /* If this insn is from the target of a branch, it isn't going to |
988 | be used in the sequel. If it is used in both cases, this | |
989 | test will not be true. */ | |
00f2bb6a | 990 | if ((code == INSN || code == JUMP_INSN || code == CALL_INSN) |
991 | && INSN_FROM_TARGET_P (insn)) | |
29bd1808 | 992 | continue; |
993 | ||
994 | /* If this insn is a USE made by update_block, we care about the | |
995 | underlying insn. */ | |
00f2bb6a | 996 | if (code == INSN && GET_CODE (PATTERN (insn)) == USE |
9204e736 | 997 | && INSN_P (XEXP (PATTERN (insn), 0))) |
29bd1808 | 998 | real_insn = XEXP (PATTERN (insn), 0); |
999 | ||
6d7dc5b9 | 1000 | if (CALL_P (real_insn)) |
29bd1808 | 1001 | { |
1002 | /* CALL clobbers all call-used regs that aren't fixed except | |
1003 | sp, ap, and fp. Do this before setting the result of the | |
1004 | call live. */ | |
41828ba0 | 1005 | AND_COMPL_HARD_REG_SET (current_live_regs, |
1006 | regs_invalidated_by_call); | |
29bd1808 | 1007 | |
1008 | /* A CALL_INSN sets any global register live, since it may | |
1009 | have been modified by the call. */ | |
1010 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
1011 | if (global_regs[i]) | |
1012 | SET_HARD_REG_BIT (current_live_regs, i); | |
1013 | } | |
1014 | ||
1015 | /* Mark anything killed in an insn to be deadened at the next | |
1016 | label. Ignore USE insns; the only REG_DEAD notes will be for | |
1017 | parameters. But they might be early. A CALL_INSN will usually | |
1018 | clobber registers used for parameters. It isn't worth bothering | |
1019 | with the unlikely case when it won't. */ | |
6d7dc5b9 | 1020 | if ((NONJUMP_INSN_P (real_insn) |
29bd1808 | 1021 | && GET_CODE (PATTERN (real_insn)) != USE |
1022 | && GET_CODE (PATTERN (real_insn)) != CLOBBER) | |
6d7dc5b9 | 1023 | || JUMP_P (real_insn) |
1024 | || CALL_P (real_insn)) | |
29bd1808 | 1025 | { |
1026 | for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1)) | |
1027 | if (REG_NOTE_KIND (link) == REG_DEAD | |
8ad4c111 | 1028 | && REG_P (XEXP (link, 0)) |
29bd1808 | 1029 | && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER) |
a2c6f0b7 | 1030 | add_to_hard_reg_set (&pending_dead_regs, |
1031 | GET_MODE (XEXP (link, 0)), | |
1032 | REGNO (XEXP (link, 0))); | |
29bd1808 | 1033 | |
ec8895d7 | 1034 | note_stores (PATTERN (real_insn), update_live_status, NULL); |
29bd1808 | 1035 | |
1036 | /* If any registers were unused after this insn, kill them. | |
1037 | These notes will always be accurate. */ | |
1038 | for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1)) | |
1039 | if (REG_NOTE_KIND (link) == REG_UNUSED | |
8ad4c111 | 1040 | && REG_P (XEXP (link, 0)) |
29bd1808 | 1041 | && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER) |
a2c6f0b7 | 1042 | remove_from_hard_reg_set (¤t_live_regs, |
1043 | GET_MODE (XEXP (link, 0)), | |
1044 | REGNO (XEXP (link, 0))); | |
29bd1808 | 1045 | } |
1046 | ||
6d7dc5b9 | 1047 | else if (LABEL_P (real_insn)) |
29bd1808 | 1048 | { |
075f6052 | 1049 | basic_block bb; |
1050 | ||
29bd1808 | 1051 | /* A label clobbers the pending dead registers since neither |
1052 | reload nor jump will propagate a value across a label. */ | |
1053 | AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs); | |
1054 | CLEAR_HARD_REG_SET (pending_dead_regs); | |
075f6052 | 1055 | |
1056 | /* We must conservatively assume that all registers that used | |
1057 | to be live here still are. The fallthrough edge may have | |
1058 | left a live register uninitialized. */ | |
1059 | bb = BLOCK_FOR_INSN (real_insn); | |
1060 | if (bb) | |
1061 | { | |
1062 | HARD_REG_SET extra_live; | |
1063 | ||
705e8c40 | 1064 | REG_SET_TO_HARD_REG_SET (extra_live, DF_LR_IN (bb)); |
075f6052 | 1065 | IOR_HARD_REG_SET (current_live_regs, extra_live); |
1066 | } | |
29bd1808 | 1067 | } |
1068 | ||
1069 | /* The beginning of the epilogue corresponds to the end of the | |
1070 | RTL chain when there are no epilogue insns. Certain resources | |
1071 | are implicitly required at that point. */ | |
6d7dc5b9 | 1072 | else if (NOTE_P (real_insn) |
ad4583d9 | 1073 | && NOTE_KIND (real_insn) == NOTE_INSN_EPILOGUE_BEG) |
29bd1808 | 1074 | IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs); |
1075 | } | |
1076 | ||
1077 | COPY_HARD_REG_SET (res->regs, current_live_regs); | |
1078 | if (tinfo != NULL) | |
1079 | { | |
1080 | tinfo->block = b; | |
1081 | tinfo->bb_tick = bb_ticks[b]; | |
1082 | } | |
1083 | } | |
1084 | else | |
1085 | /* We didn't find the start of a basic block. Assume everything | |
1086 | in use. This should happen only extremely rarely. */ | |
1087 | SET_HARD_REG_SET (res->regs); | |
1088 | ||
1089 | CLEAR_RESOURCE (&set); | |
1090 | CLEAR_RESOURCE (&needed); | |
1091 | ||
1092 | jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0, | |
1093 | set, needed); | |
1094 | ||
1095 | /* If we hit an unconditional branch, we have another way of finding out | |
1096 | what is live: we can see what is live at the branch target and include | |
be38f654 | 1097 | anything used but not set before the branch. We add the live |
aa40f561 | 1098 | resources found using the test below to those found until now. */ |
29bd1808 | 1099 | |
1100 | if (jump_insn) | |
1101 | { | |
1102 | struct resources new_resources; | |
1103 | rtx stop_insn = next_active_insn (jump_insn); | |
1104 | ||
4115ac36 | 1105 | if (!ANY_RETURN_P (jump_target)) |
1106 | jump_target = next_active_insn (jump_target); | |
1107 | mark_target_live_regs (insns, jump_target, &new_resources); | |
29bd1808 | 1108 | CLEAR_RESOURCE (&set); |
1109 | CLEAR_RESOURCE (&needed); | |
1110 | ||
1111 | /* Include JUMP_INSN in the needed registers. */ | |
1112 | for (insn = target; insn != stop_insn; insn = next_active_insn (insn)) | |
1113 | { | |
10a15ee4 | 1114 | mark_referenced_resources (insn, &needed, true); |
29bd1808 | 1115 | |
1116 | COPY_HARD_REG_SET (scratch, needed.regs); | |
1117 | AND_COMPL_HARD_REG_SET (scratch, set.regs); | |
1118 | IOR_HARD_REG_SET (new_resources.regs, scratch); | |
1119 | ||
d2137327 | 1120 | mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); |
29bd1808 | 1121 | } |
1122 | ||
be38f654 | 1123 | IOR_HARD_REG_SET (res->regs, new_resources.regs); |
29bd1808 | 1124 | } |
1125 | ||
1126 | if (tinfo != NULL) | |
1127 | { | |
1128 | COPY_HARD_REG_SET (tinfo->live_regs, res->regs); | |
1129 | } | |
1130 | } | |
1131 | \f | |
1132 | /* Initialize the resources required by mark_target_live_regs (). | |
1133 | This should be invoked before the first call to mark_target_live_regs. */ | |
1134 | ||
1135 | void | |
3ad4992f | 1136 | init_resource_info (rtx epilogue_insn) |
29bd1808 | 1137 | { |
1138 | int i; | |
075f6052 | 1139 | basic_block bb; |
29bd1808 | 1140 | |
1141 | /* Indicate what resources are required to be valid at the end of the current | |
923f5d97 | 1142 | function. The condition code never is and memory always is. |
1143 | The stack pointer is needed unless EXIT_IGNORE_STACK is true | |
1144 | and there is an epilogue that restores the original stack pointer | |
1145 | from the frame pointer. Registers used to return the function value | |
1146 | are needed. Registers holding global variables are needed. */ | |
29bd1808 | 1147 | |
1148 | end_of_function_needs.cc = 0; | |
1149 | end_of_function_needs.memory = 1; | |
1150 | end_of_function_needs.unch_memory = 0; | |
1151 | CLEAR_HARD_REG_SET (end_of_function_needs.regs); | |
1152 | ||
1153 | if (frame_pointer_needed) | |
1154 | { | |
1155 | SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM); | |
5ae82d58 | 1156 | #if !HARD_FRAME_POINTER_IS_FRAME_POINTER |
29bd1808 | 1157 | SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM); |
1158 | #endif | |
29bd1808 | 1159 | } |
923f5d97 | 1160 | if (!(frame_pointer_needed |
1161 | && EXIT_IGNORE_STACK | |
1162 | && epilogue_insn | |
d5bf7b64 | 1163 | && !crtl->sp_is_unchanging)) |
29bd1808 | 1164 | SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM); |
1165 | ||
abe32cce | 1166 | if (crtl->return_rtx != 0) |
1167 | mark_referenced_resources (crtl->return_rtx, | |
10a15ee4 | 1168 | &end_of_function_needs, true); |
29bd1808 | 1169 | |
1170 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
1171 | if (global_regs[i] | |
1172 | #ifdef EPILOGUE_USES | |
1173 | || EPILOGUE_USES (i) | |
1174 | #endif | |
1175 | ) | |
1176 | SET_HARD_REG_BIT (end_of_function_needs.regs, i); | |
1177 | ||
1178 | /* The registers required to be live at the end of the function are | |
1179 | represented in the flow information as being dead just prior to | |
1180 | reaching the end of the function. For example, the return of a value | |
1181 | might be represented by a USE of the return register immediately | |
1182 | followed by an unconditional jump to the return label where the | |
1183 | return label is the end of the RTL chain. The end of the RTL chain | |
1184 | is then taken to mean that the return register is live. | |
1185 | ||
1186 | This sequence is no longer maintained when epilogue instructions are | |
1187 | added to the RTL chain. To reconstruct the original meaning, the | |
1188 | start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the | |
1189 | point where these registers become live (start_of_epilogue_needs). | |
1190 | If epilogue instructions are present, the registers set by those | |
1191 | instructions won't have been processed by flow. Thus, those | |
1192 | registers are additionally required at the end of the RTL chain | |
1193 | (end_of_function_needs). */ | |
1194 | ||
1195 | start_of_epilogue_needs = end_of_function_needs; | |
1196 | ||
1197 | while ((epilogue_insn = next_nonnote_insn (epilogue_insn))) | |
3657db9e | 1198 | { |
1199 | mark_set_resources (epilogue_insn, &end_of_function_needs, 0, | |
1200 | MARK_SRC_DEST_CALL); | |
1201 | if (return_insn_p (epilogue_insn)) | |
1202 | break; | |
1203 | } | |
29bd1808 | 1204 | |
1205 | /* Allocate and initialize the tables used by mark_target_live_regs. */ | |
4c36ffe6 | 1206 | target_hash_table = XCNEWVEC (struct target_info *, TARGET_HASH_PRIME); |
1207 | bb_ticks = XCNEWVEC (int, last_basic_block); | |
075f6052 | 1208 | |
1209 | /* Set the BLOCK_FOR_INSN of each label that starts a basic block. */ | |
1210 | FOR_EACH_BB (bb) | |
1211 | if (LABEL_P (BB_HEAD (bb))) | |
1212 | BLOCK_FOR_INSN (BB_HEAD (bb)) = bb; | |
29bd1808 | 1213 | } |
1214 | \f | |
de132707 | 1215 | /* Free up the resources allocated to mark_target_live_regs (). This |
29bd1808 | 1216 | should be invoked after the last call to mark_target_live_regs (). */ |
1217 | ||
1218 | void | |
3ad4992f | 1219 | free_resource_info (void) |
29bd1808 | 1220 | { |
075f6052 | 1221 | basic_block bb; |
1222 | ||
29bd1808 | 1223 | if (target_hash_table != NULL) |
1224 | { | |
d7c47c0e | 1225 | int i; |
2617fe26 | 1226 | |
1227 | for (i = 0; i < TARGET_HASH_PRIME; ++i) | |
d7c47c0e | 1228 | { |
1229 | struct target_info *ti = target_hash_table[i]; | |
1230 | ||
2617fe26 | 1231 | while (ti) |
d7c47c0e | 1232 | { |
1233 | struct target_info *next = ti->next; | |
1234 | free (ti); | |
1235 | ti = next; | |
1236 | } | |
1237 | } | |
1238 | ||
29bd1808 | 1239 | free (target_hash_table); |
1240 | target_hash_table = NULL; | |
1241 | } | |
1242 | ||
1243 | if (bb_ticks != NULL) | |
1244 | { | |
1245 | free (bb_ticks); | |
1246 | bb_ticks = NULL; | |
1247 | } | |
075f6052 | 1248 | |
1249 | FOR_EACH_BB (bb) | |
1250 | if (LABEL_P (BB_HEAD (bb))) | |
1251 | BLOCK_FOR_INSN (BB_HEAD (bb)) = NULL; | |
29bd1808 | 1252 | } |
1253 | \f | |
1254 | /* Clear any hashed information that we have stored for INSN. */ | |
1255 | ||
1256 | void | |
3ad4992f | 1257 | clear_hashed_info_for_insn (rtx insn) |
29bd1808 | 1258 | { |
1259 | struct target_info *tinfo; | |
2617fe26 | 1260 | |
29bd1808 | 1261 | if (target_hash_table != NULL) |
1262 | { | |
1263 | for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME]; | |
1264 | tinfo; tinfo = tinfo->next) | |
1265 | if (tinfo->uid == INSN_UID (insn)) | |
1266 | break; | |
1267 | ||
1268 | if (tinfo) | |
1269 | tinfo->block = -1; | |
1270 | } | |
1271 | } | |
1272 | \f | |
1273 | /* Increment the tick count for the basic block that contains INSN. */ | |
1274 | ||
1275 | void | |
3ad4992f | 1276 | incr_ticks_for_insn (rtx insn) |
29bd1808 | 1277 | { |
98d5e888 | 1278 | int b = find_basic_block (insn, MAX_DELAY_SLOT_LIVE_SEARCH); |
29bd1808 | 1279 | |
1280 | if (b != -1) | |
1281 | bb_ticks[b]++; | |
1282 | } | |
1283 | \f | |
1284 | /* Add TRIAL to the set of resources used at the end of the current | |
aa40f561 | 1285 | function. */ |
29bd1808 | 1286 | void |
10a15ee4 | 1287 | mark_end_of_function_resources (rtx trial, bool include_delayed_effects) |
29bd1808 | 1288 | { |
1289 | mark_referenced_resources (trial, &end_of_function_needs, | |
1290 | include_delayed_effects); | |
1291 | } |