]>
Commit | Line | Data |
---|---|---|
14d66741 | 1 | /* Definitions for computing resource usage of specific insns. |
3ad4992f | 2 | Copyright (C) 1999, 2000, 2001, 2002, 2003 |
3 | Free Software Foundation, Inc. | |
14d66741 | 4 | |
f12b58b3 | 5 | This file is part of GCC. |
14d66741 | 6 | |
f12b58b3 | 7 | GCC is free software; you can redistribute it and/or modify it under |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 2, or (at your option) any later | |
10 | version. | |
14d66741 | 11 | |
f12b58b3 | 12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
14d66741 | 16 | |
17 | You should have received a copy of the GNU General Public License | |
f12b58b3 | 18 | along with GCC; see the file COPYING. If not, write to the Free |
19 | Software Foundation, 59 Temple Place - Suite 330, Boston, MA | |
20 | 02111-1307, USA. */ | |
14d66741 | 21 | |
29bd1808 | 22 | #include "config.h" |
27560af1 | 23 | #include "system.h" |
805e22b2 | 24 | #include "coretypes.h" |
25 | #include "tm.h" | |
d3b64f2d | 26 | #include "toplev.h" |
29bd1808 | 27 | #include "rtl.h" |
7953c610 | 28 | #include "tm_p.h" |
29bd1808 | 29 | #include "hard-reg-set.h" |
29bd1808 | 30 | #include "basic-block.h" |
0a893c29 | 31 | #include "function.h" |
29bd1808 | 32 | #include "regs.h" |
33 | #include "flags.h" | |
34 | #include "output.h" | |
35 | #include "resource.h" | |
05806416 | 36 | #include "except.h" |
d8c9779c | 37 | #include "insn-attr.h" |
98d5e888 | 38 | #include "params.h" |
29bd1808 | 39 | |
40 | /* This structure is used to record liveness information at the targets or | |
41 | fallthrough insns of branches. We will most likely need the information | |
42 | at targets again, so save them in a hash table rather than recomputing them | |
43 | each time. */ | |
44 | ||
45 | struct target_info | |
46 | { | |
47 | int uid; /* INSN_UID of target. */ | |
48 | struct target_info *next; /* Next info for same hash bucket. */ | |
49 | HARD_REG_SET live_regs; /* Registers live at target. */ | |
50 | int block; /* Basic block number containing target. */ | |
51 | int bb_tick; /* Generation count of basic block info. */ | |
52 | }; | |
53 | ||
54 | #define TARGET_HASH_PRIME 257 | |
55 | ||
56 | /* Indicates what resources are required at the beginning of the epilogue. */ | |
57 | static struct resources start_of_epilogue_needs; | |
58 | ||
59 | /* Indicates what resources are required at function end. */ | |
60 | static struct resources end_of_function_needs; | |
61 | ||
62 | /* Define the hash table itself. */ | |
63 | static struct target_info **target_hash_table = NULL; | |
64 | ||
65 | /* For each basic block, we maintain a generation number of its basic | |
66 | block info, which is updated each time we move an insn from the | |
67 | target of a jump. This is the generation number indexed by block | |
68 | number. */ | |
69 | ||
70 | static int *bb_ticks; | |
71 | ||
72 | /* Marks registers possibly live at the current place being scanned by | |
98d5e888 | 73 | mark_target_live_regs. Also used by update_live_status. */ |
29bd1808 | 74 | |
75 | static HARD_REG_SET current_live_regs; | |
76 | ||
77 | /* Marks registers for which we have seen a REG_DEAD note but no assignment. | |
78 | Also only used by the next two functions. */ | |
79 | ||
80 | static HARD_REG_SET pending_dead_regs; | |
14d66741 | 81 | \f |
3ad4992f | 82 | static void update_live_status (rtx, rtx, void *); |
83 | static int find_basic_block (rtx, int); | |
84 | static rtx next_insn_no_annul (rtx); | |
85 | static rtx find_dead_or_set_registers (rtx, struct resources*, | |
86 | rtx*, int, struct resources, | |
87 | struct resources); | |
14d66741 | 88 | \f |
29bd1808 | 89 | /* Utility function called from mark_target_live_regs via note_stores. |
90 | It deadens any CLOBBERed registers and livens any SET registers. */ | |
91 | ||
92 | static void | |
3ad4992f | 93 | update_live_status (rtx dest, rtx x, void *data ATTRIBUTE_UNUSED) |
29bd1808 | 94 | { |
95 | int first_regno, last_regno; | |
96 | int i; | |
97 | ||
98 | if (GET_CODE (dest) != REG | |
99 | && (GET_CODE (dest) != SUBREG || GET_CODE (SUBREG_REG (dest)) != REG)) | |
100 | return; | |
101 | ||
102 | if (GET_CODE (dest) == SUBREG) | |
701e46d0 | 103 | first_regno = subreg_regno (dest); |
29bd1808 | 104 | else |
105 | first_regno = REGNO (dest); | |
106 | ||
107 | last_regno = first_regno + HARD_REGNO_NREGS (first_regno, GET_MODE (dest)); | |
108 | ||
109 | if (GET_CODE (x) == CLOBBER) | |
110 | for (i = first_regno; i < last_regno; i++) | |
111 | CLEAR_HARD_REG_BIT (current_live_regs, i); | |
112 | else | |
113 | for (i = first_regno; i < last_regno; i++) | |
114 | { | |
115 | SET_HARD_REG_BIT (current_live_regs, i); | |
116 | CLEAR_HARD_REG_BIT (pending_dead_regs, i); | |
117 | } | |
118 | } | |
98d5e888 | 119 | |
120 | /* Find the number of the basic block with correct live register | |
121 | information that starts closest to INSN. Return -1 if we couldn't | |
122 | find such a basic block or the beginning is more than | |
123 | SEARCH_LIMIT instructions before INSN. Use SEARCH_LIMIT = -1 for | |
124 | an unlimited search. | |
125 | ||
126 | The delay slot filling code destroys the control-flow graph so, | |
127 | instead of finding the basic block containing INSN, we search | |
128 | backwards toward a BARRIER where the live register information is | |
129 | correct. */ | |
f770745a | 130 | |
131 | static int | |
3ad4992f | 132 | find_basic_block (rtx insn, int search_limit) |
f770745a | 133 | { |
4c26117a | 134 | basic_block bb; |
f770745a | 135 | |
136 | /* Scan backwards to the previous BARRIER. Then see if we can find a | |
137 | label that starts a basic block. Return the basic block number. */ | |
f770745a | 138 | for (insn = prev_nonnote_insn (insn); |
98d5e888 | 139 | insn && GET_CODE (insn) != BARRIER && search_limit != 0; |
140 | insn = prev_nonnote_insn (insn), --search_limit) | |
f770745a | 141 | ; |
142 | ||
98d5e888 | 143 | /* The closest BARRIER is too far away. */ |
144 | if (search_limit == 0) | |
145 | return -1; | |
146 | ||
345ac34a | 147 | /* The start of the function. */ |
98d5e888 | 148 | else if (insn == 0) |
345ac34a | 149 | return ENTRY_BLOCK_PTR->next_bb->index; |
f770745a | 150 | |
151 | /* See if any of the upcoming CODE_LABELs start a basic block. If we reach | |
152 | anything other than a CODE_LABEL or note, we can't find this code. */ | |
153 | for (insn = next_nonnote_insn (insn); | |
154 | insn && GET_CODE (insn) == CODE_LABEL; | |
155 | insn = next_nonnote_insn (insn)) | |
156 | { | |
4c26117a | 157 | FOR_EACH_BB (bb) |
158 | if (insn == bb->head) | |
159 | return bb->index; | |
f770745a | 160 | } |
161 | ||
162 | return -1; | |
163 | } | |
29bd1808 | 164 | \f |
165 | /* Similar to next_insn, but ignores insns in the delay slots of | |
166 | an annulled branch. */ | |
167 | ||
168 | static rtx | |
3ad4992f | 169 | next_insn_no_annul (rtx insn) |
29bd1808 | 170 | { |
171 | if (insn) | |
172 | { | |
173 | /* If INSN is an annulled branch, skip any insns from the target | |
174 | of the branch. */ | |
5fdf542c | 175 | if ((GET_CODE (insn) == JUMP_INSN |
176 | || GET_CODE (insn) == CALL_INSN | |
177 | || GET_CODE (insn) == INSN) | |
00f2bb6a | 178 | && INSN_ANNULLED_BRANCH_P (insn) |
29bd1808 | 179 | && NEXT_INSN (PREV_INSN (insn)) != insn) |
00f2bb6a | 180 | { |
181 | rtx next = NEXT_INSN (insn); | |
182 | enum rtx_code code = GET_CODE (next); | |
183 | ||
184 | while ((code == INSN || code == JUMP_INSN || code == CALL_INSN) | |
185 | && INSN_FROM_TARGET_P (next)) | |
186 | { | |
187 | insn = next; | |
188 | next = NEXT_INSN (insn); | |
189 | code = GET_CODE (next); | |
190 | } | |
191 | } | |
29bd1808 | 192 | |
193 | insn = NEXT_INSN (insn); | |
194 | if (insn && GET_CODE (insn) == INSN | |
195 | && GET_CODE (PATTERN (insn)) == SEQUENCE) | |
196 | insn = XVECEXP (PATTERN (insn), 0, 0); | |
197 | } | |
198 | ||
199 | return insn; | |
200 | } | |
201 | \f | |
202 | /* Given X, some rtl, and RES, a pointer to a `struct resource', mark | |
92a50326 | 203 | which resources are referenced by the insn. If INCLUDE_DELAYED_EFFECTS |
29bd1808 | 204 | is TRUE, resources used by the called routine will be included for |
205 | CALL_INSNs. */ | |
206 | ||
207 | void | |
3ad4992f | 208 | mark_referenced_resources (rtx x, struct resources *res, |
209 | int include_delayed_effects) | |
29bd1808 | 210 | { |
02e7a332 | 211 | enum rtx_code code = GET_CODE (x); |
212 | int i, j; | |
213 | unsigned int r; | |
19cb6b50 | 214 | const char *format_ptr; |
29bd1808 | 215 | |
216 | /* Handle leaf items for which we set resource flags. Also, special-case | |
217 | CALL, SET and CLOBBER operators. */ | |
218 | switch (code) | |
219 | { | |
220 | case CONST: | |
221 | case CONST_INT: | |
222 | case CONST_DOUBLE: | |
886cfd4f | 223 | case CONST_VECTOR: |
29bd1808 | 224 | case PC: |
225 | case SYMBOL_REF: | |
226 | case LABEL_REF: | |
227 | return; | |
228 | ||
229 | case SUBREG: | |
230 | if (GET_CODE (SUBREG_REG (x)) != REG) | |
231 | mark_referenced_resources (SUBREG_REG (x), res, 0); | |
232 | else | |
233 | { | |
701e46d0 | 234 | unsigned int regno = subreg_regno (x); |
02e7a332 | 235 | unsigned int last_regno |
236 | = regno + HARD_REGNO_NREGS (regno, GET_MODE (x)); | |
237 | ||
cb690619 | 238 | if (last_regno > FIRST_PSEUDO_REGISTER) |
239 | abort (); | |
02e7a332 | 240 | for (r = regno; r < last_regno; r++) |
241 | SET_HARD_REG_BIT (res->regs, r); | |
29bd1808 | 242 | } |
243 | return; | |
244 | ||
245 | case REG: | |
cb690619 | 246 | { |
247 | unsigned int regno = REGNO (x); | |
248 | unsigned int last_regno | |
249 | = regno + HARD_REGNO_NREGS (regno, GET_MODE (x)); | |
250 | ||
251 | if (last_regno > FIRST_PSEUDO_REGISTER) | |
252 | abort (); | |
253 | for (r = regno; r < last_regno; r++) | |
254 | SET_HARD_REG_BIT (res->regs, r); | |
255 | } | |
29bd1808 | 256 | return; |
257 | ||
258 | case MEM: | |
259 | /* If this memory shouldn't change, it really isn't referencing | |
260 | memory. */ | |
261 | if (RTX_UNCHANGING_P (x)) | |
262 | res->unch_memory = 1; | |
263 | else | |
264 | res->memory = 1; | |
c64322a3 | 265 | res->volatil |= MEM_VOLATILE_P (x); |
29bd1808 | 266 | |
267 | /* Mark registers used to access memory. */ | |
268 | mark_referenced_resources (XEXP (x, 0), res, 0); | |
269 | return; | |
270 | ||
271 | case CC0: | |
272 | res->cc = 1; | |
273 | return; | |
274 | ||
275 | case UNSPEC_VOLATILE: | |
276 | case ASM_INPUT: | |
277 | /* Traditional asm's are always volatile. */ | |
278 | res->volatil = 1; | |
279 | return; | |
280 | ||
281 | case TRAP_IF: | |
282 | res->volatil = 1; | |
283 | break; | |
284 | ||
285 | case ASM_OPERANDS: | |
c64322a3 | 286 | res->volatil |= MEM_VOLATILE_P (x); |
29bd1808 | 287 | |
288 | /* For all ASM_OPERANDS, we must traverse the vector of input operands. | |
289 | We can not just fall through here since then we would be confused | |
290 | by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate | |
291 | traditional asms unlike their normal usage. */ | |
2617fe26 | 292 | |
29bd1808 | 293 | for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++) |
294 | mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0); | |
295 | return; | |
296 | ||
297 | case CALL: | |
298 | /* The first operand will be a (MEM (xxx)) but doesn't really reference | |
299 | memory. The second operand may be referenced, though. */ | |
300 | mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, 0); | |
301 | mark_referenced_resources (XEXP (x, 1), res, 0); | |
302 | return; | |
303 | ||
304 | case SET: | |
305 | /* Usually, the first operand of SET is set, not referenced. But | |
306 | registers used to access memory are referenced. SET_DEST is | |
307 | also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT. */ | |
308 | ||
309 | mark_referenced_resources (SET_SRC (x), res, 0); | |
310 | ||
311 | x = SET_DEST (x); | |
9913cf73 | 312 | if (GET_CODE (x) == SIGN_EXTRACT |
313 | || GET_CODE (x) == ZERO_EXTRACT | |
314 | || GET_CODE (x) == STRICT_LOW_PART) | |
29bd1808 | 315 | mark_referenced_resources (x, res, 0); |
316 | else if (GET_CODE (x) == SUBREG) | |
317 | x = SUBREG_REG (x); | |
318 | if (GET_CODE (x) == MEM) | |
319 | mark_referenced_resources (XEXP (x, 0), res, 0); | |
320 | return; | |
321 | ||
322 | case CLOBBER: | |
323 | return; | |
324 | ||
325 | case CALL_INSN: | |
326 | if (include_delayed_effects) | |
327 | { | |
328 | /* A CALL references memory, the frame pointer if it exists, the | |
329 | stack pointer, any global registers and any registers given in | |
330 | USE insns immediately in front of the CALL. | |
331 | ||
332 | However, we may have moved some of the parameter loading insns | |
333 | into the delay slot of this CALL. If so, the USE's for them | |
334 | don't count and should be skipped. */ | |
335 | rtx insn = PREV_INSN (x); | |
336 | rtx sequence = 0; | |
337 | int seq_size = 0; | |
29bd1808 | 338 | int i; |
339 | ||
340 | /* If we are part of a delay slot sequence, point at the SEQUENCE. */ | |
341 | if (NEXT_INSN (insn) != x) | |
342 | { | |
29bd1808 | 343 | sequence = PATTERN (NEXT_INSN (insn)); |
344 | seq_size = XVECLEN (sequence, 0); | |
345 | if (GET_CODE (sequence) != SEQUENCE) | |
346 | abort (); | |
347 | } | |
348 | ||
349 | res->memory = 1; | |
350 | SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM); | |
351 | if (frame_pointer_needed) | |
352 | { | |
353 | SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM); | |
354 | #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM | |
355 | SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM); | |
356 | #endif | |
357 | } | |
358 | ||
359 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
360 | if (global_regs[i]) | |
361 | SET_HARD_REG_BIT (res->regs, i); | |
362 | ||
9239aee6 | 363 | /* Check for a REG_SETJMP. If it exists, then we must |
29bd1808 | 364 | assume that this call can need any register. |
365 | ||
366 | This is done to be more conservative about how we handle setjmp. | |
367 | We assume that they both use and set all registers. Using all | |
368 | registers ensures that a register will not be considered dead | |
369 | just because it crosses a setjmp call. A register should be | |
7fd957fe | 370 | considered dead only if the setjmp call returns nonzero. */ |
9239aee6 | 371 | if (find_reg_note (x, REG_SETJMP, NULL)) |
29bd1808 | 372 | SET_HARD_REG_SET (res->regs); |
373 | ||
374 | { | |
375 | rtx link; | |
376 | ||
377 | for (link = CALL_INSN_FUNCTION_USAGE (x); | |
378 | link; | |
379 | link = XEXP (link, 1)) | |
380 | if (GET_CODE (XEXP (link, 0)) == USE) | |
381 | { | |
382 | for (i = 1; i < seq_size; i++) | |
383 | { | |
384 | rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i)); | |
385 | if (GET_CODE (slot_pat) == SET | |
386 | && rtx_equal_p (SET_DEST (slot_pat), | |
2edca339 | 387 | XEXP (XEXP (link, 0), 0))) |
29bd1808 | 388 | break; |
389 | } | |
390 | if (i >= seq_size) | |
2edca339 | 391 | mark_referenced_resources (XEXP (XEXP (link, 0), 0), |
29bd1808 | 392 | res, 0); |
393 | } | |
394 | } | |
395 | } | |
396 | ||
397 | /* ... fall through to other INSN processing ... */ | |
398 | ||
399 | case INSN: | |
400 | case JUMP_INSN: | |
401 | ||
402 | #ifdef INSN_REFERENCES_ARE_DELAYED | |
403 | if (! include_delayed_effects | |
404 | && INSN_REFERENCES_ARE_DELAYED (x)) | |
405 | return; | |
406 | #endif | |
407 | ||
408 | /* No special processing, just speed up. */ | |
409 | mark_referenced_resources (PATTERN (x), res, include_delayed_effects); | |
410 | return; | |
411 | ||
412 | default: | |
413 | break; | |
414 | } | |
415 | ||
416 | /* Process each sub-expression and flag what it needs. */ | |
417 | format_ptr = GET_RTX_FORMAT (code); | |
418 | for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
419 | switch (*format_ptr++) | |
420 | { | |
421 | case 'e': | |
422 | mark_referenced_resources (XEXP (x, i), res, include_delayed_effects); | |
423 | break; | |
424 | ||
425 | case 'E': | |
426 | for (j = 0; j < XVECLEN (x, i); j++) | |
427 | mark_referenced_resources (XVECEXP (x, i, j), res, | |
428 | include_delayed_effects); | |
429 | break; | |
430 | } | |
431 | } | |
432 | \f | |
433 | /* A subroutine of mark_target_live_regs. Search forward from TARGET | |
2617fe26 | 434 | looking for registers that are set before they are used. These are dead. |
29bd1808 | 435 | Stop after passing a few conditional jumps, and/or a small |
436 | number of unconditional branches. */ | |
437 | ||
438 | static rtx | |
3ad4992f | 439 | find_dead_or_set_registers (rtx target, struct resources *res, |
440 | rtx *jump_target, int jump_count, | |
441 | struct resources set, struct resources needed) | |
29bd1808 | 442 | { |
443 | HARD_REG_SET scratch; | |
444 | rtx insn, next; | |
445 | rtx jump_insn = 0; | |
446 | int i; | |
447 | ||
448 | for (insn = target; insn; insn = next) | |
449 | { | |
450 | rtx this_jump_insn = insn; | |
451 | ||
452 | next = NEXT_INSN (insn); | |
4642b205 | 453 | |
454 | /* If this instruction can throw an exception, then we don't | |
455 | know where we might end up next. That means that we have to | |
456 | assume that whatever we have already marked as live really is | |
457 | live. */ | |
9223d17f | 458 | if (can_throw_internal (insn)) |
4642b205 | 459 | break; |
460 | ||
29bd1808 | 461 | switch (GET_CODE (insn)) |
462 | { | |
463 | case CODE_LABEL: | |
464 | /* After a label, any pending dead registers that weren't yet | |
465 | used can be made dead. */ | |
466 | AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs); | |
467 | AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs); | |
468 | CLEAR_HARD_REG_SET (pending_dead_regs); | |
469 | ||
470 | continue; | |
471 | ||
472 | case BARRIER: | |
473 | case NOTE: | |
474 | continue; | |
475 | ||
476 | case INSN: | |
477 | if (GET_CODE (PATTERN (insn)) == USE) | |
478 | { | |
479 | /* If INSN is a USE made by update_block, we care about the | |
480 | underlying insn. Any registers set by the underlying insn | |
481 | are live since the insn is being done somewhere else. */ | |
9204e736 | 482 | if (INSN_P (XEXP (PATTERN (insn), 0))) |
d2137327 | 483 | mark_set_resources (XEXP (PATTERN (insn), 0), res, 0, |
484 | MARK_SRC_DEST_CALL); | |
29bd1808 | 485 | |
486 | /* All other USE insns are to be ignored. */ | |
487 | continue; | |
488 | } | |
489 | else if (GET_CODE (PATTERN (insn)) == CLOBBER) | |
490 | continue; | |
491 | else if (GET_CODE (PATTERN (insn)) == SEQUENCE) | |
492 | { | |
493 | /* An unconditional jump can be used to fill the delay slot | |
494 | of a call, so search for a JUMP_INSN in any position. */ | |
495 | for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++) | |
496 | { | |
497 | this_jump_insn = XVECEXP (PATTERN (insn), 0, i); | |
498 | if (GET_CODE (this_jump_insn) == JUMP_INSN) | |
499 | break; | |
500 | } | |
501 | } | |
502 | ||
503 | default: | |
504 | break; | |
505 | } | |
506 | ||
507 | if (GET_CODE (this_jump_insn) == JUMP_INSN) | |
508 | { | |
509 | if (jump_count++ < 10) | |
510 | { | |
b2816317 | 511 | if (any_uncondjump_p (this_jump_insn) |
29bd1808 | 512 | || GET_CODE (PATTERN (this_jump_insn)) == RETURN) |
513 | { | |
514 | next = JUMP_LABEL (this_jump_insn); | |
515 | if (jump_insn == 0) | |
516 | { | |
517 | jump_insn = insn; | |
518 | if (jump_target) | |
519 | *jump_target = JUMP_LABEL (this_jump_insn); | |
520 | } | |
521 | } | |
b2816317 | 522 | else if (any_condjump_p (this_jump_insn)) |
29bd1808 | 523 | { |
524 | struct resources target_set, target_res; | |
525 | struct resources fallthrough_res; | |
526 | ||
527 | /* We can handle conditional branches here by following | |
528 | both paths, and then IOR the results of the two paths | |
529 | together, which will give us registers that are dead | |
530 | on both paths. Since this is expensive, we give it | |
531 | a much higher cost than unconditional branches. The | |
532 | cost was chosen so that we will follow at most 1 | |
533 | conditional branch. */ | |
534 | ||
535 | jump_count += 4; | |
536 | if (jump_count >= 10) | |
537 | break; | |
538 | ||
539 | mark_referenced_resources (insn, &needed, 1); | |
540 | ||
541 | /* For an annulled branch, mark_set_resources ignores slots | |
542 | filled by instructions from the target. This is correct | |
543 | if the branch is not taken. Since we are following both | |
544 | paths from the branch, we must also compute correct info | |
545 | if the branch is taken. We do this by inverting all of | |
546 | the INSN_FROM_TARGET_P bits, calling mark_set_resources, | |
547 | and then inverting the INSN_FROM_TARGET_P bits again. */ | |
548 | ||
549 | if (GET_CODE (PATTERN (insn)) == SEQUENCE | |
550 | && INSN_ANNULLED_BRANCH_P (this_jump_insn)) | |
551 | { | |
552 | for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++) | |
553 | INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) | |
554 | = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)); | |
555 | ||
556 | target_set = set; | |
d2137327 | 557 | mark_set_resources (insn, &target_set, 0, |
558 | MARK_SRC_DEST_CALL); | |
29bd1808 | 559 | |
560 | for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++) | |
561 | INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) | |
562 | = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)); | |
563 | ||
d2137327 | 564 | mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); |
29bd1808 | 565 | } |
566 | else | |
567 | { | |
d2137327 | 568 | mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); |
29bd1808 | 569 | target_set = set; |
570 | } | |
571 | ||
572 | target_res = *res; | |
573 | COPY_HARD_REG_SET (scratch, target_set.regs); | |
574 | AND_COMPL_HARD_REG_SET (scratch, needed.regs); | |
575 | AND_COMPL_HARD_REG_SET (target_res.regs, scratch); | |
576 | ||
577 | fallthrough_res = *res; | |
578 | COPY_HARD_REG_SET (scratch, set.regs); | |
579 | AND_COMPL_HARD_REG_SET (scratch, needed.regs); | |
580 | AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch); | |
581 | ||
582 | find_dead_or_set_registers (JUMP_LABEL (this_jump_insn), | |
583 | &target_res, 0, jump_count, | |
584 | target_set, needed); | |
585 | find_dead_or_set_registers (next, | |
586 | &fallthrough_res, 0, jump_count, | |
587 | set, needed); | |
588 | IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs); | |
589 | AND_HARD_REG_SET (res->regs, fallthrough_res.regs); | |
590 | break; | |
591 | } | |
592 | else | |
593 | break; | |
594 | } | |
595 | else | |
596 | { | |
597 | /* Don't try this optimization if we expired our jump count | |
598 | above, since that would mean there may be an infinite loop | |
599 | in the function being compiled. */ | |
600 | jump_insn = 0; | |
601 | break; | |
602 | } | |
603 | } | |
604 | ||
605 | mark_referenced_resources (insn, &needed, 1); | |
d2137327 | 606 | mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); |
29bd1808 | 607 | |
608 | COPY_HARD_REG_SET (scratch, set.regs); | |
609 | AND_COMPL_HARD_REG_SET (scratch, needed.regs); | |
610 | AND_COMPL_HARD_REG_SET (res->regs, scratch); | |
611 | } | |
612 | ||
613 | return jump_insn; | |
614 | } | |
615 | \f | |
616 | /* Given X, a part of an insn, and a pointer to a `struct resource', | |
617 | RES, indicate which resources are modified by the insn. If | |
d2137327 | 618 | MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially |
619 | set by the called routine. If MARK_TYPE is MARK_DEST, only mark SET_DESTs | |
29bd1808 | 620 | |
621 | If IN_DEST is nonzero, it means we are inside a SET. Otherwise, | |
622 | objects are being referenced instead of set. | |
623 | ||
624 | We never mark the insn as modifying the condition code unless it explicitly | |
625 | SETs CC0 even though this is not totally correct. The reason for this is | |
626 | that we require a SET of CC0 to immediately precede the reference to CC0. | |
627 | So if some other insn sets CC0 as a side-effect, we know it cannot affect | |
1e625a2e | 628 | our computation and thus may be placed in a delay slot. */ |
29bd1808 | 629 | |
630 | void | |
3ad4992f | 631 | mark_set_resources (rtx x, struct resources *res, int in_dest, |
632 | enum mark_resource_type mark_type) | |
29bd1808 | 633 | { |
02e7a332 | 634 | enum rtx_code code; |
635 | int i, j; | |
636 | unsigned int r; | |
637 | const char *format_ptr; | |
29bd1808 | 638 | |
639 | restart: | |
640 | ||
641 | code = GET_CODE (x); | |
642 | ||
643 | switch (code) | |
644 | { | |
645 | case NOTE: | |
646 | case BARRIER: | |
647 | case CODE_LABEL: | |
648 | case USE: | |
649 | case CONST_INT: | |
650 | case CONST_DOUBLE: | |
886cfd4f | 651 | case CONST_VECTOR: |
29bd1808 | 652 | case LABEL_REF: |
653 | case SYMBOL_REF: | |
654 | case CONST: | |
655 | case PC: | |
656 | /* These don't set any resources. */ | |
657 | return; | |
658 | ||
659 | case CC0: | |
660 | if (in_dest) | |
661 | res->cc = 1; | |
662 | return; | |
663 | ||
664 | case CALL_INSN: | |
665 | /* Called routine modifies the condition code, memory, any registers | |
666 | that aren't saved across calls, global registers and anything | |
667 | explicitly CLOBBERed immediately after the CALL_INSN. */ | |
668 | ||
d2137327 | 669 | if (mark_type == MARK_SRC_DEST_CALL) |
29bd1808 | 670 | { |
29bd1808 | 671 | rtx link; |
672 | ||
673 | res->cc = res->memory = 1; | |
02e7a332 | 674 | for (r = 0; r < FIRST_PSEUDO_REGISTER; r++) |
675 | if (call_used_regs[r] || global_regs[r]) | |
676 | SET_HARD_REG_BIT (res->regs, r); | |
29bd1808 | 677 | |
29bd1808 | 678 | for (link = CALL_INSN_FUNCTION_USAGE (x); |
679 | link; link = XEXP (link, 1)) | |
680 | if (GET_CODE (XEXP (link, 0)) == CLOBBER) | |
d2137327 | 681 | mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1, |
682 | MARK_SRC_DEST); | |
29bd1808 | 683 | |
9239aee6 | 684 | /* Check for a REG_SETJMP. If it exists, then we must |
29bd1808 | 685 | assume that this call can clobber any register. */ |
9239aee6 | 686 | if (find_reg_note (x, REG_SETJMP, NULL)) |
29bd1808 | 687 | SET_HARD_REG_SET (res->regs); |
688 | } | |
689 | ||
690 | /* ... and also what its RTL says it modifies, if anything. */ | |
691 | ||
692 | case JUMP_INSN: | |
693 | case INSN: | |
694 | ||
695 | /* An insn consisting of just a CLOBBER (or USE) is just for flow | |
696 | and doesn't actually do anything, so we ignore it. */ | |
697 | ||
698 | #ifdef INSN_SETS_ARE_DELAYED | |
d2137327 | 699 | if (mark_type != MARK_SRC_DEST_CALL |
29bd1808 | 700 | && INSN_SETS_ARE_DELAYED (x)) |
701 | return; | |
702 | #endif | |
703 | ||
704 | x = PATTERN (x); | |
705 | if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER) | |
706 | goto restart; | |
707 | return; | |
708 | ||
709 | case SET: | |
710 | /* If the source of a SET is a CALL, this is actually done by | |
711 | the called routine. So only include it if we are to include the | |
712 | effects of the calling routine. */ | |
713 | ||
714 | mark_set_resources (SET_DEST (x), res, | |
d2137327 | 715 | (mark_type == MARK_SRC_DEST_CALL |
29bd1808 | 716 | || GET_CODE (SET_SRC (x)) != CALL), |
d2137327 | 717 | mark_type); |
29bd1808 | 718 | |
d2137327 | 719 | if (mark_type != MARK_DEST) |
720 | mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST); | |
29bd1808 | 721 | return; |
722 | ||
723 | case CLOBBER: | |
d2137327 | 724 | mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST); |
29bd1808 | 725 | return; |
2617fe26 | 726 | |
29bd1808 | 727 | case SEQUENCE: |
728 | for (i = 0; i < XVECLEN (x, 0); i++) | |
729 | if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0)) | |
730 | && INSN_FROM_TARGET_P (XVECEXP (x, 0, i)))) | |
d2137327 | 731 | mark_set_resources (XVECEXP (x, 0, i), res, 0, mark_type); |
29bd1808 | 732 | return; |
733 | ||
734 | case POST_INC: | |
735 | case PRE_INC: | |
736 | case POST_DEC: | |
737 | case PRE_DEC: | |
d2137327 | 738 | mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST); |
29bd1808 | 739 | return; |
740 | ||
40988080 | 741 | case PRE_MODIFY: |
742 | case POST_MODIFY: | |
a3da8215 | 743 | mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST); |
744 | mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, MARK_SRC_DEST); | |
745 | mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, MARK_SRC_DEST); | |
40988080 | 746 | return; |
747 | ||
d2137327 | 748 | case SIGN_EXTRACT: |
29bd1808 | 749 | case ZERO_EXTRACT: |
d2137327 | 750 | if (! (mark_type == MARK_DEST && in_dest)) |
751 | { | |
752 | mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST); | |
753 | mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST); | |
754 | mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST); | |
755 | } | |
29bd1808 | 756 | return; |
757 | ||
758 | case MEM: | |
759 | if (in_dest) | |
760 | { | |
761 | res->memory = 1; | |
c64322a3 | 762 | res->unch_memory |= RTX_UNCHANGING_P (x); |
763 | res->volatil |= MEM_VOLATILE_P (x); | |
29bd1808 | 764 | } |
765 | ||
d2137327 | 766 | mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST); |
29bd1808 | 767 | return; |
768 | ||
769 | case SUBREG: | |
770 | if (in_dest) | |
771 | { | |
772 | if (GET_CODE (SUBREG_REG (x)) != REG) | |
d2137327 | 773 | mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type); |
29bd1808 | 774 | else |
775 | { | |
701e46d0 | 776 | unsigned int regno = subreg_regno (x); |
02e7a332 | 777 | unsigned int last_regno |
778 | = regno + HARD_REGNO_NREGS (regno, GET_MODE (x)); | |
779 | ||
cb690619 | 780 | if (last_regno > FIRST_PSEUDO_REGISTER) |
781 | abort (); | |
02e7a332 | 782 | for (r = regno; r < last_regno; r++) |
783 | SET_HARD_REG_BIT (res->regs, r); | |
29bd1808 | 784 | } |
785 | } | |
786 | return; | |
787 | ||
788 | case REG: | |
789 | if (in_dest) | |
cb690619 | 790 | { |
791 | unsigned int regno = REGNO (x); | |
792 | unsigned int last_regno | |
793 | = regno + HARD_REGNO_NREGS (regno, GET_MODE (x)); | |
794 | ||
795 | if (last_regno > FIRST_PSEUDO_REGISTER) | |
796 | abort (); | |
797 | for (r = regno; r < last_regno; r++) | |
798 | SET_HARD_REG_BIT (res->regs, r); | |
799 | } | |
29bd1808 | 800 | return; |
801 | ||
d2137327 | 802 | case STRICT_LOW_PART: |
803 | if (! (mark_type == MARK_DEST && in_dest)) | |
804 | { | |
805 | mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST); | |
806 | return; | |
807 | } | |
808 | ||
64511c32 | 809 | case UNSPEC_VOLATILE: |
810 | case ASM_INPUT: | |
811 | /* Traditional asm's are always volatile. */ | |
812 | res->volatil = 1; | |
813 | return; | |
814 | ||
815 | case TRAP_IF: | |
816 | res->volatil = 1; | |
817 | break; | |
818 | ||
819 | case ASM_OPERANDS: | |
c64322a3 | 820 | res->volatil |= MEM_VOLATILE_P (x); |
64511c32 | 821 | |
822 | /* For all ASM_OPERANDS, we must traverse the vector of input operands. | |
823 | We can not just fall through here since then we would be confused | |
824 | by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate | |
825 | traditional asms unlike their normal usage. */ | |
2617fe26 | 826 | |
64511c32 | 827 | for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++) |
d2137327 | 828 | mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest, |
829 | MARK_SRC_DEST); | |
64511c32 | 830 | return; |
831 | ||
29bd1808 | 832 | default: |
833 | break; | |
834 | } | |
835 | ||
836 | /* Process each sub-expression and flag what it needs. */ | |
837 | format_ptr = GET_RTX_FORMAT (code); | |
838 | for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
839 | switch (*format_ptr++) | |
840 | { | |
841 | case 'e': | |
d2137327 | 842 | mark_set_resources (XEXP (x, i), res, in_dest, mark_type); |
29bd1808 | 843 | break; |
844 | ||
845 | case 'E': | |
846 | for (j = 0; j < XVECLEN (x, i); j++) | |
d2137327 | 847 | mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type); |
29bd1808 | 848 | break; |
849 | } | |
850 | } | |
851 | \f | |
852 | /* Set the resources that are live at TARGET. | |
853 | ||
854 | If TARGET is zero, we refer to the end of the current function and can | |
855 | return our precomputed value. | |
856 | ||
857 | Otherwise, we try to find out what is live by consulting the basic block | |
858 | information. This is tricky, because we must consider the actions of | |
859 | reload and jump optimization, which occur after the basic block information | |
860 | has been computed. | |
861 | ||
862 | Accordingly, we proceed as follows:: | |
863 | ||
864 | We find the previous BARRIER and look at all immediately following labels | |
865 | (with no intervening active insns) to see if any of them start a basic | |
866 | block. If we hit the start of the function first, we use block 0. | |
867 | ||
868 | Once we have found a basic block and a corresponding first insns, we can | |
869 | accurately compute the live status from basic_block_live_regs and | |
870 | reg_renumber. (By starting at a label following a BARRIER, we are immune | |
871 | to actions taken by reload and jump.) Then we scan all insns between | |
872 | that point and our target. For each CLOBBER (or for call-clobbered regs | |
873 | when we pass a CALL_INSN), mark the appropriate registers are dead. For | |
874 | a SET, mark them as live. | |
875 | ||
876 | We have to be careful when using REG_DEAD notes because they are not | |
877 | updated by such things as find_equiv_reg. So keep track of registers | |
878 | marked as dead that haven't been assigned to, and mark them dead at the | |
879 | next CODE_LABEL since reload and jump won't propagate values across labels. | |
880 | ||
881 | If we cannot find the start of a basic block (should be a very rare | |
882 | case, if it can happen at all), mark everything as potentially live. | |
883 | ||
884 | Next, scan forward from TARGET looking for things set or clobbered | |
885 | before they are used. These are not live. | |
886 | ||
887 | Because we can be called many times on the same target, save our results | |
888 | in a hash table indexed by INSN_UID. This is only done if the function | |
889 | init_resource_info () was invoked before we are called. */ | |
890 | ||
891 | void | |
3ad4992f | 892 | mark_target_live_regs (rtx insns, rtx target, struct resources *res) |
29bd1808 | 893 | { |
894 | int b = -1; | |
27d0c333 | 895 | unsigned int i; |
29bd1808 | 896 | struct target_info *tinfo = NULL; |
897 | rtx insn; | |
898 | rtx jump_insn = 0; | |
899 | rtx jump_target; | |
900 | HARD_REG_SET scratch; | |
901 | struct resources set, needed; | |
902 | ||
903 | /* Handle end of function. */ | |
904 | if (target == 0) | |
905 | { | |
906 | *res = end_of_function_needs; | |
907 | return; | |
908 | } | |
909 | ||
910 | /* We have to assume memory is needed, but the CC isn't. */ | |
911 | res->memory = 1; | |
912 | res->volatil = res->unch_memory = 0; | |
913 | res->cc = 0; | |
914 | ||
915 | /* See if we have computed this value already. */ | |
916 | if (target_hash_table != NULL) | |
917 | { | |
918 | for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME]; | |
919 | tinfo; tinfo = tinfo->next) | |
920 | if (tinfo->uid == INSN_UID (target)) | |
921 | break; | |
922 | ||
923 | /* Start by getting the basic block number. If we have saved | |
924 | information, we can get it from there unless the insn at the | |
925 | start of the basic block has been deleted. */ | |
926 | if (tinfo && tinfo->block != -1 | |
927 | && ! INSN_DELETED_P (BLOCK_HEAD (tinfo->block))) | |
928 | b = tinfo->block; | |
929 | } | |
930 | ||
f770745a | 931 | if (b == -1) |
98d5e888 | 932 | b = find_basic_block (target, MAX_DELAY_SLOT_LIVE_SEARCH); |
29bd1808 | 933 | |
934 | if (target_hash_table != NULL) | |
935 | { | |
936 | if (tinfo) | |
937 | { | |
938 | /* If the information is up-to-date, use it. Otherwise, we will | |
939 | update it below. */ | |
940 | if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b]) | |
941 | { | |
942 | COPY_HARD_REG_SET (res->regs, tinfo->live_regs); | |
943 | return; | |
944 | } | |
945 | } | |
946 | else | |
947 | { | |
2617fe26 | 948 | /* Allocate a place to put our results and chain it into the |
29bd1808 | 949 | hash table. */ |
d7c47c0e | 950 | tinfo = (struct target_info *) xmalloc (sizeof (struct target_info)); |
29bd1808 | 951 | tinfo->uid = INSN_UID (target); |
952 | tinfo->block = b; | |
27d0c333 | 953 | tinfo->next |
954 | = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME]; | |
29bd1808 | 955 | target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo; |
956 | } | |
957 | } | |
958 | ||
959 | CLEAR_HARD_REG_SET (pending_dead_regs); | |
960 | ||
961 | /* If we found a basic block, get the live registers from it and update | |
962 | them with anything set or killed between its start and the insn before | |
963 | TARGET. Otherwise, we must assume everything is live. */ | |
964 | if (b != -1) | |
965 | { | |
71caadc0 | 966 | regset regs_live = BASIC_BLOCK (b)->global_live_at_start; |
02e7a332 | 967 | unsigned int j; |
968 | unsigned int regno; | |
29bd1808 | 969 | rtx start_insn, stop_insn; |
970 | ||
971 | /* Compute hard regs live at start of block -- this is the real hard regs | |
972 | marked live, plus live pseudo regs that have been renumbered to | |
973 | hard regs. */ | |
974 | ||
975 | REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live); | |
976 | ||
977 | EXECUTE_IF_SET_IN_REG_SET | |
978 | (regs_live, FIRST_PSEUDO_REGISTER, i, | |
979 | { | |
02e7a332 | 980 | if (reg_renumber[i] >= 0) |
981 | { | |
982 | regno = reg_renumber[i]; | |
983 | for (j = regno; | |
984 | j < regno + HARD_REGNO_NREGS (regno, | |
985 | PSEUDO_REGNO_MODE (i)); | |
986 | j++) | |
987 | SET_HARD_REG_BIT (current_live_regs, j); | |
988 | } | |
29bd1808 | 989 | }); |
990 | ||
991 | /* Get starting and ending insn, handling the case where each might | |
992 | be a SEQUENCE. */ | |
993 | start_insn = (b == 0 ? insns : BLOCK_HEAD (b)); | |
994 | stop_insn = target; | |
995 | ||
996 | if (GET_CODE (start_insn) == INSN | |
997 | && GET_CODE (PATTERN (start_insn)) == SEQUENCE) | |
998 | start_insn = XVECEXP (PATTERN (start_insn), 0, 0); | |
999 | ||
1000 | if (GET_CODE (stop_insn) == INSN | |
1001 | && GET_CODE (PATTERN (stop_insn)) == SEQUENCE) | |
1002 | stop_insn = next_insn (PREV_INSN (stop_insn)); | |
1003 | ||
1004 | for (insn = start_insn; insn != stop_insn; | |
1005 | insn = next_insn_no_annul (insn)) | |
1006 | { | |
1007 | rtx link; | |
1008 | rtx real_insn = insn; | |
00f2bb6a | 1009 | enum rtx_code code = GET_CODE (insn); |
29bd1808 | 1010 | |
1011 | /* If this insn is from the target of a branch, it isn't going to | |
1012 | be used in the sequel. If it is used in both cases, this | |
1013 | test will not be true. */ | |
00f2bb6a | 1014 | if ((code == INSN || code == JUMP_INSN || code == CALL_INSN) |
1015 | && INSN_FROM_TARGET_P (insn)) | |
29bd1808 | 1016 | continue; |
1017 | ||
1018 | /* If this insn is a USE made by update_block, we care about the | |
1019 | underlying insn. */ | |
00f2bb6a | 1020 | if (code == INSN && GET_CODE (PATTERN (insn)) == USE |
9204e736 | 1021 | && INSN_P (XEXP (PATTERN (insn), 0))) |
29bd1808 | 1022 | real_insn = XEXP (PATTERN (insn), 0); |
1023 | ||
1024 | if (GET_CODE (real_insn) == CALL_INSN) | |
1025 | { | |
1026 | /* CALL clobbers all call-used regs that aren't fixed except | |
1027 | sp, ap, and fp. Do this before setting the result of the | |
1028 | call live. */ | |
41828ba0 | 1029 | AND_COMPL_HARD_REG_SET (current_live_regs, |
1030 | regs_invalidated_by_call); | |
29bd1808 | 1031 | |
1032 | /* A CALL_INSN sets any global register live, since it may | |
1033 | have been modified by the call. */ | |
1034 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
1035 | if (global_regs[i]) | |
1036 | SET_HARD_REG_BIT (current_live_regs, i); | |
1037 | } | |
1038 | ||
1039 | /* Mark anything killed in an insn to be deadened at the next | |
1040 | label. Ignore USE insns; the only REG_DEAD notes will be for | |
1041 | parameters. But they might be early. A CALL_INSN will usually | |
1042 | clobber registers used for parameters. It isn't worth bothering | |
1043 | with the unlikely case when it won't. */ | |
1044 | if ((GET_CODE (real_insn) == INSN | |
1045 | && GET_CODE (PATTERN (real_insn)) != USE | |
1046 | && GET_CODE (PATTERN (real_insn)) != CLOBBER) | |
1047 | || GET_CODE (real_insn) == JUMP_INSN | |
1048 | || GET_CODE (real_insn) == CALL_INSN) | |
1049 | { | |
1050 | for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1)) | |
1051 | if (REG_NOTE_KIND (link) == REG_DEAD | |
1052 | && GET_CODE (XEXP (link, 0)) == REG | |
1053 | && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER) | |
1054 | { | |
27d0c333 | 1055 | unsigned int first_regno = REGNO (XEXP (link, 0)); |
1056 | unsigned int last_regno | |
29bd1808 | 1057 | = (first_regno |
1058 | + HARD_REGNO_NREGS (first_regno, | |
1059 | GET_MODE (XEXP (link, 0)))); | |
2617fe26 | 1060 | |
29bd1808 | 1061 | for (i = first_regno; i < last_regno; i++) |
1062 | SET_HARD_REG_BIT (pending_dead_regs, i); | |
1063 | } | |
1064 | ||
ec8895d7 | 1065 | note_stores (PATTERN (real_insn), update_live_status, NULL); |
29bd1808 | 1066 | |
1067 | /* If any registers were unused after this insn, kill them. | |
1068 | These notes will always be accurate. */ | |
1069 | for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1)) | |
1070 | if (REG_NOTE_KIND (link) == REG_UNUSED | |
1071 | && GET_CODE (XEXP (link, 0)) == REG | |
1072 | && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER) | |
1073 | { | |
27d0c333 | 1074 | unsigned int first_regno = REGNO (XEXP (link, 0)); |
1075 | unsigned int last_regno | |
29bd1808 | 1076 | = (first_regno |
1077 | + HARD_REGNO_NREGS (first_regno, | |
1078 | GET_MODE (XEXP (link, 0)))); | |
2617fe26 | 1079 | |
29bd1808 | 1080 | for (i = first_regno; i < last_regno; i++) |
1081 | CLEAR_HARD_REG_BIT (current_live_regs, i); | |
1082 | } | |
1083 | } | |
1084 | ||
1085 | else if (GET_CODE (real_insn) == CODE_LABEL) | |
1086 | { | |
1087 | /* A label clobbers the pending dead registers since neither | |
1088 | reload nor jump will propagate a value across a label. */ | |
1089 | AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs); | |
1090 | CLEAR_HARD_REG_SET (pending_dead_regs); | |
1091 | } | |
1092 | ||
1093 | /* The beginning of the epilogue corresponds to the end of the | |
1094 | RTL chain when there are no epilogue insns. Certain resources | |
1095 | are implicitly required at that point. */ | |
1096 | else if (GET_CODE (real_insn) == NOTE | |
2617fe26 | 1097 | && NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG) |
29bd1808 | 1098 | IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs); |
1099 | } | |
1100 | ||
1101 | COPY_HARD_REG_SET (res->regs, current_live_regs); | |
1102 | if (tinfo != NULL) | |
1103 | { | |
1104 | tinfo->block = b; | |
1105 | tinfo->bb_tick = bb_ticks[b]; | |
1106 | } | |
1107 | } | |
1108 | else | |
1109 | /* We didn't find the start of a basic block. Assume everything | |
1110 | in use. This should happen only extremely rarely. */ | |
1111 | SET_HARD_REG_SET (res->regs); | |
1112 | ||
1113 | CLEAR_RESOURCE (&set); | |
1114 | CLEAR_RESOURCE (&needed); | |
1115 | ||
1116 | jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0, | |
1117 | set, needed); | |
1118 | ||
1119 | /* If we hit an unconditional branch, we have another way of finding out | |
1120 | what is live: we can see what is live at the branch target and include | |
be38f654 | 1121 | anything used but not set before the branch. We add the live |
aa40f561 | 1122 | resources found using the test below to those found until now. */ |
29bd1808 | 1123 | |
1124 | if (jump_insn) | |
1125 | { | |
1126 | struct resources new_resources; | |
1127 | rtx stop_insn = next_active_insn (jump_insn); | |
1128 | ||
1129 | mark_target_live_regs (insns, next_active_insn (jump_target), | |
1130 | &new_resources); | |
1131 | CLEAR_RESOURCE (&set); | |
1132 | CLEAR_RESOURCE (&needed); | |
1133 | ||
1134 | /* Include JUMP_INSN in the needed registers. */ | |
1135 | for (insn = target; insn != stop_insn; insn = next_active_insn (insn)) | |
1136 | { | |
1137 | mark_referenced_resources (insn, &needed, 1); | |
1138 | ||
1139 | COPY_HARD_REG_SET (scratch, needed.regs); | |
1140 | AND_COMPL_HARD_REG_SET (scratch, set.regs); | |
1141 | IOR_HARD_REG_SET (new_resources.regs, scratch); | |
1142 | ||
d2137327 | 1143 | mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); |
29bd1808 | 1144 | } |
1145 | ||
be38f654 | 1146 | IOR_HARD_REG_SET (res->regs, new_resources.regs); |
29bd1808 | 1147 | } |
1148 | ||
1149 | if (tinfo != NULL) | |
1150 | { | |
1151 | COPY_HARD_REG_SET (tinfo->live_regs, res->regs); | |
1152 | } | |
1153 | } | |
1154 | \f | |
1155 | /* Initialize the resources required by mark_target_live_regs (). | |
1156 | This should be invoked before the first call to mark_target_live_regs. */ | |
1157 | ||
1158 | void | |
3ad4992f | 1159 | init_resource_info (rtx epilogue_insn) |
29bd1808 | 1160 | { |
1161 | int i; | |
1162 | ||
1163 | /* Indicate what resources are required to be valid at the end of the current | |
1164 | function. The condition code never is and memory always is. If the | |
1165 | frame pointer is needed, it is and so is the stack pointer unless | |
7fd957fe | 1166 | EXIT_IGNORE_STACK is nonzero. If the frame pointer is not needed, the |
29bd1808 | 1167 | stack pointer is. Registers used to return the function value are |
1168 | needed. Registers holding global variables are needed. */ | |
1169 | ||
1170 | end_of_function_needs.cc = 0; | |
1171 | end_of_function_needs.memory = 1; | |
1172 | end_of_function_needs.unch_memory = 0; | |
1173 | CLEAR_HARD_REG_SET (end_of_function_needs.regs); | |
1174 | ||
1175 | if (frame_pointer_needed) | |
1176 | { | |
1177 | SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM); | |
1178 | #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM | |
1179 | SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM); | |
1180 | #endif | |
1181 | #ifdef EXIT_IGNORE_STACK | |
1182 | if (! EXIT_IGNORE_STACK | |
1183 | || current_function_sp_is_unchanging) | |
1184 | #endif | |
1185 | SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM); | |
1186 | } | |
1187 | else | |
1188 | SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM); | |
1189 | ||
1190 | if (current_function_return_rtx != 0) | |
1191 | mark_referenced_resources (current_function_return_rtx, | |
1192 | &end_of_function_needs, 1); | |
1193 | ||
1194 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
1195 | if (global_regs[i] | |
1196 | #ifdef EPILOGUE_USES | |
1197 | || EPILOGUE_USES (i) | |
1198 | #endif | |
1199 | ) | |
1200 | SET_HARD_REG_BIT (end_of_function_needs.regs, i); | |
1201 | ||
1202 | /* The registers required to be live at the end of the function are | |
1203 | represented in the flow information as being dead just prior to | |
1204 | reaching the end of the function. For example, the return of a value | |
1205 | might be represented by a USE of the return register immediately | |
1206 | followed by an unconditional jump to the return label where the | |
1207 | return label is the end of the RTL chain. The end of the RTL chain | |
1208 | is then taken to mean that the return register is live. | |
1209 | ||
1210 | This sequence is no longer maintained when epilogue instructions are | |
1211 | added to the RTL chain. To reconstruct the original meaning, the | |
1212 | start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the | |
1213 | point where these registers become live (start_of_epilogue_needs). | |
1214 | If epilogue instructions are present, the registers set by those | |
1215 | instructions won't have been processed by flow. Thus, those | |
1216 | registers are additionally required at the end of the RTL chain | |
1217 | (end_of_function_needs). */ | |
1218 | ||
1219 | start_of_epilogue_needs = end_of_function_needs; | |
1220 | ||
1221 | while ((epilogue_insn = next_nonnote_insn (epilogue_insn))) | |
d2137327 | 1222 | mark_set_resources (epilogue_insn, &end_of_function_needs, 0, |
1223 | MARK_SRC_DEST_CALL); | |
29bd1808 | 1224 | |
1225 | /* Allocate and initialize the tables used by mark_target_live_regs. */ | |
713829e9 | 1226 | target_hash_table = (struct target_info **) |
1227 | xcalloc (TARGET_HASH_PRIME, sizeof (struct target_info *)); | |
f20183e6 | 1228 | bb_ticks = (int *) xcalloc (last_basic_block, sizeof (int)); |
29bd1808 | 1229 | } |
1230 | \f | |
de132707 | 1231 | /* Free up the resources allocated to mark_target_live_regs (). This |
29bd1808 | 1232 | should be invoked after the last call to mark_target_live_regs (). */ |
1233 | ||
1234 | void | |
3ad4992f | 1235 | free_resource_info (void) |
29bd1808 | 1236 | { |
1237 | if (target_hash_table != NULL) | |
1238 | { | |
d7c47c0e | 1239 | int i; |
2617fe26 | 1240 | |
1241 | for (i = 0; i < TARGET_HASH_PRIME; ++i) | |
d7c47c0e | 1242 | { |
1243 | struct target_info *ti = target_hash_table[i]; | |
1244 | ||
2617fe26 | 1245 | while (ti) |
d7c47c0e | 1246 | { |
1247 | struct target_info *next = ti->next; | |
1248 | free (ti); | |
1249 | ti = next; | |
1250 | } | |
1251 | } | |
1252 | ||
29bd1808 | 1253 | free (target_hash_table); |
1254 | target_hash_table = NULL; | |
1255 | } | |
1256 | ||
1257 | if (bb_ticks != NULL) | |
1258 | { | |
1259 | free (bb_ticks); | |
1260 | bb_ticks = NULL; | |
1261 | } | |
1262 | } | |
1263 | \f | |
1264 | /* Clear any hashed information that we have stored for INSN. */ | |
1265 | ||
1266 | void | |
3ad4992f | 1267 | clear_hashed_info_for_insn (rtx insn) |
29bd1808 | 1268 | { |
1269 | struct target_info *tinfo; | |
2617fe26 | 1270 | |
29bd1808 | 1271 | if (target_hash_table != NULL) |
1272 | { | |
1273 | for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME]; | |
1274 | tinfo; tinfo = tinfo->next) | |
1275 | if (tinfo->uid == INSN_UID (insn)) | |
1276 | break; | |
1277 | ||
1278 | if (tinfo) | |
1279 | tinfo->block = -1; | |
1280 | } | |
1281 | } | |
1282 | \f | |
1283 | /* Increment the tick count for the basic block that contains INSN. */ | |
1284 | ||
1285 | void | |
3ad4992f | 1286 | incr_ticks_for_insn (rtx insn) |
29bd1808 | 1287 | { |
98d5e888 | 1288 | int b = find_basic_block (insn, MAX_DELAY_SLOT_LIVE_SEARCH); |
29bd1808 | 1289 | |
1290 | if (b != -1) | |
1291 | bb_ticks[b]++; | |
1292 | } | |
1293 | \f | |
1294 | /* Add TRIAL to the set of resources used at the end of the current | |
aa40f561 | 1295 | function. */ |
29bd1808 | 1296 | void |
3ad4992f | 1297 | mark_end_of_function_resources (rtx trial, int include_delayed_effects) |
29bd1808 | 1298 | { |
1299 | mark_referenced_resources (trial, &end_of_function_needs, | |
1300 | include_delayed_effects); | |
1301 | } |