]>
Commit | Line | Data |
---|---|---|
6a73406e | 1 | /* Definitions for computing resource usage of specific insns. |
b87c6d55 | 2 | Copyright (C) 1999, 2000 Free Software Foundation, Inc. |
6a73406e RH |
3 | |
4 | This file is part of GNU CC. | |
5 | ||
6 | GNU CC is free software; you can redistribute it and/or modify | |
7 | it under the terms of the GNU General Public License as published by | |
8 | the Free Software Foundation; either version 2, or (at your option) | |
9 | any later version. | |
10 | ||
11 | GNU CC is distributed in the hope that it will be useful, | |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | GNU General Public License for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
17 | along with GNU CC; see the file COPYING. If not, write to | |
18 | the Free Software Foundation, 59 Temple Place - Suite 330, | |
19 | Boston, MA 02111-1307, USA. */ | |
20 | ||
ca545bb5 | 21 | #include "config.h" |
8ce25479 | 22 | #include "system.h" |
01198c2f | 23 | #include "toplev.h" |
ca545bb5 | 24 | #include "rtl.h" |
6baf1cc8 | 25 | #include "tm_p.h" |
ca545bb5 | 26 | #include "hard-reg-set.h" |
ca545bb5 | 27 | #include "basic-block.h" |
49ad7cfa | 28 | #include "function.h" |
ca545bb5 BM |
29 | #include "regs.h" |
30 | #include "flags.h" | |
31 | #include "output.h" | |
32 | #include "resource.h" | |
7bdb32b9 | 33 | #include "insn-attr.h" |
ca545bb5 BM |
34 | |
35 | /* This structure is used to record liveness information at the targets or | |
36 | fallthrough insns of branches. We will most likely need the information | |
37 | at targets again, so save them in a hash table rather than recomputing them | |
38 | each time. */ | |
39 | ||
40 | struct target_info | |
41 | { | |
42 | int uid; /* INSN_UID of target. */ | |
43 | struct target_info *next; /* Next info for same hash bucket. */ | |
44 | HARD_REG_SET live_regs; /* Registers live at target. */ | |
45 | int block; /* Basic block number containing target. */ | |
46 | int bb_tick; /* Generation count of basic block info. */ | |
47 | }; | |
48 | ||
49 | #define TARGET_HASH_PRIME 257 | |
50 | ||
51 | /* Indicates what resources are required at the beginning of the epilogue. */ | |
52 | static struct resources start_of_epilogue_needs; | |
53 | ||
54 | /* Indicates what resources are required at function end. */ | |
55 | static struct resources end_of_function_needs; | |
56 | ||
57 | /* Define the hash table itself. */ | |
58 | static struct target_info **target_hash_table = NULL; | |
59 | ||
60 | /* For each basic block, we maintain a generation number of its basic | |
61 | block info, which is updated each time we move an insn from the | |
62 | target of a jump. This is the generation number indexed by block | |
63 | number. */ | |
64 | ||
65 | static int *bb_ticks; | |
66 | ||
67 | /* Marks registers possibly live at the current place being scanned by | |
68 | mark_target_live_regs. Used only by next two function. */ | |
69 | ||
70 | static HARD_REG_SET current_live_regs; | |
71 | ||
72 | /* Marks registers for which we have seen a REG_DEAD note but no assignment. | |
73 | Also only used by the next two functions. */ | |
74 | ||
75 | static HARD_REG_SET pending_dead_regs; | |
6a73406e | 76 | \f |
957e4763 KG |
77 | static void update_live_status PARAMS ((rtx, rtx, void *)); |
78 | static int find_basic_block PARAMS ((rtx)); | |
79 | static rtx next_insn_no_annul PARAMS ((rtx)); | |
80 | static rtx find_dead_or_set_registers PARAMS ((rtx, struct resources*, | |
6a73406e RH |
81 | rtx*, int, struct resources, |
82 | struct resources)); | |
83 | \f | |
ca545bb5 BM |
84 | /* Utility function called from mark_target_live_regs via note_stores. |
85 | It deadens any CLOBBERed registers and livens any SET registers. */ | |
86 | ||
87 | static void | |
84832317 | 88 | update_live_status (dest, x, data) |
ca545bb5 BM |
89 | rtx dest; |
90 | rtx x; | |
84832317 | 91 | void *data ATTRIBUTE_UNUSED; |
ca545bb5 BM |
92 | { |
93 | int first_regno, last_regno; | |
94 | int i; | |
95 | ||
96 | if (GET_CODE (dest) != REG | |
97 | && (GET_CODE (dest) != SUBREG || GET_CODE (SUBREG_REG (dest)) != REG)) | |
98 | return; | |
99 | ||
100 | if (GET_CODE (dest) == SUBREG) | |
101 | first_regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest); | |
102 | else | |
103 | first_regno = REGNO (dest); | |
104 | ||
105 | last_regno = first_regno + HARD_REGNO_NREGS (first_regno, GET_MODE (dest)); | |
106 | ||
107 | if (GET_CODE (x) == CLOBBER) | |
108 | for (i = first_regno; i < last_regno; i++) | |
109 | CLEAR_HARD_REG_BIT (current_live_regs, i); | |
110 | else | |
111 | for (i = first_regno; i < last_regno; i++) | |
112 | { | |
113 | SET_HARD_REG_BIT (current_live_regs, i); | |
114 | CLEAR_HARD_REG_BIT (pending_dead_regs, i); | |
115 | } | |
116 | } | |
925fa227 RH |
117 | /* Find the number of the basic block that starts closest to INSN. Return -1 |
118 | if we couldn't find such a basic block. */ | |
119 | ||
120 | static int | |
121 | find_basic_block (insn) | |
122 | rtx insn; | |
123 | { | |
124 | int i; | |
125 | ||
126 | /* Scan backwards to the previous BARRIER. Then see if we can find a | |
127 | label that starts a basic block. Return the basic block number. */ | |
128 | ||
129 | for (insn = prev_nonnote_insn (insn); | |
130 | insn && GET_CODE (insn) != BARRIER; | |
131 | insn = prev_nonnote_insn (insn)) | |
132 | ; | |
133 | ||
134 | /* The start of the function is basic block zero. */ | |
135 | if (insn == 0) | |
136 | return 0; | |
137 | ||
138 | /* See if any of the upcoming CODE_LABELs start a basic block. If we reach | |
139 | anything other than a CODE_LABEL or note, we can't find this code. */ | |
140 | for (insn = next_nonnote_insn (insn); | |
141 | insn && GET_CODE (insn) == CODE_LABEL; | |
142 | insn = next_nonnote_insn (insn)) | |
143 | { | |
144 | for (i = 0; i < n_basic_blocks; i++) | |
145 | if (insn == BLOCK_HEAD (i)) | |
146 | return i; | |
147 | } | |
148 | ||
149 | return -1; | |
150 | } | |
ca545bb5 BM |
151 | \f |
152 | /* Similar to next_insn, but ignores insns in the delay slots of | |
153 | an annulled branch. */ | |
154 | ||
155 | static rtx | |
156 | next_insn_no_annul (insn) | |
157 | rtx insn; | |
158 | { | |
159 | if (insn) | |
160 | { | |
161 | /* If INSN is an annulled branch, skip any insns from the target | |
162 | of the branch. */ | |
163 | if (INSN_ANNULLED_BRANCH_P (insn) | |
164 | && NEXT_INSN (PREV_INSN (insn)) != insn) | |
165 | while (INSN_FROM_TARGET_P (NEXT_INSN (insn))) | |
166 | insn = NEXT_INSN (insn); | |
167 | ||
168 | insn = NEXT_INSN (insn); | |
169 | if (insn && GET_CODE (insn) == INSN | |
170 | && GET_CODE (PATTERN (insn)) == SEQUENCE) | |
171 | insn = XVECEXP (PATTERN (insn), 0, 0); | |
172 | } | |
173 | ||
174 | return insn; | |
175 | } | |
176 | \f | |
177 | /* Given X, some rtl, and RES, a pointer to a `struct resource', mark | |
178 | which resources are references by the insn. If INCLUDE_DELAYED_EFFECTS | |
179 | is TRUE, resources used by the called routine will be included for | |
180 | CALL_INSNs. */ | |
181 | ||
182 | void | |
183 | mark_referenced_resources (x, res, include_delayed_effects) | |
184 | register rtx x; | |
185 | register struct resources *res; | |
186 | register int include_delayed_effects; | |
187 | { | |
770ae6cc RK |
188 | enum rtx_code code = GET_CODE (x); |
189 | int i, j; | |
190 | unsigned int r; | |
6f7d635c | 191 | register const char *format_ptr; |
ca545bb5 BM |
192 | |
193 | /* Handle leaf items for which we set resource flags. Also, special-case | |
194 | CALL, SET and CLOBBER operators. */ | |
195 | switch (code) | |
196 | { | |
197 | case CONST: | |
198 | case CONST_INT: | |
199 | case CONST_DOUBLE: | |
200 | case PC: | |
201 | case SYMBOL_REF: | |
202 | case LABEL_REF: | |
203 | return; | |
204 | ||
205 | case SUBREG: | |
206 | if (GET_CODE (SUBREG_REG (x)) != REG) | |
207 | mark_referenced_resources (SUBREG_REG (x), res, 0); | |
208 | else | |
209 | { | |
770ae6cc RK |
210 | unsigned int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x); |
211 | unsigned int last_regno | |
212 | = regno + HARD_REGNO_NREGS (regno, GET_MODE (x)); | |
213 | ||
214 | for (r = regno; r < last_regno; r++) | |
215 | SET_HARD_REG_BIT (res->regs, r); | |
ca545bb5 BM |
216 | } |
217 | return; | |
218 | ||
219 | case REG: | |
770ae6cc RK |
220 | for (r = 0; r < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); r++) |
221 | SET_HARD_REG_BIT (res->regs, REGNO (x) + r); | |
ca545bb5 BM |
222 | return; |
223 | ||
224 | case MEM: | |
225 | /* If this memory shouldn't change, it really isn't referencing | |
226 | memory. */ | |
227 | if (RTX_UNCHANGING_P (x)) | |
228 | res->unch_memory = 1; | |
229 | else | |
230 | res->memory = 1; | |
a5045352 | 231 | res->volatil |= MEM_VOLATILE_P (x); |
ca545bb5 BM |
232 | |
233 | /* Mark registers used to access memory. */ | |
234 | mark_referenced_resources (XEXP (x, 0), res, 0); | |
235 | return; | |
236 | ||
237 | case CC0: | |
238 | res->cc = 1; | |
239 | return; | |
240 | ||
241 | case UNSPEC_VOLATILE: | |
242 | case ASM_INPUT: | |
243 | /* Traditional asm's are always volatile. */ | |
244 | res->volatil = 1; | |
245 | return; | |
246 | ||
247 | case TRAP_IF: | |
248 | res->volatil = 1; | |
249 | break; | |
250 | ||
251 | case ASM_OPERANDS: | |
a5045352 | 252 | res->volatil |= MEM_VOLATILE_P (x); |
ca545bb5 BM |
253 | |
254 | /* For all ASM_OPERANDS, we must traverse the vector of input operands. | |
255 | We can not just fall through here since then we would be confused | |
256 | by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate | |
257 | traditional asms unlike their normal usage. */ | |
258 | ||
259 | for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++) | |
260 | mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0); | |
261 | return; | |
262 | ||
263 | case CALL: | |
264 | /* The first operand will be a (MEM (xxx)) but doesn't really reference | |
265 | memory. The second operand may be referenced, though. */ | |
266 | mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, 0); | |
267 | mark_referenced_resources (XEXP (x, 1), res, 0); | |
268 | return; | |
269 | ||
270 | case SET: | |
271 | /* Usually, the first operand of SET is set, not referenced. But | |
272 | registers used to access memory are referenced. SET_DEST is | |
273 | also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT. */ | |
274 | ||
275 | mark_referenced_resources (SET_SRC (x), res, 0); | |
276 | ||
277 | x = SET_DEST (x); | |
07570c39 AO |
278 | if (GET_CODE (x) == SIGN_EXTRACT |
279 | || GET_CODE (x) == ZERO_EXTRACT | |
280 | || GET_CODE (x) == STRICT_LOW_PART) | |
ca545bb5 BM |
281 | mark_referenced_resources (x, res, 0); |
282 | else if (GET_CODE (x) == SUBREG) | |
283 | x = SUBREG_REG (x); | |
284 | if (GET_CODE (x) == MEM) | |
285 | mark_referenced_resources (XEXP (x, 0), res, 0); | |
286 | return; | |
287 | ||
288 | case CLOBBER: | |
289 | return; | |
290 | ||
291 | case CALL_INSN: | |
292 | if (include_delayed_effects) | |
293 | { | |
294 | /* A CALL references memory, the frame pointer if it exists, the | |
295 | stack pointer, any global registers and any registers given in | |
296 | USE insns immediately in front of the CALL. | |
297 | ||
298 | However, we may have moved some of the parameter loading insns | |
299 | into the delay slot of this CALL. If so, the USE's for them | |
300 | don't count and should be skipped. */ | |
301 | rtx insn = PREV_INSN (x); | |
302 | rtx sequence = 0; | |
303 | int seq_size = 0; | |
304 | rtx next = NEXT_INSN (x); | |
305 | int i; | |
306 | ||
307 | /* If we are part of a delay slot sequence, point at the SEQUENCE. */ | |
308 | if (NEXT_INSN (insn) != x) | |
309 | { | |
310 | next = NEXT_INSN (NEXT_INSN (insn)); | |
311 | sequence = PATTERN (NEXT_INSN (insn)); | |
312 | seq_size = XVECLEN (sequence, 0); | |
313 | if (GET_CODE (sequence) != SEQUENCE) | |
314 | abort (); | |
315 | } | |
316 | ||
317 | res->memory = 1; | |
318 | SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM); | |
319 | if (frame_pointer_needed) | |
320 | { | |
321 | SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM); | |
322 | #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM | |
323 | SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM); | |
324 | #endif | |
325 | } | |
326 | ||
327 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
328 | if (global_regs[i]) | |
329 | SET_HARD_REG_BIT (res->regs, i); | |
330 | ||
331 | /* Check for a NOTE_INSN_SETJMP. If it exists, then we must | |
332 | assume that this call can need any register. | |
333 | ||
334 | This is done to be more conservative about how we handle setjmp. | |
335 | We assume that they both use and set all registers. Using all | |
336 | registers ensures that a register will not be considered dead | |
337 | just because it crosses a setjmp call. A register should be | |
338 | considered dead only if the setjmp call returns non-zero. */ | |
339 | if (next && GET_CODE (next) == NOTE | |
340 | && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP) | |
341 | SET_HARD_REG_SET (res->regs); | |
342 | ||
343 | { | |
344 | rtx link; | |
345 | ||
346 | for (link = CALL_INSN_FUNCTION_USAGE (x); | |
347 | link; | |
348 | link = XEXP (link, 1)) | |
349 | if (GET_CODE (XEXP (link, 0)) == USE) | |
350 | { | |
351 | for (i = 1; i < seq_size; i++) | |
352 | { | |
353 | rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i)); | |
354 | if (GET_CODE (slot_pat) == SET | |
355 | && rtx_equal_p (SET_DEST (slot_pat), | |
0cb5d81c | 356 | XEXP (XEXP (link, 0), 0))) |
ca545bb5 BM |
357 | break; |
358 | } | |
359 | if (i >= seq_size) | |
0cb5d81c | 360 | mark_referenced_resources (XEXP (XEXP (link, 0), 0), |
ca545bb5 BM |
361 | res, 0); |
362 | } | |
363 | } | |
364 | } | |
365 | ||
366 | /* ... fall through to other INSN processing ... */ | |
367 | ||
368 | case INSN: | |
369 | case JUMP_INSN: | |
370 | ||
371 | #ifdef INSN_REFERENCES_ARE_DELAYED | |
372 | if (! include_delayed_effects | |
373 | && INSN_REFERENCES_ARE_DELAYED (x)) | |
374 | return; | |
375 | #endif | |
376 | ||
377 | /* No special processing, just speed up. */ | |
378 | mark_referenced_resources (PATTERN (x), res, include_delayed_effects); | |
379 | return; | |
380 | ||
381 | default: | |
382 | break; | |
383 | } | |
384 | ||
385 | /* Process each sub-expression and flag what it needs. */ | |
386 | format_ptr = GET_RTX_FORMAT (code); | |
387 | for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
388 | switch (*format_ptr++) | |
389 | { | |
390 | case 'e': | |
391 | mark_referenced_resources (XEXP (x, i), res, include_delayed_effects); | |
392 | break; | |
393 | ||
394 | case 'E': | |
395 | for (j = 0; j < XVECLEN (x, i); j++) | |
396 | mark_referenced_resources (XVECEXP (x, i, j), res, | |
397 | include_delayed_effects); | |
398 | break; | |
399 | } | |
400 | } | |
401 | \f | |
402 | /* A subroutine of mark_target_live_regs. Search forward from TARGET | |
403 | looking for registers that are set before they are used. These are dead. | |
404 | Stop after passing a few conditional jumps, and/or a small | |
405 | number of unconditional branches. */ | |
406 | ||
407 | static rtx | |
408 | find_dead_or_set_registers (target, res, jump_target, jump_count, set, needed) | |
409 | rtx target; | |
410 | struct resources *res; | |
411 | rtx *jump_target; | |
412 | int jump_count; | |
413 | struct resources set, needed; | |
414 | { | |
415 | HARD_REG_SET scratch; | |
416 | rtx insn, next; | |
417 | rtx jump_insn = 0; | |
418 | int i; | |
419 | ||
420 | for (insn = target; insn; insn = next) | |
421 | { | |
422 | rtx this_jump_insn = insn; | |
423 | ||
424 | next = NEXT_INSN (insn); | |
425 | switch (GET_CODE (insn)) | |
426 | { | |
427 | case CODE_LABEL: | |
428 | /* After a label, any pending dead registers that weren't yet | |
429 | used can be made dead. */ | |
430 | AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs); | |
431 | AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs); | |
432 | CLEAR_HARD_REG_SET (pending_dead_regs); | |
433 | ||
434 | continue; | |
435 | ||
436 | case BARRIER: | |
437 | case NOTE: | |
438 | continue; | |
439 | ||
440 | case INSN: | |
441 | if (GET_CODE (PATTERN (insn)) == USE) | |
442 | { | |
443 | /* If INSN is a USE made by update_block, we care about the | |
444 | underlying insn. Any registers set by the underlying insn | |
445 | are live since the insn is being done somewhere else. */ | |
446 | if (GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i') | |
73780b74 SC |
447 | mark_set_resources (XEXP (PATTERN (insn), 0), res, 0, |
448 | MARK_SRC_DEST_CALL); | |
ca545bb5 BM |
449 | |
450 | /* All other USE insns are to be ignored. */ | |
451 | continue; | |
452 | } | |
453 | else if (GET_CODE (PATTERN (insn)) == CLOBBER) | |
454 | continue; | |
455 | else if (GET_CODE (PATTERN (insn)) == SEQUENCE) | |
456 | { | |
457 | /* An unconditional jump can be used to fill the delay slot | |
458 | of a call, so search for a JUMP_INSN in any position. */ | |
459 | for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++) | |
460 | { | |
461 | this_jump_insn = XVECEXP (PATTERN (insn), 0, i); | |
462 | if (GET_CODE (this_jump_insn) == JUMP_INSN) | |
463 | break; | |
464 | } | |
465 | } | |
466 | ||
467 | default: | |
468 | break; | |
469 | } | |
470 | ||
471 | if (GET_CODE (this_jump_insn) == JUMP_INSN) | |
472 | { | |
473 | if (jump_count++ < 10) | |
474 | { | |
475 | if (simplejump_p (this_jump_insn) | |
476 | || GET_CODE (PATTERN (this_jump_insn)) == RETURN) | |
477 | { | |
478 | next = JUMP_LABEL (this_jump_insn); | |
479 | if (jump_insn == 0) | |
480 | { | |
481 | jump_insn = insn; | |
482 | if (jump_target) | |
483 | *jump_target = JUMP_LABEL (this_jump_insn); | |
484 | } | |
485 | } | |
486 | else if (condjump_p (this_jump_insn) | |
487 | || condjump_in_parallel_p (this_jump_insn)) | |
488 | { | |
489 | struct resources target_set, target_res; | |
490 | struct resources fallthrough_res; | |
491 | ||
492 | /* We can handle conditional branches here by following | |
493 | both paths, and then IOR the results of the two paths | |
494 | together, which will give us registers that are dead | |
495 | on both paths. Since this is expensive, we give it | |
496 | a much higher cost than unconditional branches. The | |
497 | cost was chosen so that we will follow at most 1 | |
498 | conditional branch. */ | |
499 | ||
500 | jump_count += 4; | |
501 | if (jump_count >= 10) | |
502 | break; | |
503 | ||
504 | mark_referenced_resources (insn, &needed, 1); | |
505 | ||
506 | /* For an annulled branch, mark_set_resources ignores slots | |
507 | filled by instructions from the target. This is correct | |
508 | if the branch is not taken. Since we are following both | |
509 | paths from the branch, we must also compute correct info | |
510 | if the branch is taken. We do this by inverting all of | |
511 | the INSN_FROM_TARGET_P bits, calling mark_set_resources, | |
512 | and then inverting the INSN_FROM_TARGET_P bits again. */ | |
513 | ||
514 | if (GET_CODE (PATTERN (insn)) == SEQUENCE | |
515 | && INSN_ANNULLED_BRANCH_P (this_jump_insn)) | |
516 | { | |
517 | for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++) | |
518 | INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) | |
519 | = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)); | |
520 | ||
521 | target_set = set; | |
73780b74 SC |
522 | mark_set_resources (insn, &target_set, 0, |
523 | MARK_SRC_DEST_CALL); | |
ca545bb5 BM |
524 | |
525 | for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++) | |
526 | INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) | |
527 | = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)); | |
528 | ||
73780b74 | 529 | mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); |
ca545bb5 BM |
530 | } |
531 | else | |
532 | { | |
73780b74 | 533 | mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); |
ca545bb5 BM |
534 | target_set = set; |
535 | } | |
536 | ||
537 | target_res = *res; | |
538 | COPY_HARD_REG_SET (scratch, target_set.regs); | |
539 | AND_COMPL_HARD_REG_SET (scratch, needed.regs); | |
540 | AND_COMPL_HARD_REG_SET (target_res.regs, scratch); | |
541 | ||
542 | fallthrough_res = *res; | |
543 | COPY_HARD_REG_SET (scratch, set.regs); | |
544 | AND_COMPL_HARD_REG_SET (scratch, needed.regs); | |
545 | AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch); | |
546 | ||
547 | find_dead_or_set_registers (JUMP_LABEL (this_jump_insn), | |
548 | &target_res, 0, jump_count, | |
549 | target_set, needed); | |
550 | find_dead_or_set_registers (next, | |
551 | &fallthrough_res, 0, jump_count, | |
552 | set, needed); | |
553 | IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs); | |
554 | AND_HARD_REG_SET (res->regs, fallthrough_res.regs); | |
555 | break; | |
556 | } | |
557 | else | |
558 | break; | |
559 | } | |
560 | else | |
561 | { | |
562 | /* Don't try this optimization if we expired our jump count | |
563 | above, since that would mean there may be an infinite loop | |
564 | in the function being compiled. */ | |
565 | jump_insn = 0; | |
566 | break; | |
567 | } | |
568 | } | |
569 | ||
570 | mark_referenced_resources (insn, &needed, 1); | |
73780b74 | 571 | mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); |
ca545bb5 BM |
572 | |
573 | COPY_HARD_REG_SET (scratch, set.regs); | |
574 | AND_COMPL_HARD_REG_SET (scratch, needed.regs); | |
575 | AND_COMPL_HARD_REG_SET (res->regs, scratch); | |
576 | } | |
577 | ||
578 | return jump_insn; | |
579 | } | |
580 | \f | |
581 | /* Given X, a part of an insn, and a pointer to a `struct resource', | |
582 | RES, indicate which resources are modified by the insn. If | |
73780b74 SC |
583 | MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially |
584 | set by the called routine. If MARK_TYPE is MARK_DEST, only mark SET_DESTs | |
ca545bb5 BM |
585 | |
586 | If IN_DEST is nonzero, it means we are inside a SET. Otherwise, | |
587 | objects are being referenced instead of set. | |
588 | ||
589 | We never mark the insn as modifying the condition code unless it explicitly | |
590 | SETs CC0 even though this is not totally correct. The reason for this is | |
591 | that we require a SET of CC0 to immediately precede the reference to CC0. | |
592 | So if some other insn sets CC0 as a side-effect, we know it cannot affect | |
593 | our computation and thus may be placed in a delay slot. */ | |
594 | ||
595 | void | |
73780b74 | 596 | mark_set_resources (x, res, in_dest, mark_type) |
ca545bb5 BM |
597 | register rtx x; |
598 | register struct resources *res; | |
599 | int in_dest; | |
73780b74 | 600 | enum mark_resource_type mark_type; |
ca545bb5 | 601 | { |
770ae6cc RK |
602 | enum rtx_code code; |
603 | int i, j; | |
604 | unsigned int r; | |
605 | const char *format_ptr; | |
ca545bb5 BM |
606 | |
607 | restart: | |
608 | ||
609 | code = GET_CODE (x); | |
610 | ||
611 | switch (code) | |
612 | { | |
613 | case NOTE: | |
614 | case BARRIER: | |
615 | case CODE_LABEL: | |
616 | case USE: | |
617 | case CONST_INT: | |
618 | case CONST_DOUBLE: | |
619 | case LABEL_REF: | |
620 | case SYMBOL_REF: | |
621 | case CONST: | |
622 | case PC: | |
623 | /* These don't set any resources. */ | |
624 | return; | |
625 | ||
626 | case CC0: | |
627 | if (in_dest) | |
628 | res->cc = 1; | |
629 | return; | |
630 | ||
631 | case CALL_INSN: | |
632 | /* Called routine modifies the condition code, memory, any registers | |
633 | that aren't saved across calls, global registers and anything | |
634 | explicitly CLOBBERed immediately after the CALL_INSN. */ | |
635 | ||
73780b74 | 636 | if (mark_type == MARK_SRC_DEST_CALL) |
ca545bb5 BM |
637 | { |
638 | rtx next = NEXT_INSN (x); | |
639 | rtx prev = PREV_INSN (x); | |
640 | rtx link; | |
641 | ||
642 | res->cc = res->memory = 1; | |
770ae6cc RK |
643 | for (r = 0; r < FIRST_PSEUDO_REGISTER; r++) |
644 | if (call_used_regs[r] || global_regs[r]) | |
645 | SET_HARD_REG_BIT (res->regs, r); | |
ca545bb5 BM |
646 | |
647 | /* If X is part of a delay slot sequence, then NEXT should be | |
648 | the first insn after the sequence. */ | |
649 | if (NEXT_INSN (prev) != x) | |
650 | next = NEXT_INSN (NEXT_INSN (prev)); | |
651 | ||
652 | for (link = CALL_INSN_FUNCTION_USAGE (x); | |
653 | link; link = XEXP (link, 1)) | |
654 | if (GET_CODE (XEXP (link, 0)) == CLOBBER) | |
73780b74 SC |
655 | mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1, |
656 | MARK_SRC_DEST); | |
ca545bb5 BM |
657 | |
658 | /* Check for a NOTE_INSN_SETJMP. If it exists, then we must | |
659 | assume that this call can clobber any register. */ | |
660 | if (next && GET_CODE (next) == NOTE | |
661 | && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP) | |
662 | SET_HARD_REG_SET (res->regs); | |
663 | } | |
664 | ||
665 | /* ... and also what its RTL says it modifies, if anything. */ | |
666 | ||
667 | case JUMP_INSN: | |
668 | case INSN: | |
669 | ||
670 | /* An insn consisting of just a CLOBBER (or USE) is just for flow | |
671 | and doesn't actually do anything, so we ignore it. */ | |
672 | ||
673 | #ifdef INSN_SETS_ARE_DELAYED | |
73780b74 | 674 | if (mark_type != MARK_SRC_DEST_CALL |
ca545bb5 BM |
675 | && INSN_SETS_ARE_DELAYED (x)) |
676 | return; | |
677 | #endif | |
678 | ||
679 | x = PATTERN (x); | |
680 | if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER) | |
681 | goto restart; | |
682 | return; | |
683 | ||
684 | case SET: | |
685 | /* If the source of a SET is a CALL, this is actually done by | |
686 | the called routine. So only include it if we are to include the | |
687 | effects of the calling routine. */ | |
688 | ||
689 | mark_set_resources (SET_DEST (x), res, | |
73780b74 | 690 | (mark_type == MARK_SRC_DEST_CALL |
ca545bb5 | 691 | || GET_CODE (SET_SRC (x)) != CALL), |
73780b74 | 692 | mark_type); |
ca545bb5 | 693 | |
73780b74 SC |
694 | if (mark_type != MARK_DEST) |
695 | mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST); | |
ca545bb5 BM |
696 | return; |
697 | ||
698 | case CLOBBER: | |
73780b74 | 699 | mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST); |
ca545bb5 BM |
700 | return; |
701 | ||
702 | case SEQUENCE: | |
703 | for (i = 0; i < XVECLEN (x, 0); i++) | |
704 | if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0)) | |
705 | && INSN_FROM_TARGET_P (XVECEXP (x, 0, i)))) | |
73780b74 | 706 | mark_set_resources (XVECEXP (x, 0, i), res, 0, mark_type); |
ca545bb5 BM |
707 | return; |
708 | ||
709 | case POST_INC: | |
710 | case PRE_INC: | |
711 | case POST_DEC: | |
712 | case PRE_DEC: | |
73780b74 | 713 | mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST); |
ca545bb5 BM |
714 | return; |
715 | ||
73780b74 | 716 | case SIGN_EXTRACT: |
ca545bb5 | 717 | case ZERO_EXTRACT: |
73780b74 SC |
718 | if (! (mark_type == MARK_DEST && in_dest)) |
719 | { | |
720 | mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST); | |
721 | mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST); | |
722 | mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST); | |
723 | } | |
ca545bb5 BM |
724 | return; |
725 | ||
726 | case MEM: | |
727 | if (in_dest) | |
728 | { | |
729 | res->memory = 1; | |
a5045352 MM |
730 | res->unch_memory |= RTX_UNCHANGING_P (x); |
731 | res->volatil |= MEM_VOLATILE_P (x); | |
ca545bb5 BM |
732 | } |
733 | ||
73780b74 | 734 | mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST); |
ca545bb5 BM |
735 | return; |
736 | ||
737 | case SUBREG: | |
738 | if (in_dest) | |
739 | { | |
740 | if (GET_CODE (SUBREG_REG (x)) != REG) | |
73780b74 | 741 | mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type); |
ca545bb5 BM |
742 | else |
743 | { | |
770ae6cc RK |
744 | unsigned int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x); |
745 | unsigned int last_regno | |
746 | = regno + HARD_REGNO_NREGS (regno, GET_MODE (x)); | |
747 | ||
748 | for (r = regno; r < last_regno; r++) | |
749 | SET_HARD_REG_BIT (res->regs, r); | |
ca545bb5 BM |
750 | } |
751 | } | |
752 | return; | |
753 | ||
754 | case REG: | |
755 | if (in_dest) | |
770ae6cc RK |
756 | for (r = 0; r < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); r++) |
757 | SET_HARD_REG_BIT (res->regs, REGNO (x) + r); | |
ca545bb5 BM |
758 | return; |
759 | ||
73780b74 SC |
760 | case STRICT_LOW_PART: |
761 | if (! (mark_type == MARK_DEST && in_dest)) | |
762 | { | |
763 | mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST); | |
764 | return; | |
765 | } | |
766 | ||
3d298f8f MM |
767 | case UNSPEC_VOLATILE: |
768 | case ASM_INPUT: | |
769 | /* Traditional asm's are always volatile. */ | |
770 | res->volatil = 1; | |
771 | return; | |
772 | ||
773 | case TRAP_IF: | |
774 | res->volatil = 1; | |
775 | break; | |
776 | ||
777 | case ASM_OPERANDS: | |
a5045352 | 778 | res->volatil |= MEM_VOLATILE_P (x); |
3d298f8f MM |
779 | |
780 | /* For all ASM_OPERANDS, we must traverse the vector of input operands. | |
781 | We can not just fall through here since then we would be confused | |
782 | by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate | |
783 | traditional asms unlike their normal usage. */ | |
784 | ||
785 | for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++) | |
73780b74 SC |
786 | mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest, |
787 | MARK_SRC_DEST); | |
3d298f8f MM |
788 | return; |
789 | ||
ca545bb5 BM |
790 | default: |
791 | break; | |
792 | } | |
793 | ||
794 | /* Process each sub-expression and flag what it needs. */ | |
795 | format_ptr = GET_RTX_FORMAT (code); | |
796 | for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
797 | switch (*format_ptr++) | |
798 | { | |
799 | case 'e': | |
73780b74 | 800 | mark_set_resources (XEXP (x, i), res, in_dest, mark_type); |
ca545bb5 BM |
801 | break; |
802 | ||
803 | case 'E': | |
804 | for (j = 0; j < XVECLEN (x, i); j++) | |
73780b74 | 805 | mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type); |
ca545bb5 BM |
806 | break; |
807 | } | |
808 | } | |
809 | \f | |
810 | /* Set the resources that are live at TARGET. | |
811 | ||
812 | If TARGET is zero, we refer to the end of the current function and can | |
813 | return our precomputed value. | |
814 | ||
815 | Otherwise, we try to find out what is live by consulting the basic block | |
816 | information. This is tricky, because we must consider the actions of | |
817 | reload and jump optimization, which occur after the basic block information | |
818 | has been computed. | |
819 | ||
820 | Accordingly, we proceed as follows:: | |
821 | ||
822 | We find the previous BARRIER and look at all immediately following labels | |
823 | (with no intervening active insns) to see if any of them start a basic | |
824 | block. If we hit the start of the function first, we use block 0. | |
825 | ||
826 | Once we have found a basic block and a corresponding first insns, we can | |
827 | accurately compute the live status from basic_block_live_regs and | |
828 | reg_renumber. (By starting at a label following a BARRIER, we are immune | |
829 | to actions taken by reload and jump.) Then we scan all insns between | |
830 | that point and our target. For each CLOBBER (or for call-clobbered regs | |
831 | when we pass a CALL_INSN), mark the appropriate registers are dead. For | |
832 | a SET, mark them as live. | |
833 | ||
834 | We have to be careful when using REG_DEAD notes because they are not | |
835 | updated by such things as find_equiv_reg. So keep track of registers | |
836 | marked as dead that haven't been assigned to, and mark them dead at the | |
837 | next CODE_LABEL since reload and jump won't propagate values across labels. | |
838 | ||
839 | If we cannot find the start of a basic block (should be a very rare | |
840 | case, if it can happen at all), mark everything as potentially live. | |
841 | ||
842 | Next, scan forward from TARGET looking for things set or clobbered | |
843 | before they are used. These are not live. | |
844 | ||
845 | Because we can be called many times on the same target, save our results | |
846 | in a hash table indexed by INSN_UID. This is only done if the function | |
847 | init_resource_info () was invoked before we are called. */ | |
848 | ||
849 | void | |
850 | mark_target_live_regs (insns, target, res) | |
851 | rtx insns; | |
852 | rtx target; | |
853 | struct resources *res; | |
854 | { | |
855 | int b = -1; | |
856 | int i; | |
857 | struct target_info *tinfo = NULL; | |
858 | rtx insn; | |
859 | rtx jump_insn = 0; | |
860 | rtx jump_target; | |
861 | HARD_REG_SET scratch; | |
862 | struct resources set, needed; | |
863 | ||
864 | /* Handle end of function. */ | |
865 | if (target == 0) | |
866 | { | |
867 | *res = end_of_function_needs; | |
868 | return; | |
869 | } | |
870 | ||
871 | /* We have to assume memory is needed, but the CC isn't. */ | |
872 | res->memory = 1; | |
873 | res->volatil = res->unch_memory = 0; | |
874 | res->cc = 0; | |
875 | ||
876 | /* See if we have computed this value already. */ | |
877 | if (target_hash_table != NULL) | |
878 | { | |
879 | for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME]; | |
880 | tinfo; tinfo = tinfo->next) | |
881 | if (tinfo->uid == INSN_UID (target)) | |
882 | break; | |
883 | ||
884 | /* Start by getting the basic block number. If we have saved | |
885 | information, we can get it from there unless the insn at the | |
886 | start of the basic block has been deleted. */ | |
887 | if (tinfo && tinfo->block != -1 | |
888 | && ! INSN_DELETED_P (BLOCK_HEAD (tinfo->block))) | |
889 | b = tinfo->block; | |
890 | } | |
891 | ||
925fa227 RH |
892 | if (b == -1) |
893 | b = find_basic_block (target); | |
ca545bb5 BM |
894 | |
895 | if (target_hash_table != NULL) | |
896 | { | |
897 | if (tinfo) | |
898 | { | |
899 | /* If the information is up-to-date, use it. Otherwise, we will | |
900 | update it below. */ | |
901 | if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b]) | |
902 | { | |
903 | COPY_HARD_REG_SET (res->regs, tinfo->live_regs); | |
904 | return; | |
905 | } | |
906 | } | |
907 | else | |
908 | { | |
909 | /* Allocate a place to put our results and chain it into the | |
910 | hash table. */ | |
911 | tinfo = (struct target_info *) oballoc (sizeof (struct target_info)); | |
912 | tinfo->uid = INSN_UID (target); | |
913 | tinfo->block = b; | |
914 | tinfo->next = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME]; | |
915 | target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo; | |
916 | } | |
917 | } | |
918 | ||
919 | CLEAR_HARD_REG_SET (pending_dead_regs); | |
920 | ||
921 | /* If we found a basic block, get the live registers from it and update | |
922 | them with anything set or killed between its start and the insn before | |
923 | TARGET. Otherwise, we must assume everything is live. */ | |
924 | if (b != -1) | |
925 | { | |
e881bb1b | 926 | regset regs_live = BASIC_BLOCK (b)->global_live_at_start; |
770ae6cc RK |
927 | unsigned int j; |
928 | unsigned int regno; | |
ca545bb5 BM |
929 | rtx start_insn, stop_insn; |
930 | ||
931 | /* Compute hard regs live at start of block -- this is the real hard regs | |
932 | marked live, plus live pseudo regs that have been renumbered to | |
933 | hard regs. */ | |
934 | ||
935 | REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live); | |
936 | ||
937 | EXECUTE_IF_SET_IN_REG_SET | |
938 | (regs_live, FIRST_PSEUDO_REGISTER, i, | |
939 | { | |
770ae6cc RK |
940 | if (reg_renumber[i] >= 0) |
941 | { | |
942 | regno = reg_renumber[i]; | |
943 | for (j = regno; | |
944 | j < regno + HARD_REGNO_NREGS (regno, | |
945 | PSEUDO_REGNO_MODE (i)); | |
946 | j++) | |
947 | SET_HARD_REG_BIT (current_live_regs, j); | |
948 | } | |
ca545bb5 BM |
949 | }); |
950 | ||
951 | /* Get starting and ending insn, handling the case where each might | |
952 | be a SEQUENCE. */ | |
953 | start_insn = (b == 0 ? insns : BLOCK_HEAD (b)); | |
954 | stop_insn = target; | |
955 | ||
956 | if (GET_CODE (start_insn) == INSN | |
957 | && GET_CODE (PATTERN (start_insn)) == SEQUENCE) | |
958 | start_insn = XVECEXP (PATTERN (start_insn), 0, 0); | |
959 | ||
960 | if (GET_CODE (stop_insn) == INSN | |
961 | && GET_CODE (PATTERN (stop_insn)) == SEQUENCE) | |
962 | stop_insn = next_insn (PREV_INSN (stop_insn)); | |
963 | ||
964 | for (insn = start_insn; insn != stop_insn; | |
965 | insn = next_insn_no_annul (insn)) | |
966 | { | |
967 | rtx link; | |
968 | rtx real_insn = insn; | |
969 | ||
970 | /* If this insn is from the target of a branch, it isn't going to | |
971 | be used in the sequel. If it is used in both cases, this | |
972 | test will not be true. */ | |
973 | if (INSN_FROM_TARGET_P (insn)) | |
974 | continue; | |
975 | ||
976 | /* If this insn is a USE made by update_block, we care about the | |
977 | underlying insn. */ | |
978 | if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE | |
979 | && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i') | |
980 | real_insn = XEXP (PATTERN (insn), 0); | |
981 | ||
982 | if (GET_CODE (real_insn) == CALL_INSN) | |
983 | { | |
984 | /* CALL clobbers all call-used regs that aren't fixed except | |
985 | sp, ap, and fp. Do this before setting the result of the | |
986 | call live. */ | |
987 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
988 | if (call_used_regs[i] | |
989 | && i != STACK_POINTER_REGNUM && i != FRAME_POINTER_REGNUM | |
990 | && i != ARG_POINTER_REGNUM | |
991 | #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM | |
992 | && i != HARD_FRAME_POINTER_REGNUM | |
993 | #endif | |
994 | #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM | |
995 | && ! (i == ARG_POINTER_REGNUM && fixed_regs[i]) | |
996 | #endif | |
8328efa3 | 997 | #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED) |
ca545bb5 BM |
998 | && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic) |
999 | #endif | |
1000 | ) | |
1001 | CLEAR_HARD_REG_BIT (current_live_regs, i); | |
1002 | ||
1003 | /* A CALL_INSN sets any global register live, since it may | |
1004 | have been modified by the call. */ | |
1005 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
1006 | if (global_regs[i]) | |
1007 | SET_HARD_REG_BIT (current_live_regs, i); | |
1008 | } | |
1009 | ||
1010 | /* Mark anything killed in an insn to be deadened at the next | |
1011 | label. Ignore USE insns; the only REG_DEAD notes will be for | |
1012 | parameters. But they might be early. A CALL_INSN will usually | |
1013 | clobber registers used for parameters. It isn't worth bothering | |
1014 | with the unlikely case when it won't. */ | |
1015 | if ((GET_CODE (real_insn) == INSN | |
1016 | && GET_CODE (PATTERN (real_insn)) != USE | |
1017 | && GET_CODE (PATTERN (real_insn)) != CLOBBER) | |
1018 | || GET_CODE (real_insn) == JUMP_INSN | |
1019 | || GET_CODE (real_insn) == CALL_INSN) | |
1020 | { | |
1021 | for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1)) | |
1022 | if (REG_NOTE_KIND (link) == REG_DEAD | |
1023 | && GET_CODE (XEXP (link, 0)) == REG | |
1024 | && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER) | |
1025 | { | |
1026 | int first_regno = REGNO (XEXP (link, 0)); | |
1027 | int last_regno | |
1028 | = (first_regno | |
1029 | + HARD_REGNO_NREGS (first_regno, | |
1030 | GET_MODE (XEXP (link, 0)))); | |
1031 | ||
1032 | for (i = first_regno; i < last_regno; i++) | |
1033 | SET_HARD_REG_BIT (pending_dead_regs, i); | |
1034 | } | |
1035 | ||
84832317 | 1036 | note_stores (PATTERN (real_insn), update_live_status, NULL); |
ca545bb5 BM |
1037 | |
1038 | /* If any registers were unused after this insn, kill them. | |
1039 | These notes will always be accurate. */ | |
1040 | for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1)) | |
1041 | if (REG_NOTE_KIND (link) == REG_UNUSED | |
1042 | && GET_CODE (XEXP (link, 0)) == REG | |
1043 | && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER) | |
1044 | { | |
1045 | int first_regno = REGNO (XEXP (link, 0)); | |
1046 | int last_regno | |
1047 | = (first_regno | |
1048 | + HARD_REGNO_NREGS (first_regno, | |
1049 | GET_MODE (XEXP (link, 0)))); | |
1050 | ||
1051 | for (i = first_regno; i < last_regno; i++) | |
1052 | CLEAR_HARD_REG_BIT (current_live_regs, i); | |
1053 | } | |
1054 | } | |
1055 | ||
1056 | else if (GET_CODE (real_insn) == CODE_LABEL) | |
1057 | { | |
1058 | /* A label clobbers the pending dead registers since neither | |
1059 | reload nor jump will propagate a value across a label. */ | |
1060 | AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs); | |
1061 | CLEAR_HARD_REG_SET (pending_dead_regs); | |
1062 | } | |
1063 | ||
1064 | /* The beginning of the epilogue corresponds to the end of the | |
1065 | RTL chain when there are no epilogue insns. Certain resources | |
1066 | are implicitly required at that point. */ | |
1067 | else if (GET_CODE (real_insn) == NOTE | |
1068 | && NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG) | |
1069 | IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs); | |
1070 | } | |
1071 | ||
1072 | COPY_HARD_REG_SET (res->regs, current_live_regs); | |
1073 | if (tinfo != NULL) | |
1074 | { | |
1075 | tinfo->block = b; | |
1076 | tinfo->bb_tick = bb_ticks[b]; | |
1077 | } | |
1078 | } | |
1079 | else | |
1080 | /* We didn't find the start of a basic block. Assume everything | |
1081 | in use. This should happen only extremely rarely. */ | |
1082 | SET_HARD_REG_SET (res->regs); | |
1083 | ||
1084 | CLEAR_RESOURCE (&set); | |
1085 | CLEAR_RESOURCE (&needed); | |
1086 | ||
1087 | jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0, | |
1088 | set, needed); | |
1089 | ||
1090 | /* If we hit an unconditional branch, we have another way of finding out | |
1091 | what is live: we can see what is live at the branch target and include | |
96e9c98d DN |
1092 | anything used but not set before the branch. We add the live |
1093 | resources found using the test below to those found until now. */ | |
ca545bb5 BM |
1094 | |
1095 | if (jump_insn) | |
1096 | { | |
1097 | struct resources new_resources; | |
1098 | rtx stop_insn = next_active_insn (jump_insn); | |
1099 | ||
1100 | mark_target_live_regs (insns, next_active_insn (jump_target), | |
1101 | &new_resources); | |
1102 | CLEAR_RESOURCE (&set); | |
1103 | CLEAR_RESOURCE (&needed); | |
1104 | ||
1105 | /* Include JUMP_INSN in the needed registers. */ | |
1106 | for (insn = target; insn != stop_insn; insn = next_active_insn (insn)) | |
1107 | { | |
1108 | mark_referenced_resources (insn, &needed, 1); | |
1109 | ||
1110 | COPY_HARD_REG_SET (scratch, needed.regs); | |
1111 | AND_COMPL_HARD_REG_SET (scratch, set.regs); | |
1112 | IOR_HARD_REG_SET (new_resources.regs, scratch); | |
1113 | ||
73780b74 | 1114 | mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); |
ca545bb5 BM |
1115 | } |
1116 | ||
96e9c98d | 1117 | IOR_HARD_REG_SET (res->regs, new_resources.regs); |
ca545bb5 BM |
1118 | } |
1119 | ||
1120 | if (tinfo != NULL) | |
1121 | { | |
1122 | COPY_HARD_REG_SET (tinfo->live_regs, res->regs); | |
1123 | } | |
1124 | } | |
1125 | \f | |
1126 | /* Initialize the resources required by mark_target_live_regs (). | |
1127 | This should be invoked before the first call to mark_target_live_regs. */ | |
1128 | ||
1129 | void | |
1130 | init_resource_info (epilogue_insn) | |
1131 | rtx epilogue_insn; | |
1132 | { | |
1133 | int i; | |
1134 | ||
1135 | /* Indicate what resources are required to be valid at the end of the current | |
1136 | function. The condition code never is and memory always is. If the | |
1137 | frame pointer is needed, it is and so is the stack pointer unless | |
1138 | EXIT_IGNORE_STACK is non-zero. If the frame pointer is not needed, the | |
1139 | stack pointer is. Registers used to return the function value are | |
1140 | needed. Registers holding global variables are needed. */ | |
1141 | ||
1142 | end_of_function_needs.cc = 0; | |
1143 | end_of_function_needs.memory = 1; | |
1144 | end_of_function_needs.unch_memory = 0; | |
1145 | CLEAR_HARD_REG_SET (end_of_function_needs.regs); | |
1146 | ||
1147 | if (frame_pointer_needed) | |
1148 | { | |
1149 | SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM); | |
1150 | #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM | |
1151 | SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM); | |
1152 | #endif | |
1153 | #ifdef EXIT_IGNORE_STACK | |
1154 | if (! EXIT_IGNORE_STACK | |
1155 | || current_function_sp_is_unchanging) | |
1156 | #endif | |
1157 | SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM); | |
1158 | } | |
1159 | else | |
1160 | SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM); | |
1161 | ||
1162 | if (current_function_return_rtx != 0) | |
1163 | mark_referenced_resources (current_function_return_rtx, | |
1164 | &end_of_function_needs, 1); | |
1165 | ||
1166 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
1167 | if (global_regs[i] | |
1168 | #ifdef EPILOGUE_USES | |
1169 | || EPILOGUE_USES (i) | |
1170 | #endif | |
1171 | ) | |
1172 | SET_HARD_REG_BIT (end_of_function_needs.regs, i); | |
1173 | ||
1174 | /* The registers required to be live at the end of the function are | |
1175 | represented in the flow information as being dead just prior to | |
1176 | reaching the end of the function. For example, the return of a value | |
1177 | might be represented by a USE of the return register immediately | |
1178 | followed by an unconditional jump to the return label where the | |
1179 | return label is the end of the RTL chain. The end of the RTL chain | |
1180 | is then taken to mean that the return register is live. | |
1181 | ||
1182 | This sequence is no longer maintained when epilogue instructions are | |
1183 | added to the RTL chain. To reconstruct the original meaning, the | |
1184 | start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the | |
1185 | point where these registers become live (start_of_epilogue_needs). | |
1186 | If epilogue instructions are present, the registers set by those | |
1187 | instructions won't have been processed by flow. Thus, those | |
1188 | registers are additionally required at the end of the RTL chain | |
1189 | (end_of_function_needs). */ | |
1190 | ||
1191 | start_of_epilogue_needs = end_of_function_needs; | |
1192 | ||
1193 | while ((epilogue_insn = next_nonnote_insn (epilogue_insn))) | |
73780b74 SC |
1194 | mark_set_resources (epilogue_insn, &end_of_function_needs, 0, |
1195 | MARK_SRC_DEST_CALL); | |
ca545bb5 BM |
1196 | |
1197 | /* Allocate and initialize the tables used by mark_target_live_regs. */ | |
ad85216e KG |
1198 | target_hash_table = (struct target_info **) |
1199 | xcalloc (TARGET_HASH_PRIME, sizeof (struct target_info *)); | |
1200 | bb_ticks = (int *) xcalloc (n_basic_blocks, sizeof (int)); | |
ca545bb5 BM |
1201 | } |
1202 | \f | |
1203 | /* Free up the resources allcated to mark_target_live_regs (). This | |
1204 | should be invoked after the last call to mark_target_live_regs (). */ | |
1205 | ||
1206 | void | |
1207 | free_resource_info () | |
1208 | { | |
1209 | if (target_hash_table != NULL) | |
1210 | { | |
1211 | free (target_hash_table); | |
1212 | target_hash_table = NULL; | |
1213 | } | |
1214 | ||
1215 | if (bb_ticks != NULL) | |
1216 | { | |
1217 | free (bb_ticks); | |
1218 | bb_ticks = NULL; | |
1219 | } | |
1220 | } | |
1221 | \f | |
1222 | /* Clear any hashed information that we have stored for INSN. */ | |
1223 | ||
1224 | void | |
1225 | clear_hashed_info_for_insn (insn) | |
1226 | rtx insn; | |
1227 | { | |
1228 | struct target_info *tinfo; | |
1229 | ||
1230 | if (target_hash_table != NULL) | |
1231 | { | |
1232 | for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME]; | |
1233 | tinfo; tinfo = tinfo->next) | |
1234 | if (tinfo->uid == INSN_UID (insn)) | |
1235 | break; | |
1236 | ||
1237 | if (tinfo) | |
1238 | tinfo->block = -1; | |
1239 | } | |
1240 | } | |
1241 | \f | |
1242 | /* Increment the tick count for the basic block that contains INSN. */ | |
1243 | ||
1244 | void | |
1245 | incr_ticks_for_insn (insn) | |
1246 | rtx insn; | |
1247 | { | |
925fa227 | 1248 | int b = find_basic_block (insn); |
ca545bb5 BM |
1249 | |
1250 | if (b != -1) | |
1251 | bb_ticks[b]++; | |
1252 | } | |
1253 | \f | |
1254 | /* Add TRIAL to the set of resources used at the end of the current | |
1255 | function. */ | |
1256 | void | |
1257 | mark_end_of_function_resources (trial, include_delayed_effects) | |
1258 | rtx trial; | |
1259 | int include_delayed_effects; | |
1260 | { | |
1261 | mark_referenced_resources (trial, &end_of_function_needs, | |
1262 | include_delayed_effects); | |
1263 | } | |
1264 | \f | |
ede7cd44 RH |
1265 | /* Try to find a hard register of mode MODE, matching the register class in |
1266 | CLASS_STR, which is available at the beginning of insn CURRENT_INSN and | |
1267 | remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX, | |
1268 | in which case the only condition is that the register must be available | |
1269 | before CURRENT_INSN. | |
1270 | Registers that already have bits set in REG_SET will not be considered. | |
ca545bb5 BM |
1271 | |
1272 | If an appropriate register is available, it will be returned and the | |
1273 | corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is | |
1274 | returned. */ | |
1275 | ||
1276 | rtx | |
ede7cd44 RH |
1277 | find_free_register (current_insn, last_insn, class_str, mode, reg_set) |
1278 | rtx current_insn, last_insn; | |
6343a50e | 1279 | const char *class_str; |
ca545bb5 BM |
1280 | int mode; |
1281 | HARD_REG_SET *reg_set; | |
1282 | { | |
1283 | int i, j; | |
1284 | struct resources used; | |
1285 | unsigned char clet = class_str[0]; | |
1286 | enum reg_class class | |
1287 | = (clet == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (clet)); | |
1288 | ||
1289 | mark_target_live_regs (get_insns (), current_insn, &used); | |
ede7cd44 RH |
1290 | if (last_insn) |
1291 | while (current_insn != last_insn) | |
1292 | { | |
1293 | /* Exclude anything set in this insn. */ | |
73780b74 SC |
1294 | mark_set_resources (PATTERN (current_insn), &used, 0, |
1295 | MARK_SRC_DEST_CALL); | |
ede7cd44 RH |
1296 | current_insn = next_nonnote_insn (current_insn); |
1297 | } | |
1298 | ||
ca545bb5 BM |
1299 | |
1300 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
1301 | { | |
f2a1bc02 BM |
1302 | int regno; |
1303 | int success; | |
1304 | ||
1305 | #ifdef REG_ALLOC_ORDER | |
1306 | regno = reg_alloc_order [i]; | |
1307 | #else | |
1308 | regno = i; | |
1309 | #endif | |
ca545bb5 | 1310 | |
f2a1bc02 BM |
1311 | /* Don't allocate fixed registers. */ |
1312 | if (fixed_regs[regno]) | |
1313 | continue; | |
1314 | /* Make sure the register is of the right class. */ | |
1315 | if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno)) | |
1316 | continue; | |
1317 | /* And can support the mode we need. */ | |
1318 | if (! HARD_REGNO_MODE_OK (regno, mode)) | |
ca545bb5 | 1319 | continue; |
f2a1bc02 BM |
1320 | /* And that we don't create an extra save/restore. */ |
1321 | if (! call_used_regs[regno] && ! regs_ever_live[regno]) | |
1322 | continue; | |
af058220 RH |
1323 | /* And we don't clobber traceback for noreturn functions. */ |
1324 | if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM) | |
1325 | && (! reload_completed || frame_pointer_needed)) | |
925fa227 | 1326 | continue; |
f2a1bc02 BM |
1327 | |
1328 | success = 1; | |
1329 | for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--) | |
ca545bb5 | 1330 | { |
f2a1bc02 BM |
1331 | if (TEST_HARD_REG_BIT (*reg_set, regno + j) |
1332 | || TEST_HARD_REG_BIT (used.regs, regno + j)) | |
ca545bb5 BM |
1333 | { |
1334 | success = 0; | |
1335 | break; | |
1336 | } | |
1337 | } | |
1338 | if (success) | |
1339 | { | |
f2a1bc02 | 1340 | for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--) |
ca545bb5 | 1341 | { |
f2a1bc02 | 1342 | SET_HARD_REG_BIT (*reg_set, regno + j); |
ca545bb5 | 1343 | } |
f2a1bc02 | 1344 | return gen_rtx_REG (mode, regno); |
ca545bb5 BM |
1345 | } |
1346 | } | |
1347 | return NULL_RTX; | |
1348 | } | |
f2a1bc02 BM |
1349 | |
1350 | /* Return true if REG is dead at CURRENT_INSN. */ | |
1351 | ||
1352 | int | |
1353 | reg_dead_p (current_insn, reg) | |
1354 | rtx current_insn, reg; | |
1355 | { | |
1356 | struct resources used; | |
1357 | int regno, j; | |
1358 | ||
1359 | mark_target_live_regs (get_insns (), current_insn, &used); | |
1360 | ||
1361 | regno = REGNO (reg); | |
1362 | for (j = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1; j >= 0; j--) | |
1363 | { | |
1364 | if (TEST_HARD_REG_BIT (used.regs, regno + j)) | |
1365 | return 0; | |
1366 | } | |
1367 | ||
1368 | return 1; | |
1369 | } |