]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/resource.c
* MAINTAINERS: Update my email address.
[thirdparty/gcc.git] / gcc / resource.c
1 /* Definitions for computing resource usage of specific insns.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "toplev.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "hard-reg-set.h"
29 #include "function.h"
30 #include "regs.h"
31 #include "flags.h"
32 #include "output.h"
33 #include "resource.h"
34 #include "except.h"
35 #include "insn-attr.h"
36 #include "params.h"
37 #include "df.h"
38
39 /* This structure is used to record liveness information at the targets or
40 fallthrough insns of branches. We will most likely need the information
41 at targets again, so save them in a hash table rather than recomputing them
42 each time. */
43
44 struct target_info
45 {
46 int uid; /* INSN_UID of target. */
47 struct target_info *next; /* Next info for same hash bucket. */
48 HARD_REG_SET live_regs; /* Registers live at target. */
49 int block; /* Basic block number containing target. */
50 int bb_tick; /* Generation count of basic block info. */
51 };
52
53 #define TARGET_HASH_PRIME 257
54
55 /* Indicates what resources are required at the beginning of the epilogue. */
56 static struct resources start_of_epilogue_needs;
57
58 /* Indicates what resources are required at function end. */
59 static struct resources end_of_function_needs;
60
61 /* Define the hash table itself. */
62 static struct target_info **target_hash_table = NULL;
63
64 /* For each basic block, we maintain a generation number of its basic
65 block info, which is updated each time we move an insn from the
66 target of a jump. This is the generation number indexed by block
67 number. */
68
69 static int *bb_ticks;
70
71 /* Marks registers possibly live at the current place being scanned by
72 mark_target_live_regs. Also used by update_live_status. */
73
74 static HARD_REG_SET current_live_regs;
75
76 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
77 Also only used by the next two functions. */
78
79 static HARD_REG_SET pending_dead_regs;
80 \f
81 static void update_live_status (rtx, const_rtx, void *);
82 static int find_basic_block (rtx, int);
83 static rtx next_insn_no_annul (rtx);
84 static rtx find_dead_or_set_registers (rtx, struct resources*,
85 rtx*, int, struct resources,
86 struct resources);
87 \f
88 /* Utility function called from mark_target_live_regs via note_stores.
89 It deadens any CLOBBERed registers and livens any SET registers. */
90
91 static void
92 update_live_status (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
93 {
94 int first_regno, last_regno;
95 int i;
96
97 if (!REG_P (dest)
98 && (GET_CODE (dest) != SUBREG || !REG_P (SUBREG_REG (dest))))
99 return;
100
101 if (GET_CODE (dest) == SUBREG)
102 {
103 first_regno = subreg_regno (dest);
104 last_regno = first_regno + subreg_nregs (dest);
105
106 }
107 else
108 {
109 first_regno = REGNO (dest);
110 last_regno = END_HARD_REGNO (dest);
111 }
112
113 if (GET_CODE (x) == CLOBBER)
114 for (i = first_regno; i < last_regno; i++)
115 CLEAR_HARD_REG_BIT (current_live_regs, i);
116 else
117 for (i = first_regno; i < last_regno; i++)
118 {
119 SET_HARD_REG_BIT (current_live_regs, i);
120 CLEAR_HARD_REG_BIT (pending_dead_regs, i);
121 }
122 }
123
124 /* Find the number of the basic block with correct live register
125 information that starts closest to INSN. Return -1 if we couldn't
126 find such a basic block or the beginning is more than
127 SEARCH_LIMIT instructions before INSN. Use SEARCH_LIMIT = -1 for
128 an unlimited search.
129
130 The delay slot filling code destroys the control-flow graph so,
131 instead of finding the basic block containing INSN, we search
132 backwards toward a BARRIER where the live register information is
133 correct. */
134
135 static int
136 find_basic_block (rtx insn, int search_limit)
137 {
138 basic_block bb;
139
140 /* Scan backwards to the previous BARRIER. Then see if we can find a
141 label that starts a basic block. Return the basic block number. */
142 for (insn = prev_nonnote_insn (insn);
143 insn && !BARRIER_P (insn) && search_limit != 0;
144 insn = prev_nonnote_insn (insn), --search_limit)
145 ;
146
147 /* The closest BARRIER is too far away. */
148 if (search_limit == 0)
149 return -1;
150
151 /* The start of the function. */
152 else if (insn == 0)
153 return ENTRY_BLOCK_PTR->next_bb->index;
154
155 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
156 anything other than a CODE_LABEL or note, we can't find this code. */
157 for (insn = next_nonnote_insn (insn);
158 insn && LABEL_P (insn);
159 insn = next_nonnote_insn (insn))
160 {
161 FOR_EACH_BB (bb)
162 if (insn == BB_HEAD (bb))
163 return bb->index;
164 }
165
166 return -1;
167 }
168 \f
169 /* Similar to next_insn, but ignores insns in the delay slots of
170 an annulled branch. */
171
172 static rtx
173 next_insn_no_annul (rtx insn)
174 {
175 if (insn)
176 {
177 /* If INSN is an annulled branch, skip any insns from the target
178 of the branch. */
179 if (INSN_P (insn)
180 && INSN_ANNULLED_BRANCH_P (insn)
181 && NEXT_INSN (PREV_INSN (insn)) != insn)
182 {
183 rtx next = NEXT_INSN (insn);
184 enum rtx_code code = GET_CODE (next);
185
186 while ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
187 && INSN_FROM_TARGET_P (next))
188 {
189 insn = next;
190 next = NEXT_INSN (insn);
191 code = GET_CODE (next);
192 }
193 }
194
195 insn = NEXT_INSN (insn);
196 if (insn && NONJUMP_INSN_P (insn)
197 && GET_CODE (PATTERN (insn)) == SEQUENCE)
198 insn = XVECEXP (PATTERN (insn), 0, 0);
199 }
200
201 return insn;
202 }
203 \f
204 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
205 which resources are referenced by the insn. If INCLUDE_DELAYED_EFFECTS
206 is TRUE, resources used by the called routine will be included for
207 CALL_INSNs. */
208
209 void
210 mark_referenced_resources (rtx x, struct resources *res,
211 int include_delayed_effects)
212 {
213 enum rtx_code code = GET_CODE (x);
214 int i, j;
215 unsigned int r;
216 const char *format_ptr;
217
218 /* Handle leaf items for which we set resource flags. Also, special-case
219 CALL, SET and CLOBBER operators. */
220 switch (code)
221 {
222 case CONST:
223 case CONST_INT:
224 case CONST_DOUBLE:
225 case CONST_FIXED:
226 case CONST_VECTOR:
227 case PC:
228 case SYMBOL_REF:
229 case LABEL_REF:
230 return;
231
232 case SUBREG:
233 if (!REG_P (SUBREG_REG (x)))
234 mark_referenced_resources (SUBREG_REG (x), res, 0);
235 else
236 {
237 unsigned int regno = subreg_regno (x);
238 unsigned int last_regno = regno + subreg_nregs (x);
239
240 gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
241 for (r = regno; r < last_regno; r++)
242 SET_HARD_REG_BIT (res->regs, r);
243 }
244 return;
245
246 case REG:
247 gcc_assert (HARD_REGISTER_P (x));
248 add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
249 return;
250
251 case MEM:
252 /* If this memory shouldn't change, it really isn't referencing
253 memory. */
254 if (MEM_READONLY_P (x))
255 res->unch_memory = 1;
256 else
257 res->memory = 1;
258 res->volatil |= MEM_VOLATILE_P (x);
259
260 /* Mark registers used to access memory. */
261 mark_referenced_resources (XEXP (x, 0), res, 0);
262 return;
263
264 case CC0:
265 res->cc = 1;
266 return;
267
268 case UNSPEC_VOLATILE:
269 case ASM_INPUT:
270 /* Traditional asm's are always volatile. */
271 res->volatil = 1;
272 return;
273
274 case TRAP_IF:
275 res->volatil = 1;
276 break;
277
278 case ASM_OPERANDS:
279 res->volatil |= MEM_VOLATILE_P (x);
280
281 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
282 We can not just fall through here since then we would be confused
283 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
284 traditional asms unlike their normal usage. */
285
286 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
287 mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0);
288 return;
289
290 case CALL:
291 /* The first operand will be a (MEM (xxx)) but doesn't really reference
292 memory. The second operand may be referenced, though. */
293 mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, 0);
294 mark_referenced_resources (XEXP (x, 1), res, 0);
295 return;
296
297 case SET:
298 /* Usually, the first operand of SET is set, not referenced. But
299 registers used to access memory are referenced. SET_DEST is
300 also referenced if it is a ZERO_EXTRACT. */
301
302 mark_referenced_resources (SET_SRC (x), res, 0);
303
304 x = SET_DEST (x);
305 if (GET_CODE (x) == ZERO_EXTRACT
306 || GET_CODE (x) == STRICT_LOW_PART)
307 mark_referenced_resources (x, res, 0);
308 else if (GET_CODE (x) == SUBREG)
309 x = SUBREG_REG (x);
310 if (MEM_P (x))
311 mark_referenced_resources (XEXP (x, 0), res, 0);
312 return;
313
314 case CLOBBER:
315 return;
316
317 case CALL_INSN:
318 if (include_delayed_effects)
319 {
320 /* A CALL references memory, the frame pointer if it exists, the
321 stack pointer, any global registers and any registers given in
322 USE insns immediately in front of the CALL.
323
324 However, we may have moved some of the parameter loading insns
325 into the delay slot of this CALL. If so, the USE's for them
326 don't count and should be skipped. */
327 rtx insn = PREV_INSN (x);
328 rtx sequence = 0;
329 int seq_size = 0;
330 int i;
331
332 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
333 if (NEXT_INSN (insn) != x)
334 {
335 sequence = PATTERN (NEXT_INSN (insn));
336 seq_size = XVECLEN (sequence, 0);
337 gcc_assert (GET_CODE (sequence) == SEQUENCE);
338 }
339
340 res->memory = 1;
341 SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
342 if (frame_pointer_needed)
343 {
344 SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
345 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
346 SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
347 #endif
348 }
349
350 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
351 if (global_regs[i])
352 SET_HARD_REG_BIT (res->regs, i);
353
354 /* Check for a REG_SETJMP. If it exists, then we must
355 assume that this call can need any register.
356
357 This is done to be more conservative about how we handle setjmp.
358 We assume that they both use and set all registers. Using all
359 registers ensures that a register will not be considered dead
360 just because it crosses a setjmp call. A register should be
361 considered dead only if the setjmp call returns nonzero. */
362 if (find_reg_note (x, REG_SETJMP, NULL))
363 SET_HARD_REG_SET (res->regs);
364
365 {
366 rtx link;
367
368 for (link = CALL_INSN_FUNCTION_USAGE (x);
369 link;
370 link = XEXP (link, 1))
371 if (GET_CODE (XEXP (link, 0)) == USE)
372 {
373 for (i = 1; i < seq_size; i++)
374 {
375 rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
376 if (GET_CODE (slot_pat) == SET
377 && rtx_equal_p (SET_DEST (slot_pat),
378 XEXP (XEXP (link, 0), 0)))
379 break;
380 }
381 if (i >= seq_size)
382 mark_referenced_resources (XEXP (XEXP (link, 0), 0),
383 res, 0);
384 }
385 }
386 }
387
388 /* ... fall through to other INSN processing ... */
389
390 case INSN:
391 case JUMP_INSN:
392
393 #ifdef INSN_REFERENCES_ARE_DELAYED
394 if (! include_delayed_effects
395 && INSN_REFERENCES_ARE_DELAYED (x))
396 return;
397 #endif
398
399 /* No special processing, just speed up. */
400 mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
401 return;
402
403 default:
404 break;
405 }
406
407 /* Process each sub-expression and flag what it needs. */
408 format_ptr = GET_RTX_FORMAT (code);
409 for (i = 0; i < GET_RTX_LENGTH (code); i++)
410 switch (*format_ptr++)
411 {
412 case 'e':
413 mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
414 break;
415
416 case 'E':
417 for (j = 0; j < XVECLEN (x, i); j++)
418 mark_referenced_resources (XVECEXP (x, i, j), res,
419 include_delayed_effects);
420 break;
421 }
422 }
423 \f
424 /* A subroutine of mark_target_live_regs. Search forward from TARGET
425 looking for registers that are set before they are used. These are dead.
426 Stop after passing a few conditional jumps, and/or a small
427 number of unconditional branches. */
428
429 static rtx
430 find_dead_or_set_registers (rtx target, struct resources *res,
431 rtx *jump_target, int jump_count,
432 struct resources set, struct resources needed)
433 {
434 HARD_REG_SET scratch;
435 rtx insn, next;
436 rtx jump_insn = 0;
437 int i;
438
439 for (insn = target; insn; insn = next)
440 {
441 rtx this_jump_insn = insn;
442
443 next = NEXT_INSN (insn);
444
445 /* If this instruction can throw an exception, then we don't
446 know where we might end up next. That means that we have to
447 assume that whatever we have already marked as live really is
448 live. */
449 if (can_throw_internal (insn))
450 break;
451
452 switch (GET_CODE (insn))
453 {
454 case CODE_LABEL:
455 /* After a label, any pending dead registers that weren't yet
456 used can be made dead. */
457 AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
458 AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
459 CLEAR_HARD_REG_SET (pending_dead_regs);
460
461 continue;
462
463 case BARRIER:
464 case NOTE:
465 continue;
466
467 case INSN:
468 if (GET_CODE (PATTERN (insn)) == USE)
469 {
470 /* If INSN is a USE made by update_block, we care about the
471 underlying insn. Any registers set by the underlying insn
472 are live since the insn is being done somewhere else. */
473 if (INSN_P (XEXP (PATTERN (insn), 0)))
474 mark_set_resources (XEXP (PATTERN (insn), 0), res, 0,
475 MARK_SRC_DEST_CALL);
476
477 /* All other USE insns are to be ignored. */
478 continue;
479 }
480 else if (GET_CODE (PATTERN (insn)) == CLOBBER)
481 continue;
482 else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
483 {
484 /* An unconditional jump can be used to fill the delay slot
485 of a call, so search for a JUMP_INSN in any position. */
486 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
487 {
488 this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
489 if (JUMP_P (this_jump_insn))
490 break;
491 }
492 }
493
494 default:
495 break;
496 }
497
498 if (JUMP_P (this_jump_insn))
499 {
500 if (jump_count++ < 10)
501 {
502 if (any_uncondjump_p (this_jump_insn)
503 || GET_CODE (PATTERN (this_jump_insn)) == RETURN)
504 {
505 next = JUMP_LABEL (this_jump_insn);
506 if (jump_insn == 0)
507 {
508 jump_insn = insn;
509 if (jump_target)
510 *jump_target = JUMP_LABEL (this_jump_insn);
511 }
512 }
513 else if (any_condjump_p (this_jump_insn))
514 {
515 struct resources target_set, target_res;
516 struct resources fallthrough_res;
517
518 /* We can handle conditional branches here by following
519 both paths, and then IOR the results of the two paths
520 together, which will give us registers that are dead
521 on both paths. Since this is expensive, we give it
522 a much higher cost than unconditional branches. The
523 cost was chosen so that we will follow at most 1
524 conditional branch. */
525
526 jump_count += 4;
527 if (jump_count >= 10)
528 break;
529
530 mark_referenced_resources (insn, &needed, 1);
531
532 /* For an annulled branch, mark_set_resources ignores slots
533 filled by instructions from the target. This is correct
534 if the branch is not taken. Since we are following both
535 paths from the branch, we must also compute correct info
536 if the branch is taken. We do this by inverting all of
537 the INSN_FROM_TARGET_P bits, calling mark_set_resources,
538 and then inverting the INSN_FROM_TARGET_P bits again. */
539
540 if (GET_CODE (PATTERN (insn)) == SEQUENCE
541 && INSN_ANNULLED_BRANCH_P (this_jump_insn))
542 {
543 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
544 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
545 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
546
547 target_set = set;
548 mark_set_resources (insn, &target_set, 0,
549 MARK_SRC_DEST_CALL);
550
551 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
552 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
553 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
554
555 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
556 }
557 else
558 {
559 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
560 target_set = set;
561 }
562
563 target_res = *res;
564 COPY_HARD_REG_SET (scratch, target_set.regs);
565 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
566 AND_COMPL_HARD_REG_SET (target_res.regs, scratch);
567
568 fallthrough_res = *res;
569 COPY_HARD_REG_SET (scratch, set.regs);
570 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
571 AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
572
573 find_dead_or_set_registers (JUMP_LABEL (this_jump_insn),
574 &target_res, 0, jump_count,
575 target_set, needed);
576 find_dead_or_set_registers (next,
577 &fallthrough_res, 0, jump_count,
578 set, needed);
579 IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
580 AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
581 break;
582 }
583 else
584 break;
585 }
586 else
587 {
588 /* Don't try this optimization if we expired our jump count
589 above, since that would mean there may be an infinite loop
590 in the function being compiled. */
591 jump_insn = 0;
592 break;
593 }
594 }
595
596 mark_referenced_resources (insn, &needed, 1);
597 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
598
599 COPY_HARD_REG_SET (scratch, set.regs);
600 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
601 AND_COMPL_HARD_REG_SET (res->regs, scratch);
602 }
603
604 return jump_insn;
605 }
606 \f
607 /* Given X, a part of an insn, and a pointer to a `struct resource',
608 RES, indicate which resources are modified by the insn. If
609 MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially
610 set by the called routine.
611
612 If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
613 objects are being referenced instead of set.
614
615 We never mark the insn as modifying the condition code unless it explicitly
616 SETs CC0 even though this is not totally correct. The reason for this is
617 that we require a SET of CC0 to immediately precede the reference to CC0.
618 So if some other insn sets CC0 as a side-effect, we know it cannot affect
619 our computation and thus may be placed in a delay slot. */
620
621 void
622 mark_set_resources (rtx x, struct resources *res, int in_dest,
623 enum mark_resource_type mark_type)
624 {
625 enum rtx_code code;
626 int i, j;
627 unsigned int r;
628 const char *format_ptr;
629
630 restart:
631
632 code = GET_CODE (x);
633
634 switch (code)
635 {
636 case NOTE:
637 case BARRIER:
638 case CODE_LABEL:
639 case USE:
640 case CONST_INT:
641 case CONST_DOUBLE:
642 case CONST_FIXED:
643 case CONST_VECTOR:
644 case LABEL_REF:
645 case SYMBOL_REF:
646 case CONST:
647 case PC:
648 /* These don't set any resources. */
649 return;
650
651 case CC0:
652 if (in_dest)
653 res->cc = 1;
654 return;
655
656 case CALL_INSN:
657 /* Called routine modifies the condition code, memory, any registers
658 that aren't saved across calls, global registers and anything
659 explicitly CLOBBERed immediately after the CALL_INSN. */
660
661 if (mark_type == MARK_SRC_DEST_CALL)
662 {
663 rtx link;
664
665 res->cc = res->memory = 1;
666
667 IOR_HARD_REG_SET (res->regs, regs_invalidated_by_call);
668
669 for (link = CALL_INSN_FUNCTION_USAGE (x);
670 link; link = XEXP (link, 1))
671 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
672 mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1,
673 MARK_SRC_DEST);
674
675 /* Check for a REG_SETJMP. If it exists, then we must
676 assume that this call can clobber any register. */
677 if (find_reg_note (x, REG_SETJMP, NULL))
678 SET_HARD_REG_SET (res->regs);
679 }
680
681 /* ... and also what its RTL says it modifies, if anything. */
682
683 case JUMP_INSN:
684 case INSN:
685
686 /* An insn consisting of just a CLOBBER (or USE) is just for flow
687 and doesn't actually do anything, so we ignore it. */
688
689 #ifdef INSN_SETS_ARE_DELAYED
690 if (mark_type != MARK_SRC_DEST_CALL
691 && INSN_SETS_ARE_DELAYED (x))
692 return;
693 #endif
694
695 x = PATTERN (x);
696 if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
697 goto restart;
698 return;
699
700 case SET:
701 /* If the source of a SET is a CALL, this is actually done by
702 the called routine. So only include it if we are to include the
703 effects of the calling routine. */
704
705 mark_set_resources (SET_DEST (x), res,
706 (mark_type == MARK_SRC_DEST_CALL
707 || GET_CODE (SET_SRC (x)) != CALL),
708 mark_type);
709
710 mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST);
711 return;
712
713 case CLOBBER:
714 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
715 return;
716
717 case SEQUENCE:
718 for (i = 0; i < XVECLEN (x, 0); i++)
719 if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
720 && INSN_FROM_TARGET_P (XVECEXP (x, 0, i))))
721 mark_set_resources (XVECEXP (x, 0, i), res, 0, mark_type);
722 return;
723
724 case POST_INC:
725 case PRE_INC:
726 case POST_DEC:
727 case PRE_DEC:
728 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
729 return;
730
731 case PRE_MODIFY:
732 case POST_MODIFY:
733 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
734 mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, MARK_SRC_DEST);
735 mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, MARK_SRC_DEST);
736 return;
737
738 case SIGN_EXTRACT:
739 case ZERO_EXTRACT:
740 mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST);
741 mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST);
742 mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST);
743 return;
744
745 case MEM:
746 if (in_dest)
747 {
748 res->memory = 1;
749 res->unch_memory |= MEM_READONLY_P (x);
750 res->volatil |= MEM_VOLATILE_P (x);
751 }
752
753 mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
754 return;
755
756 case SUBREG:
757 if (in_dest)
758 {
759 if (!REG_P (SUBREG_REG (x)))
760 mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type);
761 else
762 {
763 unsigned int regno = subreg_regno (x);
764 unsigned int last_regno = regno + subreg_nregs (x);
765
766 gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
767 for (r = regno; r < last_regno; r++)
768 SET_HARD_REG_BIT (res->regs, r);
769 }
770 }
771 return;
772
773 case REG:
774 if (in_dest)
775 {
776 gcc_assert (HARD_REGISTER_P (x));
777 add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
778 }
779 return;
780
781 case UNSPEC_VOLATILE:
782 case ASM_INPUT:
783 /* Traditional asm's are always volatile. */
784 res->volatil = 1;
785 return;
786
787 case TRAP_IF:
788 res->volatil = 1;
789 break;
790
791 case ASM_OPERANDS:
792 res->volatil |= MEM_VOLATILE_P (x);
793
794 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
795 We can not just fall through here since then we would be confused
796 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
797 traditional asms unlike their normal usage. */
798
799 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
800 mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest,
801 MARK_SRC_DEST);
802 return;
803
804 default:
805 break;
806 }
807
808 /* Process each sub-expression and flag what it needs. */
809 format_ptr = GET_RTX_FORMAT (code);
810 for (i = 0; i < GET_RTX_LENGTH (code); i++)
811 switch (*format_ptr++)
812 {
813 case 'e':
814 mark_set_resources (XEXP (x, i), res, in_dest, mark_type);
815 break;
816
817 case 'E':
818 for (j = 0; j < XVECLEN (x, i); j++)
819 mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type);
820 break;
821 }
822 }
823 \f
824 /* Return TRUE if INSN is a return, possibly with a filled delay slot. */
825
826 static bool
827 return_insn_p (const_rtx insn)
828 {
829 if (JUMP_P (insn) && GET_CODE (PATTERN (insn)) == RETURN)
830 return true;
831
832 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
833 return return_insn_p (XVECEXP (PATTERN (insn), 0, 0));
834
835 return false;
836 }
837
838 /* Set the resources that are live at TARGET.
839
840 If TARGET is zero, we refer to the end of the current function and can
841 return our precomputed value.
842
843 Otherwise, we try to find out what is live by consulting the basic block
844 information. This is tricky, because we must consider the actions of
845 reload and jump optimization, which occur after the basic block information
846 has been computed.
847
848 Accordingly, we proceed as follows::
849
850 We find the previous BARRIER and look at all immediately following labels
851 (with no intervening active insns) to see if any of them start a basic
852 block. If we hit the start of the function first, we use block 0.
853
854 Once we have found a basic block and a corresponding first insns, we can
855 accurately compute the live status from basic_block_live_regs and
856 reg_renumber. (By starting at a label following a BARRIER, we are immune
857 to actions taken by reload and jump.) Then we scan all insns between
858 that point and our target. For each CLOBBER (or for call-clobbered regs
859 when we pass a CALL_INSN), mark the appropriate registers are dead. For
860 a SET, mark them as live.
861
862 We have to be careful when using REG_DEAD notes because they are not
863 updated by such things as find_equiv_reg. So keep track of registers
864 marked as dead that haven't been assigned to, and mark them dead at the
865 next CODE_LABEL since reload and jump won't propagate values across labels.
866
867 If we cannot find the start of a basic block (should be a very rare
868 case, if it can happen at all), mark everything as potentially live.
869
870 Next, scan forward from TARGET looking for things set or clobbered
871 before they are used. These are not live.
872
873 Because we can be called many times on the same target, save our results
874 in a hash table indexed by INSN_UID. This is only done if the function
875 init_resource_info () was invoked before we are called. */
876
877 void
878 mark_target_live_regs (rtx insns, rtx target, struct resources *res)
879 {
880 int b = -1;
881 unsigned int i;
882 struct target_info *tinfo = NULL;
883 rtx insn;
884 rtx jump_insn = 0;
885 rtx jump_target;
886 HARD_REG_SET scratch;
887 struct resources set, needed;
888
889 /* Handle end of function. */
890 if (target == 0)
891 {
892 *res = end_of_function_needs;
893 return;
894 }
895
896 /* Handle return insn. */
897 else if (return_insn_p (target))
898 {
899 *res = end_of_function_needs;
900 mark_referenced_resources (target, res, 0);
901 return;
902 }
903
904 /* We have to assume memory is needed, but the CC isn't. */
905 res->memory = 1;
906 res->volatil = res->unch_memory = 0;
907 res->cc = 0;
908
909 /* See if we have computed this value already. */
910 if (target_hash_table != NULL)
911 {
912 for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
913 tinfo; tinfo = tinfo->next)
914 if (tinfo->uid == INSN_UID (target))
915 break;
916
917 /* Start by getting the basic block number. If we have saved
918 information, we can get it from there unless the insn at the
919 start of the basic block has been deleted. */
920 if (tinfo && tinfo->block != -1
921 && ! INSN_DELETED_P (BB_HEAD (BASIC_BLOCK (tinfo->block))))
922 b = tinfo->block;
923 }
924
925 if (b == -1)
926 b = find_basic_block (target, MAX_DELAY_SLOT_LIVE_SEARCH);
927
928 if (target_hash_table != NULL)
929 {
930 if (tinfo)
931 {
932 /* If the information is up-to-date, use it. Otherwise, we will
933 update it below. */
934 if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
935 {
936 COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
937 return;
938 }
939 }
940 else
941 {
942 /* Allocate a place to put our results and chain it into the
943 hash table. */
944 tinfo = XNEW (struct target_info);
945 tinfo->uid = INSN_UID (target);
946 tinfo->block = b;
947 tinfo->next
948 = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
949 target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
950 }
951 }
952
953 CLEAR_HARD_REG_SET (pending_dead_regs);
954
955 /* If we found a basic block, get the live registers from it and update
956 them with anything set or killed between its start and the insn before
957 TARGET. Otherwise, we must assume everything is live. */
958 if (b != -1)
959 {
960 regset regs_live = DF_LR_IN (BASIC_BLOCK (b));
961 rtx start_insn, stop_insn;
962
963 /* Compute hard regs live at start of block -- this is the real hard regs
964 marked live, plus live pseudo regs that have been renumbered to
965 hard regs. */
966
967 REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
968
969 /* Get starting and ending insn, handling the case where each might
970 be a SEQUENCE. */
971 start_insn = (b == ENTRY_BLOCK_PTR->next_bb->index ?
972 insns : BB_HEAD (BASIC_BLOCK (b)));
973 stop_insn = target;
974
975 if (NONJUMP_INSN_P (start_insn)
976 && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
977 start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
978
979 if (NONJUMP_INSN_P (stop_insn)
980 && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
981 stop_insn = next_insn (PREV_INSN (stop_insn));
982
983 for (insn = start_insn; insn != stop_insn;
984 insn = next_insn_no_annul (insn))
985 {
986 rtx link;
987 rtx real_insn = insn;
988 enum rtx_code code = GET_CODE (insn);
989
990 /* If this insn is from the target of a branch, it isn't going to
991 be used in the sequel. If it is used in both cases, this
992 test will not be true. */
993 if ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
994 && INSN_FROM_TARGET_P (insn))
995 continue;
996
997 /* If this insn is a USE made by update_block, we care about the
998 underlying insn. */
999 if (code == INSN && GET_CODE (PATTERN (insn)) == USE
1000 && INSN_P (XEXP (PATTERN (insn), 0)))
1001 real_insn = XEXP (PATTERN (insn), 0);
1002
1003 if (CALL_P (real_insn))
1004 {
1005 /* CALL clobbers all call-used regs that aren't fixed except
1006 sp, ap, and fp. Do this before setting the result of the
1007 call live. */
1008 AND_COMPL_HARD_REG_SET (current_live_regs,
1009 regs_invalidated_by_call);
1010
1011 /* A CALL_INSN sets any global register live, since it may
1012 have been modified by the call. */
1013 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1014 if (global_regs[i])
1015 SET_HARD_REG_BIT (current_live_regs, i);
1016 }
1017
1018 /* Mark anything killed in an insn to be deadened at the next
1019 label. Ignore USE insns; the only REG_DEAD notes will be for
1020 parameters. But they might be early. A CALL_INSN will usually
1021 clobber registers used for parameters. It isn't worth bothering
1022 with the unlikely case when it won't. */
1023 if ((NONJUMP_INSN_P (real_insn)
1024 && GET_CODE (PATTERN (real_insn)) != USE
1025 && GET_CODE (PATTERN (real_insn)) != CLOBBER)
1026 || JUMP_P (real_insn)
1027 || CALL_P (real_insn))
1028 {
1029 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1030 if (REG_NOTE_KIND (link) == REG_DEAD
1031 && REG_P (XEXP (link, 0))
1032 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1033 add_to_hard_reg_set (&pending_dead_regs,
1034 GET_MODE (XEXP (link, 0)),
1035 REGNO (XEXP (link, 0)));
1036
1037 note_stores (PATTERN (real_insn), update_live_status, NULL);
1038
1039 /* If any registers were unused after this insn, kill them.
1040 These notes will always be accurate. */
1041 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1042 if (REG_NOTE_KIND (link) == REG_UNUSED
1043 && REG_P (XEXP (link, 0))
1044 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1045 remove_from_hard_reg_set (&current_live_regs,
1046 GET_MODE (XEXP (link, 0)),
1047 REGNO (XEXP (link, 0)));
1048 }
1049
1050 else if (LABEL_P (real_insn))
1051 {
1052 /* A label clobbers the pending dead registers since neither
1053 reload nor jump will propagate a value across a label. */
1054 AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
1055 CLEAR_HARD_REG_SET (pending_dead_regs);
1056 }
1057
1058 /* The beginning of the epilogue corresponds to the end of the
1059 RTL chain when there are no epilogue insns. Certain resources
1060 are implicitly required at that point. */
1061 else if (NOTE_P (real_insn)
1062 && NOTE_KIND (real_insn) == NOTE_INSN_EPILOGUE_BEG)
1063 IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
1064 }
1065
1066 COPY_HARD_REG_SET (res->regs, current_live_regs);
1067 if (tinfo != NULL)
1068 {
1069 tinfo->block = b;
1070 tinfo->bb_tick = bb_ticks[b];
1071 }
1072 }
1073 else
1074 /* We didn't find the start of a basic block. Assume everything
1075 in use. This should happen only extremely rarely. */
1076 SET_HARD_REG_SET (res->regs);
1077
1078 CLEAR_RESOURCE (&set);
1079 CLEAR_RESOURCE (&needed);
1080
1081 jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0,
1082 set, needed);
1083
1084 /* If we hit an unconditional branch, we have another way of finding out
1085 what is live: we can see what is live at the branch target and include
1086 anything used but not set before the branch. We add the live
1087 resources found using the test below to those found until now. */
1088
1089 if (jump_insn)
1090 {
1091 struct resources new_resources;
1092 rtx stop_insn = next_active_insn (jump_insn);
1093
1094 mark_target_live_regs (insns, next_active_insn (jump_target),
1095 &new_resources);
1096 CLEAR_RESOURCE (&set);
1097 CLEAR_RESOURCE (&needed);
1098
1099 /* Include JUMP_INSN in the needed registers. */
1100 for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
1101 {
1102 mark_referenced_resources (insn, &needed, 1);
1103
1104 COPY_HARD_REG_SET (scratch, needed.regs);
1105 AND_COMPL_HARD_REG_SET (scratch, set.regs);
1106 IOR_HARD_REG_SET (new_resources.regs, scratch);
1107
1108 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1109 }
1110
1111 IOR_HARD_REG_SET (res->regs, new_resources.regs);
1112 }
1113
1114 if (tinfo != NULL)
1115 {
1116 COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
1117 }
1118 }
1119 \f
1120 /* Initialize the resources required by mark_target_live_regs ().
1121 This should be invoked before the first call to mark_target_live_regs. */
1122
1123 void
1124 init_resource_info (rtx epilogue_insn)
1125 {
1126 int i;
1127
1128 /* Indicate what resources are required to be valid at the end of the current
1129 function. The condition code never is and memory always is. If the
1130 frame pointer is needed, it is and so is the stack pointer unless
1131 EXIT_IGNORE_STACK is nonzero. If the frame pointer is not needed, the
1132 stack pointer is. Registers used to return the function value are
1133 needed. Registers holding global variables are needed. */
1134
1135 end_of_function_needs.cc = 0;
1136 end_of_function_needs.memory = 1;
1137 end_of_function_needs.unch_memory = 0;
1138 CLEAR_HARD_REG_SET (end_of_function_needs.regs);
1139
1140 if (frame_pointer_needed)
1141 {
1142 SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
1143 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1144 SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
1145 #endif
1146 if (! EXIT_IGNORE_STACK
1147 || current_function_sp_is_unchanging)
1148 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1149 }
1150 else
1151 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1152
1153 if (current_function_return_rtx != 0)
1154 mark_referenced_resources (current_function_return_rtx,
1155 &end_of_function_needs, 1);
1156
1157 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1158 if (global_regs[i]
1159 #ifdef EPILOGUE_USES
1160 || EPILOGUE_USES (i)
1161 #endif
1162 )
1163 SET_HARD_REG_BIT (end_of_function_needs.regs, i);
1164
1165 /* The registers required to be live at the end of the function are
1166 represented in the flow information as being dead just prior to
1167 reaching the end of the function. For example, the return of a value
1168 might be represented by a USE of the return register immediately
1169 followed by an unconditional jump to the return label where the
1170 return label is the end of the RTL chain. The end of the RTL chain
1171 is then taken to mean that the return register is live.
1172
1173 This sequence is no longer maintained when epilogue instructions are
1174 added to the RTL chain. To reconstruct the original meaning, the
1175 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
1176 point where these registers become live (start_of_epilogue_needs).
1177 If epilogue instructions are present, the registers set by those
1178 instructions won't have been processed by flow. Thus, those
1179 registers are additionally required at the end of the RTL chain
1180 (end_of_function_needs). */
1181
1182 start_of_epilogue_needs = end_of_function_needs;
1183
1184 while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
1185 {
1186 mark_set_resources (epilogue_insn, &end_of_function_needs, 0,
1187 MARK_SRC_DEST_CALL);
1188 if (return_insn_p (epilogue_insn))
1189 break;
1190 }
1191
1192 /* Allocate and initialize the tables used by mark_target_live_regs. */
1193 target_hash_table = XCNEWVEC (struct target_info *, TARGET_HASH_PRIME);
1194 bb_ticks = XCNEWVEC (int, last_basic_block);
1195 }
1196 \f
1197 /* Free up the resources allocated to mark_target_live_regs (). This
1198 should be invoked after the last call to mark_target_live_regs (). */
1199
1200 void
1201 free_resource_info (void)
1202 {
1203 if (target_hash_table != NULL)
1204 {
1205 int i;
1206
1207 for (i = 0; i < TARGET_HASH_PRIME; ++i)
1208 {
1209 struct target_info *ti = target_hash_table[i];
1210
1211 while (ti)
1212 {
1213 struct target_info *next = ti->next;
1214 free (ti);
1215 ti = next;
1216 }
1217 }
1218
1219 free (target_hash_table);
1220 target_hash_table = NULL;
1221 }
1222
1223 if (bb_ticks != NULL)
1224 {
1225 free (bb_ticks);
1226 bb_ticks = NULL;
1227 }
1228 }
1229 \f
1230 /* Clear any hashed information that we have stored for INSN. */
1231
1232 void
1233 clear_hashed_info_for_insn (rtx insn)
1234 {
1235 struct target_info *tinfo;
1236
1237 if (target_hash_table != NULL)
1238 {
1239 for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
1240 tinfo; tinfo = tinfo->next)
1241 if (tinfo->uid == INSN_UID (insn))
1242 break;
1243
1244 if (tinfo)
1245 tinfo->block = -1;
1246 }
1247 }
1248 \f
1249 /* Increment the tick count for the basic block that contains INSN. */
1250
1251 void
1252 incr_ticks_for_insn (rtx insn)
1253 {
1254 int b = find_basic_block (insn, MAX_DELAY_SLOT_LIVE_SEARCH);
1255
1256 if (b != -1)
1257 bb_ticks[b]++;
1258 }
1259 \f
1260 /* Add TRIAL to the set of resources used at the end of the current
1261 function. */
1262 void
1263 mark_end_of_function_resources (rtx trial, int include_delayed_effects)
1264 {
1265 mark_referenced_resources (trial, &end_of_function_needs,
1266 include_delayed_effects);
1267 }