]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/reorg.c
alias.c (record_set, [...]): Use REG_P.
[thirdparty/gcc.git] / gcc / reorg.c
1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
5 Hacked by Michael Tiemann (tiemann@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 02111-1307, USA. */
23
24 /* Instruction reorganization pass.
25
26 This pass runs after register allocation and final jump
27 optimization. It should be the last pass to run before peephole.
28 It serves primarily to fill delay slots of insns, typically branch
29 and call insns. Other insns typically involve more complicated
30 interactions of data dependencies and resource constraints, and
31 are better handled by scheduling before register allocation (by the
32 function `schedule_insns').
33
34 The Branch Penalty is the number of extra cycles that are needed to
35 execute a branch insn. On an ideal machine, branches take a single
36 cycle, and the Branch Penalty is 0. Several RISC machines approach
37 branch delays differently:
38
39 The MIPS has a single branch delay slot. Most insns
40 (except other branches) can be used to fill this slot. When the
41 slot is filled, two insns execute in two cycles, reducing the
42 branch penalty to zero.
43
44 The SPARC always has a branch delay slot, but its effects can be
45 annulled when the branch is not taken. This means that failing to
46 find other sources of insns, we can hoist an insn from the branch
47 target that would only be safe to execute knowing that the branch
48 is taken.
49
50 The HP-PA always has a branch delay slot. For unconditional branches
51 its effects can be annulled when the branch is taken. The effects
52 of the delay slot in a conditional branch can be nullified for forward
53 taken branches, or for untaken backward branches. This means
54 we can hoist insns from the fall-through path for forward branches or
55 steal insns from the target of backward branches.
56
57 The TMS320C3x and C4x have three branch delay slots. When the three
58 slots are filled, the branch penalty is zero. Most insns can fill the
59 delay slots except jump insns.
60
61 Three techniques for filling delay slots have been implemented so far:
62
63 (1) `fill_simple_delay_slots' is the simplest, most efficient way
64 to fill delay slots. This pass first looks for insns which come
65 from before the branch and which are safe to execute after the
66 branch. Then it searches after the insn requiring delay slots or,
67 in the case of a branch, for insns that are after the point at
68 which the branch merges into the fallthrough code, if such a point
69 exists. When such insns are found, the branch penalty decreases
70 and no code expansion takes place.
71
72 (2) `fill_eager_delay_slots' is more complicated: it is used for
73 scheduling conditional jumps, or for scheduling jumps which cannot
74 be filled using (1). A machine need not have annulled jumps to use
75 this strategy, but it helps (by keeping more options open).
76 `fill_eager_delay_slots' tries to guess the direction the branch
77 will go; if it guesses right 100% of the time, it can reduce the
78 branch penalty as much as `fill_simple_delay_slots' does. If it
79 guesses wrong 100% of the time, it might as well schedule nops. When
80 `fill_eager_delay_slots' takes insns from the fall-through path of
81 the jump, usually there is no code expansion; when it takes insns
82 from the branch target, there is code expansion if it is not the
83 only way to reach that target.
84
85 (3) `relax_delay_slots' uses a set of rules to simplify code that
86 has been reorganized by (1) and (2). It finds cases where
87 conditional test can be eliminated, jumps can be threaded, extra
88 insns can be eliminated, etc. It is the job of (1) and (2) to do a
89 good job of scheduling locally; `relax_delay_slots' takes care of
90 making the various individual schedules work well together. It is
91 especially tuned to handle the control flow interactions of branch
92 insns. It does nothing for insns with delay slots that do not
93 branch.
94
95 On machines that use CC0, we are very conservative. We will not make
96 a copy of an insn involving CC0 since we want to maintain a 1-1
97 correspondence between the insn that sets and uses CC0. The insns are
98 allowed to be separated by placing an insn that sets CC0 (but not an insn
99 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
100 delay slot. In that case, we point each insn at the other with REG_CC_USER
101 and REG_CC_SETTER notes. Note that these restrictions affect very few
102 machines because most RISC machines with delay slots will not use CC0
103 (the RT is the only known exception at this point).
104
105 Not yet implemented:
106
107 The Acorn Risc Machine can conditionally execute most insns, so
108 it is profitable to move single insns into a position to execute
109 based on the condition code of the previous insn.
110
111 The HP-PA can conditionally nullify insns, providing a similar
112 effect to the ARM, differing mostly in which insn is "in charge". */
113
114 #include "config.h"
115 #include "system.h"
116 #include "coretypes.h"
117 #include "tm.h"
118 #include "toplev.h"
119 #include "rtl.h"
120 #include "tm_p.h"
121 #include "expr.h"
122 #include "function.h"
123 #include "insn-config.h"
124 #include "conditions.h"
125 #include "hard-reg-set.h"
126 #include "basic-block.h"
127 #include "regs.h"
128 #include "recog.h"
129 #include "flags.h"
130 #include "output.h"
131 #include "obstack.h"
132 #include "insn-attr.h"
133 #include "resource.h"
134 #include "except.h"
135 #include "params.h"
136
137 #ifdef DELAY_SLOTS
138
139 #ifndef ANNUL_IFTRUE_SLOTS
140 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
141 #endif
142 #ifndef ANNUL_IFFALSE_SLOTS
143 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
144 #endif
145
146 /* Insns which have delay slots that have not yet been filled. */
147
148 static struct obstack unfilled_slots_obstack;
149 static rtx *unfilled_firstobj;
150
151 /* Define macros to refer to the first and last slot containing unfilled
152 insns. These are used because the list may move and its address
153 should be recomputed at each use. */
154
155 #define unfilled_slots_base \
156 ((rtx *) obstack_base (&unfilled_slots_obstack))
157
158 #define unfilled_slots_next \
159 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
160
161 /* Points to the label before the end of the function. */
162 static rtx end_of_function_label;
163
164 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
165 not always monotonically increase. */
166 static int *uid_to_ruid;
167
168 /* Highest valid index in `uid_to_ruid'. */
169 static int max_uid;
170
171 static int stop_search_p (rtx, int);
172 static int resource_conflicts_p (struct resources *, struct resources *);
173 static int insn_references_resource_p (rtx, struct resources *, int);
174 static int insn_sets_resource_p (rtx, struct resources *, int);
175 static rtx find_end_label (void);
176 static rtx emit_delay_sequence (rtx, rtx, int);
177 static rtx add_to_delay_list (rtx, rtx);
178 static rtx delete_from_delay_slot (rtx);
179 static void delete_scheduled_jump (rtx);
180 static void note_delay_statistics (int, int);
181 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
182 static rtx optimize_skip (rtx);
183 #endif
184 static int get_jump_flags (rtx, rtx);
185 static int rare_destination (rtx);
186 static int mostly_true_jump (rtx, rtx);
187 static rtx get_branch_condition (rtx, rtx);
188 static int condition_dominates_p (rtx, rtx);
189 static int redirect_with_delay_slots_safe_p (rtx, rtx, rtx);
190 static int redirect_with_delay_list_safe_p (rtx, rtx, rtx);
191 static int check_annul_list_true_false (int, rtx);
192 static rtx steal_delay_list_from_target (rtx, rtx, rtx, rtx,
193 struct resources *,
194 struct resources *,
195 struct resources *,
196 int, int *, int *, rtx *);
197 static rtx steal_delay_list_from_fallthrough (rtx, rtx, rtx, rtx,
198 struct resources *,
199 struct resources *,
200 struct resources *,
201 int, int *, int *);
202 static void try_merge_delay_insns (rtx, rtx);
203 static rtx redundant_insn (rtx, rtx, rtx);
204 static int own_thread_p (rtx, rtx, int);
205 static void update_block (rtx, rtx);
206 static int reorg_redirect_jump (rtx, rtx);
207 static void update_reg_dead_notes (rtx, rtx);
208 static void fix_reg_dead_note (rtx, rtx);
209 static void update_reg_unused_notes (rtx, rtx);
210 static void fill_simple_delay_slots (int);
211 static rtx fill_slots_from_thread (rtx, rtx, rtx, rtx, int, int, int, int,
212 int *, rtx);
213 static void fill_eager_delay_slots (void);
214 static void relax_delay_slots (rtx);
215 #ifdef HAVE_return
216 static void make_return_insns (rtx);
217 #endif
218 \f
219 /* Return TRUE if this insn should stop the search for insn to fill delay
220 slots. LABELS_P indicates that labels should terminate the search.
221 In all cases, jumps terminate the search. */
222
223 static int
224 stop_search_p (rtx insn, int labels_p)
225 {
226 if (insn == 0)
227 return 1;
228
229 /* If the insn can throw an exception that is caught within the function,
230 it may effectively perform a jump from the viewpoint of the function.
231 Therefore act like for a jump. */
232 if (can_throw_internal (insn))
233 return 1;
234
235 switch (GET_CODE (insn))
236 {
237 case NOTE:
238 case CALL_INSN:
239 return 0;
240
241 case CODE_LABEL:
242 return labels_p;
243
244 case JUMP_INSN:
245 case BARRIER:
246 return 1;
247
248 case INSN:
249 /* OK unless it contains a delay slot or is an `asm' insn of some type.
250 We don't know anything about these. */
251 return (GET_CODE (PATTERN (insn)) == SEQUENCE
252 || GET_CODE (PATTERN (insn)) == ASM_INPUT
253 || asm_noperands (PATTERN (insn)) >= 0);
254
255 default:
256 abort ();
257 }
258 }
259 \f
260 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
261 resource set contains a volatile memory reference. Otherwise, return FALSE. */
262
263 static int
264 resource_conflicts_p (struct resources *res1, struct resources *res2)
265 {
266 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
267 || (res1->unch_memory && res2->unch_memory)
268 || res1->volatil || res2->volatil)
269 return 1;
270
271 #ifdef HARD_REG_SET
272 return (res1->regs & res2->regs) != HARD_CONST (0);
273 #else
274 {
275 int i;
276
277 for (i = 0; i < HARD_REG_SET_LONGS; i++)
278 if ((res1->regs[i] & res2->regs[i]) != 0)
279 return 1;
280 return 0;
281 }
282 #endif
283 }
284
285 /* Return TRUE if any resource marked in RES, a `struct resources', is
286 referenced by INSN. If INCLUDE_DELAYED_EFFECTS is set, return if the called
287 routine is using those resources.
288
289 We compute this by computing all the resources referenced by INSN and
290 seeing if this conflicts with RES. It might be faster to directly check
291 ourselves, and this is the way it used to work, but it means duplicating
292 a large block of complex code. */
293
294 static int
295 insn_references_resource_p (rtx insn, struct resources *res,
296 int include_delayed_effects)
297 {
298 struct resources insn_res;
299
300 CLEAR_RESOURCE (&insn_res);
301 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
302 return resource_conflicts_p (&insn_res, res);
303 }
304
305 /* Return TRUE if INSN modifies resources that are marked in RES.
306 INCLUDE_DELAYED_EFFECTS is set if the actions of that routine should be
307 included. CC0 is only modified if it is explicitly set; see comments
308 in front of mark_set_resources for details. */
309
310 static int
311 insn_sets_resource_p (rtx insn, struct resources *res,
312 int include_delayed_effects)
313 {
314 struct resources insn_sets;
315
316 CLEAR_RESOURCE (&insn_sets);
317 mark_set_resources (insn, &insn_sets, 0, include_delayed_effects);
318 return resource_conflicts_p (&insn_sets, res);
319 }
320 \f
321 /* Find a label at the end of the function or before a RETURN. If there is
322 none, make one. */
323
324 static rtx
325 find_end_label (void)
326 {
327 rtx insn;
328
329 /* If we found one previously, return it. */
330 if (end_of_function_label)
331 return end_of_function_label;
332
333 /* Otherwise, see if there is a label at the end of the function. If there
334 is, it must be that RETURN insns aren't needed, so that is our return
335 label and we don't have to do anything else. */
336
337 insn = get_last_insn ();
338 while (GET_CODE (insn) == NOTE
339 || (GET_CODE (insn) == INSN
340 && (GET_CODE (PATTERN (insn)) == USE
341 || GET_CODE (PATTERN (insn)) == CLOBBER)))
342 insn = PREV_INSN (insn);
343
344 /* When a target threads its epilogue we might already have a
345 suitable return insn. If so put a label before it for the
346 end_of_function_label. */
347 if (GET_CODE (insn) == BARRIER
348 && GET_CODE (PREV_INSN (insn)) == JUMP_INSN
349 && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
350 {
351 rtx temp = PREV_INSN (PREV_INSN (insn));
352 end_of_function_label = gen_label_rtx ();
353 LABEL_NUSES (end_of_function_label) = 0;
354
355 /* Put the label before an USE insns that may proceed the RETURN insn. */
356 while (GET_CODE (temp) == USE)
357 temp = PREV_INSN (temp);
358
359 emit_label_after (end_of_function_label, temp);
360 }
361
362 else if (GET_CODE (insn) == CODE_LABEL)
363 end_of_function_label = insn;
364 else
365 {
366 end_of_function_label = gen_label_rtx ();
367 LABEL_NUSES (end_of_function_label) = 0;
368 /* If the basic block reorder pass moves the return insn to
369 some other place try to locate it again and put our
370 end_of_function_label there. */
371 while (insn && ! (GET_CODE (insn) == JUMP_INSN
372 && (GET_CODE (PATTERN (insn)) == RETURN)))
373 insn = PREV_INSN (insn);
374 if (insn)
375 {
376 insn = PREV_INSN (insn);
377
378 /* Put the label before an USE insns that may proceed the
379 RETURN insn. */
380 while (GET_CODE (insn) == USE)
381 insn = PREV_INSN (insn);
382
383 emit_label_after (end_of_function_label, insn);
384 }
385 else
386 {
387 /* Otherwise, make a new label and emit a RETURN and BARRIER,
388 if needed. */
389 emit_label (end_of_function_label);
390 #ifdef HAVE_return
391 if (HAVE_return)
392 {
393 /* The return we make may have delay slots too. */
394 rtx insn = gen_return ();
395 insn = emit_jump_insn (insn);
396 emit_barrier ();
397 if (num_delay_slots (insn) > 0)
398 obstack_ptr_grow (&unfilled_slots_obstack, insn);
399 }
400 #endif
401 }
402 }
403
404 /* Show one additional use for this label so it won't go away until
405 we are done. */
406 ++LABEL_NUSES (end_of_function_label);
407
408 return end_of_function_label;
409 }
410 \f
411 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
412 the pattern of INSN with the SEQUENCE.
413
414 Chain the insns so that NEXT_INSN of each insn in the sequence points to
415 the next and NEXT_INSN of the last insn in the sequence points to
416 the first insn after the sequence. Similarly for PREV_INSN. This makes
417 it easier to scan all insns.
418
419 Returns the SEQUENCE that replaces INSN. */
420
421 static rtx
422 emit_delay_sequence (rtx insn, rtx list, int length)
423 {
424 int i = 1;
425 rtx li;
426 int had_barrier = 0;
427
428 /* Allocate the rtvec to hold the insns and the SEQUENCE. */
429 rtvec seqv = rtvec_alloc (length + 1);
430 rtx seq = gen_rtx_SEQUENCE (VOIDmode, seqv);
431 rtx seq_insn = make_insn_raw (seq);
432 rtx first = get_insns ();
433 rtx last = get_last_insn ();
434
435 /* Make a copy of the insn having delay slots. */
436 rtx delay_insn = copy_rtx (insn);
437
438 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
439 confuse further processing. Update LAST in case it was the last insn.
440 We will put the BARRIER back in later. */
441 if (NEXT_INSN (insn) && GET_CODE (NEXT_INSN (insn)) == BARRIER)
442 {
443 delete_related_insns (NEXT_INSN (insn));
444 last = get_last_insn ();
445 had_barrier = 1;
446 }
447
448 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
449 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
450 PREV_INSN (seq_insn) = PREV_INSN (insn);
451
452 if (insn != last)
453 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
454
455 if (insn != first)
456 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
457
458 /* Note the calls to set_new_first_and_last_insn must occur after
459 SEQ_INSN has been completely spliced into the insn stream.
460
461 Otherwise CUR_INSN_UID will get set to an incorrect value because
462 set_new_first_and_last_insn will not find SEQ_INSN in the chain. */
463 if (insn == last)
464 set_new_first_and_last_insn (first, seq_insn);
465
466 if (insn == first)
467 set_new_first_and_last_insn (seq_insn, last);
468
469 /* Build our SEQUENCE and rebuild the insn chain. */
470 XVECEXP (seq, 0, 0) = delay_insn;
471 INSN_DELETED_P (delay_insn) = 0;
472 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
473
474 for (li = list; li; li = XEXP (li, 1), i++)
475 {
476 rtx tem = XEXP (li, 0);
477 rtx note, next;
478
479 /* Show that this copy of the insn isn't deleted. */
480 INSN_DELETED_P (tem) = 0;
481
482 XVECEXP (seq, 0, i) = tem;
483 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
484 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
485
486 /* SPARC assembler, for instance, emit warning when debug info is output
487 into the delay slot. */
488 if (INSN_LOCATOR (tem) && !INSN_LOCATOR (seq_insn))
489 INSN_LOCATOR (seq_insn) = INSN_LOCATOR (tem);
490 INSN_LOCATOR (tem) = 0;
491
492 for (note = REG_NOTES (tem); note; note = next)
493 {
494 next = XEXP (note, 1);
495 switch (REG_NOTE_KIND (note))
496 {
497 case REG_DEAD:
498 /* Remove any REG_DEAD notes because we can't rely on them now
499 that the insn has been moved. */
500 remove_note (tem, note);
501 break;
502
503 case REG_LABEL:
504 /* Keep the label reference count up to date. */
505 if (GET_CODE (XEXP (note, 0)) == CODE_LABEL)
506 LABEL_NUSES (XEXP (note, 0)) ++;
507 break;
508
509 default:
510 break;
511 }
512 }
513 }
514
515 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
516
517 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
518 last insn in that SEQUENCE to point to us. Similarly for the first
519 insn in the following insn if it is a SEQUENCE. */
520
521 if (PREV_INSN (seq_insn) && GET_CODE (PREV_INSN (seq_insn)) == INSN
522 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
523 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
524 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
525 = seq_insn;
526
527 if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == INSN
528 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
529 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
530
531 /* If there used to be a BARRIER, put it back. */
532 if (had_barrier)
533 emit_barrier_after (seq_insn);
534
535 if (i != length + 1)
536 abort ();
537
538 return seq_insn;
539 }
540
541 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
542 be in the order in which the insns are to be executed. */
543
544 static rtx
545 add_to_delay_list (rtx insn, rtx delay_list)
546 {
547 /* If we have an empty list, just make a new list element. If
548 INSN has its block number recorded, clear it since we may
549 be moving the insn to a new block. */
550
551 if (delay_list == 0)
552 {
553 clear_hashed_info_for_insn (insn);
554 return gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
555 }
556
557 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
558 list. */
559 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
560
561 return delay_list;
562 }
563 \f
564 /* Delete INSN from the delay slot of the insn that it is in, which may
565 produce an insn with no delay slots. Return the new insn. */
566
567 static rtx
568 delete_from_delay_slot (rtx insn)
569 {
570 rtx trial, seq_insn, seq, prev;
571 rtx delay_list = 0;
572 int i;
573 int had_barrier = 0;
574
575 /* We first must find the insn containing the SEQUENCE with INSN in its
576 delay slot. Do this by finding an insn, TRIAL, where
577 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
578
579 for (trial = insn;
580 PREV_INSN (NEXT_INSN (trial)) == trial;
581 trial = NEXT_INSN (trial))
582 ;
583
584 seq_insn = PREV_INSN (NEXT_INSN (trial));
585 seq = PATTERN (seq_insn);
586
587 if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == BARRIER)
588 had_barrier = 1;
589
590 /* Create a delay list consisting of all the insns other than the one
591 we are deleting (unless we were the only one). */
592 if (XVECLEN (seq, 0) > 2)
593 for (i = 1; i < XVECLEN (seq, 0); i++)
594 if (XVECEXP (seq, 0, i) != insn)
595 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
596
597 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
598 list, and rebuild the delay list if non-empty. */
599 prev = PREV_INSN (seq_insn);
600 trial = XVECEXP (seq, 0, 0);
601 delete_related_insns (seq_insn);
602 add_insn_after (trial, prev);
603
604 /* If there was a barrier after the old SEQUENCE, remit it. */
605 if (had_barrier)
606 emit_barrier_after (trial);
607
608 /* If there are any delay insns, remit them. Otherwise clear the
609 annul flag. */
610 if (delay_list)
611 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2);
612 else if (GET_CODE (trial) == JUMP_INSN
613 || GET_CODE (trial) == CALL_INSN
614 || GET_CODE (trial) == INSN)
615 INSN_ANNULLED_BRANCH_P (trial) = 0;
616
617 INSN_FROM_TARGET_P (insn) = 0;
618
619 /* Show we need to fill this insn again. */
620 obstack_ptr_grow (&unfilled_slots_obstack, trial);
621
622 return trial;
623 }
624 \f
625 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
626 the insn that sets CC0 for it and delete it too. */
627
628 static void
629 delete_scheduled_jump (rtx insn)
630 {
631 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
632 delete the insn that sets the condition code, but it is hard to find it.
633 Since this case is rare anyway, don't bother trying; there would likely
634 be other insns that became dead anyway, which we wouldn't know to
635 delete. */
636
637 #ifdef HAVE_cc0
638 if (reg_mentioned_p (cc0_rtx, insn))
639 {
640 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
641
642 /* If a reg-note was found, it points to an insn to set CC0. This
643 insn is in the delay list of some other insn. So delete it from
644 the delay list it was in. */
645 if (note)
646 {
647 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
648 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
649 delete_from_delay_slot (XEXP (note, 0));
650 }
651 else
652 {
653 /* The insn setting CC0 is our previous insn, but it may be in
654 a delay slot. It will be the last insn in the delay slot, if
655 it is. */
656 rtx trial = previous_insn (insn);
657 if (GET_CODE (trial) == NOTE)
658 trial = prev_nonnote_insn (trial);
659 if (sets_cc0_p (PATTERN (trial)) != 1
660 || FIND_REG_INC_NOTE (trial, NULL_RTX))
661 return;
662 if (PREV_INSN (NEXT_INSN (trial)) == trial)
663 delete_related_insns (trial);
664 else
665 delete_from_delay_slot (trial);
666 }
667 }
668 #endif
669
670 delete_related_insns (insn);
671 }
672 \f
673 /* Counters for delay-slot filling. */
674
675 #define NUM_REORG_FUNCTIONS 2
676 #define MAX_DELAY_HISTOGRAM 3
677 #define MAX_REORG_PASSES 2
678
679 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
680
681 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
682
683 static int reorg_pass_number;
684
685 static void
686 note_delay_statistics (int slots_filled, int index)
687 {
688 num_insns_needing_delays[index][reorg_pass_number]++;
689 if (slots_filled > MAX_DELAY_HISTOGRAM)
690 slots_filled = MAX_DELAY_HISTOGRAM;
691 num_filled_delays[index][slots_filled][reorg_pass_number]++;
692 }
693 \f
694 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
695
696 /* Optimize the following cases:
697
698 1. When a conditional branch skips over only one instruction,
699 use an annulling branch and put that insn in the delay slot.
700 Use either a branch that annuls when the condition if true or
701 invert the test with a branch that annuls when the condition is
702 false. This saves insns, since otherwise we must copy an insn
703 from the L1 target.
704
705 (orig) (skip) (otherwise)
706 Bcc.n L1 Bcc',a L1 Bcc,a L1'
707 insn insn insn2
708 L1: L1: L1:
709 insn2 insn2 insn2
710 insn3 insn3 L1':
711 insn3
712
713 2. When a conditional branch skips over only one instruction,
714 and after that, it unconditionally branches somewhere else,
715 perform the similar optimization. This saves executing the
716 second branch in the case where the inverted condition is true.
717
718 Bcc.n L1 Bcc',a L2
719 insn insn
720 L1: L1:
721 Bra L2 Bra L2
722
723 INSN is a JUMP_INSN.
724
725 This should be expanded to skip over N insns, where N is the number
726 of delay slots required. */
727
728 static rtx
729 optimize_skip (rtx insn)
730 {
731 rtx trial = next_nonnote_insn (insn);
732 rtx next_trial = next_active_insn (trial);
733 rtx delay_list = 0;
734 rtx target_label;
735 int flags;
736
737 flags = get_jump_flags (insn, JUMP_LABEL (insn));
738
739 if (trial == 0
740 || GET_CODE (trial) != INSN
741 || GET_CODE (PATTERN (trial)) == SEQUENCE
742 || recog_memoized (trial) < 0
743 || (! eligible_for_annul_false (insn, 0, trial, flags)
744 && ! eligible_for_annul_true (insn, 0, trial, flags))
745 || can_throw_internal (trial))
746 return 0;
747
748 /* There are two cases where we are just executing one insn (we assume
749 here that a branch requires only one insn; this should be generalized
750 at some point): Where the branch goes around a single insn or where
751 we have one insn followed by a branch to the same label we branch to.
752 In both of these cases, inverting the jump and annulling the delay
753 slot give the same effect in fewer insns. */
754 if ((next_trial == next_active_insn (JUMP_LABEL (insn))
755 && ! (next_trial == 0 && current_function_epilogue_delay_list != 0))
756 || (next_trial != 0
757 && GET_CODE (next_trial) == JUMP_INSN
758 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
759 && (simplejump_p (next_trial)
760 || GET_CODE (PATTERN (next_trial)) == RETURN)))
761 {
762 if (eligible_for_annul_false (insn, 0, trial, flags))
763 {
764 if (invert_jump (insn, JUMP_LABEL (insn), 1))
765 INSN_FROM_TARGET_P (trial) = 1;
766 else if (! eligible_for_annul_true (insn, 0, trial, flags))
767 return 0;
768 }
769
770 delay_list = add_to_delay_list (trial, NULL_RTX);
771 next_trial = next_active_insn (trial);
772 update_block (trial, trial);
773 delete_related_insns (trial);
774
775 /* Also, if we are targeting an unconditional
776 branch, thread our jump to the target of that branch. Don't
777 change this into a RETURN here, because it may not accept what
778 we have in the delay slot. We'll fix this up later. */
779 if (next_trial && GET_CODE (next_trial) == JUMP_INSN
780 && (simplejump_p (next_trial)
781 || GET_CODE (PATTERN (next_trial)) == RETURN))
782 {
783 target_label = JUMP_LABEL (next_trial);
784 if (target_label == 0)
785 target_label = find_end_label ();
786
787 /* Recompute the flags based on TARGET_LABEL since threading
788 the jump to TARGET_LABEL may change the direction of the
789 jump (which may change the circumstances in which the
790 delay slot is nullified). */
791 flags = get_jump_flags (insn, target_label);
792 if (eligible_for_annul_true (insn, 0, trial, flags))
793 reorg_redirect_jump (insn, target_label);
794 }
795
796 INSN_ANNULLED_BRANCH_P (insn) = 1;
797 }
798
799 return delay_list;
800 }
801 #endif
802 \f
803 /* Encode and return branch direction and prediction information for
804 INSN assuming it will jump to LABEL.
805
806 Non conditional branches return no direction information and
807 are predicted as very likely taken. */
808
809 static int
810 get_jump_flags (rtx insn, rtx label)
811 {
812 int flags;
813
814 /* get_jump_flags can be passed any insn with delay slots, these may
815 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
816 direction information, and only if they are conditional jumps.
817
818 If LABEL is zero, then there is no way to determine the branch
819 direction. */
820 if (GET_CODE (insn) == JUMP_INSN
821 && (condjump_p (insn) || condjump_in_parallel_p (insn))
822 && INSN_UID (insn) <= max_uid
823 && label != 0
824 && INSN_UID (label) <= max_uid)
825 flags
826 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
827 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
828 /* No valid direction information. */
829 else
830 flags = 0;
831
832 /* If insn is a conditional branch call mostly_true_jump to get
833 determine the branch prediction.
834
835 Non conditional branches are predicted as very likely taken. */
836 if (GET_CODE (insn) == JUMP_INSN
837 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
838 {
839 int prediction;
840
841 prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
842 switch (prediction)
843 {
844 case 2:
845 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
846 break;
847 case 1:
848 flags |= ATTR_FLAG_likely;
849 break;
850 case 0:
851 flags |= ATTR_FLAG_unlikely;
852 break;
853 case -1:
854 flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
855 break;
856
857 default:
858 abort ();
859 }
860 }
861 else
862 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
863
864 return flags;
865 }
866
867 /* Return 1 if INSN is a destination that will be branched to rarely (the
868 return point of a function); return 2 if DEST will be branched to very
869 rarely (a call to a function that doesn't return). Otherwise,
870 return 0. */
871
872 static int
873 rare_destination (rtx insn)
874 {
875 int jump_count = 0;
876 rtx next;
877
878 for (; insn; insn = next)
879 {
880 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
881 insn = XVECEXP (PATTERN (insn), 0, 0);
882
883 next = NEXT_INSN (insn);
884
885 switch (GET_CODE (insn))
886 {
887 case CODE_LABEL:
888 return 0;
889 case BARRIER:
890 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
891 don't scan past JUMP_INSNs, so any barrier we find here must
892 have been after a CALL_INSN and hence mean the call doesn't
893 return. */
894 return 2;
895 case JUMP_INSN:
896 if (GET_CODE (PATTERN (insn)) == RETURN)
897 return 1;
898 else if (simplejump_p (insn)
899 && jump_count++ < 10)
900 next = JUMP_LABEL (insn);
901 else
902 return 0;
903
904 default:
905 break;
906 }
907 }
908
909 /* If we got here it means we hit the end of the function. So this
910 is an unlikely destination. */
911
912 return 1;
913 }
914
915 /* Return truth value of the statement that this branch
916 is mostly taken. If we think that the branch is extremely likely
917 to be taken, we return 2. If the branch is slightly more likely to be
918 taken, return 1. If the branch is slightly less likely to be taken,
919 return 0 and if the branch is highly unlikely to be taken, return -1.
920
921 CONDITION, if nonzero, is the condition that JUMP_INSN is testing. */
922
923 static int
924 mostly_true_jump (rtx jump_insn, rtx condition)
925 {
926 rtx target_label = JUMP_LABEL (jump_insn);
927 rtx insn, note;
928 int rare_dest = rare_destination (target_label);
929 int rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
930
931 /* If branch probabilities are available, then use that number since it
932 always gives a correct answer. */
933 note = find_reg_note (jump_insn, REG_BR_PROB, 0);
934 if (note)
935 {
936 int prob = INTVAL (XEXP (note, 0));
937
938 if (prob >= REG_BR_PROB_BASE * 9 / 10)
939 return 2;
940 else if (prob >= REG_BR_PROB_BASE / 2)
941 return 1;
942 else if (prob >= REG_BR_PROB_BASE / 10)
943 return 0;
944 else
945 return -1;
946 }
947
948 /* ??? Ought to use estimate_probability instead. */
949
950 /* If this is a branch outside a loop, it is highly unlikely. */
951 if (GET_CODE (PATTERN (jump_insn)) == SET
952 && GET_CODE (SET_SRC (PATTERN (jump_insn))) == IF_THEN_ELSE
953 && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 1)) == LABEL_REF
954 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 1)))
955 || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 2)) == LABEL_REF
956 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 2)))))
957 return -1;
958
959 if (target_label)
960 {
961 /* If this is the test of a loop, it is very likely true. We scan
962 backwards from the target label. If we find a NOTE_INSN_LOOP_BEG
963 before the next real insn, we assume the branch is to the top of
964 the loop. */
965 for (insn = PREV_INSN (target_label);
966 insn && GET_CODE (insn) == NOTE;
967 insn = PREV_INSN (insn))
968 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
969 return 2;
970
971 /* If this is a jump to the test of a loop, it is likely true. We scan
972 forwards from the target label. If we find a NOTE_INSN_LOOP_VTOP
973 before the next real insn, we assume the branch is to the loop branch
974 test. */
975 for (insn = NEXT_INSN (target_label);
976 insn && GET_CODE (insn) == NOTE;
977 insn = PREV_INSN (insn))
978 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP)
979 return 1;
980 }
981
982 /* Look at the relative rarities of the fallthrough and destination. If
983 they differ, we can predict the branch that way. */
984
985 switch (rare_fallthrough - rare_dest)
986 {
987 case -2:
988 return -1;
989 case -1:
990 return 0;
991 case 0:
992 break;
993 case 1:
994 return 1;
995 case 2:
996 return 2;
997 }
998
999 /* If we couldn't figure out what this jump was, assume it won't be
1000 taken. This should be rare. */
1001 if (condition == 0)
1002 return 0;
1003
1004 /* EQ tests are usually false and NE tests are usually true. Also,
1005 most quantities are positive, so we can make the appropriate guesses
1006 about signed comparisons against zero. */
1007 switch (GET_CODE (condition))
1008 {
1009 case CONST_INT:
1010 /* Unconditional branch. */
1011 return 1;
1012 case EQ:
1013 return 0;
1014 case NE:
1015 return 1;
1016 case LE:
1017 case LT:
1018 if (XEXP (condition, 1) == const0_rtx)
1019 return 0;
1020 break;
1021 case GE:
1022 case GT:
1023 if (XEXP (condition, 1) == const0_rtx)
1024 return 1;
1025 break;
1026
1027 default:
1028 break;
1029 }
1030
1031 /* Predict backward branches usually take, forward branches usually not. If
1032 we don't know whether this is forward or backward, assume the branch
1033 will be taken, since most are. */
1034 return (target_label == 0 || INSN_UID (jump_insn) > max_uid
1035 || INSN_UID (target_label) > max_uid
1036 || (uid_to_ruid[INSN_UID (jump_insn)]
1037 > uid_to_ruid[INSN_UID (target_label)]));
1038 }
1039
1040 /* Return the condition under which INSN will branch to TARGET. If TARGET
1041 is zero, return the condition under which INSN will return. If INSN is
1042 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1043 type of jump, or it doesn't go to TARGET, return 0. */
1044
1045 static rtx
1046 get_branch_condition (rtx insn, rtx target)
1047 {
1048 rtx pat = PATTERN (insn);
1049 rtx src;
1050
1051 if (condjump_in_parallel_p (insn))
1052 pat = XVECEXP (pat, 0, 0);
1053
1054 if (GET_CODE (pat) == RETURN)
1055 return target == 0 ? const_true_rtx : 0;
1056
1057 else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1058 return 0;
1059
1060 src = SET_SRC (pat);
1061 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1062 return const_true_rtx;
1063
1064 else if (GET_CODE (src) == IF_THEN_ELSE
1065 && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
1066 || (GET_CODE (XEXP (src, 1)) == LABEL_REF
1067 && XEXP (XEXP (src, 1), 0) == target))
1068 && XEXP (src, 2) == pc_rtx)
1069 return XEXP (src, 0);
1070
1071 else if (GET_CODE (src) == IF_THEN_ELSE
1072 && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
1073 || (GET_CODE (XEXP (src, 2)) == LABEL_REF
1074 && XEXP (XEXP (src, 2), 0) == target))
1075 && XEXP (src, 1) == pc_rtx)
1076 {
1077 enum rtx_code rev;
1078 rev = reversed_comparison_code (XEXP (src, 0), insn);
1079 if (rev != UNKNOWN)
1080 return gen_rtx_fmt_ee (rev, GET_MODE (XEXP (src, 0)),
1081 XEXP (XEXP (src, 0), 0),
1082 XEXP (XEXP (src, 0), 1));
1083 }
1084
1085 return 0;
1086 }
1087
1088 /* Return nonzero if CONDITION is more strict than the condition of
1089 INSN, i.e., if INSN will always branch if CONDITION is true. */
1090
1091 static int
1092 condition_dominates_p (rtx condition, rtx insn)
1093 {
1094 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1095 enum rtx_code code = GET_CODE (condition);
1096 enum rtx_code other_code;
1097
1098 if (rtx_equal_p (condition, other_condition)
1099 || other_condition == const_true_rtx)
1100 return 1;
1101
1102 else if (condition == const_true_rtx || other_condition == 0)
1103 return 0;
1104
1105 other_code = GET_CODE (other_condition);
1106 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1107 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1108 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1109 return 0;
1110
1111 return comparison_dominates_p (code, other_code);
1112 }
1113
1114 /* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
1115 any insns already in the delay slot of JUMP. */
1116
1117 static int
1118 redirect_with_delay_slots_safe_p (rtx jump, rtx newlabel, rtx seq)
1119 {
1120 int flags, i;
1121 rtx pat = PATTERN (seq);
1122
1123 /* Make sure all the delay slots of this jump would still
1124 be valid after threading the jump. If they are still
1125 valid, then return nonzero. */
1126
1127 flags = get_jump_flags (jump, newlabel);
1128 for (i = 1; i < XVECLEN (pat, 0); i++)
1129 if (! (
1130 #ifdef ANNUL_IFFALSE_SLOTS
1131 (INSN_ANNULLED_BRANCH_P (jump)
1132 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1133 ? eligible_for_annul_false (jump, i - 1,
1134 XVECEXP (pat, 0, i), flags) :
1135 #endif
1136 #ifdef ANNUL_IFTRUE_SLOTS
1137 (INSN_ANNULLED_BRANCH_P (jump)
1138 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1139 ? eligible_for_annul_true (jump, i - 1,
1140 XVECEXP (pat, 0, i), flags) :
1141 #endif
1142 eligible_for_delay (jump, i - 1, XVECEXP (pat, 0, i), flags)))
1143 break;
1144
1145 return (i == XVECLEN (pat, 0));
1146 }
1147
1148 /* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
1149 any insns we wish to place in the delay slot of JUMP. */
1150
1151 static int
1152 redirect_with_delay_list_safe_p (rtx jump, rtx newlabel, rtx delay_list)
1153 {
1154 int flags, i;
1155 rtx li;
1156
1157 /* Make sure all the insns in DELAY_LIST would still be
1158 valid after threading the jump. If they are still
1159 valid, then return nonzero. */
1160
1161 flags = get_jump_flags (jump, newlabel);
1162 for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
1163 if (! (
1164 #ifdef ANNUL_IFFALSE_SLOTS
1165 (INSN_ANNULLED_BRANCH_P (jump)
1166 && INSN_FROM_TARGET_P (XEXP (li, 0)))
1167 ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
1168 #endif
1169 #ifdef ANNUL_IFTRUE_SLOTS
1170 (INSN_ANNULLED_BRANCH_P (jump)
1171 && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
1172 ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
1173 #endif
1174 eligible_for_delay (jump, i, XEXP (li, 0), flags)))
1175 break;
1176
1177 return (li == NULL);
1178 }
1179
1180 /* DELAY_LIST is a list of insns that have already been placed into delay
1181 slots. See if all of them have the same annulling status as ANNUL_TRUE_P.
1182 If not, return 0; otherwise return 1. */
1183
1184 static int
1185 check_annul_list_true_false (int annul_true_p, rtx delay_list)
1186 {
1187 rtx temp;
1188
1189 if (delay_list)
1190 {
1191 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1192 {
1193 rtx trial = XEXP (temp, 0);
1194
1195 if ((annul_true_p && INSN_FROM_TARGET_P (trial))
1196 || (!annul_true_p && !INSN_FROM_TARGET_P (trial)))
1197 return 0;
1198 }
1199 }
1200
1201 return 1;
1202 }
1203 \f
1204 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1205 the condition tested by INSN is CONDITION and the resources shown in
1206 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1207 from SEQ's delay list, in addition to whatever insns it may execute
1208 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1209 needed while searching for delay slot insns. Return the concatenated
1210 delay list if possible, otherwise, return 0.
1211
1212 SLOTS_TO_FILL is the total number of slots required by INSN, and
1213 PSLOTS_FILLED points to the number filled so far (also the number of
1214 insns in DELAY_LIST). It is updated with the number that have been
1215 filled from the SEQUENCE, if any.
1216
1217 PANNUL_P points to a nonzero value if we already know that we need
1218 to annul INSN. If this routine determines that annulling is needed,
1219 it may set that value nonzero.
1220
1221 PNEW_THREAD points to a location that is to receive the place at which
1222 execution should continue. */
1223
1224 static rtx
1225 steal_delay_list_from_target (rtx insn, rtx condition, rtx seq,
1226 rtx delay_list, struct resources *sets,
1227 struct resources *needed,
1228 struct resources *other_needed,
1229 int slots_to_fill, int *pslots_filled,
1230 int *pannul_p, rtx *pnew_thread)
1231 {
1232 rtx temp;
1233 int slots_remaining = slots_to_fill - *pslots_filled;
1234 int total_slots_filled = *pslots_filled;
1235 rtx new_delay_list = 0;
1236 int must_annul = *pannul_p;
1237 int used_annul = 0;
1238 int i;
1239 struct resources cc_set;
1240
1241 /* We can't do anything if there are more delay slots in SEQ than we
1242 can handle, or if we don't know that it will be a taken branch.
1243 We know that it will be a taken branch if it is either an unconditional
1244 branch or a conditional branch with a stricter branch condition.
1245
1246 Also, exit if the branch has more than one set, since then it is computing
1247 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1248 ??? It may be possible to move other sets into INSN in addition to
1249 moving the instructions in the delay slots.
1250
1251 We can not steal the delay list if one of the instructions in the
1252 current delay_list modifies the condition codes and the jump in the
1253 sequence is a conditional jump. We can not do this because we can
1254 not change the direction of the jump because the condition codes
1255 will effect the direction of the jump in the sequence. */
1256
1257 CLEAR_RESOURCE (&cc_set);
1258 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1259 {
1260 rtx trial = XEXP (temp, 0);
1261
1262 mark_set_resources (trial, &cc_set, 0, MARK_SRC_DEST_CALL);
1263 if (insn_references_resource_p (XVECEXP (seq , 0, 0), &cc_set, 0))
1264 return delay_list;
1265 }
1266
1267 if (XVECLEN (seq, 0) - 1 > slots_remaining
1268 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0))
1269 || ! single_set (XVECEXP (seq, 0, 0)))
1270 return delay_list;
1271
1272 #ifdef MD_CAN_REDIRECT_BRANCH
1273 /* On some targets, branches with delay slots can have a limited
1274 displacement. Give the back end a chance to tell us we can't do
1275 this. */
1276 if (! MD_CAN_REDIRECT_BRANCH (insn, XVECEXP (seq, 0, 0)))
1277 return delay_list;
1278 #endif
1279
1280 for (i = 1; i < XVECLEN (seq, 0); i++)
1281 {
1282 rtx trial = XVECEXP (seq, 0, i);
1283 int flags;
1284
1285 if (insn_references_resource_p (trial, sets, 0)
1286 || insn_sets_resource_p (trial, needed, 0)
1287 || insn_sets_resource_p (trial, sets, 0)
1288 #ifdef HAVE_cc0
1289 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1290 delay list. */
1291 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1292 #endif
1293 /* If TRIAL is from the fallthrough code of an annulled branch insn
1294 in SEQ, we cannot use it. */
1295 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1296 && ! INSN_FROM_TARGET_P (trial)))
1297 return delay_list;
1298
1299 /* If this insn was already done (usually in a previous delay slot),
1300 pretend we put it in our delay slot. */
1301 if (redundant_insn (trial, insn, new_delay_list))
1302 continue;
1303
1304 /* We will end up re-vectoring this branch, so compute flags
1305 based on jumping to the new label. */
1306 flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1307
1308 if (! must_annul
1309 && ((condition == const_true_rtx
1310 || (! insn_sets_resource_p (trial, other_needed, 0)
1311 && ! may_trap_p (PATTERN (trial)))))
1312 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1313 : (must_annul || (delay_list == NULL && new_delay_list == NULL))
1314 && (must_annul = 1,
1315 check_annul_list_true_false (0, delay_list)
1316 && check_annul_list_true_false (0, new_delay_list)
1317 && eligible_for_annul_false (insn, total_slots_filled,
1318 trial, flags)))
1319 {
1320 if (must_annul)
1321 used_annul = 1;
1322 temp = copy_rtx (trial);
1323 INSN_FROM_TARGET_P (temp) = 1;
1324 new_delay_list = add_to_delay_list (temp, new_delay_list);
1325 total_slots_filled++;
1326
1327 if (--slots_remaining == 0)
1328 break;
1329 }
1330 else
1331 return delay_list;
1332 }
1333
1334 /* Show the place to which we will be branching. */
1335 *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1336
1337 /* Add any new insns to the delay list and update the count of the
1338 number of slots filled. */
1339 *pslots_filled = total_slots_filled;
1340 if (used_annul)
1341 *pannul_p = 1;
1342
1343 if (delay_list == 0)
1344 return new_delay_list;
1345
1346 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1347 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1348
1349 return delay_list;
1350 }
1351 \f
1352 /* Similar to steal_delay_list_from_target except that SEQ is on the
1353 fallthrough path of INSN. Here we only do something if the delay insn
1354 of SEQ is an unconditional branch. In that case we steal its delay slot
1355 for INSN since unconditional branches are much easier to fill. */
1356
1357 static rtx
1358 steal_delay_list_from_fallthrough (rtx insn, rtx condition, rtx seq,
1359 rtx delay_list, struct resources *sets,
1360 struct resources *needed,
1361 struct resources *other_needed,
1362 int slots_to_fill, int *pslots_filled,
1363 int *pannul_p)
1364 {
1365 int i;
1366 int flags;
1367 int must_annul = *pannul_p;
1368 int used_annul = 0;
1369
1370 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1371
1372 /* We can't do anything if SEQ's delay insn isn't an
1373 unconditional branch. */
1374
1375 if (! simplejump_p (XVECEXP (seq, 0, 0))
1376 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
1377 return delay_list;
1378
1379 for (i = 1; i < XVECLEN (seq, 0); i++)
1380 {
1381 rtx trial = XVECEXP (seq, 0, i);
1382
1383 /* If TRIAL sets CC0, stealing it will move it too far from the use
1384 of CC0. */
1385 if (insn_references_resource_p (trial, sets, 0)
1386 || insn_sets_resource_p (trial, needed, 0)
1387 || insn_sets_resource_p (trial, sets, 0)
1388 #ifdef HAVE_cc0
1389 || sets_cc0_p (PATTERN (trial))
1390 #endif
1391 )
1392
1393 break;
1394
1395 /* If this insn was already done, we don't need it. */
1396 if (redundant_insn (trial, insn, delay_list))
1397 {
1398 delete_from_delay_slot (trial);
1399 continue;
1400 }
1401
1402 if (! must_annul
1403 && ((condition == const_true_rtx
1404 || (! insn_sets_resource_p (trial, other_needed, 0)
1405 && ! may_trap_p (PATTERN (trial)))))
1406 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1407 : (must_annul || delay_list == NULL) && (must_annul = 1,
1408 check_annul_list_true_false (1, delay_list)
1409 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1410 {
1411 if (must_annul)
1412 used_annul = 1;
1413 delete_from_delay_slot (trial);
1414 delay_list = add_to_delay_list (trial, delay_list);
1415
1416 if (++(*pslots_filled) == slots_to_fill)
1417 break;
1418 }
1419 else
1420 break;
1421 }
1422
1423 if (used_annul)
1424 *pannul_p = 1;
1425 return delay_list;
1426 }
1427 \f
1428 /* Try merging insns starting at THREAD which match exactly the insns in
1429 INSN's delay list.
1430
1431 If all insns were matched and the insn was previously annulling, the
1432 annul bit will be cleared.
1433
1434 For each insn that is merged, if the branch is or will be non-annulling,
1435 we delete the merged insn. */
1436
1437 static void
1438 try_merge_delay_insns (rtx insn, rtx thread)
1439 {
1440 rtx trial, next_trial;
1441 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1442 int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
1443 int slot_number = 1;
1444 int num_slots = XVECLEN (PATTERN (insn), 0);
1445 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1446 struct resources set, needed;
1447 rtx merged_insns = 0;
1448 int i;
1449 int flags;
1450
1451 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1452
1453 CLEAR_RESOURCE (&needed);
1454 CLEAR_RESOURCE (&set);
1455
1456 /* If this is not an annulling branch, take into account anything needed in
1457 INSN's delay slot. This prevents two increments from being incorrectly
1458 folded into one. If we are annulling, this would be the correct
1459 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1460 will essentially disable this optimization. This method is somewhat of
1461 a kludge, but I don't see a better way.) */
1462 if (! annul_p)
1463 for (i = 1 ; i < num_slots; i++)
1464 if (XVECEXP (PATTERN (insn), 0, i))
1465 mark_referenced_resources (XVECEXP (PATTERN (insn), 0, i), &needed, 1);
1466
1467 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1468 {
1469 rtx pat = PATTERN (trial);
1470 rtx oldtrial = trial;
1471
1472 next_trial = next_nonnote_insn (trial);
1473
1474 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1475 if (GET_CODE (trial) == INSN
1476 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1477 continue;
1478
1479 if (GET_CODE (next_to_match) == GET_CODE (trial)
1480 #ifdef HAVE_cc0
1481 /* We can't share an insn that sets cc0. */
1482 && ! sets_cc0_p (pat)
1483 #endif
1484 && ! insn_references_resource_p (trial, &set, 1)
1485 && ! insn_sets_resource_p (trial, &set, 1)
1486 && ! insn_sets_resource_p (trial, &needed, 1)
1487 && (trial = try_split (pat, trial, 0)) != 0
1488 /* Update next_trial, in case try_split succeeded. */
1489 && (next_trial = next_nonnote_insn (trial))
1490 /* Likewise THREAD. */
1491 && (thread = oldtrial == thread ? trial : thread)
1492 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1493 /* Have to test this condition if annul condition is different
1494 from (and less restrictive than) non-annulling one. */
1495 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1496 {
1497
1498 if (! annul_p)
1499 {
1500 update_block (trial, thread);
1501 if (trial == thread)
1502 thread = next_active_insn (thread);
1503
1504 delete_related_insns (trial);
1505 INSN_FROM_TARGET_P (next_to_match) = 0;
1506 }
1507 else
1508 merged_insns = gen_rtx_INSN_LIST (VOIDmode, trial, merged_insns);
1509
1510 if (++slot_number == num_slots)
1511 break;
1512
1513 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1514 }
1515
1516 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
1517 mark_referenced_resources (trial, &needed, 1);
1518 }
1519
1520 /* See if we stopped on a filled insn. If we did, try to see if its
1521 delay slots match. */
1522 if (slot_number != num_slots
1523 && trial && GET_CODE (trial) == INSN
1524 && GET_CODE (PATTERN (trial)) == SEQUENCE
1525 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
1526 {
1527 rtx pat = PATTERN (trial);
1528 rtx filled_insn = XVECEXP (pat, 0, 0);
1529
1530 /* Account for resources set/needed by the filled insn. */
1531 mark_set_resources (filled_insn, &set, 0, MARK_SRC_DEST_CALL);
1532 mark_referenced_resources (filled_insn, &needed, 1);
1533
1534 for (i = 1; i < XVECLEN (pat, 0); i++)
1535 {
1536 rtx dtrial = XVECEXP (pat, 0, i);
1537
1538 if (! insn_references_resource_p (dtrial, &set, 1)
1539 && ! insn_sets_resource_p (dtrial, &set, 1)
1540 && ! insn_sets_resource_p (dtrial, &needed, 1)
1541 #ifdef HAVE_cc0
1542 && ! sets_cc0_p (PATTERN (dtrial))
1543 #endif
1544 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1545 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1546 {
1547 if (! annul_p)
1548 {
1549 rtx new;
1550
1551 update_block (dtrial, thread);
1552 new = delete_from_delay_slot (dtrial);
1553 if (INSN_DELETED_P (thread))
1554 thread = new;
1555 INSN_FROM_TARGET_P (next_to_match) = 0;
1556 }
1557 else
1558 merged_insns = gen_rtx_INSN_LIST (SImode, dtrial,
1559 merged_insns);
1560
1561 if (++slot_number == num_slots)
1562 break;
1563
1564 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1565 }
1566 else
1567 {
1568 /* Keep track of the set/referenced resources for the delay
1569 slots of any trial insns we encounter. */
1570 mark_set_resources (dtrial, &set, 0, MARK_SRC_DEST_CALL);
1571 mark_referenced_resources (dtrial, &needed, 1);
1572 }
1573 }
1574 }
1575
1576 /* If all insns in the delay slot have been matched and we were previously
1577 annulling the branch, we need not any more. In that case delete all the
1578 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn in
1579 the delay list so that we know that it isn't only being used at the
1580 target. */
1581 if (slot_number == num_slots && annul_p)
1582 {
1583 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
1584 {
1585 if (GET_MODE (merged_insns) == SImode)
1586 {
1587 rtx new;
1588
1589 update_block (XEXP (merged_insns, 0), thread);
1590 new = delete_from_delay_slot (XEXP (merged_insns, 0));
1591 if (INSN_DELETED_P (thread))
1592 thread = new;
1593 }
1594 else
1595 {
1596 update_block (XEXP (merged_insns, 0), thread);
1597 delete_related_insns (XEXP (merged_insns, 0));
1598 }
1599 }
1600
1601 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1602
1603 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1604 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1605 }
1606 }
1607 \f
1608 /* See if INSN is redundant with an insn in front of TARGET. Often this
1609 is called when INSN is a candidate for a delay slot of TARGET.
1610 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1611 of INSN. Often INSN will be redundant with an insn in a delay slot of
1612 some previous insn. This happens when we have a series of branches to the
1613 same label; in that case the first insn at the target might want to go
1614 into each of the delay slots.
1615
1616 If we are not careful, this routine can take up a significant fraction
1617 of the total compilation time (4%), but only wins rarely. Hence we
1618 speed this routine up by making two passes. The first pass goes back
1619 until it hits a label and sees if it finds an insn with an identical
1620 pattern. Only in this (relatively rare) event does it check for
1621 data conflicts.
1622
1623 We do not split insns we encounter. This could cause us not to find a
1624 redundant insn, but the cost of splitting seems greater than the possible
1625 gain in rare cases. */
1626
1627 static rtx
1628 redundant_insn (rtx insn, rtx target, rtx delay_list)
1629 {
1630 rtx target_main = target;
1631 rtx ipat = PATTERN (insn);
1632 rtx trial, pat;
1633 struct resources needed, set;
1634 int i;
1635 unsigned insns_to_search;
1636
1637 /* If INSN has any REG_UNUSED notes, it can't match anything since we
1638 are allowed to not actually assign to such a register. */
1639 if (find_reg_note (insn, REG_UNUSED, NULL_RTX) != 0)
1640 return 0;
1641
1642 /* Scan backwards looking for a match. */
1643 for (trial = PREV_INSN (target),
1644 insns_to_search = MAX_DELAY_SLOT_INSN_SEARCH;
1645 trial && insns_to_search > 0;
1646 trial = PREV_INSN (trial), --insns_to_search)
1647 {
1648 if (GET_CODE (trial) == CODE_LABEL)
1649 return 0;
1650
1651 if (! INSN_P (trial))
1652 continue;
1653
1654 pat = PATTERN (trial);
1655 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1656 continue;
1657
1658 if (GET_CODE (pat) == SEQUENCE)
1659 {
1660 /* Stop for a CALL and its delay slots because it is difficult to
1661 track its resource needs correctly. */
1662 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
1663 return 0;
1664
1665 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1666 slots because it is difficult to track its resource needs
1667 correctly. */
1668
1669 #ifdef INSN_SETS_ARE_DELAYED
1670 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1671 return 0;
1672 #endif
1673
1674 #ifdef INSN_REFERENCES_ARE_DELAYED
1675 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1676 return 0;
1677 #endif
1678
1679 /* See if any of the insns in the delay slot match, updating
1680 resource requirements as we go. */
1681 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1682 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
1683 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat)
1684 && ! find_reg_note (XVECEXP (pat, 0, i), REG_UNUSED, NULL_RTX))
1685 break;
1686
1687 /* If found a match, exit this loop early. */
1688 if (i > 0)
1689 break;
1690 }
1691
1692 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat)
1693 && ! find_reg_note (trial, REG_UNUSED, NULL_RTX))
1694 break;
1695 }
1696
1697 /* If we didn't find an insn that matches, return 0. */
1698 if (trial == 0)
1699 return 0;
1700
1701 /* See what resources this insn sets and needs. If they overlap, or
1702 if this insn references CC0, it can't be redundant. */
1703
1704 CLEAR_RESOURCE (&needed);
1705 CLEAR_RESOURCE (&set);
1706 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1707 mark_referenced_resources (insn, &needed, 1);
1708
1709 /* If TARGET is a SEQUENCE, get the main insn. */
1710 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
1711 target_main = XVECEXP (PATTERN (target), 0, 0);
1712
1713 if (resource_conflicts_p (&needed, &set)
1714 #ifdef HAVE_cc0
1715 || reg_mentioned_p (cc0_rtx, ipat)
1716 #endif
1717 /* The insn requiring the delay may not set anything needed or set by
1718 INSN. */
1719 || insn_sets_resource_p (target_main, &needed, 1)
1720 || insn_sets_resource_p (target_main, &set, 1))
1721 return 0;
1722
1723 /* Insns we pass may not set either NEEDED or SET, so merge them for
1724 simpler tests. */
1725 needed.memory |= set.memory;
1726 needed.unch_memory |= set.unch_memory;
1727 IOR_HARD_REG_SET (needed.regs, set.regs);
1728
1729 /* This insn isn't redundant if it conflicts with an insn that either is
1730 or will be in a delay slot of TARGET. */
1731
1732 while (delay_list)
1733 {
1734 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, 1))
1735 return 0;
1736 delay_list = XEXP (delay_list, 1);
1737 }
1738
1739 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
1740 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
1741 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
1742 return 0;
1743
1744 /* Scan backwards until we reach a label or an insn that uses something
1745 INSN sets or sets something insn uses or sets. */
1746
1747 for (trial = PREV_INSN (target),
1748 insns_to_search = MAX_DELAY_SLOT_INSN_SEARCH;
1749 trial && GET_CODE (trial) != CODE_LABEL && insns_to_search > 0;
1750 trial = PREV_INSN (trial), --insns_to_search)
1751 {
1752 if (GET_CODE (trial) != INSN && GET_CODE (trial) != CALL_INSN
1753 && GET_CODE (trial) != JUMP_INSN)
1754 continue;
1755
1756 pat = PATTERN (trial);
1757 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1758 continue;
1759
1760 if (GET_CODE (pat) == SEQUENCE)
1761 {
1762 /* If this is a CALL_INSN and its delay slots, it is hard to track
1763 the resource needs properly, so give up. */
1764 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
1765 return 0;
1766
1767 /* If this is an INSN or JUMP_INSN with delayed effects, it
1768 is hard to track the resource needs properly, so give up. */
1769
1770 #ifdef INSN_SETS_ARE_DELAYED
1771 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1772 return 0;
1773 #endif
1774
1775 #ifdef INSN_REFERENCES_ARE_DELAYED
1776 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1777 return 0;
1778 #endif
1779
1780 /* See if any of the insns in the delay slot match, updating
1781 resource requirements as we go. */
1782 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1783 {
1784 rtx candidate = XVECEXP (pat, 0, i);
1785
1786 /* If an insn will be annulled if the branch is false, it isn't
1787 considered as a possible duplicate insn. */
1788 if (rtx_equal_p (PATTERN (candidate), ipat)
1789 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1790 && INSN_FROM_TARGET_P (candidate)))
1791 {
1792 /* Show that this insn will be used in the sequel. */
1793 INSN_FROM_TARGET_P (candidate) = 0;
1794 return candidate;
1795 }
1796
1797 /* Unless this is an annulled insn from the target of a branch,
1798 we must stop if it sets anything needed or set by INSN. */
1799 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1800 || ! INSN_FROM_TARGET_P (candidate))
1801 && insn_sets_resource_p (candidate, &needed, 1))
1802 return 0;
1803 }
1804
1805 /* If the insn requiring the delay slot conflicts with INSN, we
1806 must stop. */
1807 if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1))
1808 return 0;
1809 }
1810 else
1811 {
1812 /* See if TRIAL is the same as INSN. */
1813 pat = PATTERN (trial);
1814 if (rtx_equal_p (pat, ipat))
1815 return trial;
1816
1817 /* Can't go any further if TRIAL conflicts with INSN. */
1818 if (insn_sets_resource_p (trial, &needed, 1))
1819 return 0;
1820 }
1821 }
1822
1823 return 0;
1824 }
1825 \f
1826 /* Return 1 if THREAD can only be executed in one way. If LABEL is nonzero,
1827 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
1828 is nonzero, we are allowed to fall into this thread; otherwise, we are
1829 not.
1830
1831 If LABEL is used more than one or we pass a label other than LABEL before
1832 finding an active insn, we do not own this thread. */
1833
1834 static int
1835 own_thread_p (rtx thread, rtx label, int allow_fallthrough)
1836 {
1837 rtx active_insn;
1838 rtx insn;
1839
1840 /* We don't own the function end. */
1841 if (thread == 0)
1842 return 0;
1843
1844 /* Get the first active insn, or THREAD, if it is an active insn. */
1845 active_insn = next_active_insn (PREV_INSN (thread));
1846
1847 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
1848 if (GET_CODE (insn) == CODE_LABEL
1849 && (insn != label || LABEL_NUSES (insn) != 1))
1850 return 0;
1851
1852 if (allow_fallthrough)
1853 return 1;
1854
1855 /* Ensure that we reach a BARRIER before any insn or label. */
1856 for (insn = prev_nonnote_insn (thread);
1857 insn == 0 || GET_CODE (insn) != BARRIER;
1858 insn = prev_nonnote_insn (insn))
1859 if (insn == 0
1860 || GET_CODE (insn) == CODE_LABEL
1861 || (GET_CODE (insn) == INSN
1862 && GET_CODE (PATTERN (insn)) != USE
1863 && GET_CODE (PATTERN (insn)) != CLOBBER))
1864 return 0;
1865
1866 return 1;
1867 }
1868 \f
1869 /* Called when INSN is being moved from a location near the target of a jump.
1870 We leave a marker of the form (use (INSN)) immediately in front
1871 of WHERE for mark_target_live_regs. These markers will be deleted when
1872 reorg finishes.
1873
1874 We used to try to update the live status of registers if WHERE is at
1875 the start of a basic block, but that can't work since we may remove a
1876 BARRIER in relax_delay_slots. */
1877
1878 static void
1879 update_block (rtx insn, rtx where)
1880 {
1881 /* Ignore if this was in a delay slot and it came from the target of
1882 a branch. */
1883 if (INSN_FROM_TARGET_P (insn))
1884 return;
1885
1886 emit_insn_before (gen_rtx_USE (VOIDmode, insn), where);
1887
1888 /* INSN might be making a value live in a block where it didn't use to
1889 be. So recompute liveness information for this block. */
1890
1891 incr_ticks_for_insn (insn);
1892 }
1893
1894 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
1895 the basic block containing the jump. */
1896
1897 static int
1898 reorg_redirect_jump (rtx jump, rtx nlabel)
1899 {
1900 incr_ticks_for_insn (jump);
1901 return redirect_jump (jump, nlabel, 1);
1902 }
1903
1904 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
1905 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
1906 that reference values used in INSN. If we find one, then we move the
1907 REG_DEAD note to INSN.
1908
1909 This is needed to handle the case where an later insn (after INSN) has a
1910 REG_DEAD note for a register used by INSN, and this later insn subsequently
1911 gets moved before a CODE_LABEL because it is a redundant insn. In this
1912 case, mark_target_live_regs may be confused into thinking the register
1913 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
1914
1915 static void
1916 update_reg_dead_notes (rtx insn, rtx delayed_insn)
1917 {
1918 rtx p, link, next;
1919
1920 for (p = next_nonnote_insn (insn); p != delayed_insn;
1921 p = next_nonnote_insn (p))
1922 for (link = REG_NOTES (p); link; link = next)
1923 {
1924 next = XEXP (link, 1);
1925
1926 if (REG_NOTE_KIND (link) != REG_DEAD
1927 || !REG_P (XEXP (link, 0)))
1928 continue;
1929
1930 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
1931 {
1932 /* Move the REG_DEAD note from P to INSN. */
1933 remove_note (p, link);
1934 XEXP (link, 1) = REG_NOTES (insn);
1935 REG_NOTES (insn) = link;
1936 }
1937 }
1938 }
1939
1940 /* Called when an insn redundant with start_insn is deleted. If there
1941 is a REG_DEAD note for the target of start_insn between start_insn
1942 and stop_insn, then the REG_DEAD note needs to be deleted since the
1943 value no longer dies there.
1944
1945 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
1946 confused into thinking the register is dead. */
1947
1948 static void
1949 fix_reg_dead_note (rtx start_insn, rtx stop_insn)
1950 {
1951 rtx p, link, next;
1952
1953 for (p = next_nonnote_insn (start_insn); p != stop_insn;
1954 p = next_nonnote_insn (p))
1955 for (link = REG_NOTES (p); link; link = next)
1956 {
1957 next = XEXP (link, 1);
1958
1959 if (REG_NOTE_KIND (link) != REG_DEAD
1960 || !REG_P (XEXP (link, 0)))
1961 continue;
1962
1963 if (reg_set_p (XEXP (link, 0), PATTERN (start_insn)))
1964 {
1965 remove_note (p, link);
1966 return;
1967 }
1968 }
1969 }
1970
1971 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
1972
1973 This handles the case of udivmodXi4 instructions which optimize their
1974 output depending on whether any REG_UNUSED notes are present.
1975 we must make sure that INSN calculates as many results as REDUNDANT_INSN
1976 does. */
1977
1978 static void
1979 update_reg_unused_notes (rtx insn, rtx redundant_insn)
1980 {
1981 rtx link, next;
1982
1983 for (link = REG_NOTES (insn); link; link = next)
1984 {
1985 next = XEXP (link, 1);
1986
1987 if (REG_NOTE_KIND (link) != REG_UNUSED
1988 || !REG_P (XEXP (link, 0)))
1989 continue;
1990
1991 if (! find_regno_note (redundant_insn, REG_UNUSED,
1992 REGNO (XEXP (link, 0))))
1993 remove_note (insn, link);
1994 }
1995 }
1996 \f
1997 /* Scan a function looking for insns that need a delay slot and find insns to
1998 put into the delay slot.
1999
2000 NON_JUMPS_P is nonzero if we are to only try to fill non-jump insns (such
2001 as calls). We do these first since we don't want jump insns (that are
2002 easier to fill) to get the only insns that could be used for non-jump insns.
2003 When it is zero, only try to fill JUMP_INSNs.
2004
2005 When slots are filled in this manner, the insns (including the
2006 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2007 it is possible to tell whether a delay slot has really been filled
2008 or not. `final' knows how to deal with this, by communicating
2009 through FINAL_SEQUENCE. */
2010
2011 static void
2012 fill_simple_delay_slots (int non_jumps_p)
2013 {
2014 rtx insn, pat, trial, next_trial;
2015 int i;
2016 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2017 struct resources needed, set;
2018 int slots_to_fill, slots_filled;
2019 rtx delay_list;
2020
2021 for (i = 0; i < num_unfilled_slots; i++)
2022 {
2023 int flags;
2024 /* Get the next insn to fill. If it has already had any slots assigned,
2025 we can't do anything with it. Maybe we'll improve this later. */
2026
2027 insn = unfilled_slots_base[i];
2028 if (insn == 0
2029 || INSN_DELETED_P (insn)
2030 || (GET_CODE (insn) == INSN
2031 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2032 || (GET_CODE (insn) == JUMP_INSN && non_jumps_p)
2033 || (GET_CODE (insn) != JUMP_INSN && ! non_jumps_p))
2034 continue;
2035
2036 /* It may have been that this insn used to need delay slots, but
2037 now doesn't; ignore in that case. This can happen, for example,
2038 on the HP PA RISC, where the number of delay slots depends on
2039 what insns are nearby. */
2040 slots_to_fill = num_delay_slots (insn);
2041
2042 /* Some machine description have defined instructions to have
2043 delay slots only in certain circumstances which may depend on
2044 nearby insns (which change due to reorg's actions).
2045
2046 For example, the PA port normally has delay slots for unconditional
2047 jumps.
2048
2049 However, the PA port claims such jumps do not have a delay slot
2050 if they are immediate successors of certain CALL_INSNs. This
2051 allows the port to favor filling the delay slot of the call with
2052 the unconditional jump. */
2053 if (slots_to_fill == 0)
2054 continue;
2055
2056 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2057 says how many. After initialization, first try optimizing
2058
2059 call _foo call _foo
2060 nop add %o7,.-L1,%o7
2061 b,a L1
2062 nop
2063
2064 If this case applies, the delay slot of the call is filled with
2065 the unconditional jump. This is done first to avoid having the
2066 delay slot of the call filled in the backward scan. Also, since
2067 the unconditional jump is likely to also have a delay slot, that
2068 insn must exist when it is subsequently scanned.
2069
2070 This is tried on each insn with delay slots as some machines
2071 have insns which perform calls, but are not represented as
2072 CALL_INSNs. */
2073
2074 slots_filled = 0;
2075 delay_list = 0;
2076
2077 if (GET_CODE (insn) == JUMP_INSN)
2078 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2079 else
2080 flags = get_jump_flags (insn, NULL_RTX);
2081
2082 if ((trial = next_active_insn (insn))
2083 && GET_CODE (trial) == JUMP_INSN
2084 && simplejump_p (trial)
2085 && eligible_for_delay (insn, slots_filled, trial, flags)
2086 && no_labels_between_p (insn, trial)
2087 && ! can_throw_internal (trial))
2088 {
2089 rtx *tmp;
2090 slots_filled++;
2091 delay_list = add_to_delay_list (trial, delay_list);
2092
2093 /* TRIAL may have had its delay slot filled, then unfilled. When
2094 the delay slot is unfilled, TRIAL is placed back on the unfilled
2095 slots obstack. Unfortunately, it is placed on the end of the
2096 obstack, not in its original location. Therefore, we must search
2097 from entry i + 1 to the end of the unfilled slots obstack to
2098 try and find TRIAL. */
2099 tmp = &unfilled_slots_base[i + 1];
2100 while (*tmp != trial && tmp != unfilled_slots_next)
2101 tmp++;
2102
2103 /* Remove the unconditional jump from consideration for delay slot
2104 filling and unthread it. */
2105 if (*tmp == trial)
2106 *tmp = 0;
2107 {
2108 rtx next = NEXT_INSN (trial);
2109 rtx prev = PREV_INSN (trial);
2110 if (prev)
2111 NEXT_INSN (prev) = next;
2112 if (next)
2113 PREV_INSN (next) = prev;
2114 }
2115 }
2116
2117 /* Now, scan backwards from the insn to search for a potential
2118 delay-slot candidate. Stop searching when a label or jump is hit.
2119
2120 For each candidate, if it is to go into the delay slot (moved
2121 forward in execution sequence), it must not need or set any resources
2122 that were set by later insns and must not set any resources that
2123 are needed for those insns.
2124
2125 The delay slot insn itself sets resources unless it is a call
2126 (in which case the called routine, not the insn itself, is doing
2127 the setting). */
2128
2129 if (slots_filled < slots_to_fill)
2130 {
2131 CLEAR_RESOURCE (&needed);
2132 CLEAR_RESOURCE (&set);
2133 mark_set_resources (insn, &set, 0, MARK_SRC_DEST);
2134 mark_referenced_resources (insn, &needed, 0);
2135
2136 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
2137 trial = next_trial)
2138 {
2139 next_trial = prev_nonnote_insn (trial);
2140
2141 /* This must be an INSN or CALL_INSN. */
2142 pat = PATTERN (trial);
2143
2144 /* USE and CLOBBER at this level was just for flow; ignore it. */
2145 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2146 continue;
2147
2148 /* Check for resource conflict first, to avoid unnecessary
2149 splitting. */
2150 if (! insn_references_resource_p (trial, &set, 1)
2151 && ! insn_sets_resource_p (trial, &set, 1)
2152 && ! insn_sets_resource_p (trial, &needed, 1)
2153 #ifdef HAVE_cc0
2154 /* Can't separate set of cc0 from its use. */
2155 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2156 #endif
2157 && ! can_throw_internal (trial))
2158 {
2159 trial = try_split (pat, trial, 1);
2160 next_trial = prev_nonnote_insn (trial);
2161 if (eligible_for_delay (insn, slots_filled, trial, flags))
2162 {
2163 /* In this case, we are searching backward, so if we
2164 find insns to put on the delay list, we want
2165 to put them at the head, rather than the
2166 tail, of the list. */
2167
2168 update_reg_dead_notes (trial, insn);
2169 delay_list = gen_rtx_INSN_LIST (VOIDmode,
2170 trial, delay_list);
2171 update_block (trial, trial);
2172 delete_related_insns (trial);
2173 if (slots_to_fill == ++slots_filled)
2174 break;
2175 continue;
2176 }
2177 }
2178
2179 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2180 mark_referenced_resources (trial, &needed, 1);
2181 }
2182 }
2183
2184 /* If all needed slots haven't been filled, we come here. */
2185
2186 /* Try to optimize case of jumping around a single insn. */
2187 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2188 if (slots_filled != slots_to_fill
2189 && delay_list == 0
2190 && GET_CODE (insn) == JUMP_INSN
2191 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
2192 {
2193 delay_list = optimize_skip (insn);
2194 if (delay_list)
2195 slots_filled += 1;
2196 }
2197 #endif
2198
2199 /* Try to get insns from beyond the insn needing the delay slot.
2200 These insns can neither set or reference resources set in insns being
2201 skipped, cannot set resources in the insn being skipped, and, if this
2202 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2203 call might not return).
2204
2205 There used to be code which continued past the target label if
2206 we saw all uses of the target label. This code did not work,
2207 because it failed to account for some instructions which were
2208 both annulled and marked as from the target. This can happen as a
2209 result of optimize_skip. Since this code was redundant with
2210 fill_eager_delay_slots anyways, it was just deleted. */
2211
2212 if (slots_filled != slots_to_fill
2213 /* If this instruction could throw an exception which is
2214 caught in the same function, then it's not safe to fill
2215 the delay slot with an instruction from beyond this
2216 point. For example, consider:
2217
2218 int i = 2;
2219
2220 try {
2221 f();
2222 i = 3;
2223 } catch (...) {}
2224
2225 return i;
2226
2227 Even though `i' is a local variable, we must be sure not
2228 to put `i = 3' in the delay slot if `f' might throw an
2229 exception.
2230
2231 Presumably, we should also check to see if we could get
2232 back to this function via `setjmp'. */
2233 && ! can_throw_internal (insn)
2234 && (GET_CODE (insn) != JUMP_INSN
2235 || ((condjump_p (insn) || condjump_in_parallel_p (insn))
2236 && ! simplejump_p (insn)
2237 && JUMP_LABEL (insn) != 0)))
2238 {
2239 /* Invariant: If insn is a JUMP_INSN, the insn's jump
2240 label. Otherwise, zero. */
2241 rtx target = 0;
2242 int maybe_never = 0;
2243 rtx pat, trial_delay;
2244
2245 CLEAR_RESOURCE (&needed);
2246 CLEAR_RESOURCE (&set);
2247
2248 if (GET_CODE (insn) == CALL_INSN)
2249 {
2250 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
2251 mark_referenced_resources (insn, &needed, 1);
2252 maybe_never = 1;
2253 }
2254 else
2255 {
2256 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
2257 mark_referenced_resources (insn, &needed, 1);
2258 if (GET_CODE (insn) == JUMP_INSN)
2259 target = JUMP_LABEL (insn);
2260 }
2261
2262 if (target == 0)
2263 for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
2264 {
2265 next_trial = next_nonnote_insn (trial);
2266
2267 if (GET_CODE (trial) == CODE_LABEL
2268 || GET_CODE (trial) == BARRIER)
2269 break;
2270
2271 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
2272 pat = PATTERN (trial);
2273
2274 /* Stand-alone USE and CLOBBER are just for flow. */
2275 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2276 continue;
2277
2278 /* If this already has filled delay slots, get the insn needing
2279 the delay slots. */
2280 if (GET_CODE (pat) == SEQUENCE)
2281 trial_delay = XVECEXP (pat, 0, 0);
2282 else
2283 trial_delay = trial;
2284
2285 /* Stop our search when seeing an unconditional jump. */
2286 if (GET_CODE (trial_delay) == JUMP_INSN)
2287 break;
2288
2289 /* See if we have a resource problem before we try to
2290 split. */
2291 if (GET_CODE (pat) != SEQUENCE
2292 && ! insn_references_resource_p (trial, &set, 1)
2293 && ! insn_sets_resource_p (trial, &set, 1)
2294 && ! insn_sets_resource_p (trial, &needed, 1)
2295 #ifdef HAVE_cc0
2296 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2297 #endif
2298 && ! (maybe_never && may_trap_p (pat))
2299 && (trial = try_split (pat, trial, 0))
2300 && eligible_for_delay (insn, slots_filled, trial, flags)
2301 && ! can_throw_internal(trial))
2302 {
2303 next_trial = next_nonnote_insn (trial);
2304 delay_list = add_to_delay_list (trial, delay_list);
2305
2306 #ifdef HAVE_cc0
2307 if (reg_mentioned_p (cc0_rtx, pat))
2308 link_cc0_insns (trial);
2309 #endif
2310
2311 delete_related_insns (trial);
2312 if (slots_to_fill == ++slots_filled)
2313 break;
2314 continue;
2315 }
2316
2317 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2318 mark_referenced_resources (trial, &needed, 1);
2319
2320 /* Ensure we don't put insns between the setting of cc and the
2321 comparison by moving a setting of cc into an earlier delay
2322 slot since these insns could clobber the condition code. */
2323 set.cc = 1;
2324
2325 /* If this is a call or jump, we might not get here. */
2326 if (GET_CODE (trial_delay) == CALL_INSN
2327 || GET_CODE (trial_delay) == JUMP_INSN)
2328 maybe_never = 1;
2329 }
2330
2331 /* If there are slots left to fill and our search was stopped by an
2332 unconditional branch, try the insn at the branch target. We can
2333 redirect the branch if it works.
2334
2335 Don't do this if the insn at the branch target is a branch. */
2336 if (slots_to_fill != slots_filled
2337 && trial
2338 && GET_CODE (trial) == JUMP_INSN
2339 && simplejump_p (trial)
2340 && (target == 0 || JUMP_LABEL (trial) == target)
2341 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
2342 && ! (GET_CODE (next_trial) == INSN
2343 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
2344 && GET_CODE (next_trial) != JUMP_INSN
2345 && ! insn_references_resource_p (next_trial, &set, 1)
2346 && ! insn_sets_resource_p (next_trial, &set, 1)
2347 && ! insn_sets_resource_p (next_trial, &needed, 1)
2348 #ifdef HAVE_cc0
2349 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
2350 #endif
2351 && ! (maybe_never && may_trap_p (PATTERN (next_trial)))
2352 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
2353 && eligible_for_delay (insn, slots_filled, next_trial, flags)
2354 && ! can_throw_internal (trial))
2355 {
2356 /* See comment in relax_delay_slots about necessity of using
2357 next_real_insn here. */
2358 rtx new_label = next_real_insn (next_trial);
2359
2360 if (new_label != 0)
2361 new_label = get_label_before (new_label);
2362 else
2363 new_label = find_end_label ();
2364
2365 delay_list
2366 = add_to_delay_list (copy_rtx (next_trial), delay_list);
2367 slots_filled++;
2368 reorg_redirect_jump (trial, new_label);
2369
2370 /* If we merged because we both jumped to the same place,
2371 redirect the original insn also. */
2372 if (target)
2373 reorg_redirect_jump (insn, new_label);
2374 }
2375 }
2376
2377 /* If this is an unconditional jump, then try to get insns from the
2378 target of the jump. */
2379 if (GET_CODE (insn) == JUMP_INSN
2380 && simplejump_p (insn)
2381 && slots_filled != slots_to_fill)
2382 delay_list
2383 = fill_slots_from_thread (insn, const_true_rtx,
2384 next_active_insn (JUMP_LABEL (insn)),
2385 NULL, 1, 1,
2386 own_thread_p (JUMP_LABEL (insn),
2387 JUMP_LABEL (insn), 0),
2388 slots_to_fill, &slots_filled,
2389 delay_list);
2390
2391 if (delay_list)
2392 unfilled_slots_base[i]
2393 = emit_delay_sequence (insn, delay_list, slots_filled);
2394
2395 if (slots_to_fill == slots_filled)
2396 unfilled_slots_base[i] = 0;
2397
2398 note_delay_statistics (slots_filled, 0);
2399 }
2400
2401 #ifdef DELAY_SLOTS_FOR_EPILOGUE
2402 /* See if the epilogue needs any delay slots. Try to fill them if so.
2403 The only thing we can do is scan backwards from the end of the
2404 function. If we did this in a previous pass, it is incorrect to do it
2405 again. */
2406 if (current_function_epilogue_delay_list)
2407 return;
2408
2409 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
2410 if (slots_to_fill == 0)
2411 return;
2412
2413 slots_filled = 0;
2414 CLEAR_RESOURCE (&set);
2415
2416 /* The frame pointer and stack pointer are needed at the beginning of
2417 the epilogue, so instructions setting them can not be put in the
2418 epilogue delay slot. However, everything else needed at function
2419 end is safe, so we don't want to use end_of_function_needs here. */
2420 CLEAR_RESOURCE (&needed);
2421 if (frame_pointer_needed)
2422 {
2423 SET_HARD_REG_BIT (needed.regs, FRAME_POINTER_REGNUM);
2424 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2425 SET_HARD_REG_BIT (needed.regs, HARD_FRAME_POINTER_REGNUM);
2426 #endif
2427 if (! EXIT_IGNORE_STACK
2428 || current_function_sp_is_unchanging)
2429 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2430 }
2431 else
2432 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2433
2434 #ifdef EPILOGUE_USES
2435 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2436 {
2437 if (EPILOGUE_USES (i))
2438 SET_HARD_REG_BIT (needed.regs, i);
2439 }
2440 #endif
2441
2442 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
2443 trial = PREV_INSN (trial))
2444 {
2445 if (GET_CODE (trial) == NOTE)
2446 continue;
2447 pat = PATTERN (trial);
2448 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2449 continue;
2450
2451 if (! insn_references_resource_p (trial, &set, 1)
2452 && ! insn_sets_resource_p (trial, &needed, 1)
2453 && ! insn_sets_resource_p (trial, &set, 1)
2454 #ifdef HAVE_cc0
2455 /* Don't want to mess with cc0 here. */
2456 && ! reg_mentioned_p (cc0_rtx, pat)
2457 #endif
2458 && ! can_throw_internal (trial))
2459 {
2460 trial = try_split (pat, trial, 1);
2461 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
2462 {
2463 /* Here as well we are searching backward, so put the
2464 insns we find on the head of the list. */
2465
2466 current_function_epilogue_delay_list
2467 = gen_rtx_INSN_LIST (VOIDmode, trial,
2468 current_function_epilogue_delay_list);
2469 mark_end_of_function_resources (trial, 1);
2470 update_block (trial, trial);
2471 delete_related_insns (trial);
2472
2473 /* Clear deleted bit so final.c will output the insn. */
2474 INSN_DELETED_P (trial) = 0;
2475
2476 if (slots_to_fill == ++slots_filled)
2477 break;
2478 continue;
2479 }
2480 }
2481
2482 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2483 mark_referenced_resources (trial, &needed, 1);
2484 }
2485
2486 note_delay_statistics (slots_filled, 0);
2487 #endif
2488 }
2489 \f
2490 /* Try to find insns to place in delay slots.
2491
2492 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
2493 or is an unconditional branch if CONDITION is const_true_rtx.
2494 *PSLOTS_FILLED is updated with the number of slots that we have filled.
2495
2496 THREAD is a flow-of-control, either the insns to be executed if the
2497 branch is true or if the branch is false, THREAD_IF_TRUE says which.
2498
2499 OPPOSITE_THREAD is the thread in the opposite direction. It is used
2500 to see if any potential delay slot insns set things needed there.
2501
2502 LIKELY is nonzero if it is extremely likely that the branch will be
2503 taken and THREAD_IF_TRUE is set. This is used for the branch at the
2504 end of a loop back up to the top.
2505
2506 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
2507 thread. I.e., it is the fallthrough code of our jump or the target of the
2508 jump when we are the only jump going there.
2509
2510 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
2511 case, we can only take insns from the head of the thread for our delay
2512 slot. We then adjust the jump to point after the insns we have taken. */
2513
2514 static rtx
2515 fill_slots_from_thread (rtx insn, rtx condition, rtx thread,
2516 rtx opposite_thread, int likely, int thread_if_true,
2517 int own_thread, int slots_to_fill,
2518 int *pslots_filled, rtx delay_list)
2519 {
2520 rtx new_thread;
2521 struct resources opposite_needed, set, needed;
2522 rtx trial;
2523 int lose = 0;
2524 int must_annul = 0;
2525 int flags;
2526
2527 /* Validate our arguments. */
2528 if ((condition == const_true_rtx && ! thread_if_true)
2529 || (! own_thread && ! thread_if_true))
2530 abort ();
2531
2532 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2533
2534 /* If our thread is the end of subroutine, we can't get any delay
2535 insns from that. */
2536 if (thread == 0)
2537 return delay_list;
2538
2539 /* If this is an unconditional branch, nothing is needed at the
2540 opposite thread. Otherwise, compute what is needed there. */
2541 if (condition == const_true_rtx)
2542 CLEAR_RESOURCE (&opposite_needed);
2543 else
2544 mark_target_live_regs (get_insns (), opposite_thread, &opposite_needed);
2545
2546 /* If the insn at THREAD can be split, do it here to avoid having to
2547 update THREAD and NEW_THREAD if it is done in the loop below. Also
2548 initialize NEW_THREAD. */
2549
2550 new_thread = thread = try_split (PATTERN (thread), thread, 0);
2551
2552 /* Scan insns at THREAD. We are looking for an insn that can be removed
2553 from THREAD (it neither sets nor references resources that were set
2554 ahead of it and it doesn't set anything needs by the insns ahead of
2555 it) and that either can be placed in an annulling insn or aren't
2556 needed at OPPOSITE_THREAD. */
2557
2558 CLEAR_RESOURCE (&needed);
2559 CLEAR_RESOURCE (&set);
2560
2561 /* If we do not own this thread, we must stop as soon as we find
2562 something that we can't put in a delay slot, since all we can do
2563 is branch into THREAD at a later point. Therefore, labels stop
2564 the search if this is not the `true' thread. */
2565
2566 for (trial = thread;
2567 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
2568 trial = next_nonnote_insn (trial))
2569 {
2570 rtx pat, old_trial;
2571
2572 /* If we have passed a label, we no longer own this thread. */
2573 if (GET_CODE (trial) == CODE_LABEL)
2574 {
2575 own_thread = 0;
2576 continue;
2577 }
2578
2579 pat = PATTERN (trial);
2580 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2581 continue;
2582
2583 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
2584 don't separate or copy insns that set and use CC0. */
2585 if (! insn_references_resource_p (trial, &set, 1)
2586 && ! insn_sets_resource_p (trial, &set, 1)
2587 && ! insn_sets_resource_p (trial, &needed, 1)
2588 #ifdef HAVE_cc0
2589 && ! (reg_mentioned_p (cc0_rtx, pat)
2590 && (! own_thread || ! sets_cc0_p (pat)))
2591 #endif
2592 && ! can_throw_internal (trial))
2593 {
2594 rtx prior_insn;
2595
2596 /* If TRIAL is redundant with some insn before INSN, we don't
2597 actually need to add it to the delay list; we can merely pretend
2598 we did. */
2599 if ((prior_insn = redundant_insn (trial, insn, delay_list)))
2600 {
2601 fix_reg_dead_note (prior_insn, insn);
2602 if (own_thread)
2603 {
2604 update_block (trial, thread);
2605 if (trial == thread)
2606 {
2607 thread = next_active_insn (thread);
2608 if (new_thread == trial)
2609 new_thread = thread;
2610 }
2611
2612 delete_related_insns (trial);
2613 }
2614 else
2615 {
2616 update_reg_unused_notes (prior_insn, trial);
2617 new_thread = next_active_insn (trial);
2618 }
2619
2620 continue;
2621 }
2622
2623 /* There are two ways we can win: If TRIAL doesn't set anything
2624 needed at the opposite thread and can't trap, or if it can
2625 go into an annulled delay slot. */
2626 if (!must_annul
2627 && (condition == const_true_rtx
2628 || (! insn_sets_resource_p (trial, &opposite_needed, 1)
2629 && ! may_trap_p (pat))))
2630 {
2631 old_trial = trial;
2632 trial = try_split (pat, trial, 0);
2633 if (new_thread == old_trial)
2634 new_thread = trial;
2635 if (thread == old_trial)
2636 thread = trial;
2637 pat = PATTERN (trial);
2638 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
2639 goto winner;
2640 }
2641 else if (0
2642 #ifdef ANNUL_IFTRUE_SLOTS
2643 || ! thread_if_true
2644 #endif
2645 #ifdef ANNUL_IFFALSE_SLOTS
2646 || thread_if_true
2647 #endif
2648 )
2649 {
2650 old_trial = trial;
2651 trial = try_split (pat, trial, 0);
2652 if (new_thread == old_trial)
2653 new_thread = trial;
2654 if (thread == old_trial)
2655 thread = trial;
2656 pat = PATTERN (trial);
2657 if ((must_annul || delay_list == NULL) && (thread_if_true
2658 ? check_annul_list_true_false (0, delay_list)
2659 && eligible_for_annul_false (insn, *pslots_filled, trial, flags)
2660 : check_annul_list_true_false (1, delay_list)
2661 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
2662 {
2663 rtx temp;
2664
2665 must_annul = 1;
2666 winner:
2667
2668 #ifdef HAVE_cc0
2669 if (reg_mentioned_p (cc0_rtx, pat))
2670 link_cc0_insns (trial);
2671 #endif
2672
2673 /* If we own this thread, delete the insn. If this is the
2674 destination of a branch, show that a basic block status
2675 may have been updated. In any case, mark the new
2676 starting point of this thread. */
2677 if (own_thread)
2678 {
2679 rtx note;
2680
2681 update_block (trial, thread);
2682 if (trial == thread)
2683 {
2684 thread = next_active_insn (thread);
2685 if (new_thread == trial)
2686 new_thread = thread;
2687 }
2688
2689 /* We are moving this insn, not deleting it. We must
2690 temporarily increment the use count on any referenced
2691 label lest it be deleted by delete_related_insns. */
2692 note = find_reg_note (trial, REG_LABEL, 0);
2693 /* REG_LABEL could be NOTE_INSN_DELETED_LABEL too. */
2694 if (note && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2695 LABEL_NUSES (XEXP (note, 0))++;
2696
2697 delete_related_insns (trial);
2698
2699 if (note && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2700 LABEL_NUSES (XEXP (note, 0))--;
2701 }
2702 else
2703 new_thread = next_active_insn (trial);
2704
2705 temp = own_thread ? trial : copy_rtx (trial);
2706 if (thread_if_true)
2707 INSN_FROM_TARGET_P (temp) = 1;
2708
2709 delay_list = add_to_delay_list (temp, delay_list);
2710
2711 if (slots_to_fill == ++(*pslots_filled))
2712 {
2713 /* Even though we have filled all the slots, we
2714 may be branching to a location that has a
2715 redundant insn. Skip any if so. */
2716 while (new_thread && ! own_thread
2717 && ! insn_sets_resource_p (new_thread, &set, 1)
2718 && ! insn_sets_resource_p (new_thread, &needed, 1)
2719 && ! insn_references_resource_p (new_thread,
2720 &set, 1)
2721 && (prior_insn
2722 = redundant_insn (new_thread, insn,
2723 delay_list)))
2724 {
2725 /* We know we do not own the thread, so no need
2726 to call update_block and delete_insn. */
2727 fix_reg_dead_note (prior_insn, insn);
2728 update_reg_unused_notes (prior_insn, new_thread);
2729 new_thread = next_active_insn (new_thread);
2730 }
2731 break;
2732 }
2733
2734 continue;
2735 }
2736 }
2737 }
2738
2739 /* This insn can't go into a delay slot. */
2740 lose = 1;
2741 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2742 mark_referenced_resources (trial, &needed, 1);
2743
2744 /* Ensure we don't put insns between the setting of cc and the comparison
2745 by moving a setting of cc into an earlier delay slot since these insns
2746 could clobber the condition code. */
2747 set.cc = 1;
2748
2749 /* If this insn is a register-register copy and the next insn has
2750 a use of our destination, change it to use our source. That way,
2751 it will become a candidate for our delay slot the next time
2752 through this loop. This case occurs commonly in loops that
2753 scan a list.
2754
2755 We could check for more complex cases than those tested below,
2756 but it doesn't seem worth it. It might also be a good idea to try
2757 to swap the two insns. That might do better.
2758
2759 We can't do this if the next insn modifies our destination, because
2760 that would make the replacement into the insn invalid. We also can't
2761 do this if it modifies our source, because it might be an earlyclobber
2762 operand. This latter test also prevents updating the contents of
2763 a PRE_INC. We also can't do this if there's overlap of source and
2764 destination. Overlap may happen for larger-than-register-size modes. */
2765
2766 if (GET_CODE (trial) == INSN && GET_CODE (pat) == SET
2767 && REG_P (SET_SRC (pat))
2768 && REG_P (SET_DEST (pat))
2769 && !reg_overlap_mentioned_p (SET_DEST (pat), SET_SRC (pat)))
2770 {
2771 rtx next = next_nonnote_insn (trial);
2772
2773 if (next && GET_CODE (next) == INSN
2774 && GET_CODE (PATTERN (next)) != USE
2775 && ! reg_set_p (SET_DEST (pat), next)
2776 && ! reg_set_p (SET_SRC (pat), next)
2777 && reg_referenced_p (SET_DEST (pat), PATTERN (next))
2778 && ! modified_in_p (SET_DEST (pat), next))
2779 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
2780 }
2781 }
2782
2783 /* If we stopped on a branch insn that has delay slots, see if we can
2784 steal some of the insns in those slots. */
2785 if (trial && GET_CODE (trial) == INSN
2786 && GET_CODE (PATTERN (trial)) == SEQUENCE
2787 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN)
2788 {
2789 /* If this is the `true' thread, we will want to follow the jump,
2790 so we can only do this if we have taken everything up to here. */
2791 if (thread_if_true && trial == new_thread)
2792 {
2793 delay_list
2794 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
2795 delay_list, &set, &needed,
2796 &opposite_needed, slots_to_fill,
2797 pslots_filled, &must_annul,
2798 &new_thread);
2799 /* If we owned the thread and are told that it branched
2800 elsewhere, make sure we own the thread at the new location. */
2801 if (own_thread && trial != new_thread)
2802 own_thread = own_thread_p (new_thread, new_thread, 0);
2803 }
2804 else if (! thread_if_true)
2805 delay_list
2806 = steal_delay_list_from_fallthrough (insn, condition,
2807 PATTERN (trial),
2808 delay_list, &set, &needed,
2809 &opposite_needed, slots_to_fill,
2810 pslots_filled, &must_annul);
2811 }
2812
2813 /* If we haven't found anything for this delay slot and it is very
2814 likely that the branch will be taken, see if the insn at our target
2815 increments or decrements a register with an increment that does not
2816 depend on the destination register. If so, try to place the opposite
2817 arithmetic insn after the jump insn and put the arithmetic insn in the
2818 delay slot. If we can't do this, return. */
2819 if (delay_list == 0 && likely && new_thread
2820 && GET_CODE (new_thread) == INSN
2821 && GET_CODE (PATTERN (new_thread)) != ASM_INPUT
2822 && asm_noperands (PATTERN (new_thread)) < 0)
2823 {
2824 rtx pat = PATTERN (new_thread);
2825 rtx dest;
2826 rtx src;
2827
2828 trial = new_thread;
2829 pat = PATTERN (trial);
2830
2831 if (GET_CODE (trial) != INSN
2832 || GET_CODE (pat) != SET
2833 || ! eligible_for_delay (insn, 0, trial, flags)
2834 || can_throw_internal (trial))
2835 return 0;
2836
2837 dest = SET_DEST (pat), src = SET_SRC (pat);
2838 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
2839 && rtx_equal_p (XEXP (src, 0), dest)
2840 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1))
2841 && ! side_effects_p (pat))
2842 {
2843 rtx other = XEXP (src, 1);
2844 rtx new_arith;
2845 rtx ninsn;
2846
2847 /* If this is a constant adjustment, use the same code with
2848 the negated constant. Otherwise, reverse the sense of the
2849 arithmetic. */
2850 if (GET_CODE (other) == CONST_INT)
2851 new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest,
2852 negate_rtx (GET_MODE (src), other));
2853 else
2854 new_arith = gen_rtx_fmt_ee (GET_CODE (src) == PLUS ? MINUS : PLUS,
2855 GET_MODE (src), dest, other);
2856
2857 ninsn = emit_insn_after (gen_rtx_SET (VOIDmode, dest, new_arith),
2858 insn);
2859
2860 if (recog_memoized (ninsn) < 0
2861 || (extract_insn (ninsn), ! constrain_operands (1)))
2862 {
2863 delete_related_insns (ninsn);
2864 return 0;
2865 }
2866
2867 if (own_thread)
2868 {
2869 update_block (trial, thread);
2870 if (trial == thread)
2871 {
2872 thread = next_active_insn (thread);
2873 if (new_thread == trial)
2874 new_thread = thread;
2875 }
2876 delete_related_insns (trial);
2877 }
2878 else
2879 new_thread = next_active_insn (trial);
2880
2881 ninsn = own_thread ? trial : copy_rtx (trial);
2882 if (thread_if_true)
2883 INSN_FROM_TARGET_P (ninsn) = 1;
2884
2885 delay_list = add_to_delay_list (ninsn, NULL_RTX);
2886 (*pslots_filled)++;
2887 }
2888 }
2889
2890 if (delay_list && must_annul)
2891 INSN_ANNULLED_BRANCH_P (insn) = 1;
2892
2893 /* If we are to branch into the middle of this thread, find an appropriate
2894 label or make a new one if none, and redirect INSN to it. If we hit the
2895 end of the function, use the end-of-function label. */
2896 if (new_thread != thread)
2897 {
2898 rtx label;
2899
2900 if (! thread_if_true)
2901 abort ();
2902
2903 if (new_thread && GET_CODE (new_thread) == JUMP_INSN
2904 && (simplejump_p (new_thread)
2905 || GET_CODE (PATTERN (new_thread)) == RETURN)
2906 && redirect_with_delay_list_safe_p (insn,
2907 JUMP_LABEL (new_thread),
2908 delay_list))
2909 new_thread = follow_jumps (JUMP_LABEL (new_thread));
2910
2911 if (new_thread == 0)
2912 label = find_end_label ();
2913 else if (GET_CODE (new_thread) == CODE_LABEL)
2914 label = new_thread;
2915 else
2916 label = get_label_before (new_thread);
2917
2918 reorg_redirect_jump (insn, label);
2919 }
2920
2921 return delay_list;
2922 }
2923 \f
2924 /* Make another attempt to find insns to place in delay slots.
2925
2926 We previously looked for insns located in front of the delay insn
2927 and, for non-jump delay insns, located behind the delay insn.
2928
2929 Here only try to schedule jump insns and try to move insns from either
2930 the target or the following insns into the delay slot. If annulling is
2931 supported, we will be likely to do this. Otherwise, we can do this only
2932 if safe. */
2933
2934 static void
2935 fill_eager_delay_slots (void)
2936 {
2937 rtx insn;
2938 int i;
2939 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2940
2941 for (i = 0; i < num_unfilled_slots; i++)
2942 {
2943 rtx condition;
2944 rtx target_label, insn_at_target, fallthrough_insn;
2945 rtx delay_list = 0;
2946 int own_target;
2947 int own_fallthrough;
2948 int prediction, slots_to_fill, slots_filled;
2949
2950 insn = unfilled_slots_base[i];
2951 if (insn == 0
2952 || INSN_DELETED_P (insn)
2953 || GET_CODE (insn) != JUMP_INSN
2954 || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
2955 continue;
2956
2957 slots_to_fill = num_delay_slots (insn);
2958 /* Some machine description have defined instructions to have
2959 delay slots only in certain circumstances which may depend on
2960 nearby insns (which change due to reorg's actions).
2961
2962 For example, the PA port normally has delay slots for unconditional
2963 jumps.
2964
2965 However, the PA port claims such jumps do not have a delay slot
2966 if they are immediate successors of certain CALL_INSNs. This
2967 allows the port to favor filling the delay slot of the call with
2968 the unconditional jump. */
2969 if (slots_to_fill == 0)
2970 continue;
2971
2972 slots_filled = 0;
2973 target_label = JUMP_LABEL (insn);
2974 condition = get_branch_condition (insn, target_label);
2975
2976 if (condition == 0)
2977 continue;
2978
2979 /* Get the next active fallthrough and target insns and see if we own
2980 them. Then see whether the branch is likely true. We don't need
2981 to do a lot of this for unconditional branches. */
2982
2983 insn_at_target = next_active_insn (target_label);
2984 own_target = own_thread_p (target_label, target_label, 0);
2985
2986 if (condition == const_true_rtx)
2987 {
2988 own_fallthrough = 0;
2989 fallthrough_insn = 0;
2990 prediction = 2;
2991 }
2992 else
2993 {
2994 fallthrough_insn = next_active_insn (insn);
2995 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
2996 prediction = mostly_true_jump (insn, condition);
2997 }
2998
2999 /* If this insn is expected to branch, first try to get insns from our
3000 target, then our fallthrough insns. If it is not expected to branch,
3001 try the other order. */
3002
3003 if (prediction > 0)
3004 {
3005 delay_list
3006 = fill_slots_from_thread (insn, condition, insn_at_target,
3007 fallthrough_insn, prediction == 2, 1,
3008 own_target,
3009 slots_to_fill, &slots_filled, delay_list);
3010
3011 if (delay_list == 0 && own_fallthrough)
3012 {
3013 /* Even though we didn't find anything for delay slots,
3014 we might have found a redundant insn which we deleted
3015 from the thread that was filled. So we have to recompute
3016 the next insn at the target. */
3017 target_label = JUMP_LABEL (insn);
3018 insn_at_target = next_active_insn (target_label);
3019
3020 delay_list
3021 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3022 insn_at_target, 0, 0,
3023 own_fallthrough,
3024 slots_to_fill, &slots_filled,
3025 delay_list);
3026 }
3027 }
3028 else
3029 {
3030 if (own_fallthrough)
3031 delay_list
3032 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3033 insn_at_target, 0, 0,
3034 own_fallthrough,
3035 slots_to_fill, &slots_filled,
3036 delay_list);
3037
3038 if (delay_list == 0)
3039 delay_list
3040 = fill_slots_from_thread (insn, condition, insn_at_target,
3041 next_active_insn (insn), 0, 1,
3042 own_target,
3043 slots_to_fill, &slots_filled,
3044 delay_list);
3045 }
3046
3047 if (delay_list)
3048 unfilled_slots_base[i]
3049 = emit_delay_sequence (insn, delay_list, slots_filled);
3050
3051 if (slots_to_fill == slots_filled)
3052 unfilled_slots_base[i] = 0;
3053
3054 note_delay_statistics (slots_filled, 1);
3055 }
3056 }
3057 \f
3058 /* Once we have tried two ways to fill a delay slot, make a pass over the
3059 code to try to improve the results and to do such things as more jump
3060 threading. */
3061
3062 static void
3063 relax_delay_slots (rtx first)
3064 {
3065 rtx insn, next, pat;
3066 rtx trial, delay_insn, target_label;
3067
3068 /* Look at every JUMP_INSN and see if we can improve it. */
3069 for (insn = first; insn; insn = next)
3070 {
3071 rtx other;
3072
3073 next = next_active_insn (insn);
3074
3075 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3076 the next insn, or jumps to a label that is not the last of a
3077 group of consecutive labels. */
3078 if (GET_CODE (insn) == JUMP_INSN
3079 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3080 && (target_label = JUMP_LABEL (insn)) != 0)
3081 {
3082 target_label = skip_consecutive_labels (follow_jumps (target_label));
3083 if (target_label == 0)
3084 target_label = find_end_label ();
3085
3086 if (next_active_insn (target_label) == next
3087 && ! condjump_in_parallel_p (insn))
3088 {
3089 delete_jump (insn);
3090 continue;
3091 }
3092
3093 if (target_label != JUMP_LABEL (insn))
3094 reorg_redirect_jump (insn, target_label);
3095
3096 /* See if this jump branches around an unconditional jump.
3097 If so, invert this jump and point it to the target of the
3098 second jump. */
3099 if (next && GET_CODE (next) == JUMP_INSN
3100 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3101 && next_active_insn (target_label) == next_active_insn (next)
3102 && no_labels_between_p (insn, next))
3103 {
3104 rtx label = JUMP_LABEL (next);
3105
3106 /* Be careful how we do this to avoid deleting code or
3107 labels that are momentarily dead. See similar optimization
3108 in jump.c.
3109
3110 We also need to ensure we properly handle the case when
3111 invert_jump fails. */
3112
3113 ++LABEL_NUSES (target_label);
3114 if (label)
3115 ++LABEL_NUSES (label);
3116
3117 if (invert_jump (insn, label, 1))
3118 {
3119 delete_related_insns (next);
3120 next = insn;
3121 }
3122
3123 if (label)
3124 --LABEL_NUSES (label);
3125
3126 if (--LABEL_NUSES (target_label) == 0)
3127 delete_related_insns (target_label);
3128
3129 continue;
3130 }
3131 }
3132
3133 /* If this is an unconditional jump and the previous insn is a
3134 conditional jump, try reversing the condition of the previous
3135 insn and swapping our targets. The next pass might be able to
3136 fill the slots.
3137
3138 Don't do this if we expect the conditional branch to be true, because
3139 we would then be making the more common case longer. */
3140
3141 if (GET_CODE (insn) == JUMP_INSN
3142 && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
3143 && (other = prev_active_insn (insn)) != 0
3144 && (condjump_p (other) || condjump_in_parallel_p (other))
3145 && no_labels_between_p (other, insn)
3146 && 0 > mostly_true_jump (other,
3147 get_branch_condition (other,
3148 JUMP_LABEL (other))))
3149 {
3150 rtx other_target = JUMP_LABEL (other);
3151 target_label = JUMP_LABEL (insn);
3152
3153 if (invert_jump (other, target_label, 0))
3154 reorg_redirect_jump (insn, other_target);
3155 }
3156
3157 /* Now look only at cases where we have filled a delay slot. */
3158 if (GET_CODE (insn) != INSN
3159 || GET_CODE (PATTERN (insn)) != SEQUENCE)
3160 continue;
3161
3162 pat = PATTERN (insn);
3163 delay_insn = XVECEXP (pat, 0, 0);
3164
3165 /* See if the first insn in the delay slot is redundant with some
3166 previous insn. Remove it from the delay slot if so; then set up
3167 to reprocess this insn. */
3168 if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0))
3169 {
3170 delete_from_delay_slot (XVECEXP (pat, 0, 1));
3171 next = prev_active_insn (next);
3172 continue;
3173 }
3174
3175 /* See if we have a RETURN insn with a filled delay slot followed
3176 by a RETURN insn with an unfilled a delay slot. If so, we can delete
3177 the first RETURN (but not its delay insn). This gives the same
3178 effect in fewer instructions.
3179
3180 Only do so if optimizing for size since this results in slower, but
3181 smaller code. */
3182 if (optimize_size
3183 && GET_CODE (PATTERN (delay_insn)) == RETURN
3184 && next
3185 && GET_CODE (next) == JUMP_INSN
3186 && GET_CODE (PATTERN (next)) == RETURN)
3187 {
3188 rtx after;
3189 int i;
3190
3191 /* Delete the RETURN and just execute the delay list insns.
3192
3193 We do this by deleting the INSN containing the SEQUENCE, then
3194 re-emitting the insns separately, and then deleting the RETURN.
3195 This allows the count of the jump target to be properly
3196 decremented. */
3197
3198 /* Clear the from target bit, since these insns are no longer
3199 in delay slots. */
3200 for (i = 0; i < XVECLEN (pat, 0); i++)
3201 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3202
3203 trial = PREV_INSN (insn);
3204 delete_related_insns (insn);
3205 if (GET_CODE (pat) != SEQUENCE)
3206 abort ();
3207 after = trial;
3208 for (i = 0; i < XVECLEN (pat, 0); i++)
3209 {
3210 rtx this_insn = XVECEXP (pat, 0, i);
3211 add_insn_after (this_insn, after);
3212 after = this_insn;
3213 }
3214 delete_scheduled_jump (delay_insn);
3215 continue;
3216 }
3217
3218 /* Now look only at the cases where we have a filled JUMP_INSN. */
3219 if (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
3220 || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
3221 || condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
3222 continue;
3223
3224 target_label = JUMP_LABEL (delay_insn);
3225
3226 if (target_label)
3227 {
3228 /* If this jump goes to another unconditional jump, thread it, but
3229 don't convert a jump into a RETURN here. */
3230 trial = skip_consecutive_labels (follow_jumps (target_label));
3231 if (trial == 0)
3232 trial = find_end_label ();
3233
3234 if (trial != target_label
3235 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
3236 {
3237 reorg_redirect_jump (delay_insn, trial);
3238 target_label = trial;
3239 }
3240
3241 /* If the first insn at TARGET_LABEL is redundant with a previous
3242 insn, redirect the jump to the following insn process again. */
3243 trial = next_active_insn (target_label);
3244 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
3245 && redundant_insn (trial, insn, 0)
3246 && ! can_throw_internal (trial))
3247 {
3248 rtx tmp;
3249
3250 /* Figure out where to emit the special USE insn so we don't
3251 later incorrectly compute register live/death info. */
3252 tmp = next_active_insn (trial);
3253 if (tmp == 0)
3254 tmp = find_end_label ();
3255
3256 /* Insert the special USE insn and update dataflow info. */
3257 update_block (trial, tmp);
3258
3259 /* Now emit a label before the special USE insn, and
3260 redirect our jump to the new label. */
3261 target_label = get_label_before (PREV_INSN (tmp));
3262 reorg_redirect_jump (delay_insn, target_label);
3263 next = insn;
3264 continue;
3265 }
3266
3267 /* Similarly, if it is an unconditional jump with one insn in its
3268 delay list and that insn is redundant, thread the jump. */
3269 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
3270 && XVECLEN (PATTERN (trial), 0) == 2
3271 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN
3272 && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
3273 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
3274 && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
3275 {
3276 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
3277 if (target_label == 0)
3278 target_label = find_end_label ();
3279
3280 if (redirect_with_delay_slots_safe_p (delay_insn, target_label,
3281 insn))
3282 {
3283 reorg_redirect_jump (delay_insn, target_label);
3284 next = insn;
3285 continue;
3286 }
3287 }
3288 }
3289
3290 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3291 && prev_active_insn (target_label) == insn
3292 && ! condjump_in_parallel_p (delay_insn)
3293 #ifdef HAVE_cc0
3294 /* If the last insn in the delay slot sets CC0 for some insn,
3295 various code assumes that it is in a delay slot. We could
3296 put it back where it belonged and delete the register notes,
3297 but it doesn't seem worthwhile in this uncommon case. */
3298 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
3299 REG_CC_USER, NULL_RTX)
3300 #endif
3301 )
3302 {
3303 rtx after;
3304 int i;
3305
3306 /* All this insn does is execute its delay list and jump to the
3307 following insn. So delete the jump and just execute the delay
3308 list insns.
3309
3310 We do this by deleting the INSN containing the SEQUENCE, then
3311 re-emitting the insns separately, and then deleting the jump.
3312 This allows the count of the jump target to be properly
3313 decremented. */
3314
3315 /* Clear the from target bit, since these insns are no longer
3316 in delay slots. */
3317 for (i = 0; i < XVECLEN (pat, 0); i++)
3318 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3319
3320 trial = PREV_INSN (insn);
3321 delete_related_insns (insn);
3322 if (GET_CODE (pat) != SEQUENCE)
3323 abort ();
3324 after = trial;
3325 for (i = 0; i < XVECLEN (pat, 0); i++)
3326 {
3327 rtx this_insn = XVECEXP (pat, 0, i);
3328 add_insn_after (this_insn, after);
3329 after = this_insn;
3330 }
3331 delete_scheduled_jump (delay_insn);
3332 continue;
3333 }
3334
3335 /* See if this is an unconditional jump around a single insn which is
3336 identical to the one in its delay slot. In this case, we can just
3337 delete the branch and the insn in its delay slot. */
3338 if (next && GET_CODE (next) == INSN
3339 && prev_label (next_active_insn (next)) == target_label
3340 && simplejump_p (insn)
3341 && XVECLEN (pat, 0) == 2
3342 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
3343 {
3344 delete_related_insns (insn);
3345 continue;
3346 }
3347
3348 /* See if this jump (with its delay slots) branches around another
3349 jump (without delay slots). If so, invert this jump and point
3350 it to the target of the second jump. We cannot do this for
3351 annulled jumps, though. Again, don't convert a jump to a RETURN
3352 here. */
3353 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3354 && next && GET_CODE (next) == JUMP_INSN
3355 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3356 && next_active_insn (target_label) == next_active_insn (next)
3357 && no_labels_between_p (insn, next))
3358 {
3359 rtx label = JUMP_LABEL (next);
3360 rtx old_label = JUMP_LABEL (delay_insn);
3361
3362 if (label == 0)
3363 label = find_end_label ();
3364
3365 /* find_end_label can generate a new label. Check this first. */
3366 if (no_labels_between_p (insn, next)
3367 && redirect_with_delay_slots_safe_p (delay_insn, label, insn))
3368 {
3369 /* Be careful how we do this to avoid deleting code or labels
3370 that are momentarily dead. See similar optimization in
3371 jump.c */
3372 if (old_label)
3373 ++LABEL_NUSES (old_label);
3374
3375 if (invert_jump (delay_insn, label, 1))
3376 {
3377 int i;
3378
3379 /* Must update the INSN_FROM_TARGET_P bits now that
3380 the branch is reversed, so that mark_target_live_regs
3381 will handle the delay slot insn correctly. */
3382 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
3383 {
3384 rtx slot = XVECEXP (PATTERN (insn), 0, i);
3385 INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
3386 }
3387
3388 delete_related_insns (next);
3389 next = insn;
3390 }
3391
3392 if (old_label && --LABEL_NUSES (old_label) == 0)
3393 delete_related_insns (old_label);
3394 continue;
3395 }
3396 }
3397
3398 /* If we own the thread opposite the way this insn branches, see if we
3399 can merge its delay slots with following insns. */
3400 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3401 && own_thread_p (NEXT_INSN (insn), 0, 1))
3402 try_merge_delay_insns (insn, next);
3403 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3404 && own_thread_p (target_label, target_label, 0))
3405 try_merge_delay_insns (insn, next_active_insn (target_label));
3406
3407 /* If we get here, we haven't deleted INSN. But we may have deleted
3408 NEXT, so recompute it. */
3409 next = next_active_insn (insn);
3410 }
3411 }
3412 \f
3413 #ifdef HAVE_return
3414
3415 /* Look for filled jumps to the end of function label. We can try to convert
3416 them into RETURN insns if the insns in the delay slot are valid for the
3417 RETURN as well. */
3418
3419 static void
3420 make_return_insns (rtx first)
3421 {
3422 rtx insn, jump_insn, pat;
3423 rtx real_return_label = end_of_function_label;
3424 int slots, i;
3425
3426 #ifdef DELAY_SLOTS_FOR_EPILOGUE
3427 /* If a previous pass filled delay slots in the epilogue, things get a
3428 bit more complicated, as those filler insns would generally (without
3429 data flow analysis) have to be executed after any existing branch
3430 delay slot filler insns. It is also unknown whether such a
3431 transformation would actually be profitable. Note that the existing
3432 code only cares for branches with (some) filled delay slots. */
3433 if (current_function_epilogue_delay_list != NULL)
3434 return;
3435 #endif
3436
3437 /* See if there is a RETURN insn in the function other than the one we
3438 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
3439 into a RETURN to jump to it. */
3440 for (insn = first; insn; insn = NEXT_INSN (insn))
3441 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
3442 {
3443 real_return_label = get_label_before (insn);
3444 break;
3445 }
3446
3447 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3448 was equal to END_OF_FUNCTION_LABEL. */
3449 LABEL_NUSES (real_return_label)++;
3450
3451 /* Clear the list of insns to fill so we can use it. */
3452 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3453
3454 for (insn = first; insn; insn = NEXT_INSN (insn))
3455 {
3456 int flags;
3457
3458 /* Only look at filled JUMP_INSNs that go to the end of function
3459 label. */
3460 if (GET_CODE (insn) != INSN
3461 || GET_CODE (PATTERN (insn)) != SEQUENCE
3462 || GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
3463 || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
3464 continue;
3465
3466 pat = PATTERN (insn);
3467 jump_insn = XVECEXP (pat, 0, 0);
3468
3469 /* If we can't make the jump into a RETURN, try to redirect it to the best
3470 RETURN and go on to the next insn. */
3471 if (! reorg_redirect_jump (jump_insn, NULL_RTX))
3472 {
3473 /* Make sure redirecting the jump will not invalidate the delay
3474 slot insns. */
3475 if (redirect_with_delay_slots_safe_p (jump_insn,
3476 real_return_label,
3477 insn))
3478 reorg_redirect_jump (jump_insn, real_return_label);
3479 continue;
3480 }
3481
3482 /* See if this RETURN can accept the insns current in its delay slot.
3483 It can if it has more or an equal number of slots and the contents
3484 of each is valid. */
3485
3486 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
3487 slots = num_delay_slots (jump_insn);
3488 if (slots >= XVECLEN (pat, 0) - 1)
3489 {
3490 for (i = 1; i < XVECLEN (pat, 0); i++)
3491 if (! (
3492 #ifdef ANNUL_IFFALSE_SLOTS
3493 (INSN_ANNULLED_BRANCH_P (jump_insn)
3494 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3495 ? eligible_for_annul_false (jump_insn, i - 1,
3496 XVECEXP (pat, 0, i), flags) :
3497 #endif
3498 #ifdef ANNUL_IFTRUE_SLOTS
3499 (INSN_ANNULLED_BRANCH_P (jump_insn)
3500 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3501 ? eligible_for_annul_true (jump_insn, i - 1,
3502 XVECEXP (pat, 0, i), flags) :
3503 #endif
3504 eligible_for_delay (jump_insn, i - 1,
3505 XVECEXP (pat, 0, i), flags)))
3506 break;
3507 }
3508 else
3509 i = 0;
3510
3511 if (i == XVECLEN (pat, 0))
3512 continue;
3513
3514 /* We have to do something with this insn. If it is an unconditional
3515 RETURN, delete the SEQUENCE and output the individual insns,
3516 followed by the RETURN. Then set things up so we try to find
3517 insns for its delay slots, if it needs some. */
3518 if (GET_CODE (PATTERN (jump_insn)) == RETURN)
3519 {
3520 rtx prev = PREV_INSN (insn);
3521
3522 delete_related_insns (insn);
3523 for (i = 1; i < XVECLEN (pat, 0); i++)
3524 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
3525
3526 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
3527 emit_barrier_after (insn);
3528
3529 if (slots)
3530 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3531 }
3532 else
3533 /* It is probably more efficient to keep this with its current
3534 delay slot as a branch to a RETURN. */
3535 reorg_redirect_jump (jump_insn, real_return_label);
3536 }
3537
3538 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
3539 new delay slots we have created. */
3540 if (--LABEL_NUSES (real_return_label) == 0)
3541 delete_related_insns (real_return_label);
3542
3543 fill_simple_delay_slots (1);
3544 fill_simple_delay_slots (0);
3545 }
3546 #endif
3547 \f
3548 /* Try to find insns to place in delay slots. */
3549
3550 void
3551 dbr_schedule (rtx first, FILE *file)
3552 {
3553 rtx insn, next, epilogue_insn = 0;
3554 int i;
3555 #if 0
3556 int old_flag_no_peephole = flag_no_peephole;
3557
3558 /* Execute `final' once in prescan mode to delete any insns that won't be
3559 used. Don't let final try to do any peephole optimization--it will
3560 ruin dataflow information for this pass. */
3561
3562 flag_no_peephole = 1;
3563 final (first, 0, NO_DEBUG, 1, 1);
3564 flag_no_peephole = old_flag_no_peephole;
3565 #endif
3566
3567 /* If the current function has no insns other than the prologue and
3568 epilogue, then do not try to fill any delay slots. */
3569 if (n_basic_blocks == 0)
3570 return;
3571
3572 /* Find the highest INSN_UID and allocate and initialize our map from
3573 INSN_UID's to position in code. */
3574 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
3575 {
3576 if (INSN_UID (insn) > max_uid)
3577 max_uid = INSN_UID (insn);
3578 if (GET_CODE (insn) == NOTE
3579 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
3580 epilogue_insn = insn;
3581 }
3582
3583 uid_to_ruid = xmalloc ((max_uid + 1) * sizeof (int));
3584 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
3585 uid_to_ruid[INSN_UID (insn)] = i;
3586
3587 /* Initialize the list of insns that need filling. */
3588 if (unfilled_firstobj == 0)
3589 {
3590 gcc_obstack_init (&unfilled_slots_obstack);
3591 unfilled_firstobj = obstack_alloc (&unfilled_slots_obstack, 0);
3592 }
3593
3594 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
3595 {
3596 rtx target;
3597
3598 INSN_ANNULLED_BRANCH_P (insn) = 0;
3599 INSN_FROM_TARGET_P (insn) = 0;
3600
3601 /* Skip vector tables. We can't get attributes for them. */
3602 if (GET_CODE (insn) == JUMP_INSN
3603 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
3604 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
3605 continue;
3606
3607 if (num_delay_slots (insn) > 0)
3608 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3609
3610 /* Ensure all jumps go to the last of a set of consecutive labels. */
3611 if (GET_CODE (insn) == JUMP_INSN
3612 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3613 && JUMP_LABEL (insn) != 0
3614 && ((target = skip_consecutive_labels (JUMP_LABEL (insn)))
3615 != JUMP_LABEL (insn)))
3616 redirect_jump (insn, target, 1);
3617 }
3618
3619 init_resource_info (epilogue_insn);
3620
3621 /* Show we haven't computed an end-of-function label yet. */
3622 end_of_function_label = 0;
3623
3624 /* Initialize the statistics for this function. */
3625 memset (num_insns_needing_delays, 0, sizeof num_insns_needing_delays);
3626 memset (num_filled_delays, 0, sizeof num_filled_delays);
3627
3628 /* Now do the delay slot filling. Try everything twice in case earlier
3629 changes make more slots fillable. */
3630
3631 for (reorg_pass_number = 0;
3632 reorg_pass_number < MAX_REORG_PASSES;
3633 reorg_pass_number++)
3634 {
3635 fill_simple_delay_slots (1);
3636 fill_simple_delay_slots (0);
3637 fill_eager_delay_slots ();
3638 relax_delay_slots (first);
3639 }
3640
3641 /* Delete any USE insns made by update_block; subsequent passes don't need
3642 them or know how to deal with them. */
3643 for (insn = first; insn; insn = next)
3644 {
3645 next = NEXT_INSN (insn);
3646
3647 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
3648 && INSN_P (XEXP (PATTERN (insn), 0)))
3649 next = delete_related_insns (insn);
3650 }
3651
3652 /* If we made an end of function label, indicate that it is now
3653 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
3654 If it is now unused, delete it. */
3655 if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
3656 delete_related_insns (end_of_function_label);
3657
3658 #ifdef HAVE_return
3659 if (HAVE_return && end_of_function_label != 0)
3660 make_return_insns (first);
3661 #endif
3662
3663 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3664
3665 /* It is not clear why the line below is needed, but it does seem to be. */
3666 unfilled_firstobj = obstack_alloc (&unfilled_slots_obstack, 0);
3667
3668 if (file)
3669 {
3670 int i, j, need_comma;
3671 int total_delay_slots[MAX_DELAY_HISTOGRAM + 1];
3672 int total_annul_slots[MAX_DELAY_HISTOGRAM + 1];
3673
3674 for (reorg_pass_number = 0;
3675 reorg_pass_number < MAX_REORG_PASSES;
3676 reorg_pass_number++)
3677 {
3678 fprintf (file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
3679 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
3680 {
3681 need_comma = 0;
3682 fprintf (file, ";; Reorg function #%d\n", i);
3683
3684 fprintf (file, ";; %d insns needing delay slots\n;; ",
3685 num_insns_needing_delays[i][reorg_pass_number]);
3686
3687 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3688 if (num_filled_delays[i][j][reorg_pass_number])
3689 {
3690 if (need_comma)
3691 fprintf (file, ", ");
3692 need_comma = 1;
3693 fprintf (file, "%d got %d delays",
3694 num_filled_delays[i][j][reorg_pass_number], j);
3695 }
3696 fprintf (file, "\n");
3697 }
3698 }
3699 memset (total_delay_slots, 0, sizeof total_delay_slots);
3700 memset (total_annul_slots, 0, sizeof total_annul_slots);
3701 for (insn = first; insn; insn = NEXT_INSN (insn))
3702 {
3703 if (! INSN_DELETED_P (insn)
3704 && GET_CODE (insn) == INSN
3705 && GET_CODE (PATTERN (insn)) != USE
3706 && GET_CODE (PATTERN (insn)) != CLOBBER)
3707 {
3708 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
3709 {
3710 j = XVECLEN (PATTERN (insn), 0) - 1;
3711 if (j > MAX_DELAY_HISTOGRAM)
3712 j = MAX_DELAY_HISTOGRAM;
3713 if (INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (insn), 0, 0)))
3714 total_annul_slots[j]++;
3715 else
3716 total_delay_slots[j]++;
3717 }
3718 else if (num_delay_slots (insn) > 0)
3719 total_delay_slots[0]++;
3720 }
3721 }
3722 fprintf (file, ";; Reorg totals: ");
3723 need_comma = 0;
3724 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3725 {
3726 if (total_delay_slots[j])
3727 {
3728 if (need_comma)
3729 fprintf (file, ", ");
3730 need_comma = 1;
3731 fprintf (file, "%d got %d delays", total_delay_slots[j], j);
3732 }
3733 }
3734 fprintf (file, "\n");
3735 #if defined (ANNUL_IFTRUE_SLOTS) || defined (ANNUL_IFFALSE_SLOTS)
3736 fprintf (file, ";; Reorg annuls: ");
3737 need_comma = 0;
3738 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3739 {
3740 if (total_annul_slots[j])
3741 {
3742 if (need_comma)
3743 fprintf (file, ", ");
3744 need_comma = 1;
3745 fprintf (file, "%d got %d delays", total_annul_slots[j], j);
3746 }
3747 }
3748 fprintf (file, "\n");
3749 #endif
3750 fprintf (file, "\n");
3751 }
3752
3753 /* For all JUMP insns, fill in branch prediction notes, so that during
3754 assembler output a target can set branch prediction bits in the code.
3755 We have to do this now, as up until this point the destinations of
3756 JUMPS can be moved around and changed, but past right here that cannot
3757 happen. */
3758 for (insn = first; insn; insn = NEXT_INSN (insn))
3759 {
3760 int pred_flags;
3761
3762 if (GET_CODE (insn) == INSN)
3763 {
3764 rtx pat = PATTERN (insn);
3765
3766 if (GET_CODE (pat) == SEQUENCE)
3767 insn = XVECEXP (pat, 0, 0);
3768 }
3769 if (GET_CODE (insn) != JUMP_INSN)
3770 continue;
3771
3772 pred_flags = get_jump_flags (insn, JUMP_LABEL (insn));
3773 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_BR_PRED,
3774 GEN_INT (pred_flags),
3775 REG_NOTES (insn));
3776 }
3777 free_resource_info ();
3778 free (uid_to_ruid);
3779 #ifdef DELAY_SLOTS_FOR_EPILOGUE
3780 /* SPARC assembler, for instance, emit warning when debug info is output
3781 into the delay slot. */
3782 {
3783 rtx link;
3784
3785 for (link = current_function_epilogue_delay_list;
3786 link;
3787 link = XEXP (link, 1))
3788 INSN_LOCATOR (XEXP (link, 0)) = 0;
3789 }
3790 #endif
3791 }
3792 #endif /* DELAY_SLOTS */