]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/jump.c
c-common.c, [...]: Fix comment typos.
[thirdparty/gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically set of utility function to
24 operate with jumps.
25
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
32 at by later passes.
33
34 The subroutines redirect_jump and invert_jump are used
35 from other passes as well. */
36
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "rtl.h"
42 #include "tm_p.h"
43 #include "flags.h"
44 #include "hard-reg-set.h"
45 #include "regs.h"
46 #include "insn-config.h"
47 #include "insn-attr.h"
48 #include "recog.h"
49 #include "function.h"
50 #include "expr.h"
51 #include "real.h"
52 #include "except.h"
53 #include "diagnostic.h"
54 #include "toplev.h"
55 #include "reload.h"
56 #include "predict.h"
57 #include "timevar.h"
58
59 /* Optimize jump y; x: ... y: jumpif... x?
60 Don't know if it is worth bothering with. */
61 /* Optimize two cases of conditional jump to conditional jump?
62 This can never delete any instruction or make anything dead,
63 or even change what is live at any point.
64 So perhaps let combiner do it. */
65
66 static rtx next_nonnote_insn_in_loop (rtx);
67 static void init_label_info (rtx);
68 static void mark_all_labels (rtx);
69 static int duplicate_loop_exit_test (rtx);
70 static void delete_computation (rtx);
71 static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
72 static int redirect_exp (rtx, rtx, rtx);
73 static void invert_exp_1 (rtx);
74 static int invert_exp (rtx);
75 static int returnjump_p_1 (rtx *, void *);
76 static void delete_prior_computation (rtx, rtx);
77 \f
78 /* Alternate entry into the jump optimizer. This entry point only rebuilds
79 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
80 instructions. */
81 void
82 rebuild_jump_labels (rtx f)
83 {
84 rtx insn;
85
86 timevar_push (TV_REBUILD_JUMP);
87 init_label_info (f);
88 mark_all_labels (f);
89
90 /* Keep track of labels used from static data; we don't track them
91 closely enough to delete them here, so make sure their reference
92 count doesn't drop to zero. */
93
94 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
95 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
96 LABEL_NUSES (XEXP (insn, 0))++;
97 timevar_pop (TV_REBUILD_JUMP);
98 }
99 \f
100 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
101 non-fallthru insn. This is not generally true, as multiple barriers
102 may have crept in, or the BARRIER may be separated from the last
103 real insn by one or more NOTEs.
104
105 This simple pass moves barriers and removes duplicates so that the
106 old code is happy.
107 */
108 void
109 cleanup_barriers (void)
110 {
111 rtx insn, next, prev;
112 for (insn = get_insns (); insn; insn = next)
113 {
114 next = NEXT_INSN (insn);
115 if (GET_CODE (insn) == BARRIER)
116 {
117 prev = prev_nonnote_insn (insn);
118 if (GET_CODE (prev) == BARRIER)
119 delete_barrier (insn);
120 else if (prev != PREV_INSN (insn))
121 reorder_insns (insn, insn, prev);
122 }
123 }
124 }
125 \f
126 /* Return the next insn after INSN that is not a NOTE and is in the loop,
127 i.e. when there is no such INSN before NOTE_INSN_LOOP_END return NULL_RTX.
128 This routine does not look inside SEQUENCEs. */
129
130 static rtx
131 next_nonnote_insn_in_loop (rtx insn)
132 {
133 while (insn)
134 {
135 insn = NEXT_INSN (insn);
136 if (insn == 0 || GET_CODE (insn) != NOTE)
137 break;
138 if (GET_CODE (insn) == NOTE
139 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
140 return NULL_RTX;
141 }
142
143 return insn;
144 }
145
146 void
147 copy_loop_headers (rtx f)
148 {
149 rtx insn, next;
150 /* Now iterate optimizing jumps until nothing changes over one pass. */
151 for (insn = f; insn; insn = next)
152 {
153 rtx temp, temp1;
154
155 next = NEXT_INSN (insn);
156
157 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
158 jump. Try to optimize by duplicating the loop exit test if so.
159 This is only safe immediately after regscan, because it uses
160 the values of regno_first_uid and regno_last_uid. */
161 if (GET_CODE (insn) == NOTE
162 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
163 && (temp1 = next_nonnote_insn_in_loop (insn)) != 0
164 && any_uncondjump_p (temp1) && onlyjump_p (temp1))
165 {
166 temp = PREV_INSN (insn);
167 if (duplicate_loop_exit_test (insn))
168 {
169 next = NEXT_INSN (temp);
170 }
171 }
172 }
173 }
174
175 void
176 purge_line_number_notes (rtx f)
177 {
178 rtx last_note = 0;
179 rtx insn;
180 /* Delete extraneous line number notes.
181 Note that two consecutive notes for different lines are not really
182 extraneous. There should be some indication where that line belonged,
183 even if it became empty. */
184
185 for (insn = f; insn; insn = NEXT_INSN (insn))
186 if (GET_CODE (insn) == NOTE)
187 {
188 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
189 /* Any previous line note was for the prologue; gdb wants a new
190 note after the prologue even if it is for the same line. */
191 last_note = NULL_RTX;
192 else if (NOTE_LINE_NUMBER (insn) >= 0)
193 {
194 /* Delete this note if it is identical to previous note. */
195 if (last_note
196 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
197 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
198 {
199 delete_related_insns (insn);
200 continue;
201 }
202
203 last_note = insn;
204 }
205 }
206 }
207 \f
208 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
209 notes whose labels don't occur in the insn any more. Returns the
210 largest INSN_UID found. */
211 static void
212 init_label_info (rtx f)
213 {
214 rtx insn;
215
216 for (insn = f; insn; insn = NEXT_INSN (insn))
217 if (GET_CODE (insn) == CODE_LABEL)
218 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
219 else if (GET_CODE (insn) == JUMP_INSN)
220 JUMP_LABEL (insn) = 0;
221 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
222 {
223 rtx note, next;
224
225 for (note = REG_NOTES (insn); note; note = next)
226 {
227 next = XEXP (note, 1);
228 if (REG_NOTE_KIND (note) == REG_LABEL
229 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
230 remove_note (insn, note);
231 }
232 }
233 }
234
235 /* Mark the label each jump jumps to.
236 Combine consecutive labels, and count uses of labels. */
237
238 static void
239 mark_all_labels (rtx f)
240 {
241 rtx insn;
242
243 for (insn = f; insn; insn = NEXT_INSN (insn))
244 if (INSN_P (insn))
245 {
246 if (GET_CODE (insn) == CALL_INSN
247 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
248 {
249 mark_all_labels (XEXP (PATTERN (insn), 0));
250 mark_all_labels (XEXP (PATTERN (insn), 1));
251 mark_all_labels (XEXP (PATTERN (insn), 2));
252
253 /* Canonicalize the tail recursion label attached to the
254 CALL_PLACEHOLDER insn. */
255 if (XEXP (PATTERN (insn), 3))
256 {
257 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
258 XEXP (PATTERN (insn), 3));
259 mark_jump_label (label_ref, insn, 0);
260 XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
261 }
262
263 continue;
264 }
265
266 mark_jump_label (PATTERN (insn), insn, 0);
267 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
268 {
269 /* When we know the LABEL_REF contained in a REG used in
270 an indirect jump, we'll have a REG_LABEL note so that
271 flow can tell where it's going. */
272 if (JUMP_LABEL (insn) == 0)
273 {
274 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
275 if (label_note)
276 {
277 /* But a LABEL_REF around the REG_LABEL note, so
278 that we can canonicalize it. */
279 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
280 XEXP (label_note, 0));
281
282 mark_jump_label (label_ref, insn, 0);
283 XEXP (label_note, 0) = XEXP (label_ref, 0);
284 JUMP_LABEL (insn) = XEXP (label_note, 0);
285 }
286 }
287 }
288 }
289 }
290
291 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
292 jump. Assume that this unconditional jump is to the exit test code. If
293 the code is sufficiently simple, make a copy of it before INSN,
294 followed by a jump to the exit of the loop. Then delete the unconditional
295 jump after INSN.
296
297 Return 1 if we made the change, else 0.
298
299 This is only safe immediately after a regscan pass because it uses the
300 values of regno_first_uid and regno_last_uid. */
301
302 static int
303 duplicate_loop_exit_test (rtx loop_start)
304 {
305 rtx insn, set, reg, p, link;
306 rtx copy = 0, first_copy = 0;
307 int num_insns = 0;
308 rtx exitcode
309 = NEXT_INSN (JUMP_LABEL (next_nonnote_insn_in_loop (loop_start)));
310 rtx lastexit;
311 int max_reg = max_reg_num ();
312 rtx *reg_map = 0;
313 rtx loop_pre_header_label;
314
315 /* Scan the exit code. We do not perform this optimization if any insn:
316
317 is a CALL_INSN
318 is a CODE_LABEL
319 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
320 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
321
322 We also do not do this if we find an insn with ASM_OPERANDS. While
323 this restriction should not be necessary, copying an insn with
324 ASM_OPERANDS can confuse asm_noperands in some cases.
325
326 Also, don't do this if the exit code is more than 20 insns. */
327
328 for (insn = exitcode;
329 insn
330 && ! (GET_CODE (insn) == NOTE
331 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
332 insn = NEXT_INSN (insn))
333 {
334 switch (GET_CODE (insn))
335 {
336 case CODE_LABEL:
337 case CALL_INSN:
338 return 0;
339 case NOTE:
340
341 if (optimize < 2
342 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
343 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
344 /* If we were to duplicate this code, we would not move
345 the BLOCK notes, and so debugging the moved code would
346 be difficult. Thus, we only move the code with -O2 or
347 higher. */
348 return 0;
349
350 break;
351 case JUMP_INSN:
352 case INSN:
353 if (++num_insns > 20
354 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
355 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
356 return 0;
357 break;
358 default:
359 break;
360 }
361 }
362
363 /* Unless INSN is zero, we can do the optimization. */
364 if (insn == 0)
365 return 0;
366
367 lastexit = insn;
368
369 /* See if any insn sets a register only used in the loop exit code and
370 not a user variable. If so, replace it with a new register. */
371 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
372 if (GET_CODE (insn) == INSN
373 && (set = single_set (insn)) != 0
374 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
375 || (GET_CODE (reg) == SUBREG
376 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
377 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
378 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
379 {
380 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
381 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
382 break;
383
384 if (p != lastexit)
385 {
386 /* We can do the replacement. Allocate reg_map if this is the
387 first replacement we found. */
388 if (reg_map == 0)
389 reg_map = xcalloc (max_reg, sizeof (rtx));
390
391 REG_LOOP_TEST_P (reg) = 1;
392
393 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
394 }
395 }
396 loop_pre_header_label = gen_label_rtx ();
397
398 /* Now copy each insn. */
399 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
400 {
401 switch (GET_CODE (insn))
402 {
403 case BARRIER:
404 copy = emit_barrier_before (loop_start);
405 break;
406 case NOTE:
407 /* Only copy line-number notes. */
408 if (NOTE_LINE_NUMBER (insn) >= 0)
409 {
410 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
411 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
412 }
413 break;
414
415 case INSN:
416 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
417 if (reg_map)
418 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
419
420 mark_jump_label (PATTERN (copy), copy, 0);
421 INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
422
423 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
424 make them. */
425 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
426 if (REG_NOTE_KIND (link) != REG_LABEL)
427 {
428 if (GET_CODE (link) == EXPR_LIST)
429 REG_NOTES (copy)
430 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
431 XEXP (link, 0),
432 REG_NOTES (copy)));
433 else
434 REG_NOTES (copy)
435 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
436 XEXP (link, 0),
437 REG_NOTES (copy)));
438 }
439
440 if (reg_map && REG_NOTES (copy))
441 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
442 break;
443
444 case JUMP_INSN:
445 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
446 loop_start);
447 INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
448 if (reg_map)
449 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
450 mark_jump_label (PATTERN (copy), copy, 0);
451 if (REG_NOTES (insn))
452 {
453 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
454 if (reg_map)
455 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
456 }
457
458 /* Predict conditional jump that do make loop looping as taken.
459 Other jumps are probably exit conditions, so predict
460 them as untaken. */
461 if (any_condjump_p (copy))
462 {
463 rtx label = JUMP_LABEL (copy);
464 if (label)
465 {
466 /* The jump_insn after loop_start should be followed
467 by barrier and loopback label. */
468 if (prev_nonnote_insn (label)
469 && (prev_nonnote_insn (prev_nonnote_insn (label))
470 == next_nonnote_insn (loop_start)))
471 {
472 predict_insn_def (copy, PRED_LOOP_HEADER, TAKEN);
473 /* To keep pre-header, we need to redirect all loop
474 entrances before the LOOP_BEG note. */
475 redirect_jump (copy, loop_pre_header_label, 0);
476 }
477 else
478 predict_insn_def (copy, PRED_LOOP_HEADER, NOT_TAKEN);
479 }
480 }
481 break;
482
483 default:
484 abort ();
485 }
486
487 /* Record the first insn we copied. We need it so that we can
488 scan the copied insns for new pseudo registers. */
489 if (! first_copy)
490 first_copy = copy;
491 }
492
493 /* Now clean up by emitting a jump to the end label and deleting the jump
494 at the start of the loop. */
495 if (! copy || GET_CODE (copy) != BARRIER)
496 {
497 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
498 loop_start);
499
500 /* Record the first insn we copied. We need it so that we can
501 scan the copied insns for new pseudo registers. This may not
502 be strictly necessary since we should have copied at least one
503 insn above. But I am going to be safe. */
504 if (! first_copy)
505 first_copy = copy;
506
507 mark_jump_label (PATTERN (copy), copy, 0);
508 emit_barrier_before (loop_start);
509 }
510
511 emit_label_before (loop_pre_header_label, loop_start);
512
513 /* Now scan from the first insn we copied to the last insn we copied
514 (copy) for new pseudo registers. Do this after the code to jump to
515 the end label since that might create a new pseudo too. */
516 reg_scan_update (first_copy, copy, max_reg);
517
518 /* Mark the exit code as the virtual top of the converted loop. */
519 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
520
521 delete_related_insns (next_nonnote_insn (loop_start));
522
523 /* Clean up. */
524 if (reg_map)
525 free (reg_map);
526
527 return 1;
528 }
529 \f
530 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
531 notes between START and END out before START. START and END may be such
532 notes. Returns the values of the new starting and ending insns, which
533 may be different if the original ones were such notes.
534 Return true if there were only such notes and no real instructions. */
535
536 bool
537 squeeze_notes (rtx* startp, rtx* endp)
538 {
539 rtx start = *startp;
540 rtx end = *endp;
541
542 rtx insn;
543 rtx next;
544 rtx last = NULL;
545 rtx past_end = NEXT_INSN (end);
546
547 for (insn = start; insn != past_end; insn = next)
548 {
549 next = NEXT_INSN (insn);
550 if (GET_CODE (insn) == NOTE
551 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
552 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
553 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
554 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
555 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
556 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
557 {
558 if (insn == start)
559 start = next;
560 else
561 {
562 rtx prev = PREV_INSN (insn);
563 PREV_INSN (insn) = PREV_INSN (start);
564 NEXT_INSN (insn) = start;
565 NEXT_INSN (PREV_INSN (insn)) = insn;
566 PREV_INSN (NEXT_INSN (insn)) = insn;
567 NEXT_INSN (prev) = next;
568 PREV_INSN (next) = prev;
569 }
570 }
571 else
572 last = insn;
573 }
574
575 /* There were no real instructions. */
576 if (start == past_end)
577 return true;
578
579 end = last;
580
581 *startp = start;
582 *endp = end;
583 return false;
584 }
585 \f
586 /* Return the label before INSN, or put a new label there. */
587
588 rtx
589 get_label_before (rtx insn)
590 {
591 rtx label;
592
593 /* Find an existing label at this point
594 or make a new one if there is none. */
595 label = prev_nonnote_insn (insn);
596
597 if (label == 0 || GET_CODE (label) != CODE_LABEL)
598 {
599 rtx prev = PREV_INSN (insn);
600
601 label = gen_label_rtx ();
602 emit_label_after (label, prev);
603 LABEL_NUSES (label) = 0;
604 }
605 return label;
606 }
607
608 /* Return the label after INSN, or put a new label there. */
609
610 rtx
611 get_label_after (rtx insn)
612 {
613 rtx label;
614
615 /* Find an existing label at this point
616 or make a new one if there is none. */
617 label = next_nonnote_insn (insn);
618
619 if (label == 0 || GET_CODE (label) != CODE_LABEL)
620 {
621 label = gen_label_rtx ();
622 emit_label_after (label, insn);
623 LABEL_NUSES (label) = 0;
624 }
625 return label;
626 }
627 \f
628 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
629 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
630 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
631 know whether it's source is floating point or integer comparison. Machine
632 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
633 to help this function avoid overhead in these cases. */
634 enum rtx_code
635 reversed_comparison_code_parts (enum rtx_code code, rtx arg0, rtx arg1, rtx insn)
636 {
637 enum machine_mode mode;
638
639 /* If this is not actually a comparison, we can't reverse it. */
640 if (GET_RTX_CLASS (code) != RTX_COMPARE
641 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
642 return UNKNOWN;
643
644 mode = GET_MODE (arg0);
645 if (mode == VOIDmode)
646 mode = GET_MODE (arg1);
647
648 /* First see if machine description supplies us way to reverse the
649 comparison. Give it priority over everything else to allow
650 machine description to do tricks. */
651 if (GET_MODE_CLASS (mode) == MODE_CC
652 && REVERSIBLE_CC_MODE (mode))
653 {
654 #ifdef REVERSE_CONDITION
655 return REVERSE_CONDITION (code, mode);
656 #endif
657 return reverse_condition (code);
658 }
659
660 /* Try a few special cases based on the comparison code. */
661 switch (code)
662 {
663 case GEU:
664 case GTU:
665 case LEU:
666 case LTU:
667 case NE:
668 case EQ:
669 /* It is always safe to reverse EQ and NE, even for the floating
670 point. Similarly the unsigned comparisons are never used for
671 floating point so we can reverse them in the default way. */
672 return reverse_condition (code);
673 case ORDERED:
674 case UNORDERED:
675 case LTGT:
676 case UNEQ:
677 /* In case we already see unordered comparison, we can be sure to
678 be dealing with floating point so we don't need any more tests. */
679 return reverse_condition_maybe_unordered (code);
680 case UNLT:
681 case UNLE:
682 case UNGT:
683 case UNGE:
684 /* We don't have safe way to reverse these yet. */
685 return UNKNOWN;
686 default:
687 break;
688 }
689
690 if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
691 {
692 rtx prev;
693 /* Try to search for the comparison to determine the real mode.
694 This code is expensive, but with sane machine description it
695 will be never used, since REVERSIBLE_CC_MODE will return true
696 in all cases. */
697 if (! insn)
698 return UNKNOWN;
699
700 for (prev = prev_nonnote_insn (insn);
701 prev != 0 && GET_CODE (prev) != CODE_LABEL;
702 prev = prev_nonnote_insn (prev))
703 {
704 rtx set = set_of (arg0, prev);
705 if (set && GET_CODE (set) == SET
706 && rtx_equal_p (SET_DEST (set), arg0))
707 {
708 rtx src = SET_SRC (set);
709
710 if (GET_CODE (src) == COMPARE)
711 {
712 rtx comparison = src;
713 arg0 = XEXP (src, 0);
714 mode = GET_MODE (arg0);
715 if (mode == VOIDmode)
716 mode = GET_MODE (XEXP (comparison, 1));
717 break;
718 }
719 /* We can get past reg-reg moves. This may be useful for model
720 of i387 comparisons that first move flag registers around. */
721 if (REG_P (src))
722 {
723 arg0 = src;
724 continue;
725 }
726 }
727 /* If register is clobbered in some ununderstandable way,
728 give up. */
729 if (set)
730 return UNKNOWN;
731 }
732 }
733
734 /* Test for an integer condition, or a floating-point comparison
735 in which NaNs can be ignored. */
736 if (GET_CODE (arg0) == CONST_INT
737 || (GET_MODE (arg0) != VOIDmode
738 && GET_MODE_CLASS (mode) != MODE_CC
739 && !HONOR_NANS (mode)))
740 return reverse_condition (code);
741
742 return UNKNOWN;
743 }
744
745 /* A wrapper around the previous function to take COMPARISON as rtx
746 expression. This simplifies many callers. */
747 enum rtx_code
748 reversed_comparison_code (rtx comparison, rtx insn)
749 {
750 if (!COMPARISON_P (comparison))
751 return UNKNOWN;
752 return reversed_comparison_code_parts (GET_CODE (comparison),
753 XEXP (comparison, 0),
754 XEXP (comparison, 1), insn);
755 }
756 \f
757 /* Given an rtx-code for a comparison, return the code for the negated
758 comparison. If no such code exists, return UNKNOWN.
759
760 WATCH OUT! reverse_condition is not safe to use on a jump that might
761 be acting on the results of an IEEE floating point comparison, because
762 of the special treatment of non-signaling nans in comparisons.
763 Use reversed_comparison_code instead. */
764
765 enum rtx_code
766 reverse_condition (enum rtx_code code)
767 {
768 switch (code)
769 {
770 case EQ:
771 return NE;
772 case NE:
773 return EQ;
774 case GT:
775 return LE;
776 case GE:
777 return LT;
778 case LT:
779 return GE;
780 case LE:
781 return GT;
782 case GTU:
783 return LEU;
784 case GEU:
785 return LTU;
786 case LTU:
787 return GEU;
788 case LEU:
789 return GTU;
790 case UNORDERED:
791 return ORDERED;
792 case ORDERED:
793 return UNORDERED;
794
795 case UNLT:
796 case UNLE:
797 case UNGT:
798 case UNGE:
799 case UNEQ:
800 case LTGT:
801 return UNKNOWN;
802
803 default:
804 abort ();
805 }
806 }
807
808 /* Similar, but we're allowed to generate unordered comparisons, which
809 makes it safe for IEEE floating-point. Of course, we have to recognize
810 that the target will support them too... */
811
812 enum rtx_code
813 reverse_condition_maybe_unordered (enum rtx_code code)
814 {
815 switch (code)
816 {
817 case EQ:
818 return NE;
819 case NE:
820 return EQ;
821 case GT:
822 return UNLE;
823 case GE:
824 return UNLT;
825 case LT:
826 return UNGE;
827 case LE:
828 return UNGT;
829 case LTGT:
830 return UNEQ;
831 case UNORDERED:
832 return ORDERED;
833 case ORDERED:
834 return UNORDERED;
835 case UNLT:
836 return GE;
837 case UNLE:
838 return GT;
839 case UNGT:
840 return LE;
841 case UNGE:
842 return LT;
843 case UNEQ:
844 return LTGT;
845
846 default:
847 abort ();
848 }
849 }
850
851 /* Similar, but return the code when two operands of a comparison are swapped.
852 This IS safe for IEEE floating-point. */
853
854 enum rtx_code
855 swap_condition (enum rtx_code code)
856 {
857 switch (code)
858 {
859 case EQ:
860 case NE:
861 case UNORDERED:
862 case ORDERED:
863 case UNEQ:
864 case LTGT:
865 return code;
866
867 case GT:
868 return LT;
869 case GE:
870 return LE;
871 case LT:
872 return GT;
873 case LE:
874 return GE;
875 case GTU:
876 return LTU;
877 case GEU:
878 return LEU;
879 case LTU:
880 return GTU;
881 case LEU:
882 return GEU;
883 case UNLT:
884 return UNGT;
885 case UNLE:
886 return UNGE;
887 case UNGT:
888 return UNLT;
889 case UNGE:
890 return UNLE;
891
892 default:
893 abort ();
894 }
895 }
896
897 /* Given a comparison CODE, return the corresponding unsigned comparison.
898 If CODE is an equality comparison or already an unsigned comparison,
899 CODE is returned. */
900
901 enum rtx_code
902 unsigned_condition (enum rtx_code code)
903 {
904 switch (code)
905 {
906 case EQ:
907 case NE:
908 case GTU:
909 case GEU:
910 case LTU:
911 case LEU:
912 return code;
913
914 case GT:
915 return GTU;
916 case GE:
917 return GEU;
918 case LT:
919 return LTU;
920 case LE:
921 return LEU;
922
923 default:
924 abort ();
925 }
926 }
927
928 /* Similarly, return the signed version of a comparison. */
929
930 enum rtx_code
931 signed_condition (enum rtx_code code)
932 {
933 switch (code)
934 {
935 case EQ:
936 case NE:
937 case GT:
938 case GE:
939 case LT:
940 case LE:
941 return code;
942
943 case GTU:
944 return GT;
945 case GEU:
946 return GE;
947 case LTU:
948 return LT;
949 case LEU:
950 return LE;
951
952 default:
953 abort ();
954 }
955 }
956 \f
957 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
958 truth of CODE1 implies the truth of CODE2. */
959
960 int
961 comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
962 {
963 /* UNKNOWN comparison codes can happen as a result of trying to revert
964 comparison codes.
965 They can't match anything, so we have to reject them here. */
966 if (code1 == UNKNOWN || code2 == UNKNOWN)
967 return 0;
968
969 if (code1 == code2)
970 return 1;
971
972 switch (code1)
973 {
974 case UNEQ:
975 if (code2 == UNLE || code2 == UNGE)
976 return 1;
977 break;
978
979 case EQ:
980 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
981 || code2 == ORDERED)
982 return 1;
983 break;
984
985 case UNLT:
986 if (code2 == UNLE || code2 == NE)
987 return 1;
988 break;
989
990 case LT:
991 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
992 return 1;
993 break;
994
995 case UNGT:
996 if (code2 == UNGE || code2 == NE)
997 return 1;
998 break;
999
1000 case GT:
1001 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1002 return 1;
1003 break;
1004
1005 case GE:
1006 case LE:
1007 if (code2 == ORDERED)
1008 return 1;
1009 break;
1010
1011 case LTGT:
1012 if (code2 == NE || code2 == ORDERED)
1013 return 1;
1014 break;
1015
1016 case LTU:
1017 if (code2 == LEU || code2 == NE)
1018 return 1;
1019 break;
1020
1021 case GTU:
1022 if (code2 == GEU || code2 == NE)
1023 return 1;
1024 break;
1025
1026 case UNORDERED:
1027 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
1028 || code2 == UNGE || code2 == UNGT)
1029 return 1;
1030 break;
1031
1032 default:
1033 break;
1034 }
1035
1036 return 0;
1037 }
1038 \f
1039 /* Return 1 if INSN is an unconditional jump and nothing else. */
1040
1041 int
1042 simplejump_p (rtx insn)
1043 {
1044 return (GET_CODE (insn) == JUMP_INSN
1045 && GET_CODE (PATTERN (insn)) == SET
1046 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
1047 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
1048 }
1049
1050 /* Return nonzero if INSN is a (possibly) conditional jump
1051 and nothing more.
1052
1053 Use of this function is deprecated, since we need to support combined
1054 branch and compare insns. Use any_condjump_p instead whenever possible. */
1055
1056 int
1057 condjump_p (rtx insn)
1058 {
1059 rtx x = PATTERN (insn);
1060
1061 if (GET_CODE (x) != SET
1062 || GET_CODE (SET_DEST (x)) != PC)
1063 return 0;
1064
1065 x = SET_SRC (x);
1066 if (GET_CODE (x) == LABEL_REF)
1067 return 1;
1068 else
1069 return (GET_CODE (x) == IF_THEN_ELSE
1070 && ((GET_CODE (XEXP (x, 2)) == PC
1071 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
1072 || GET_CODE (XEXP (x, 1)) == RETURN))
1073 || (GET_CODE (XEXP (x, 1)) == PC
1074 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
1075 || GET_CODE (XEXP (x, 2)) == RETURN))));
1076
1077 return 0;
1078 }
1079
1080 /* Return nonzero if INSN is a (possibly) conditional jump inside a
1081 PARALLEL.
1082
1083 Use this function is deprecated, since we need to support combined
1084 branch and compare insns. Use any_condjump_p instead whenever possible. */
1085
1086 int
1087 condjump_in_parallel_p (rtx insn)
1088 {
1089 rtx x = PATTERN (insn);
1090
1091 if (GET_CODE (x) != PARALLEL)
1092 return 0;
1093 else
1094 x = XVECEXP (x, 0, 0);
1095
1096 if (GET_CODE (x) != SET)
1097 return 0;
1098 if (GET_CODE (SET_DEST (x)) != PC)
1099 return 0;
1100 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
1101 return 1;
1102 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1103 return 0;
1104 if (XEXP (SET_SRC (x), 2) == pc_rtx
1105 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
1106 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
1107 return 1;
1108 if (XEXP (SET_SRC (x), 1) == pc_rtx
1109 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
1110 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
1111 return 1;
1112 return 0;
1113 }
1114
1115 /* Return set of PC, otherwise NULL. */
1116
1117 rtx
1118 pc_set (rtx insn)
1119 {
1120 rtx pat;
1121 if (GET_CODE (insn) != JUMP_INSN)
1122 return NULL_RTX;
1123 pat = PATTERN (insn);
1124
1125 /* The set is allowed to appear either as the insn pattern or
1126 the first set in a PARALLEL. */
1127 if (GET_CODE (pat) == PARALLEL)
1128 pat = XVECEXP (pat, 0, 0);
1129 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
1130 return pat;
1131
1132 return NULL_RTX;
1133 }
1134
1135 /* Return true when insn is an unconditional direct jump,
1136 possibly bundled inside a PARALLEL. */
1137
1138 int
1139 any_uncondjump_p (rtx insn)
1140 {
1141 rtx x = pc_set (insn);
1142 if (!x)
1143 return 0;
1144 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
1145 return 0;
1146 return 1;
1147 }
1148
1149 /* Return true when insn is a conditional jump. This function works for
1150 instructions containing PC sets in PARALLELs. The instruction may have
1151 various other effects so before removing the jump you must verify
1152 onlyjump_p.
1153
1154 Note that unlike condjump_p it returns false for unconditional jumps. */
1155
1156 int
1157 any_condjump_p (rtx insn)
1158 {
1159 rtx x = pc_set (insn);
1160 enum rtx_code a, b;
1161
1162 if (!x)
1163 return 0;
1164 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1165 return 0;
1166
1167 a = GET_CODE (XEXP (SET_SRC (x), 1));
1168 b = GET_CODE (XEXP (SET_SRC (x), 2));
1169
1170 return ((b == PC && (a == LABEL_REF || a == RETURN))
1171 || (a == PC && (b == LABEL_REF || b == RETURN)));
1172 }
1173
1174 /* Return the label of a conditional jump. */
1175
1176 rtx
1177 condjump_label (rtx insn)
1178 {
1179 rtx x = pc_set (insn);
1180
1181 if (!x)
1182 return NULL_RTX;
1183 x = SET_SRC (x);
1184 if (GET_CODE (x) == LABEL_REF)
1185 return x;
1186 if (GET_CODE (x) != IF_THEN_ELSE)
1187 return NULL_RTX;
1188 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
1189 return XEXP (x, 1);
1190 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
1191 return XEXP (x, 2);
1192 return NULL_RTX;
1193 }
1194
1195 /* Return true if INSN is a (possibly conditional) return insn. */
1196
1197 static int
1198 returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
1199 {
1200 rtx x = *loc;
1201
1202 return x && (GET_CODE (x) == RETURN
1203 || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
1204 }
1205
1206 int
1207 returnjump_p (rtx insn)
1208 {
1209 if (GET_CODE (insn) != JUMP_INSN)
1210 return 0;
1211 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
1212 }
1213
1214 /* Return true if INSN is a jump that only transfers control and
1215 nothing more. */
1216
1217 int
1218 onlyjump_p (rtx insn)
1219 {
1220 rtx set;
1221
1222 if (GET_CODE (insn) != JUMP_INSN)
1223 return 0;
1224
1225 set = single_set (insn);
1226 if (set == NULL)
1227 return 0;
1228 if (GET_CODE (SET_DEST (set)) != PC)
1229 return 0;
1230 if (side_effects_p (SET_SRC (set)))
1231 return 0;
1232
1233 return 1;
1234 }
1235
1236 #ifdef HAVE_cc0
1237
1238 /* Return nonzero if X is an RTX that only sets the condition codes
1239 and has no side effects. */
1240
1241 int
1242 only_sets_cc0_p (rtx x)
1243 {
1244 if (! x)
1245 return 0;
1246
1247 if (INSN_P (x))
1248 x = PATTERN (x);
1249
1250 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1251 }
1252
1253 /* Return 1 if X is an RTX that does nothing but set the condition codes
1254 and CLOBBER or USE registers.
1255 Return -1 if X does explicitly set the condition codes,
1256 but also does other things. */
1257
1258 int
1259 sets_cc0_p (rtx x)
1260 {
1261 if (! x)
1262 return 0;
1263
1264 if (INSN_P (x))
1265 x = PATTERN (x);
1266
1267 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1268 return 1;
1269 if (GET_CODE (x) == PARALLEL)
1270 {
1271 int i;
1272 int sets_cc0 = 0;
1273 int other_things = 0;
1274 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1275 {
1276 if (GET_CODE (XVECEXP (x, 0, i)) == SET
1277 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1278 sets_cc0 = 1;
1279 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1280 other_things = 1;
1281 }
1282 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1283 }
1284 return 0;
1285 }
1286 #endif
1287 \f
1288 /* Follow any unconditional jump at LABEL;
1289 return the ultimate label reached by any such chain of jumps.
1290 If LABEL is not followed by a jump, return LABEL.
1291 If the chain loops or we can't find end, return LABEL,
1292 since that tells caller to avoid changing the insn.
1293
1294 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1295 a USE or CLOBBER. */
1296
1297 rtx
1298 follow_jumps (rtx label)
1299 {
1300 rtx insn;
1301 rtx next;
1302 rtx value = label;
1303 int depth;
1304
1305 for (depth = 0;
1306 (depth < 10
1307 && (insn = next_active_insn (value)) != 0
1308 && GET_CODE (insn) == JUMP_INSN
1309 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1310 && onlyjump_p (insn))
1311 || GET_CODE (PATTERN (insn)) == RETURN)
1312 && (next = NEXT_INSN (insn))
1313 && GET_CODE (next) == BARRIER);
1314 depth++)
1315 {
1316 /* Don't chain through the insn that jumps into a loop
1317 from outside the loop,
1318 since that would create multiple loop entry jumps
1319 and prevent loop optimization. */
1320 rtx tem;
1321 if (!reload_completed)
1322 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1323 if (GET_CODE (tem) == NOTE
1324 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
1325 /* ??? Optional. Disables some optimizations, but makes
1326 gcov output more accurate with -O. */
1327 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
1328 return value;
1329
1330 /* If we have found a cycle, make the insn jump to itself. */
1331 if (JUMP_LABEL (insn) == label)
1332 return label;
1333
1334 tem = next_active_insn (JUMP_LABEL (insn));
1335 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1336 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1337 break;
1338
1339 value = JUMP_LABEL (insn);
1340 }
1341 if (depth == 10)
1342 return label;
1343 return value;
1344 }
1345
1346 \f
1347 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1348 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1349 in INSN, then store one of them in JUMP_LABEL (INSN).
1350 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1351 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1352 Also, when there are consecutive labels, canonicalize on the last of them.
1353
1354 Note that two labels separated by a loop-beginning note
1355 must be kept distinct if we have not yet done loop-optimization,
1356 because the gap between them is where loop-optimize
1357 will want to move invariant code to. CROSS_JUMP tells us
1358 that loop-optimization is done with. */
1359
1360 void
1361 mark_jump_label (rtx x, rtx insn, int in_mem)
1362 {
1363 RTX_CODE code = GET_CODE (x);
1364 int i;
1365 const char *fmt;
1366
1367 switch (code)
1368 {
1369 case PC:
1370 case CC0:
1371 case REG:
1372 case CONST_INT:
1373 case CONST_DOUBLE:
1374 case CLOBBER:
1375 case CALL:
1376 return;
1377
1378 case MEM:
1379 in_mem = 1;
1380 break;
1381
1382 case SYMBOL_REF:
1383 if (!in_mem)
1384 return;
1385
1386 /* If this is a constant-pool reference, see if it is a label. */
1387 if (CONSTANT_POOL_ADDRESS_P (x))
1388 mark_jump_label (get_pool_constant (x), insn, in_mem);
1389 break;
1390
1391 case LABEL_REF:
1392 {
1393 rtx label = XEXP (x, 0);
1394
1395 /* Ignore remaining references to unreachable labels that
1396 have been deleted. */
1397 if (GET_CODE (label) == NOTE
1398 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1399 break;
1400
1401 if (GET_CODE (label) != CODE_LABEL)
1402 abort ();
1403
1404 /* Ignore references to labels of containing functions. */
1405 if (LABEL_REF_NONLOCAL_P (x))
1406 break;
1407
1408 XEXP (x, 0) = label;
1409 if (! insn || ! INSN_DELETED_P (insn))
1410 ++LABEL_NUSES (label);
1411
1412 if (insn)
1413 {
1414 if (GET_CODE (insn) == JUMP_INSN)
1415 JUMP_LABEL (insn) = label;
1416 else
1417 {
1418 /* Add a REG_LABEL note for LABEL unless there already
1419 is one. All uses of a label, except for labels
1420 that are the targets of jumps, must have a
1421 REG_LABEL note. */
1422 if (! find_reg_note (insn, REG_LABEL, label))
1423 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1424 REG_NOTES (insn));
1425 }
1426 }
1427 return;
1428 }
1429
1430 /* Do walk the labels in a vector, but not the first operand of an
1431 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1432 case ADDR_VEC:
1433 case ADDR_DIFF_VEC:
1434 if (! INSN_DELETED_P (insn))
1435 {
1436 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1437
1438 for (i = 0; i < XVECLEN (x, eltnum); i++)
1439 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1440 }
1441 return;
1442
1443 default:
1444 break;
1445 }
1446
1447 fmt = GET_RTX_FORMAT (code);
1448 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1449 {
1450 if (fmt[i] == 'e')
1451 mark_jump_label (XEXP (x, i), insn, in_mem);
1452 else if (fmt[i] == 'E')
1453 {
1454 int j;
1455 for (j = 0; j < XVECLEN (x, i); j++)
1456 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1457 }
1458 }
1459 }
1460
1461 /* If all INSN does is set the pc, delete it,
1462 and delete the insn that set the condition codes for it
1463 if that's what the previous thing was. */
1464
1465 void
1466 delete_jump (rtx insn)
1467 {
1468 rtx set = single_set (insn);
1469
1470 if (set && GET_CODE (SET_DEST (set)) == PC)
1471 delete_computation (insn);
1472 }
1473
1474 /* Verify INSN is a BARRIER and delete it. */
1475
1476 void
1477 delete_barrier (rtx insn)
1478 {
1479 if (GET_CODE (insn) != BARRIER)
1480 abort ();
1481
1482 delete_insn (insn);
1483 }
1484
1485 /* Recursively delete prior insns that compute the value (used only by INSN
1486 which the caller is deleting) stored in the register mentioned by NOTE
1487 which is a REG_DEAD note associated with INSN. */
1488
1489 static void
1490 delete_prior_computation (rtx note, rtx insn)
1491 {
1492 rtx our_prev;
1493 rtx reg = XEXP (note, 0);
1494
1495 for (our_prev = prev_nonnote_insn (insn);
1496 our_prev && (GET_CODE (our_prev) == INSN
1497 || GET_CODE (our_prev) == CALL_INSN);
1498 our_prev = prev_nonnote_insn (our_prev))
1499 {
1500 rtx pat = PATTERN (our_prev);
1501
1502 /* If we reach a CALL which is not calling a const function
1503 or the callee pops the arguments, then give up. */
1504 if (GET_CODE (our_prev) == CALL_INSN
1505 && (! CONST_OR_PURE_CALL_P (our_prev)
1506 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1507 break;
1508
1509 /* If we reach a SEQUENCE, it is too complex to try to
1510 do anything with it, so give up. We can be run during
1511 and after reorg, so SEQUENCE rtl can legitimately show
1512 up here. */
1513 if (GET_CODE (pat) == SEQUENCE)
1514 break;
1515
1516 if (GET_CODE (pat) == USE
1517 && GET_CODE (XEXP (pat, 0)) == INSN)
1518 /* reorg creates USEs that look like this. We leave them
1519 alone because reorg needs them for its own purposes. */
1520 break;
1521
1522 if (reg_set_p (reg, pat))
1523 {
1524 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
1525 break;
1526
1527 if (GET_CODE (pat) == PARALLEL)
1528 {
1529 /* If we find a SET of something else, we can't
1530 delete the insn. */
1531
1532 int i;
1533
1534 for (i = 0; i < XVECLEN (pat, 0); i++)
1535 {
1536 rtx part = XVECEXP (pat, 0, i);
1537
1538 if (GET_CODE (part) == SET
1539 && SET_DEST (part) != reg)
1540 break;
1541 }
1542
1543 if (i == XVECLEN (pat, 0))
1544 delete_computation (our_prev);
1545 }
1546 else if (GET_CODE (pat) == SET
1547 && GET_CODE (SET_DEST (pat)) == REG)
1548 {
1549 int dest_regno = REGNO (SET_DEST (pat));
1550 int dest_endregno
1551 = (dest_regno
1552 + (dest_regno < FIRST_PSEUDO_REGISTER
1553 ? hard_regno_nregs[dest_regno]
1554 [GET_MODE (SET_DEST (pat))] : 1));
1555 int regno = REGNO (reg);
1556 int endregno
1557 = (regno
1558 + (regno < FIRST_PSEUDO_REGISTER
1559 ? hard_regno_nregs[regno][GET_MODE (reg)] : 1));
1560
1561 if (dest_regno >= regno
1562 && dest_endregno <= endregno)
1563 delete_computation (our_prev);
1564
1565 /* We may have a multi-word hard register and some, but not
1566 all, of the words of the register are needed in subsequent
1567 insns. Write REG_UNUSED notes for those parts that were not
1568 needed. */
1569 else if (dest_regno <= regno
1570 && dest_endregno >= endregno)
1571 {
1572 int i;
1573
1574 REG_NOTES (our_prev)
1575 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1576 REG_NOTES (our_prev));
1577
1578 for (i = dest_regno; i < dest_endregno; i++)
1579 if (! find_regno_note (our_prev, REG_UNUSED, i))
1580 break;
1581
1582 if (i == dest_endregno)
1583 delete_computation (our_prev);
1584 }
1585 }
1586
1587 break;
1588 }
1589
1590 /* If PAT references the register that dies here, it is an
1591 additional use. Hence any prior SET isn't dead. However, this
1592 insn becomes the new place for the REG_DEAD note. */
1593 if (reg_overlap_mentioned_p (reg, pat))
1594 {
1595 XEXP (note, 1) = REG_NOTES (our_prev);
1596 REG_NOTES (our_prev) = note;
1597 break;
1598 }
1599 }
1600 }
1601
1602 /* Delete INSN and recursively delete insns that compute values used only
1603 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1604 If we are running before flow.c, we need do nothing since flow.c will
1605 delete dead code. We also can't know if the registers being used are
1606 dead or not at this point.
1607
1608 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1609 nothing other than set a register that dies in this insn, we can delete
1610 that insn as well.
1611
1612 On machines with CC0, if CC0 is used in this insn, we may be able to
1613 delete the insn that set it. */
1614
1615 static void
1616 delete_computation (rtx insn)
1617 {
1618 rtx note, next;
1619
1620 #ifdef HAVE_cc0
1621 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1622 {
1623 rtx prev = prev_nonnote_insn (insn);
1624 /* We assume that at this stage
1625 CC's are always set explicitly
1626 and always immediately before the jump that
1627 will use them. So if the previous insn
1628 exists to set the CC's, delete it
1629 (unless it performs auto-increments, etc.). */
1630 if (prev && GET_CODE (prev) == INSN
1631 && sets_cc0_p (PATTERN (prev)))
1632 {
1633 if (sets_cc0_p (PATTERN (prev)) > 0
1634 && ! side_effects_p (PATTERN (prev)))
1635 delete_computation (prev);
1636 else
1637 /* Otherwise, show that cc0 won't be used. */
1638 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1639 cc0_rtx, REG_NOTES (prev));
1640 }
1641 }
1642 #endif
1643
1644 for (note = REG_NOTES (insn); note; note = next)
1645 {
1646 next = XEXP (note, 1);
1647
1648 if (REG_NOTE_KIND (note) != REG_DEAD
1649 /* Verify that the REG_NOTE is legitimate. */
1650 || GET_CODE (XEXP (note, 0)) != REG)
1651 continue;
1652
1653 delete_prior_computation (note, insn);
1654 }
1655
1656 delete_related_insns (insn);
1657 }
1658 \f
1659 /* Delete insn INSN from the chain of insns and update label ref counts
1660 and delete insns now unreachable.
1661
1662 Returns the first insn after INSN that was not deleted.
1663
1664 Usage of this instruction is deprecated. Use delete_insn instead and
1665 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1666
1667 rtx
1668 delete_related_insns (rtx insn)
1669 {
1670 int was_code_label = (GET_CODE (insn) == CODE_LABEL);
1671 rtx note;
1672 rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1673
1674 while (next && INSN_DELETED_P (next))
1675 next = NEXT_INSN (next);
1676
1677 /* This insn is already deleted => return first following nondeleted. */
1678 if (INSN_DELETED_P (insn))
1679 return next;
1680
1681 delete_insn (insn);
1682
1683 /* If instruction is followed by a barrier,
1684 delete the barrier too. */
1685
1686 if (next != 0 && GET_CODE (next) == BARRIER)
1687 delete_insn (next);
1688
1689 /* If deleting a jump, decrement the count of the label,
1690 and delete the label if it is now unused. */
1691
1692 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1693 {
1694 rtx lab = JUMP_LABEL (insn), lab_next;
1695
1696 if (LABEL_NUSES (lab) == 0)
1697 {
1698 /* This can delete NEXT or PREV,
1699 either directly if NEXT is JUMP_LABEL (INSN),
1700 or indirectly through more levels of jumps. */
1701 delete_related_insns (lab);
1702
1703 /* I feel a little doubtful about this loop,
1704 but I see no clean and sure alternative way
1705 to find the first insn after INSN that is not now deleted.
1706 I hope this works. */
1707 while (next && INSN_DELETED_P (next))
1708 next = NEXT_INSN (next);
1709 return next;
1710 }
1711 else if (tablejump_p (insn, NULL, &lab_next))
1712 {
1713 /* If we're deleting the tablejump, delete the dispatch table.
1714 We may not be able to kill the label immediately preceding
1715 just yet, as it might be referenced in code leading up to
1716 the tablejump. */
1717 delete_related_insns (lab_next);
1718 }
1719 }
1720
1721 /* Likewise if we're deleting a dispatch table. */
1722
1723 if (GET_CODE (insn) == JUMP_INSN
1724 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1725 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1726 {
1727 rtx pat = PATTERN (insn);
1728 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1729 int len = XVECLEN (pat, diff_vec_p);
1730
1731 for (i = 0; i < len; i++)
1732 if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1733 delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1734 while (next && INSN_DELETED_P (next))
1735 next = NEXT_INSN (next);
1736 return next;
1737 }
1738
1739 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1740 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
1741 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1742 if (REG_NOTE_KIND (note) == REG_LABEL
1743 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1744 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
1745 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1746 delete_related_insns (XEXP (note, 0));
1747
1748 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
1749 prev = PREV_INSN (prev);
1750
1751 /* If INSN was a label and a dispatch table follows it,
1752 delete the dispatch table. The tablejump must have gone already.
1753 It isn't useful to fall through into a table. */
1754
1755 if (was_code_label
1756 && NEXT_INSN (insn) != 0
1757 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1758 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1759 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1760 next = delete_related_insns (NEXT_INSN (insn));
1761
1762 /* If INSN was a label, delete insns following it if now unreachable. */
1763
1764 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
1765 {
1766 enum rtx_code code;
1767 while (next)
1768 {
1769 code = GET_CODE (next);
1770 if (code == NOTE
1771 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1772 next = NEXT_INSN (next);
1773 /* Keep going past other deleted labels to delete what follows. */
1774 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1775 next = NEXT_INSN (next);
1776 else if (code == BARRIER || INSN_P (next))
1777 /* Note: if this deletes a jump, it can cause more
1778 deletion of unreachable code, after a different label.
1779 As long as the value from this recursive call is correct,
1780 this invocation functions correctly. */
1781 next = delete_related_insns (next);
1782 else
1783 break;
1784 }
1785 }
1786
1787 return next;
1788 }
1789 \f
1790 /* Delete a range of insns from FROM to TO, inclusive.
1791 This is for the sake of peephole optimization, so assume
1792 that whatever these insns do will still be done by a new
1793 peephole insn that will replace them. */
1794
1795 void
1796 delete_for_peephole (rtx from, rtx to)
1797 {
1798 rtx insn = from;
1799
1800 while (1)
1801 {
1802 rtx next = NEXT_INSN (insn);
1803 rtx prev = PREV_INSN (insn);
1804
1805 if (GET_CODE (insn) != NOTE)
1806 {
1807 INSN_DELETED_P (insn) = 1;
1808
1809 /* Patch this insn out of the chain. */
1810 /* We don't do this all at once, because we
1811 must preserve all NOTEs. */
1812 if (prev)
1813 NEXT_INSN (prev) = next;
1814
1815 if (next)
1816 PREV_INSN (next) = prev;
1817 }
1818
1819 if (insn == to)
1820 break;
1821 insn = next;
1822 }
1823
1824 /* Note that if TO is an unconditional jump
1825 we *do not* delete the BARRIER that follows,
1826 since the peephole that replaces this sequence
1827 is also an unconditional jump in that case. */
1828 }
1829 \f
1830 /* We have determined that AVOIDED_INSN is never reached, and are
1831 about to delete it. If the insn chain between AVOIDED_INSN and
1832 FINISH contains more than one line from the current function, and
1833 contains at least one operation, print a warning if the user asked
1834 for it. If FINISH is NULL, look between AVOIDED_INSN and a LABEL.
1835
1836 CSE and inlining can duplicate insns, so it's possible to get
1837 spurious warnings from this. */
1838
1839 void
1840 never_reached_warning (rtx avoided_insn, rtx finish)
1841 {
1842 rtx insn;
1843 rtx a_line_note = NULL;
1844 int two_avoided_lines = 0, contains_insn = 0, reached_end = 0;
1845
1846 if (!warn_notreached)
1847 return;
1848
1849 /* Back up to the first of any NOTEs preceding avoided_insn; flow passes
1850 us the head of a block, a NOTE_INSN_BASIC_BLOCK, which often follows
1851 the line note. */
1852 insn = avoided_insn;
1853 while (1)
1854 {
1855 rtx prev = PREV_INSN (insn);
1856 if (prev == NULL_RTX
1857 || GET_CODE (prev) != NOTE)
1858 break;
1859 insn = prev;
1860 }
1861
1862 /* Scan forwards, looking at LINE_NUMBER notes, until we hit a LABEL
1863 in case FINISH is NULL, otherwise until we run out of insns. */
1864
1865 for (; insn != NULL; insn = NEXT_INSN (insn))
1866 {
1867 if ((finish == NULL && GET_CODE (insn) == CODE_LABEL)
1868 || GET_CODE (insn) == BARRIER)
1869 break;
1870
1871 if (GET_CODE (insn) == NOTE /* A line number note? */
1872 && NOTE_LINE_NUMBER (insn) >= 0)
1873 {
1874 if (a_line_note == NULL)
1875 a_line_note = insn;
1876 else
1877 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
1878 != NOTE_LINE_NUMBER (insn));
1879 }
1880 else if (INSN_P (insn))
1881 {
1882 if (reached_end)
1883 break;
1884 contains_insn = 1;
1885 }
1886
1887 if (insn == finish)
1888 reached_end = 1;
1889 }
1890 if (two_avoided_lines && contains_insn)
1891 {
1892 location_t locus;
1893 locus.file = NOTE_SOURCE_FILE (a_line_note);
1894 locus.line = NOTE_LINE_NUMBER (a_line_note);
1895 warning ("%Hwill never be executed", &locus);
1896 }
1897 }
1898 \f
1899 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1900 NLABEL as a return. Accrue modifications into the change group. */
1901
1902 static void
1903 redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
1904 {
1905 rtx x = *loc;
1906 RTX_CODE code = GET_CODE (x);
1907 int i;
1908 const char *fmt;
1909
1910 if (code == LABEL_REF)
1911 {
1912 if (XEXP (x, 0) == olabel)
1913 {
1914 rtx n;
1915 if (nlabel)
1916 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1917 else
1918 n = gen_rtx_RETURN (VOIDmode);
1919
1920 validate_change (insn, loc, n, 1);
1921 return;
1922 }
1923 }
1924 else if (code == RETURN && olabel == 0)
1925 {
1926 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1927 if (loc == &PATTERN (insn))
1928 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1929 validate_change (insn, loc, x, 1);
1930 return;
1931 }
1932
1933 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1934 && GET_CODE (SET_SRC (x)) == LABEL_REF
1935 && XEXP (SET_SRC (x), 0) == olabel)
1936 {
1937 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1938 return;
1939 }
1940
1941 fmt = GET_RTX_FORMAT (code);
1942 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1943 {
1944 if (fmt[i] == 'e')
1945 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1946 else if (fmt[i] == 'E')
1947 {
1948 int j;
1949 for (j = 0; j < XVECLEN (x, i); j++)
1950 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1951 }
1952 }
1953 }
1954
1955 /* Similar, but apply the change group and report success or failure. */
1956
1957 static int
1958 redirect_exp (rtx olabel, rtx nlabel, rtx insn)
1959 {
1960 rtx *loc;
1961
1962 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1963 loc = &XVECEXP (PATTERN (insn), 0, 0);
1964 else
1965 loc = &PATTERN (insn);
1966
1967 redirect_exp_1 (loc, olabel, nlabel, insn);
1968 if (num_validated_changes () == 0)
1969 return 0;
1970
1971 return apply_change_group ();
1972 }
1973
1974 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
1975 the modifications into the change group. Return false if we did
1976 not see how to do that. */
1977
1978 int
1979 redirect_jump_1 (rtx jump, rtx nlabel)
1980 {
1981 int ochanges = num_validated_changes ();
1982 rtx *loc;
1983
1984 if (GET_CODE (PATTERN (jump)) == PARALLEL)
1985 loc = &XVECEXP (PATTERN (jump), 0, 0);
1986 else
1987 loc = &PATTERN (jump);
1988
1989 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1990 return num_validated_changes () > ochanges;
1991 }
1992
1993 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
1994 jump target label is unused as a result, it and the code following
1995 it may be deleted.
1996
1997 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
1998 RETURN insn.
1999
2000 The return value will be 1 if the change was made, 0 if it wasn't
2001 (this can only occur for NLABEL == 0). */
2002
2003 int
2004 redirect_jump (rtx jump, rtx nlabel, int delete_unused)
2005 {
2006 rtx olabel = JUMP_LABEL (jump);
2007 rtx note;
2008
2009 if (nlabel == olabel)
2010 return 1;
2011
2012 if (! redirect_exp (olabel, nlabel, jump))
2013 return 0;
2014
2015 JUMP_LABEL (jump) = nlabel;
2016 if (nlabel)
2017 ++LABEL_NUSES (nlabel);
2018
2019 /* Update labels in any REG_EQUAL note. */
2020 if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
2021 {
2022 if (nlabel && olabel)
2023 {
2024 rtx dest = XEXP (note, 0);
2025
2026 if (GET_CODE (dest) == IF_THEN_ELSE)
2027 {
2028 if (GET_CODE (XEXP (dest, 1)) == LABEL_REF
2029 && XEXP (XEXP (dest, 1), 0) == olabel)
2030 XEXP (XEXP (dest, 1), 0) = nlabel;
2031 if (GET_CODE (XEXP (dest, 2)) == LABEL_REF
2032 && XEXP (XEXP (dest, 2), 0) == olabel)
2033 XEXP (XEXP (dest, 2), 0) = nlabel;
2034 }
2035 else
2036 remove_note (jump, note);
2037 }
2038 else
2039 remove_note (jump, note);
2040 }
2041
2042 /* If we're eliding the jump over exception cleanups at the end of a
2043 function, move the function end note so that -Wreturn-type works. */
2044 if (olabel && nlabel
2045 && NEXT_INSN (olabel)
2046 && GET_CODE (NEXT_INSN (olabel)) == NOTE
2047 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
2048 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
2049
2050 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused
2051 /* Undefined labels will remain outside the insn stream. */
2052 && INSN_UID (olabel))
2053 delete_related_insns (olabel);
2054
2055 return 1;
2056 }
2057
2058 /* Invert the jump condition of rtx X contained in jump insn, INSN.
2059 Accrue the modifications into the change group. */
2060
2061 static void
2062 invert_exp_1 (rtx insn)
2063 {
2064 RTX_CODE code;
2065 rtx x = pc_set (insn);
2066
2067 if (!x)
2068 abort ();
2069 x = SET_SRC (x);
2070
2071 code = GET_CODE (x);
2072
2073 if (code == IF_THEN_ELSE)
2074 {
2075 rtx comp = XEXP (x, 0);
2076 rtx tem;
2077 enum rtx_code reversed_code;
2078
2079 /* We can do this in two ways: The preferable way, which can only
2080 be done if this is not an integer comparison, is to reverse
2081 the comparison code. Otherwise, swap the THEN-part and ELSE-part
2082 of the IF_THEN_ELSE. If we can't do either, fail. */
2083
2084 reversed_code = reversed_comparison_code (comp, insn);
2085
2086 if (reversed_code != UNKNOWN)
2087 {
2088 validate_change (insn, &XEXP (x, 0),
2089 gen_rtx_fmt_ee (reversed_code,
2090 GET_MODE (comp), XEXP (comp, 0),
2091 XEXP (comp, 1)),
2092 1);
2093 return;
2094 }
2095
2096 tem = XEXP (x, 1);
2097 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
2098 validate_change (insn, &XEXP (x, 2), tem, 1);
2099 }
2100 else
2101 abort ();
2102 }
2103
2104 /* Invert the jump condition of conditional jump insn, INSN.
2105
2106 Return 1 if we can do so, 0 if we cannot find a way to do so that
2107 matches a pattern. */
2108
2109 static int
2110 invert_exp (rtx insn)
2111 {
2112 invert_exp_1 (insn);
2113 if (num_validated_changes () == 0)
2114 return 0;
2115
2116 return apply_change_group ();
2117 }
2118
2119 /* Invert the condition of the jump JUMP, and make it jump to label
2120 NLABEL instead of where it jumps now. Accrue changes into the
2121 change group. Return false if we didn't see how to perform the
2122 inversion and redirection. */
2123
2124 int
2125 invert_jump_1 (rtx jump, rtx nlabel)
2126 {
2127 int ochanges;
2128
2129 ochanges = num_validated_changes ();
2130 invert_exp_1 (jump);
2131 if (num_validated_changes () == ochanges)
2132 return 0;
2133
2134 return redirect_jump_1 (jump, nlabel);
2135 }
2136
2137 /* Invert the condition of the jump JUMP, and make it jump to label
2138 NLABEL instead of where it jumps now. Return true if successful. */
2139
2140 int
2141 invert_jump (rtx jump, rtx nlabel, int delete_unused)
2142 {
2143 /* We have to either invert the condition and change the label or
2144 do neither. Either operation could fail. We first try to invert
2145 the jump. If that succeeds, we try changing the label. If that fails,
2146 we invert the jump back to what it was. */
2147
2148 if (! invert_exp (jump))
2149 return 0;
2150
2151 if (redirect_jump (jump, nlabel, delete_unused))
2152 {
2153 /* Remove REG_EQUAL note if we have one. */
2154 rtx note = find_reg_note (jump, REG_EQUAL, NULL_RTX);
2155 if (note)
2156 remove_note (jump, note);
2157
2158 invert_br_probabilities (jump);
2159
2160 return 1;
2161 }
2162
2163 if (! invert_exp (jump))
2164 /* This should just be putting it back the way it was. */
2165 abort ();
2166
2167 return 0;
2168 }
2169
2170 \f
2171 /* Like rtx_equal_p except that it considers two REGs as equal
2172 if they renumber to the same value and considers two commutative
2173 operations to be the same if the order of the operands has been
2174 reversed.
2175
2176 ??? Addition is not commutative on the PA due to the weird implicit
2177 space register selection rules for memory addresses. Therefore, we
2178 don't consider a + b == b + a.
2179
2180 We could/should make this test a little tighter. Possibly only
2181 disabling it on the PA via some backend macro or only disabling this
2182 case when the PLUS is inside a MEM. */
2183
2184 int
2185 rtx_renumbered_equal_p (rtx x, rtx y)
2186 {
2187 int i;
2188 enum rtx_code code = GET_CODE (x);
2189 const char *fmt;
2190
2191 if (x == y)
2192 return 1;
2193
2194 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
2195 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
2196 && GET_CODE (SUBREG_REG (y)) == REG)))
2197 {
2198 int reg_x = -1, reg_y = -1;
2199 int byte_x = 0, byte_y = 0;
2200
2201 if (GET_MODE (x) != GET_MODE (y))
2202 return 0;
2203
2204 /* If we haven't done any renumbering, don't
2205 make any assumptions. */
2206 if (reg_renumber == 0)
2207 return rtx_equal_p (x, y);
2208
2209 if (code == SUBREG)
2210 {
2211 reg_x = REGNO (SUBREG_REG (x));
2212 byte_x = SUBREG_BYTE (x);
2213
2214 if (reg_renumber[reg_x] >= 0)
2215 {
2216 reg_x = subreg_regno_offset (reg_renumber[reg_x],
2217 GET_MODE (SUBREG_REG (x)),
2218 byte_x,
2219 GET_MODE (x));
2220 byte_x = 0;
2221 }
2222 }
2223 else
2224 {
2225 reg_x = REGNO (x);
2226 if (reg_renumber[reg_x] >= 0)
2227 reg_x = reg_renumber[reg_x];
2228 }
2229
2230 if (GET_CODE (y) == SUBREG)
2231 {
2232 reg_y = REGNO (SUBREG_REG (y));
2233 byte_y = SUBREG_BYTE (y);
2234
2235 if (reg_renumber[reg_y] >= 0)
2236 {
2237 reg_y = subreg_regno_offset (reg_renumber[reg_y],
2238 GET_MODE (SUBREG_REG (y)),
2239 byte_y,
2240 GET_MODE (y));
2241 byte_y = 0;
2242 }
2243 }
2244 else
2245 {
2246 reg_y = REGNO (y);
2247 if (reg_renumber[reg_y] >= 0)
2248 reg_y = reg_renumber[reg_y];
2249 }
2250
2251 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
2252 }
2253
2254 /* Now we have disposed of all the cases
2255 in which different rtx codes can match. */
2256 if (code != GET_CODE (y))
2257 return 0;
2258
2259 switch (code)
2260 {
2261 case PC:
2262 case CC0:
2263 case ADDR_VEC:
2264 case ADDR_DIFF_VEC:
2265 case CONST_INT:
2266 return 0;
2267
2268 case LABEL_REF:
2269 /* We can't assume nonlocal labels have their following insns yet. */
2270 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
2271 return XEXP (x, 0) == XEXP (y, 0);
2272
2273 /* Two label-refs are equivalent if they point at labels
2274 in the same position in the instruction stream. */
2275 return (next_real_insn (XEXP (x, 0))
2276 == next_real_insn (XEXP (y, 0)));
2277
2278 case SYMBOL_REF:
2279 return XSTR (x, 0) == XSTR (y, 0);
2280
2281 case CODE_LABEL:
2282 /* If we didn't match EQ equality above, they aren't the same. */
2283 return 0;
2284
2285 default:
2286 break;
2287 }
2288
2289 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2290
2291 if (GET_MODE (x) != GET_MODE (y))
2292 return 0;
2293
2294 /* For commutative operations, the RTX match if the operand match in any
2295 order. Also handle the simple binary and unary cases without a loop.
2296
2297 ??? Don't consider PLUS a commutative operator; see comments above. */
2298 if (COMMUTATIVE_P (x) && code != PLUS)
2299 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2300 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
2301 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
2302 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
2303 else if (NON_COMMUTATIVE_P (x))
2304 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2305 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
2306 else if (UNARY_P (x))
2307 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
2308
2309 /* Compare the elements. If any pair of corresponding elements
2310 fail to match, return 0 for the whole things. */
2311
2312 fmt = GET_RTX_FORMAT (code);
2313 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2314 {
2315 int j;
2316 switch (fmt[i])
2317 {
2318 case 'w':
2319 if (XWINT (x, i) != XWINT (y, i))
2320 return 0;
2321 break;
2322
2323 case 'i':
2324 if (XINT (x, i) != XINT (y, i))
2325 return 0;
2326 break;
2327
2328 case 't':
2329 if (XTREE (x, i) != XTREE (y, i))
2330 return 0;
2331 break;
2332
2333 case 's':
2334 if (strcmp (XSTR (x, i), XSTR (y, i)))
2335 return 0;
2336 break;
2337
2338 case 'e':
2339 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
2340 return 0;
2341 break;
2342
2343 case 'u':
2344 if (XEXP (x, i) != XEXP (y, i))
2345 return 0;
2346 /* Fall through. */
2347 case '0':
2348 break;
2349
2350 case 'E':
2351 if (XVECLEN (x, i) != XVECLEN (y, i))
2352 return 0;
2353 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2354 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
2355 return 0;
2356 break;
2357
2358 default:
2359 abort ();
2360 }
2361 }
2362 return 1;
2363 }
2364 \f
2365 /* If X is a hard register or equivalent to one or a subregister of one,
2366 return the hard register number. If X is a pseudo register that was not
2367 assigned a hard register, return the pseudo register number. Otherwise,
2368 return -1. Any rtx is valid for X. */
2369
2370 int
2371 true_regnum (rtx x)
2372 {
2373 if (GET_CODE (x) == REG)
2374 {
2375 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
2376 return reg_renumber[REGNO (x)];
2377 return REGNO (x);
2378 }
2379 if (GET_CODE (x) == SUBREG)
2380 {
2381 int base = true_regnum (SUBREG_REG (x));
2382 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
2383 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
2384 GET_MODE (SUBREG_REG (x)),
2385 SUBREG_BYTE (x), GET_MODE (x));
2386 }
2387 return -1;
2388 }
2389
2390 /* Return regno of the register REG and handle subregs too. */
2391 unsigned int
2392 reg_or_subregno (rtx reg)
2393 {
2394 if (REG_P (reg))
2395 return REGNO (reg);
2396 if (GET_CODE (reg) == SUBREG)
2397 return REGNO (SUBREG_REG (reg));
2398 abort ();
2399 }