]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/jump.c
Update mainline egcs to gcc2 snapshot 971021.
[thirdparty/gcc.git] / gcc / jump.c
CommitLineData
15a63be1 1/* Optimize jump instructions, for GNU compiler.
35d9eabb 2 Copyright (C) 1987, 88, 89, 91-96, 1997 Free Software Foundation, Inc.
15a63be1
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
a35311b0
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
15a63be1
RK
20
21
22/* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
25
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
30
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
37 at by later passes.
38
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
43
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
46
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
50
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
53
54#include "config.h"
e9a25f70 55#include <stdio.h>
15a63be1
RK
56#include "rtl.h"
57#include "flags.h"
58#include "hard-reg-set.h"
59#include "regs.h"
15a63be1
RK
60#include "insn-config.h"
61#include "insn-flags.h"
e9a25f70 62#include "recog.h"
3c86a619 63#include "expr.h"
15a63be1 64#include "real.h"
6adb4e3a 65#include "except.h"
15a63be1
RK
66
67/* ??? Eventually must record somehow the labels used by jumps
68 from nested functions. */
69/* Pre-record the next or previous real insn for each label?
70 No, this pass is very fast anyway. */
71/* Condense consecutive labels?
72 This would make life analysis faster, maybe. */
73/* Optimize jump y; x: ... y: jumpif... x?
74 Don't know if it is worth bothering with. */
75/* Optimize two cases of conditional jump to conditional jump?
76 This can never delete any instruction or make anything dead,
77 or even change what is live at any point.
78 So perhaps let combiner do it. */
79
80/* Vector indexed by uid.
81 For each CODE_LABEL, index by its uid to get first unconditional jump
82 that jumps to the label.
83 For each JUMP_INSN, index by its uid to get the next unconditional jump
84 that jumps to the same label.
85 Element 0 is the start of a chain of all return insns.
86 (It is safe to use element 0 because insn uid 0 is not used. */
87
88static rtx *jump_chain;
89
90/* List of labels referred to from initializers.
91 These can never be deleted. */
92rtx forced_labels;
93
94/* Maximum index in jump_chain. */
95
96static int max_jump_chain;
97
98/* Set nonzero by jump_optimize if control can fall through
99 to the end of the function. */
100int can_reach_end;
101
102/* Indicates whether death notes are significant in cross jump analysis.
103 Normally they are not significant, because of A and B jump to C,
104 and R dies in A, it must die in B. But this might not be true after
105 stack register conversion, and we must compare death notes in that
0f41302f 106 case. */
15a63be1
RK
107
108static int cross_jump_death_matters = 0;
109
8cd2aff2
RK
110static int duplicate_loop_exit_test PROTO((rtx));
111static void find_cross_jump PROTO((rtx, rtx, int, rtx *, rtx *));
112static void do_cross_jump PROTO((rtx, rtx, rtx));
113static int jump_back_p PROTO((rtx, rtx));
114static int tension_vector_labels PROTO((rtx, int));
115static void mark_jump_label PROTO((rtx, rtx, int));
116static void delete_computation PROTO((rtx));
117static void delete_from_jump_chain PROTO((rtx));
118static int delete_labelref_insn PROTO((rtx, rtx, int));
119static void redirect_tablejump PROTO((rtx, rtx));
15a63be1
RK
120\f
121/* Delete no-op jumps and optimize jumps to jumps
122 and jumps around jumps.
123 Delete unused labels and unreachable code.
124
125 If CROSS_JUMP is 1, detect matching code
126 before a jump and its destination and unify them.
127 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
128
129 If NOOP_MOVES is nonzero, delete no-op move insns.
130
131 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
132 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
133
134 If `optimize' is zero, don't change any code,
135 just determine whether control drops off the end of the function.
136 This case occurs when we have -W and not -O.
137 It works because `delete_insn' checks the value of `optimize'
138 and refrains from actually deleting when that is 0. */
139
140void
141jump_optimize (f, cross_jump, noop_moves, after_regscan)
142 rtx f;
143 int cross_jump;
144 int noop_moves;
145 int after_regscan;
146{
484c3924 147 register rtx insn, next, note;
15a63be1
RK
148 int changed;
149 int first = 1;
150 int max_uid = 0;
151 rtx last_insn;
152
153 cross_jump_death_matters = (cross_jump == 2);
154
484c3924
RK
155 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
156 notes whose labels don't occur in the insn any more. */
15a63be1
RK
157
158 for (insn = f; insn; insn = NEXT_INSN (insn))
159 {
160 if (GET_CODE (insn) == CODE_LABEL)
161 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
162 else if (GET_CODE (insn) == JUMP_INSN)
163 JUMP_LABEL (insn) = 0;
484c3924
RK
164 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
165 for (note = REG_NOTES (insn); note; note = next)
166 {
167 next = XEXP (note, 1);
168 if (REG_NOTE_KIND (note) == REG_LABEL
169 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
170 remove_note (insn, note);
171 }
172
15a63be1
RK
173 if (INSN_UID (insn) > max_uid)
174 max_uid = INSN_UID (insn);
175 }
176
177 max_uid++;
178
179 /* Delete insns following barriers, up to next label. */
180
181 for (insn = f; insn;)
182 {
183 if (GET_CODE (insn) == BARRIER)
184 {
185 insn = NEXT_INSN (insn);
186 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
187 {
188 if (GET_CODE (insn) == NOTE
189 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
190 insn = NEXT_INSN (insn);
191 else
192 insn = delete_insn (insn);
193 }
194 /* INSN is now the code_label. */
195 }
196 else
197 insn = NEXT_INSN (insn);
198 }
199
200 /* Leave some extra room for labels and duplicate exit test insns
201 we make. */
202 max_jump_chain = max_uid * 14 / 10;
203 jump_chain = (rtx *) alloca (max_jump_chain * sizeof (rtx));
4c9a05bc 204 bzero ((char *) jump_chain, max_jump_chain * sizeof (rtx));
15a63be1
RK
205
206 /* Mark the label each jump jumps to.
207 Combine consecutive labels, and count uses of labels.
208
209 For each label, make a chain (using `jump_chain')
210 of all the *unconditional* jumps that jump to it;
211 also make a chain of all returns. */
212
213 for (insn = f; insn; insn = NEXT_INSN (insn))
8cd2aff2 214 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
15a63be1
RK
215 && ! INSN_DELETED_P (insn))
216 {
217 mark_jump_label (PATTERN (insn), insn, cross_jump);
218 if (GET_CODE (insn) == JUMP_INSN)
219 {
220 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
221 {
222 jump_chain[INSN_UID (insn)]
223 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
224 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
225 }
226 if (GET_CODE (PATTERN (insn)) == RETURN)
227 {
228 jump_chain[INSN_UID (insn)] = jump_chain[0];
229 jump_chain[0] = insn;
230 }
231 }
232 }
233
234 /* Keep track of labels used from static data;
235 they cannot ever be deleted. */
236
237 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
238 LABEL_NUSES (XEXP (insn, 0))++;
239
6adb4e3a
MS
240 check_exception_handler_labels ();
241
242 /* Keep track of labels used for marking handlers for exception
243 regions; they cannot usually be deleted. */
244
245 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
246 LABEL_NUSES (XEXP (insn, 0))++;
247
248 exception_optimize ();
249
15a63be1
RK
250 /* Delete all labels already not referenced.
251 Also find the last insn. */
252
253 last_insn = 0;
254 for (insn = f; insn; )
255 {
256 if (GET_CODE (insn) == CODE_LABEL && LABEL_NUSES (insn) == 0)
257 insn = delete_insn (insn);
258 else
259 {
260 last_insn = insn;
261 insn = NEXT_INSN (insn);
262 }
263 }
264
265 if (!optimize)
266 {
267 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
268 If so record that this function can drop off the end. */
269
270 insn = last_insn;
271 {
272 int n_labels = 1;
273 while (insn
274 /* One label can follow the end-note: the return label. */
275 && ((GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
276 /* Ordinary insns can follow it if returning a structure. */
277 || GET_CODE (insn) == INSN
278 /* If machine uses explicit RETURN insns, no epilogue,
279 then one of them follows the note. */
280 || (GET_CODE (insn) == JUMP_INSN
281 && GET_CODE (PATTERN (insn)) == RETURN)
60374599
DE
282 /* A barrier can follow the return insn. */
283 || GET_CODE (insn) == BARRIER
15a63be1
RK
284 /* Other kinds of notes can follow also. */
285 || (GET_CODE (insn) == NOTE
286 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)))
287 insn = PREV_INSN (insn);
288 }
289
290 /* Report if control can fall through at the end of the function. */
291 if (insn && GET_CODE (insn) == NOTE
292 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END
293 && ! INSN_DELETED_P (insn))
294 can_reach_end = 1;
295
296 /* Zero the "deleted" flag of all the "deleted" insns. */
297 for (insn = f; insn; insn = NEXT_INSN (insn))
298 INSN_DELETED_P (insn) = 0;
299 return;
300 }
301
302#ifdef HAVE_return
303 if (HAVE_return)
304 {
305 /* If we fall through to the epilogue, see if we can insert a RETURN insn
306 in front of it. If the machine allows it at this point (we might be
307 after reload for a leaf routine), it will improve optimization for it
308 to be there. */
309 insn = get_last_insn ();
310 while (insn && GET_CODE (insn) == NOTE)
311 insn = PREV_INSN (insn);
312
313 if (insn && GET_CODE (insn) != BARRIER)
314 {
315 emit_jump_insn (gen_return ());
316 emit_barrier ();
317 }
318 }
319#endif
320
321 if (noop_moves)
322 for (insn = f; insn; )
323 {
2156dfe3 324 next = NEXT_INSN (insn);
15a63be1
RK
325
326 if (GET_CODE (insn) == INSN)
327 {
328 register rtx body = PATTERN (insn);
329
330/* Combine stack_adjusts with following push_insns. */
331#ifdef PUSH_ROUNDING
332 if (GET_CODE (body) == SET
333 && SET_DEST (body) == stack_pointer_rtx
334 && GET_CODE (SET_SRC (body)) == PLUS
335 && XEXP (SET_SRC (body), 0) == stack_pointer_rtx
336 && GET_CODE (XEXP (SET_SRC (body), 1)) == CONST_INT
337 && INTVAL (XEXP (SET_SRC (body), 1)) > 0)
338 {
339 rtx p;
340 rtx stack_adjust_insn = insn;
341 int stack_adjust_amount = INTVAL (XEXP (SET_SRC (body), 1));
342 int total_pushed = 0;
343 int pushes = 0;
344
345 /* Find all successive push insns. */
346 p = insn;
347 /* Don't convert more than three pushes;
348 that starts adding too many displaced addresses
349 and the whole thing starts becoming a losing
350 proposition. */
351 while (pushes < 3)
352 {
353 rtx pbody, dest;
354 p = next_nonnote_insn (p);
355 if (p == 0 || GET_CODE (p) != INSN)
356 break;
357 pbody = PATTERN (p);
358 if (GET_CODE (pbody) != SET)
359 break;
360 dest = SET_DEST (pbody);
361 /* Allow a no-op move between the adjust and the push. */
362 if (GET_CODE (dest) == REG
363 && GET_CODE (SET_SRC (pbody)) == REG
364 && REGNO (dest) == REGNO (SET_SRC (pbody)))
365 continue;
366 if (! (GET_CODE (dest) == MEM
367 && GET_CODE (XEXP (dest, 0)) == POST_INC
368 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
369 break;
370 pushes++;
1ad4c71a 371 if (total_pushed + GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)))
15a63be1
RK
372 > stack_adjust_amount)
373 break;
1ad4c71a 374 total_pushed += GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
15a63be1
RK
375 }
376
377 /* Discard the amount pushed from the stack adjust;
378 maybe eliminate it entirely. */
379 if (total_pushed >= stack_adjust_amount)
380 {
344a8220 381 delete_computation (stack_adjust_insn);
15a63be1
RK
382 total_pushed = stack_adjust_amount;
383 }
384 else
385 XEXP (SET_SRC (PATTERN (stack_adjust_insn)), 1)
5f4f0e22 386 = GEN_INT (stack_adjust_amount - total_pushed);
15a63be1
RK
387
388 /* Change the appropriate push insns to ordinary stores. */
389 p = insn;
390 while (total_pushed > 0)
391 {
392 rtx pbody, dest;
393 p = next_nonnote_insn (p);
394 if (GET_CODE (p) != INSN)
395 break;
396 pbody = PATTERN (p);
397 if (GET_CODE (pbody) == SET)
398 break;
399 dest = SET_DEST (pbody);
400 if (! (GET_CODE (dest) == MEM
401 && GET_CODE (XEXP (dest, 0)) == POST_INC
402 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
403 break;
1ad4c71a 404 total_pushed -= GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
15a63be1
RK
405 /* If this push doesn't fully fit in the space
406 of the stack adjust that we deleted,
407 make another stack adjust here for what we
408 didn't use up. There should be peepholes
409 to recognize the resulting sequence of insns. */
410 if (total_pushed < 0)
411 {
412 emit_insn_before (gen_add2_insn (stack_pointer_rtx,
5f4f0e22 413 GEN_INT (- total_pushed)),
15a63be1
RK
414 p);
415 break;
416 }
417 XEXP (dest, 0)
418 = plus_constant (stack_pointer_rtx, total_pushed);
419 }
420 }
421#endif
422
423 /* Detect and delete no-op move instructions
424 resulting from not allocating a parameter in a register. */
425
426 if (GET_CODE (body) == SET
427 && (SET_DEST (body) == SET_SRC (body)
428 || (GET_CODE (SET_DEST (body)) == MEM
429 && GET_CODE (SET_SRC (body)) == MEM
430 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
431 && ! (GET_CODE (SET_DEST (body)) == MEM
432 && MEM_VOLATILE_P (SET_DEST (body)))
433 && ! (GET_CODE (SET_SRC (body)) == MEM
434 && MEM_VOLATILE_P (SET_SRC (body))))
344a8220 435 delete_computation (insn);
15a63be1
RK
436
437 /* Detect and ignore no-op move instructions
438 resulting from smart or fortuitous register allocation. */
439
440 else if (GET_CODE (body) == SET)
441 {
442 int sreg = true_regnum (SET_SRC (body));
443 int dreg = true_regnum (SET_DEST (body));
444
445 if (sreg == dreg && sreg >= 0)
446 delete_insn (insn);
447 else if (sreg >= 0 && dreg >= 0)
448 {
449 rtx trial;
5f4f0e22
CH
450 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
451 sreg, NULL_PTR, dreg,
15a63be1
RK
452 GET_MODE (SET_SRC (body)));
453
db3cf6fb
MS
454 if (tem != 0
455 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
15a63be1
RK
456 {
457 /* DREG may have been the target of a REG_DEAD note in
458 the insn which makes INSN redundant. If so, reorg
459 would still think it is dead. So search for such a
460 note and delete it if we find it. */
59df2b2b
RK
461 if (! find_regno_note (insn, REG_UNUSED, dreg))
462 for (trial = prev_nonnote_insn (insn);
463 trial && GET_CODE (trial) != CODE_LABEL;
464 trial = prev_nonnote_insn (trial))
465 if (find_regno_note (trial, REG_DEAD, dreg))
466 {
467 remove_death (dreg, trial);
468 break;
469 }
470#ifdef PRESERVE_DEATH_INFO_REGNO_P
471 /* Deleting insn could lose a death-note for SREG
472 so don't do it if final needs accurate
473 death-notes. */
474 if (PRESERVE_DEATH_INFO_REGNO_P (sreg)
475 && (trial = find_regno_note (insn, REG_DEAD, sreg)))
476 {
477 /* Change this into a USE so that we won't emit
478 code for it, but still can keep the note. */
479 PATTERN (insn)
480 = gen_rtx (USE, VOIDmode, XEXP (trial, 0));
d8db8192 481 INSN_CODE (insn) = -1;
59df2b2b
RK
482 /* Remove all reg notes but the REG_DEAD one. */
483 REG_NOTES (insn) = trial;
484 XEXP (trial, 1) = NULL_RTX;
485 }
486 else
487#endif
15a63be1
RK
488 delete_insn (insn);
489 }
490 }
491 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
5f4f0e22
CH
492 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
493 NULL_PTR, 0,
494 GET_MODE (SET_DEST (body))))
15a63be1
RK
495 {
496 /* This handles the case where we have two consecutive
497 assignments of the same constant to pseudos that didn't
498 get a hard reg. Each SET from the constant will be
499 converted into a SET of the spill register and an
500 output reload will be made following it. This produces
501 two loads of the same constant into the same spill
502 register. */
503
504 rtx in_insn = insn;
505
506 /* Look back for a death note for the first reg.
507 If there is one, it is no longer accurate. */
508 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
509 {
510 if ((GET_CODE (in_insn) == INSN
511 || GET_CODE (in_insn) == JUMP_INSN)
512 && find_regno_note (in_insn, REG_DEAD, dreg))
513 {
514 remove_death (dreg, in_insn);
515 break;
516 }
517 in_insn = PREV_INSN (in_insn);
518 }
519
520 /* Delete the second load of the value. */
521 delete_insn (insn);
522 }
523 }
524 else if (GET_CODE (body) == PARALLEL)
525 {
526 /* If each part is a set between two identical registers or
0f41302f 527 a USE or CLOBBER, delete the insn. */
15a63be1
RK
528 int i, sreg, dreg;
529 rtx tem;
530
531 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
532 {
533 tem = XVECEXP (body, 0, i);
534 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
535 continue;
536
537 if (GET_CODE (tem) != SET
538 || (sreg = true_regnum (SET_SRC (tem))) < 0
539 || (dreg = true_regnum (SET_DEST (tem))) < 0
540 || dreg != sreg)
541 break;
542 }
543
544 if (i < 0)
545 delete_insn (insn);
546 }
15a63be1 547 /* Also delete insns to store bit fields if they are no-ops. */
f76b9db2
ILT
548 /* Not worth the hair to detect this in the big-endian case. */
549 else if (! BYTES_BIG_ENDIAN
550 && GET_CODE (body) == SET
15a63be1
RK
551 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
552 && XEXP (SET_DEST (body), 2) == const0_rtx
553 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
554 && ! (GET_CODE (SET_SRC (body)) == MEM
555 && MEM_VOLATILE_P (SET_SRC (body))))
556 delete_insn (insn);
15a63be1
RK
557 }
558 insn = next;
559 }
560
2156dfe3
RK
561 /* If we haven't yet gotten to reload and we have just run regscan,
562 delete any insn that sets a register that isn't used elsewhere.
563 This helps some of the optimizations below by having less insns
564 being jumped around. */
565
566 if (! reload_completed && after_regscan)
567 for (insn = f; insn; insn = next)
568 {
569 rtx set = single_set (insn);
570
571 next = NEXT_INSN (insn);
572
573 if (set && GET_CODE (SET_DEST (set)) == REG
574 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
b1f21e0a 575 && REGNO_FIRST_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
b803fb99
RS
576 /* We use regno_last_note_uid so as not to delete the setting
577 of a reg that's used in notes. A subsequent optimization
578 might arrange to use that reg for real. */
b1f21e0a 579 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
d008e26c
RK
580 && ! side_effects_p (SET_SRC (set))
581 && ! find_reg_note (insn, REG_RETVAL, 0))
2156dfe3
RK
582 delete_insn (insn);
583 }
584
15a63be1
RK
585 /* Now iterate optimizing jumps until nothing changes over one pass. */
586 changed = 1;
587 while (changed)
588 {
15a63be1
RK
589 changed = 0;
590
591 for (insn = f; insn; insn = next)
592 {
593 rtx reallabelprev;
2156dfe3 594 rtx temp, temp1, temp2, temp3, temp4, temp5, temp6;
15a63be1 595 rtx nlabel;
3915de94 596 int this_is_simplejump, this_is_condjump, reversep;
3480bb98 597 int this_is_condjump_in_parallel;
15a63be1
RK
598#if 0
599 /* If NOT the first iteration, if this is the last jump pass
600 (just before final), do the special peephole optimizations.
601 Avoiding the first iteration gives ordinary jump opts
602 a chance to work before peephole opts. */
603
604 if (reload_completed && !first && !flag_no_peephole)
605 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN)
606 peephole (insn);
607#endif
608
609 /* That could have deleted some insns after INSN, so check now
610 what the following insn is. */
611
612 next = NEXT_INSN (insn);
613
614 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
615 jump. Try to optimize by duplicating the loop exit test if so.
616 This is only safe immediately after regscan, because it uses
617 the values of regno_first_uid and regno_last_uid. */
618 if (after_regscan && GET_CODE (insn) == NOTE
619 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
620 && (temp1 = next_nonnote_insn (insn)) != 0
621 && simplejump_p (temp1))
622 {
623 temp = PREV_INSN (insn);
624 if (duplicate_loop_exit_test (insn))
625 {
626 changed = 1;
627 next = NEXT_INSN (temp);
628 continue;
629 }
630 }
631
632 if (GET_CODE (insn) != JUMP_INSN)
633 continue;
634
635 this_is_simplejump = simplejump_p (insn);
636 this_is_condjump = condjump_p (insn);
3480bb98 637 this_is_condjump_in_parallel = condjump_in_parallel_p (insn);
15a63be1
RK
638
639 /* Tension the labels in dispatch tables. */
640
641 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
642 changed |= tension_vector_labels (PATTERN (insn), 0);
643 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
644 changed |= tension_vector_labels (PATTERN (insn), 1);
645
646 /* If a dispatch table always goes to the same place,
647 get rid of it and replace the insn that uses it. */
648
649 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
650 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
651 {
652 int i;
653 rtx pat = PATTERN (insn);
654 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
655 int len = XVECLEN (pat, diff_vec_p);
656 rtx dispatch = prev_real_insn (insn);
657
658 for (i = 0; i < len; i++)
659 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
660 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
661 break;
662 if (i == len
0546e268 663 && dispatch != 0
15a63be1
RK
664 && GET_CODE (dispatch) == JUMP_INSN
665 && JUMP_LABEL (dispatch) != 0
666 /* Don't mess with a casesi insn. */
667 && !(GET_CODE (PATTERN (dispatch)) == SET
668 && (GET_CODE (SET_SRC (PATTERN (dispatch)))
669 == IF_THEN_ELSE))
670 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
671 {
672 redirect_tablejump (dispatch,
673 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
674 changed = 1;
675 }
676 }
677
678 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
679
680 /* If a jump references the end of the function, try to turn
681 it into a RETURN insn, possibly a conditional one. */
682 if (JUMP_LABEL (insn)
8e318904
JL
683 && (next_active_insn (JUMP_LABEL (insn)) == 0
684 || GET_CODE (PATTERN (next_active_insn (JUMP_LABEL (insn))))
685 == RETURN))
5f4f0e22 686 changed |= redirect_jump (insn, NULL_RTX);
15a63be1
RK
687
688 /* Detect jump to following insn. */
689 if (reallabelprev == insn && condjump_p (insn))
690 {
cd423ead 691 next = next_real_insn (JUMP_LABEL (insn));
15a63be1
RK
692 delete_jump (insn);
693 changed = 1;
694 continue;
695 }
696
d45cf215 697 /* If we have an unconditional jump preceded by a USE, try to put
15a63be1
RK
698 the USE before the target and jump there. This simplifies many
699 of the optimizations below since we don't have to worry about
700 dealing with these USE insns. We only do this if the label
701 being branch to already has the identical USE or if code
702 never falls through to that label. */
703
704 if (this_is_simplejump
705 && (temp = prev_nonnote_insn (insn)) != 0
706 && GET_CODE (temp) == INSN && GET_CODE (PATTERN (temp)) == USE
707 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
708 && (GET_CODE (temp1) == BARRIER
709 || (GET_CODE (temp1) == INSN
9740123d
JW
710 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
711 /* Don't do this optimization if we have a loop containing only
712 the USE instruction, and the loop start label has a usage
713 count of 1. This is because we will redo this optimization
714 everytime through the outer loop, and jump opt will never
715 exit. */
716 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
717 && temp2 == JUMP_LABEL (insn)
718 && LABEL_NUSES (temp2) == 1))
15a63be1
RK
719 {
720 if (GET_CODE (temp1) == BARRIER)
721 {
2156dfe3 722 emit_insn_after (PATTERN (temp), temp1);
15a63be1
RK
723 temp1 = NEXT_INSN (temp1);
724 }
15a63be1 725
2156dfe3 726 delete_insn (temp);
15a63be1
RK
727 redirect_jump (insn, get_label_before (temp1));
728 reallabelprev = prev_real_insn (temp1);
729 changed = 1;
730 }
731
732 /* Simplify if (...) x = a; else x = b; by converting it
733 to x = b; if (...) x = a;
734 if B is sufficiently simple, the test doesn't involve X,
735 and nothing in the test modifies B or X.
736
737 If we have small register classes, we also can't do this if X
738 is a hard register.
739
740 If the "x = b;" insn has any REG_NOTES, we don't do this because
741 of the possibility that we are running after CSE and there is a
742 REG_EQUAL note that is only valid if the branch has already been
743 taken. If we move the insn with the REG_EQUAL note, we may
744 fold the comparison to always be false in a later CSE pass.
745 (We could also delete the REG_NOTES when moving the insn, but it
746 seems simpler to not move it.) An exception is that we can move
747 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
748 value is the same as "b".
749
750 INSN is the branch over the `else' part.
751
752 We set:
753
d45cf215 754 TEMP to the jump insn preceding "x = a;"
15a63be1
RK
755 TEMP1 to X
756 TEMP2 to the insn that sets "x = b;"
2156dfe3
RK
757 TEMP3 to the insn that sets "x = a;"
758 TEMP4 to the set of "x = b"; */
15a63be1
RK
759
760 if (this_is_simplejump
761 && (temp3 = prev_active_insn (insn)) != 0
762 && GET_CODE (temp3) == INSN
2156dfe3
RK
763 && (temp4 = single_set (temp3)) != 0
764 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
f95182a4
ILT
765 && (! SMALL_REGISTER_CLASSES
766 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
15a63be1
RK
767 && (temp2 = next_active_insn (insn)) != 0
768 && GET_CODE (temp2) == INSN
2156dfe3
RK
769 && (temp4 = single_set (temp2)) != 0
770 && rtx_equal_p (SET_DEST (temp4), temp1)
771 && (GET_CODE (SET_SRC (temp4)) == REG
772 || GET_CODE (SET_SRC (temp4)) == SUBREG
773 || CONSTANT_P (SET_SRC (temp4)))
15a63be1
RK
774 && (REG_NOTES (temp2) == 0
775 || ((REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUAL
776 || REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUIV)
777 && XEXP (REG_NOTES (temp2), 1) == 0
778 && rtx_equal_p (XEXP (REG_NOTES (temp2), 0),
2156dfe3 779 SET_SRC (temp4))))
15a63be1
RK
780 && (temp = prev_active_insn (temp3)) != 0
781 && condjump_p (temp) && ! simplejump_p (temp)
782 /* TEMP must skip over the "x = a;" insn */
783 && prev_real_insn (JUMP_LABEL (temp)) == insn
784 && no_labels_between_p (insn, JUMP_LABEL (temp))
785 /* There must be no other entries to the "x = b;" insn. */
786 && no_labels_between_p (JUMP_LABEL (temp), temp2)
787 /* INSN must either branch to the insn after TEMP2 or the insn
788 after TEMP2 must branch to the same place as INSN. */
789 && (reallabelprev == temp2
2156dfe3
RK
790 || ((temp5 = next_active_insn (temp2)) != 0
791 && simplejump_p (temp5)
792 && JUMP_LABEL (temp5) == JUMP_LABEL (insn))))
15a63be1
RK
793 {
794 /* The test expression, X, may be a complicated test with
795 multiple branches. See if we can find all the uses of
796 the label that TEMP branches to without hitting a CALL_INSN
797 or a jump to somewhere else. */
798 rtx target = JUMP_LABEL (temp);
799 int nuses = LABEL_NUSES (target);
800 rtx p, q;
801
802 /* Set P to the first jump insn that goes around "x = a;". */
803 for (p = temp; nuses && p; p = prev_nonnote_insn (p))
804 {
805 if (GET_CODE (p) == JUMP_INSN)
806 {
807 if (condjump_p (p) && ! simplejump_p (p)
808 && JUMP_LABEL (p) == target)
809 {
810 nuses--;
811 if (nuses == 0)
812 break;
813 }
814 else
815 break;
816 }
817 else if (GET_CODE (p) == CALL_INSN)
818 break;
819 }
820
821#ifdef HAVE_cc0
822 /* We cannot insert anything between a set of cc and its use
823 so if P uses cc0, we must back up to the previous insn. */
824 q = prev_nonnote_insn (p);
825 if (q && GET_RTX_CLASS (GET_CODE (q)) == 'i'
826 && sets_cc0_p (PATTERN (q)))
827 p = q;
828#endif
829
830 if (p)
831 p = PREV_INSN (p);
832
833 /* If we found all the uses and there was no data conflict, we
834 can move the assignment unless we can branch into the middle
835 from somewhere. */
836 if (nuses == 0 && p
837 && no_labels_between_p (p, insn)
838 && ! reg_referenced_between_p (temp1, p, NEXT_INSN (temp3))
839 && ! reg_set_between_p (temp1, p, temp3)
2156dfe3
RK
840 && (GET_CODE (SET_SRC (temp4)) == CONST_INT
841 || ! reg_set_between_p (SET_SRC (temp4), p, temp2)))
15a63be1 842 {
2156dfe3
RK
843 emit_insn_after_with_line_notes (PATTERN (temp2), p, temp2);
844 delete_insn (temp2);
15a63be1
RK
845
846 /* Set NEXT to an insn that we know won't go away. */
847 next = next_active_insn (insn);
848
849 /* Delete the jump around the set. Note that we must do
850 this before we redirect the test jumps so that it won't
851 delete the code immediately following the assignment
852 we moved (which might be a jump). */
853
854 delete_insn (insn);
855
856 /* We either have two consecutive labels or a jump to
857 a jump, so adjust all the JUMP_INSNs to branch to where
858 INSN branches to. */
859 for (p = NEXT_INSN (p); p != next; p = NEXT_INSN (p))
860 if (GET_CODE (p) == JUMP_INSN)
861 redirect_jump (p, target);
862
863 changed = 1;
864 continue;
865 }
866 }
867
66bd9361
JW
868 /* Simplify if (...) { x = a; goto l; } x = b; by converting it
869 to x = a; if (...) goto l; x = b;
870 if A is sufficiently simple, the test doesn't involve X,
871 and nothing in the test modifies A or X.
872
873 If we have small register classes, we also can't do this if X
874 is a hard register.
875
876 If the "x = a;" insn has any REG_NOTES, we don't do this because
877 of the possibility that we are running after CSE and there is a
878 REG_EQUAL note that is only valid if the branch has already been
879 taken. If we move the insn with the REG_EQUAL note, we may
880 fold the comparison to always be false in a later CSE pass.
881 (We could also delete the REG_NOTES when moving the insn, but it
882 seems simpler to not move it.) An exception is that we can move
883 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
884 value is the same as "a".
885
886 INSN is the goto.
887
888 We set:
889
890 TEMP to the jump insn preceding "x = a;"
891 TEMP1 to X
892 TEMP2 to the insn that sets "x = b;"
893 TEMP3 to the insn that sets "x = a;"
894 TEMP4 to the set of "x = a"; */
895
896 if (this_is_simplejump
897 && (temp2 = next_active_insn (insn)) != 0
898 && GET_CODE (temp2) == INSN
899 && (temp4 = single_set (temp2)) != 0
900 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
f95182a4
ILT
901 && (! SMALL_REGISTER_CLASSES
902 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
66bd9361
JW
903 && (temp3 = prev_active_insn (insn)) != 0
904 && GET_CODE (temp3) == INSN
905 && (temp4 = single_set (temp3)) != 0
906 && rtx_equal_p (SET_DEST (temp4), temp1)
907 && (GET_CODE (SET_SRC (temp4)) == REG
908 || GET_CODE (SET_SRC (temp4)) == SUBREG
909 || CONSTANT_P (SET_SRC (temp4)))
910 && (REG_NOTES (temp3) == 0
911 || ((REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUAL
912 || REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUIV)
913 && XEXP (REG_NOTES (temp3), 1) == 0
914 && rtx_equal_p (XEXP (REG_NOTES (temp3), 0),
915 SET_SRC (temp4))))
916 && (temp = prev_active_insn (temp3)) != 0
917 && condjump_p (temp) && ! simplejump_p (temp)
918 /* TEMP must skip over the "x = a;" insn */
919 && prev_real_insn (JUMP_LABEL (temp)) == insn
920 && no_labels_between_p (temp, insn))
921 {
922 rtx prev_label = JUMP_LABEL (temp);
923 rtx insert_after = prev_nonnote_insn (temp);
924
925#ifdef HAVE_cc0
926 /* We cannot insert anything between a set of cc and its use. */
927 if (insert_after && GET_RTX_CLASS (GET_CODE (insert_after)) == 'i'
928 && sets_cc0_p (PATTERN (insert_after)))
929 insert_after = prev_nonnote_insn (insert_after);
930#endif
931 ++LABEL_NUSES (prev_label);
932
933 if (insert_after
934 && no_labels_between_p (insert_after, temp)
7a7233ff 935 && ! reg_referenced_between_p (temp1, insert_after, temp3)
04bd0246
DE
936 && ! reg_referenced_between_p (temp1, temp3,
937 NEXT_INSN (temp2))
66bd9361
JW
938 && ! reg_set_between_p (temp1, insert_after, temp)
939 && (GET_CODE (SET_SRC (temp4)) == CONST_INT
940 || ! reg_set_between_p (SET_SRC (temp4),
941 insert_after, temp))
942 && invert_jump (temp, JUMP_LABEL (insn)))
943 {
944 emit_insn_after_with_line_notes (PATTERN (temp3),
945 insert_after, temp3);
946 delete_insn (temp3);
947 delete_insn (insn);
948 /* Set NEXT to an insn that we know won't go away. */
949 next = temp2;
950 changed = 1;
951 }
952 if (prev_label && --LABEL_NUSES (prev_label) == 0)
953 delete_insn (prev_label);
954 if (changed)
955 continue;
956 }
957
2156dfe3
RK
958#ifndef HAVE_cc0
959 /* If we have if (...) x = exp; and branches are expensive,
960 EXP is a single insn, does not have any side effects, cannot
961 trap, and is not too costly, convert this to
962 t = exp; if (...) x = t;
963
964 Don't do this when we have CC0 because it is unlikely to help
965 and we'd need to worry about where to place the new insn and
966 the potential for conflicts. We also can't do this when we have
967 notes on the insn for the same reason as above.
968
969 We set:
970
971 TEMP to the "x = exp;" insn.
972 TEMP1 to the single set in the "x = exp; insn.
973 TEMP2 to "x". */
974
975 if (! reload_completed
976 && this_is_condjump && ! this_is_simplejump
977 && BRANCH_COST >= 3
978 && (temp = next_nonnote_insn (insn)) != 0
a8fc41af 979 && GET_CODE (temp) == INSN
2156dfe3
RK
980 && REG_NOTES (temp) == 0
981 && (reallabelprev == temp
982 || ((temp2 = next_active_insn (temp)) != 0
983 && simplejump_p (temp2)
984 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
985 && (temp1 = single_set (temp)) != 0
986 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
987 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
f95182a4
ILT
988 && (! SMALL_REGISTER_CLASSES
989 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
2156dfe3
RK
990 && GET_CODE (SET_SRC (temp1)) != REG
991 && GET_CODE (SET_SRC (temp1)) != SUBREG
992 && GET_CODE (SET_SRC (temp1)) != CONST_INT
993 && ! side_effects_p (SET_SRC (temp1))
994 && ! may_trap_p (SET_SRC (temp1))
97fa962f 995 && rtx_cost (SET_SRC (temp1), SET) < 10)
2156dfe3
RK
996 {
997 rtx new = gen_reg_rtx (GET_MODE (temp2));
998
999 if (validate_change (temp, &SET_DEST (temp1), new, 0))
1000 {
1001 next = emit_insn_after (gen_move_insn (temp2, new), insn);
1002 emit_insn_after_with_line_notes (PATTERN (temp),
1003 PREV_INSN (insn), temp);
1004 delete_insn (temp);
5954c8a8 1005 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
2156dfe3
RK
1006 }
1007 }
1008
1009 /* Similarly, if it takes two insns to compute EXP but they
1010 have the same destination. Here TEMP3 will be the second
1011 insn and TEMP4 the SET from that insn. */
1012
1013 if (! reload_completed
1014 && this_is_condjump && ! this_is_simplejump
1015 && BRANCH_COST >= 4
1016 && (temp = next_nonnote_insn (insn)) != 0
a8fc41af 1017 && GET_CODE (temp) == INSN
2156dfe3
RK
1018 && REG_NOTES (temp) == 0
1019 && (temp3 = next_nonnote_insn (temp)) != 0
a8fc41af 1020 && GET_CODE (temp3) == INSN
2156dfe3
RK
1021 && REG_NOTES (temp3) == 0
1022 && (reallabelprev == temp3
1023 || ((temp2 = next_active_insn (temp3)) != 0
1024 && simplejump_p (temp2)
1025 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
1026 && (temp1 = single_set (temp)) != 0
1027 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
1028 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
f95182a4
ILT
1029 && (! SMALL_REGISTER_CLASSES
1030 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
2156dfe3
RK
1031 && ! side_effects_p (SET_SRC (temp1))
1032 && ! may_trap_p (SET_SRC (temp1))
97fa962f 1033 && rtx_cost (SET_SRC (temp1), SET) < 10
2156dfe3
RK
1034 && (temp4 = single_set (temp3)) != 0
1035 && rtx_equal_p (SET_DEST (temp4), temp2)
1036 && ! side_effects_p (SET_SRC (temp4))
1037 && ! may_trap_p (SET_SRC (temp4))
97fa962f 1038 && rtx_cost (SET_SRC (temp4), SET) < 10)
2156dfe3
RK
1039 {
1040 rtx new = gen_reg_rtx (GET_MODE (temp2));
1041
1042 if (validate_change (temp, &SET_DEST (temp1), new, 0))
1043 {
1044 next = emit_insn_after (gen_move_insn (temp2, new), insn);
1045 emit_insn_after_with_line_notes (PATTERN (temp),
1046 PREV_INSN (insn), temp);
1047 emit_insn_after_with_line_notes
1048 (replace_rtx (PATTERN (temp3), temp2, new),
1049 PREV_INSN (insn), temp3);
1050 delete_insn (temp);
1051 delete_insn (temp3);
5954c8a8 1052 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
2156dfe3
RK
1053 }
1054 }
1055
1056 /* Finally, handle the case where two insns are used to
1057 compute EXP but a temporary register is used. Here we must
0f41302f 1058 ensure that the temporary register is not used anywhere else. */
2156dfe3
RK
1059
1060 if (! reload_completed
1061 && after_regscan
1062 && this_is_condjump && ! this_is_simplejump
1063 && BRANCH_COST >= 4
1064 && (temp = next_nonnote_insn (insn)) != 0
a8fc41af 1065 && GET_CODE (temp) == INSN
2156dfe3
RK
1066 && REG_NOTES (temp) == 0
1067 && (temp3 = next_nonnote_insn (temp)) != 0
a8fc41af 1068 && GET_CODE (temp3) == INSN
2156dfe3
RK
1069 && REG_NOTES (temp3) == 0
1070 && (reallabelprev == temp3
1071 || ((temp2 = next_active_insn (temp3)) != 0
1072 && simplejump_p (temp2)
1073 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
1074 && (temp1 = single_set (temp)) != 0
c03c4711
RK
1075 && (temp5 = SET_DEST (temp1),
1076 (GET_CODE (temp5) == REG
1077 || (GET_CODE (temp5) == SUBREG
1078 && (temp5 = SUBREG_REG (temp5),
1079 GET_CODE (temp5) == REG))))
2156dfe3 1080 && REGNO (temp5) >= FIRST_PSEUDO_REGISTER
b1f21e0a
MM
1081 && REGNO_FIRST_UID (REGNO (temp5)) == INSN_UID (temp)
1082 && REGNO_LAST_UID (REGNO (temp5)) == INSN_UID (temp3)
2156dfe3
RK
1083 && ! side_effects_p (SET_SRC (temp1))
1084 && ! may_trap_p (SET_SRC (temp1))
97fa962f 1085 && rtx_cost (SET_SRC (temp1), SET) < 10
2156dfe3
RK
1086 && (temp4 = single_set (temp3)) != 0
1087 && (temp2 = SET_DEST (temp4), GET_CODE (temp2) == REG)
1088 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
f95182a4
ILT
1089 && (! SMALL_REGISTER_CLASSES
1090 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
2156dfe3
RK
1091 && rtx_equal_p (SET_DEST (temp4), temp2)
1092 && ! side_effects_p (SET_SRC (temp4))
1093 && ! may_trap_p (SET_SRC (temp4))
97fa962f 1094 && rtx_cost (SET_SRC (temp4), SET) < 10)
2156dfe3
RK
1095 {
1096 rtx new = gen_reg_rtx (GET_MODE (temp2));
1097
1098 if (validate_change (temp3, &SET_DEST (temp4), new, 0))
1099 {
1100 next = emit_insn_after (gen_move_insn (temp2, new), insn);
1101 emit_insn_after_with_line_notes (PATTERN (temp),
1102 PREV_INSN (insn), temp);
1103 emit_insn_after_with_line_notes (PATTERN (temp3),
1104 PREV_INSN (insn), temp3);
1105 delete_insn (temp);
1106 delete_insn (temp3);
5954c8a8 1107 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
2156dfe3
RK
1108 }
1109 }
1110#endif /* HAVE_cc0 */
1111
7209f1f9
DE
1112 /* Try to use a conditional move (if the target has them), or a
1113 store-flag insn. The general case is:
1114
1115 1) x = a; if (...) x = b; and
1116 2) if (...) x = b;
1117
1118 If the jump would be faster, the machine should not have defined
1119 the movcc or scc insns!. These cases are often made by the
3915de94 1120 previous optimization.
15a63be1 1121
7209f1f9
DE
1122 The second case is treated as x = x; if (...) x = b;.
1123
15a63be1
RK
1124 INSN here is the jump around the store. We set:
1125
1126 TEMP to the "x = b;" insn.
1127 TEMP1 to X.
7209f1f9 1128 TEMP2 to B.
15a63be1
RK
1129 TEMP3 to A (X in the second case).
1130 TEMP4 to the condition being tested.
1131 TEMP5 to the earliest insn used to find the condition. */
1132
1133 if (/* We can't do this after reload has completed. */
1134 ! reload_completed
1135 && this_is_condjump && ! this_is_simplejump
1136 /* Set TEMP to the "x = b;" insn. */
1137 && (temp = next_nonnote_insn (insn)) != 0
1138 && GET_CODE (temp) == INSN
1139 && GET_CODE (PATTERN (temp)) == SET
1140 && GET_CODE (temp1 = SET_DEST (PATTERN (temp))) == REG
f95182a4
ILT
1141 && (! SMALL_REGISTER_CLASSES
1142 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
15a63be1 1143 && (GET_CODE (temp2 = SET_SRC (PATTERN (temp))) == REG
a73f9fc9 1144 || GET_CODE (temp2) == SUBREG
7209f1f9 1145 /* ??? How about floating point constants? */
15a63be1 1146 || GET_CODE (temp2) == CONST_INT)
2156dfe3
RK
1147 /* Allow either form, but prefer the former if both apply.
1148 There is no point in using the old value of TEMP1 if
1149 it is a register, since cse will alias them. It can
1150 lose if the old value were a hard register since CSE
1151 won't replace hard registers. */
7209f1f9
DE
1152 && (((temp3 = reg_set_last (temp1, insn)) != 0)
1153 /* Make the latter case look like x = x; if (...) x = b; */
1154 || (temp3 = temp1, 1))
15a63be1
RK
1155 /* INSN must either branch to the insn after TEMP or the insn
1156 after TEMP must branch to the same place as INSN. */
1157 && (reallabelprev == temp
1158 || ((temp4 = next_active_insn (temp)) != 0
1159 && simplejump_p (temp4)
1160 && JUMP_LABEL (temp4) == JUMP_LABEL (insn)))
1161 && (temp4 = get_condition (insn, &temp5)) != 0
7124e1e5
RS
1162 /* We must be comparing objects whose modes imply the size.
1163 We could handle BLKmode if (1) emit_store_flag could
1164 and (2) we could find the size reliably. */
1165 && GET_MODE (XEXP (temp4, 0)) != BLKmode
04d23d7c
RK
1166 /* Even if branches are cheap, the store_flag optimization
1167 can win when the operation to be performed can be
1168 expressed directly. */
2156dfe3
RK
1169#ifdef HAVE_cc0
1170 /* If the previous insn sets CC0 and something else, we can't
1171 do this since we are going to delete that insn. */
1172
1173 && ! ((temp6 = prev_nonnote_insn (insn)) != 0
1174 && GET_CODE (temp6) == INSN
01ca1b91
RK
1175 && (sets_cc0_p (PATTERN (temp6)) == -1
1176 || (sets_cc0_p (PATTERN (temp6)) == 1
1177 && FIND_REG_INC_NOTE (temp6, NULL_RTX))))
2156dfe3
RK
1178#endif
1179 )
15a63be1 1180 {
7209f1f9
DE
1181#ifdef HAVE_conditional_move
1182 /* First try a conditional move. */
1183 {
1184 enum rtx_code code = GET_CODE (temp4);
1185 rtx var = temp1;
1186 rtx cond0, cond1, aval, bval;
1187 rtx target;
1188
1189 /* Copy the compared variables into cond0 and cond1, so that
1190 any side effects performed in or after the old comparison,
1191 will not affect our compare which will come later. */
1192 /* ??? Is it possible to just use the comparison in the jump
1193 insn? After all, we're going to delete it. We'd have
1194 to modify emit_conditional_move to take a comparison rtx
1195 instead or write a new function. */
1196 cond0 = gen_reg_rtx (GET_MODE (XEXP (temp4, 0)));
1197 /* We want the target to be able to simplify comparisons with
1198 zero (and maybe other constants as well), so don't create
1199 pseudos for them. There's no need to either. */
1200 if (GET_CODE (XEXP (temp4, 1)) == CONST_INT
1201 || GET_CODE (XEXP (temp4, 1)) == CONST_DOUBLE)
1202 cond1 = XEXP (temp4, 1);
1203 else
1204 cond1 = gen_reg_rtx (GET_MODE (XEXP (temp4, 1)));
1205
1206 aval = temp3;
1207 bval = temp2;
1208
1209 start_sequence ();
1210 target = emit_conditional_move (var, code,
1211 cond0, cond1, VOIDmode,
1212 aval, bval, GET_MODE (var),
1213 (code == LTU || code == GEU
1214 || code == LEU || code == GTU));
1215
1216 if (target)
1217 {
1218 rtx seq1,seq2;
1219
1220 /* Save the conditional move sequence but don't emit it
1221 yet. On some machines, like the alpha, it is possible
1222 that temp5 == insn, so next generate the sequence that
1223 saves the compared values and then emit both
1224 sequences ensuring seq1 occurs before seq2. */
1225 seq2 = get_insns ();
1226 end_sequence ();
1227
1228 /* Now that we can't fail, generate the copy insns that
1229 preserve the compared values. */
1230 start_sequence ();
1231 emit_move_insn (cond0, XEXP (temp4, 0));
1232 if (cond1 != XEXP (temp4, 1))
1233 emit_move_insn (cond1, XEXP (temp4, 1));
1234 seq1 = get_insns ();
1235 end_sequence ();
1236
1237 emit_insns_before (seq1, temp5);
777e434c
RK
1238 /* Insert conditional move after insn, to be sure that
1239 the jump and a possible compare won't be separated */
1240 emit_insns_after (seq2, insn);
7209f1f9
DE
1241
1242 /* ??? We can also delete the insn that sets X to A.
1243 Flow will do it too though. */
1244 delete_insn (temp);
1245 next = NEXT_INSN (insn);
1246 delete_jump (insn);
1247 changed = 1;
1248 continue;
1249 }
1250 else
2156dfe3 1251 end_sequence ();
7209f1f9
DE
1252 }
1253#endif
2156dfe3 1254
7209f1f9
DE
1255 /* That didn't work, try a store-flag insn.
1256
1257 We further divide the cases into:
1258
1259 1) x = a; if (...) x = b; and either A or B is zero,
1260 2) if (...) x = 0; and jumps are expensive,
1261 3) x = a; if (...) x = b; and A and B are constants where all
1262 the set bits in A are also set in B and jumps are expensive,
1263 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
1264 more expensive, and
1265 5) if (...) x = b; if jumps are even more expensive. */
1266
1267 if (GET_MODE_CLASS (GET_MODE (temp1)) == MODE_INT
1268 && ((GET_CODE (temp3) == CONST_INT)
1269 /* Make the latter case look like
1270 x = x; if (...) x = 0; */
1271 || (temp3 = temp1,
1272 ((BRANCH_COST >= 2
1273 && temp2 == const0_rtx)
1274 || BRANCH_COST >= 3)))
1275 /* If B is zero, OK; if A is zero, can only do (1) if we
1276 can reverse the condition. See if (3) applies possibly
1277 by reversing the condition. Prefer reversing to (4) when
1278 branches are very expensive. */
04d23d7c
RK
1279 && (((BRANCH_COST >= 2
1280 || STORE_FLAG_VALUE == -1
1281 || (STORE_FLAG_VALUE == 1
1282 /* Check that the mask is a power of two,
1283 so that it can probably be generated
1284 with a shift. */
1285 && exact_log2 (INTVAL (temp3)) >= 0))
1286 && (reversep = 0, temp2 == const0_rtx))
1287 || ((BRANCH_COST >= 2
1288 || STORE_FLAG_VALUE == -1
1289 || (STORE_FLAG_VALUE == 1
1290 && exact_log2 (INTVAL (temp2)) >= 0))
1291 && temp3 == const0_rtx
7209f1f9
DE
1292 && (reversep = can_reverse_comparison_p (temp4, insn)))
1293 || (BRANCH_COST >= 2
1294 && GET_CODE (temp2) == CONST_INT
1295 && GET_CODE (temp3) == CONST_INT
1296 && ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp2)
1297 || ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp3)
1298 && (reversep = can_reverse_comparison_p (temp4,
1299 insn)))))
1300 || BRANCH_COST >= 3)
1301 )
1302 {
1303 enum rtx_code code = GET_CODE (temp4);
1304 rtx uval, cval, var = temp1;
1305 int normalizep;
1306 rtx target;
1307
1308 /* If necessary, reverse the condition. */
1309 if (reversep)
1310 code = reverse_condition (code), uval = temp2, cval = temp3;
1311 else
1312 uval = temp3, cval = temp2;
1313
1314 /* If CVAL is non-zero, normalize to -1. Otherwise, if UVAL
1315 is the constant 1, it is best to just compute the result
1316 directly. If UVAL is constant and STORE_FLAG_VALUE
1317 includes all of its bits, it is best to compute the flag
1318 value unnormalized and `and' it with UVAL. Otherwise,
1319 normalize to -1 and `and' with UVAL. */
1320 normalizep = (cval != const0_rtx ? -1
1321 : (uval == const1_rtx ? 1
1322 : (GET_CODE (uval) == CONST_INT
1323 && (INTVAL (uval) & ~STORE_FLAG_VALUE) == 0)
1324 ? 0 : -1));
1325
1326 /* We will be putting the store-flag insn immediately in
1327 front of the comparison that was originally being done,
1328 so we know all the variables in TEMP4 will be valid.
1329 However, this might be in front of the assignment of
1330 A to VAR. If it is, it would clobber the store-flag
1331 we will be emitting.
1332
1333 Therefore, emit into a temporary which will be copied to
1334 VAR immediately after TEMP. */
2156dfe3
RK
1335
1336 start_sequence ();
7209f1f9
DE
1337 target = emit_store_flag (gen_reg_rtx (GET_MODE (var)), code,
1338 XEXP (temp4, 0), XEXP (temp4, 1),
1339 VOIDmode,
1340 (code == LTU || code == LEU
1341 || code == GEU || code == GTU),
1342 normalizep);
1343 if (target)
3915de94 1344 {
7209f1f9
DE
1345 rtx seq;
1346 rtx before = insn;
3915de94 1347
7209f1f9
DE
1348 seq = get_insns ();
1349 end_sequence ();
3915de94 1350
7209f1f9
DE
1351 /* Put the store-flag insns in front of the first insn
1352 used to compute the condition to ensure that we
1353 use the same values of them as the current
1354 comparison. However, the remainder of the insns we
1355 generate will be placed directly in front of the
1356 jump insn, in case any of the pseudos we use
1357 are modified earlier. */
3915de94 1358
7209f1f9 1359 emit_insns_before (seq, temp5);
3915de94 1360
7209f1f9
DE
1361 start_sequence ();
1362
1363 /* Both CVAL and UVAL are non-zero. */
1364 if (cval != const0_rtx && uval != const0_rtx)
c71ebae3 1365 {
7209f1f9
DE
1366 rtx tem1, tem2;
1367
1368 tem1 = expand_and (uval, target, NULL_RTX);
1369 if (GET_CODE (cval) == CONST_INT
1370 && GET_CODE (uval) == CONST_INT
1371 && (INTVAL (cval) & INTVAL (uval)) == INTVAL (cval))
1372 tem2 = cval;
1373 else
1374 {
1375 tem2 = expand_unop (GET_MODE (var), one_cmpl_optab,
1376 target, NULL_RTX, 0);
1377 tem2 = expand_and (cval, tem2,
1378 (GET_CODE (tem2) == REG
1379 ? tem2 : 0));
1380 }
1381
1382 /* If we usually make new pseudos, do so here. This
1383 turns out to help machines that have conditional
1384 move insns. */
1385 /* ??? Conditional moves have already been handled.
1386 This may be obsolete. */
1387
1388 if (flag_expensive_optimizations)
1389 target = 0;
1390
1391 target = expand_binop (GET_MODE (var), ior_optab,
1392 tem1, tem2, target,
1393 1, OPTAB_WIDEN);
c71ebae3 1394 }
7209f1f9
DE
1395 else if (normalizep != 1)
1396 {
1397 /* We know that either CVAL or UVAL is zero. If
1398 UVAL is zero, negate TARGET and `and' with CVAL.
1399 Otherwise, `and' with UVAL. */
1400 if (uval == const0_rtx)
1401 {
1402 target = expand_unop (GET_MODE (var), one_cmpl_optab,
1403 target, NULL_RTX, 0);
1404 uval = cval;
1405 }
c71ebae3 1406
7209f1f9
DE
1407 target = expand_and (uval, target,
1408 (GET_CODE (target) == REG
1409 && ! preserve_subexpressions_p ()
1410 ? target : NULL_RTX));
1411 }
3915de94 1412
7209f1f9
DE
1413 emit_move_insn (var, target);
1414 seq = get_insns ();
1415 end_sequence ();
01ca1b91 1416#ifdef HAVE_cc0
7209f1f9
DE
1417 /* If INSN uses CC0, we must not separate it from the
1418 insn that sets cc0. */
1419 if (reg_mentioned_p (cc0_rtx, PATTERN (before)))
1420 before = prev_nonnote_insn (before);
01ca1b91 1421#endif
7209f1f9 1422 emit_insns_before (seq, before);
01ca1b91 1423
7209f1f9
DE
1424 delete_insn (temp);
1425 next = NEXT_INSN (insn);
1426 delete_jump (insn);
1427 changed = 1;
1428 continue;
1429 }
1430 else
1431 end_sequence ();
15a63be1 1432 }
15a63be1
RK
1433 }
1434
1435 /* If branches are expensive, convert
1436 if (foo) bar++; to bar += (foo != 0);
1437 and similarly for "bar--;"
1438
1439 INSN is the conditional branch around the arithmetic. We set:
1440
1441 TEMP is the arithmetic insn.
6dc42e49 1442 TEMP1 is the SET doing the arithmetic.
15a63be1
RK
1443 TEMP2 is the operand being incremented or decremented.
1444 TEMP3 to the condition being tested.
1445 TEMP4 to the earliest insn used to find the condition. */
1446
a8d916d3 1447 if ((BRANCH_COST >= 2
48f16828 1448#ifdef HAVE_incscc
a8d916d3 1449 || HAVE_incscc
48f16828
RK
1450#endif
1451#ifdef HAVE_decscc
d3907945 1452 || HAVE_decscc
a8d916d3
JL
1453#endif
1454 )
15a63be1
RK
1455 && ! reload_completed
1456 && this_is_condjump && ! this_is_simplejump
1457 && (temp = next_nonnote_insn (insn)) != 0
1458 && (temp1 = single_set (temp)) != 0
1459 && (temp2 = SET_DEST (temp1),
1460 GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT)
1461 && GET_CODE (SET_SRC (temp1)) == PLUS
1462 && (XEXP (SET_SRC (temp1), 1) == const1_rtx
1463 || XEXP (SET_SRC (temp1), 1) == constm1_rtx)
1464 && rtx_equal_p (temp2, XEXP (SET_SRC (temp1), 0))
8743ede9
RK
1465 && ! side_effects_p (temp2)
1466 && ! may_trap_p (temp2)
15a63be1
RK
1467 /* INSN must either branch to the insn after TEMP or the insn
1468 after TEMP must branch to the same place as INSN. */
1469 && (reallabelprev == temp
1470 || ((temp3 = next_active_insn (temp)) != 0
1471 && simplejump_p (temp3)
1472 && JUMP_LABEL (temp3) == JUMP_LABEL (insn)))
1473 && (temp3 = get_condition (insn, &temp4)) != 0
7124e1e5
RS
1474 /* We must be comparing objects whose modes imply the size.
1475 We could handle BLKmode if (1) emit_store_flag could
1476 and (2) we could find the size reliably. */
1477 && GET_MODE (XEXP (temp3, 0)) != BLKmode
15a63be1
RK
1478 && can_reverse_comparison_p (temp3, insn))
1479 {
626d0d1d 1480 rtx temp6, target = 0, seq, init_insn = 0, init = temp2;
15a63be1
RK
1481 enum rtx_code code = reverse_condition (GET_CODE (temp3));
1482
1483 start_sequence ();
1484
626d0d1d
RK
1485 /* It must be the case that TEMP2 is not modified in the range
1486 [TEMP4, INSN). The one exception we make is if the insn
1487 before INSN sets TEMP2 to something which is also unchanged
1488 in that range. In that case, we can move the initialization
1489 into our sequence. */
1490
1491 if ((temp5 = prev_active_insn (insn)) != 0
19f5ce60 1492 && no_labels_between_p (temp5, insn)
626d0d1d
RK
1493 && GET_CODE (temp5) == INSN
1494 && (temp6 = single_set (temp5)) != 0
1495 && rtx_equal_p (temp2, SET_DEST (temp6))
1496 && (CONSTANT_P (SET_SRC (temp6))
1497 || GET_CODE (SET_SRC (temp6)) == REG
1498 || GET_CODE (SET_SRC (temp6)) == SUBREG))
1499 {
1500 emit_insn (PATTERN (temp5));
1501 init_insn = temp5;
1502 init = SET_SRC (temp6);
1503 }
1504
1505 if (CONSTANT_P (init)
1506 || ! reg_set_between_p (init, PREV_INSN (temp4), insn))
1507 target = emit_store_flag (gen_reg_rtx (GET_MODE (temp2)), code,
1508 XEXP (temp3, 0), XEXP (temp3, 1),
1509 VOIDmode,
1510 (code == LTU || code == LEU
1511 || code == GTU || code == GEU), 1);
15a63be1
RK
1512
1513 /* If we can do the store-flag, do the addition or
1514 subtraction. */
1515
1516 if (target)
1517 target = expand_binop (GET_MODE (temp2),
1518 (XEXP (SET_SRC (temp1), 1) == const1_rtx
1519 ? add_optab : sub_optab),
58f066d1 1520 temp2, target, temp2, 0, OPTAB_WIDEN);
15a63be1
RK
1521
1522 if (target != 0)
1523 {
1524 /* Put the result back in temp2 in case it isn't already.
1525 Then replace the jump, possible a CC0-setting insn in
1526 front of the jump, and TEMP, with the sequence we have
1527 made. */
1528
1529 if (target != temp2)
1530 emit_move_insn (temp2, target);
1531
1532 seq = get_insns ();
1533 end_sequence ();
1534
1535 emit_insns_before (seq, temp4);
1536 delete_insn (temp);
626d0d1d
RK
1537
1538 if (init_insn)
1539 delete_insn (init_insn);
1540
15a63be1
RK
1541 next = NEXT_INSN (insn);
1542#ifdef HAVE_cc0
1543 delete_insn (prev_nonnote_insn (insn));
1544#endif
1545 delete_insn (insn);
1546 changed = 1;
1547 continue;
1548 }
1549 else
1550 end_sequence ();
1551 }
1552
1553 /* Simplify if (...) x = 1; else {...} if (x) ...
1554 We recognize this case scanning backwards as well.
1555
1556 TEMP is the assignment to x;
1557 TEMP1 is the label at the head of the second if. */
1558 /* ?? This should call get_condition to find the values being
1559 compared, instead of looking for a COMPARE insn when HAVE_cc0
1560 is not defined. This would allow it to work on the m88k. */
1561 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1562 is not defined and the condition is tested by a separate compare
1563 insn. This is because the code below assumes that the result
1564 of the compare dies in the following branch.
1565
1566 Not only that, but there might be other insns between the
1567 compare and branch whose results are live. Those insns need
1568 to be executed.
1569
1570 A way to fix this is to move the insns at JUMP_LABEL (insn)
1571 to before INSN. If we are running before flow, they will
1572 be deleted if they aren't needed. But this doesn't work
1573 well after flow.
1574
1575 This is really a special-case of jump threading, anyway. The
1576 right thing to do is to replace this and jump threading with
1577 much simpler code in cse.
1578
1579 This code has been turned off in the non-cc0 case in the
1580 meantime. */
1581
1582#ifdef HAVE_cc0
1583 else if (this_is_simplejump
1584 /* Safe to skip USE and CLOBBER insns here
1585 since they will not be deleted. */
1586 && (temp = prev_active_insn (insn))
1587 && no_labels_between_p (temp, insn)
1588 && GET_CODE (temp) == INSN
1589 && GET_CODE (PATTERN (temp)) == SET
1590 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1591 && CONSTANT_P (SET_SRC (PATTERN (temp)))
1592 && (temp1 = next_active_insn (JUMP_LABEL (insn)))
1593 /* If we find that the next value tested is `x'
1594 (TEMP1 is the insn where this happens), win. */
1595 && GET_CODE (temp1) == INSN
1596 && GET_CODE (PATTERN (temp1)) == SET
1597#ifdef HAVE_cc0
1598 /* Does temp1 `tst' the value of x? */
1599 && SET_SRC (PATTERN (temp1)) == SET_DEST (PATTERN (temp))
1600 && SET_DEST (PATTERN (temp1)) == cc0_rtx
1601 && (temp1 = next_nonnote_insn (temp1))
1602#else
1603 /* Does temp1 compare the value of x against zero? */
1604 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1605 && XEXP (SET_SRC (PATTERN (temp1)), 1) == const0_rtx
1606 && (XEXP (SET_SRC (PATTERN (temp1)), 0)
1607 == SET_DEST (PATTERN (temp)))
1608 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1609 && (temp1 = find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1610#endif
1611 && condjump_p (temp1))
1612 {
1613 /* Get the if_then_else from the condjump. */
1614 rtx choice = SET_SRC (PATTERN (temp1));
1615 if (GET_CODE (choice) == IF_THEN_ELSE)
1616 {
1617 enum rtx_code code = GET_CODE (XEXP (choice, 0));
1618 rtx val = SET_SRC (PATTERN (temp));
1619 rtx cond
1620 = simplify_relational_operation (code, GET_MODE (SET_DEST (PATTERN (temp))),
1621 val, const0_rtx);
1622 rtx ultimate;
1623
1624 if (cond == const_true_rtx)
1625 ultimate = XEXP (choice, 1);
1626 else if (cond == const0_rtx)
1627 ultimate = XEXP (choice, 2);
1628 else
1629 ultimate = 0;
1630
1631 if (ultimate == pc_rtx)
1632 ultimate = get_label_after (temp1);
1633 else if (ultimate && GET_CODE (ultimate) != RETURN)
1634 ultimate = XEXP (ultimate, 0);
1635
9e390837 1636 if (ultimate && JUMP_LABEL(insn) != ultimate)
15a63be1
RK
1637 changed |= redirect_jump (insn, ultimate);
1638 }
1639 }
1640#endif
1641
1642#if 0
1643 /* @@ This needs a bit of work before it will be right.
1644
1645 Any type of comparison can be accepted for the first and
1646 second compare. When rewriting the first jump, we must
1647 compute the what conditions can reach label3, and use the
1648 appropriate code. We can not simply reverse/swap the code
1649 of the first jump. In some cases, the second jump must be
1650 rewritten also.
1651
1652 For example,
1653 < == converts to > ==
1654 < != converts to == >
1655 etc.
1656
1657 If the code is written to only accept an '==' test for the second
1658 compare, then all that needs to be done is to swap the condition
1659 of the first branch.
1660
1661 It is questionable whether we want this optimization anyways,
1662 since if the user wrote code like this because he/she knew that
6dc42e49 1663 the jump to label1 is taken most of the time, then rewriting
15a63be1
RK
1664 this gives slower code. */
1665 /* @@ This should call get_condition to find the values being
1666 compared, instead of looking for a COMPARE insn when HAVE_cc0
1667 is not defined. This would allow it to work on the m88k. */
1668 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1669 is not defined and the condition is tested by a separate compare
1670 insn. This is because the code below assumes that the result
1671 of the compare dies in the following branch. */
1672
1673 /* Simplify test a ~= b
1674 condjump label1;
1675 test a == b
1676 condjump label2;
1677 jump label3;
1678 label1:
1679
1680 rewriting as
1681 test a ~~= b
1682 condjump label3
1683 test a == b
1684 condjump label2
1685 label1:
1686
1687 where ~= is an inequality, e.g. >, and ~~= is the swapped
1688 inequality, e.g. <.
1689
1690 We recognize this case scanning backwards.
1691
1692 TEMP is the conditional jump to `label2';
1693 TEMP1 is the test for `a == b';
1694 TEMP2 is the conditional jump to `label1';
1695 TEMP3 is the test for `a ~= b'. */
1696 else if (this_is_simplejump
1697 && (temp = prev_active_insn (insn))
1698 && no_labels_between_p (temp, insn)
1699 && condjump_p (temp)
1700 && (temp1 = prev_active_insn (temp))
1701 && no_labels_between_p (temp1, temp)
1702 && GET_CODE (temp1) == INSN
1703 && GET_CODE (PATTERN (temp1)) == SET
1704#ifdef HAVE_cc0
1705 && sets_cc0_p (PATTERN (temp1)) == 1
1706#else
1707 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1708 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1709 && (temp == find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1710#endif
1711 && (temp2 = prev_active_insn (temp1))
1712 && no_labels_between_p (temp2, temp1)
1713 && condjump_p (temp2)
1714 && JUMP_LABEL (temp2) == next_nonnote_insn (NEXT_INSN (insn))
1715 && (temp3 = prev_active_insn (temp2))
1716 && no_labels_between_p (temp3, temp2)
1717 && GET_CODE (PATTERN (temp3)) == SET
1718 && rtx_equal_p (SET_DEST (PATTERN (temp3)),
1719 SET_DEST (PATTERN (temp1)))
1720 && rtx_equal_p (SET_SRC (PATTERN (temp1)),
1721 SET_SRC (PATTERN (temp3)))
1722 && ! inequality_comparisons_p (PATTERN (temp))
1723 && inequality_comparisons_p (PATTERN (temp2)))
1724 {
1725 rtx fallthrough_label = JUMP_LABEL (temp2);
1726
1727 ++LABEL_NUSES (fallthrough_label);
1728 if (swap_jump (temp2, JUMP_LABEL (insn)))
1729 {
1730 delete_insn (insn);
1731 changed = 1;
1732 }
1733
1734 if (--LABEL_NUSES (fallthrough_label) == 0)
1735 delete_insn (fallthrough_label);
1736 }
1737#endif
1738 /* Simplify if (...) {... x = 1;} if (x) ...
1739
1740 We recognize this case backwards.
1741
1742 TEMP is the test of `x';
1743 TEMP1 is the assignment to `x' at the end of the
1744 previous statement. */
1745 /* @@ This should call get_condition to find the values being
1746 compared, instead of looking for a COMPARE insn when HAVE_cc0
1747 is not defined. This would allow it to work on the m88k. */
1748 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1749 is not defined and the condition is tested by a separate compare
1750 insn. This is because the code below assumes that the result
1751 of the compare dies in the following branch. */
2d20b9df
RS
1752
1753 /* ??? This has to be turned off. The problem is that the
1754 unconditional jump might indirectly end up branching to the
1755 label between TEMP1 and TEMP. We can't detect this, in general,
1756 since it may become a jump to there after further optimizations.
1757 If that jump is done, it will be deleted, so we will retry
1758 this optimization in the next pass, thus an infinite loop.
1759
1760 The present code prevents this by putting the jump after the
1761 label, but this is not logically correct. */
1762#if 0
15a63be1
RK
1763 else if (this_is_condjump
1764 /* Safe to skip USE and CLOBBER insns here
1765 since they will not be deleted. */
1766 && (temp = prev_active_insn (insn))
1767 && no_labels_between_p (temp, insn)
1768 && GET_CODE (temp) == INSN
1769 && GET_CODE (PATTERN (temp)) == SET
1770#ifdef HAVE_cc0
1771 && sets_cc0_p (PATTERN (temp)) == 1
1772 && GET_CODE (SET_SRC (PATTERN (temp))) == REG
1773#else
1774 /* Temp must be a compare insn, we can not accept a register
1775 to register move here, since it may not be simply a
1776 tst insn. */
1777 && GET_CODE (SET_SRC (PATTERN (temp))) == COMPARE
1778 && XEXP (SET_SRC (PATTERN (temp)), 1) == const0_rtx
1779 && GET_CODE (XEXP (SET_SRC (PATTERN (temp)), 0)) == REG
1780 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1781 && insn == find_next_ref (SET_DEST (PATTERN (temp)), temp)
1782#endif
1783 /* May skip USE or CLOBBER insns here
1784 for checking for opportunity, since we
1785 take care of them later. */
1786 && (temp1 = prev_active_insn (temp))
1787 && GET_CODE (temp1) == INSN
1788 && GET_CODE (PATTERN (temp1)) == SET
1789#ifdef HAVE_cc0
1790 && SET_SRC (PATTERN (temp)) == SET_DEST (PATTERN (temp1))
1791#else
1792 && (XEXP (SET_SRC (PATTERN (temp)), 0)
1793 == SET_DEST (PATTERN (temp1)))
1794#endif
1795 && CONSTANT_P (SET_SRC (PATTERN (temp1)))
1796 /* If this isn't true, cse will do the job. */
1797 && ! no_labels_between_p (temp1, temp))
1798 {
1799 /* Get the if_then_else from the condjump. */
1800 rtx choice = SET_SRC (PATTERN (insn));
1801 if (GET_CODE (choice) == IF_THEN_ELSE
1802 && (GET_CODE (XEXP (choice, 0)) == EQ
1803 || GET_CODE (XEXP (choice, 0)) == NE))
1804 {
1805 int want_nonzero = (GET_CODE (XEXP (choice, 0)) == NE);
1806 rtx last_insn;
1807 rtx ultimate;
1808 rtx p;
1809
1810 /* Get the place that condjump will jump to
1811 if it is reached from here. */
1812 if ((SET_SRC (PATTERN (temp1)) != const0_rtx)
1813 == want_nonzero)
1814 ultimate = XEXP (choice, 1);
1815 else
1816 ultimate = XEXP (choice, 2);
1817 /* Get it as a CODE_LABEL. */
1818 if (ultimate == pc_rtx)
1819 ultimate = get_label_after (insn);
1820 else
1821 /* Get the label out of the LABEL_REF. */
1822 ultimate = XEXP (ultimate, 0);
1823
2d20b9df
RS
1824 /* Insert the jump immediately before TEMP, specifically
1825 after the label that is between TEMP1 and TEMP. */
1826 last_insn = PREV_INSN (temp);
15a63be1
RK
1827
1828 /* If we would be branching to the next insn, the jump
1829 would immediately be deleted and the re-inserted in
1830 a subsequent pass over the code. So don't do anything
1831 in that case. */
1832 if (next_active_insn (last_insn)
1833 != next_active_insn (ultimate))
1834 {
1835 emit_barrier_after (last_insn);
1836 p = emit_jump_insn_after (gen_jump (ultimate),
1837 last_insn);
1838 JUMP_LABEL (p) = ultimate;
1839 ++LABEL_NUSES (ultimate);
1840 if (INSN_UID (ultimate) < max_jump_chain
1841 && INSN_CODE (p) < max_jump_chain)
1842 {
1843 jump_chain[INSN_UID (p)]
1844 = jump_chain[INSN_UID (ultimate)];
1845 jump_chain[INSN_UID (ultimate)] = p;
1846 }
1847 changed = 1;
1848 continue;
1849 }
1850 }
1851 }
2d20b9df 1852#endif
15a63be1
RK
1853 /* Detect a conditional jump going to the same place
1854 as an immediately following unconditional jump. */
1855 else if (this_is_condjump
1856 && (temp = next_active_insn (insn)) != 0
1857 && simplejump_p (temp)
1858 && (next_active_insn (JUMP_LABEL (insn))
1859 == next_active_insn (JUMP_LABEL (temp))))
1860 {
f6a6a1b3
DE
1861 rtx tem = temp;
1862
1863 /* ??? Optional. Disables some optimizations, but makes
1864 gcov output more accurate with -O. */
1865 if (flag_test_coverage && !reload_completed)
1866 for (tem = insn; tem != temp; tem = NEXT_INSN (tem))
1867 if (GET_CODE (tem) == NOTE && NOTE_LINE_NUMBER (tem) > 0)
1868 break;
1869
1870 if (tem == temp)
1871 {
1872 delete_jump (insn);
1873 changed = 1;
1874 continue;
1875 }
15a63be1
RK
1876 }
1877 /* Detect a conditional jump jumping over an unconditional jump. */
1878
3480bb98
JL
1879 else if ((this_is_condjump || this_is_condjump_in_parallel)
1880 && ! this_is_simplejump
15a63be1
RK
1881 && reallabelprev != 0
1882 && GET_CODE (reallabelprev) == JUMP_INSN
1883 && prev_active_insn (reallabelprev) == insn
1884 && no_labels_between_p (insn, reallabelprev)
1885 && simplejump_p (reallabelprev))
1886 {
1887 /* When we invert the unconditional jump, we will be
1888 decrementing the usage count of its old label.
1889 Make sure that we don't delete it now because that
1890 might cause the following code to be deleted. */
1891 rtx prev_uses = prev_nonnote_insn (reallabelprev);
1892 rtx prev_label = JUMP_LABEL (insn);
1893
e26a82e4
JW
1894 if (prev_label)
1895 ++LABEL_NUSES (prev_label);
15a63be1
RK
1896
1897 if (invert_jump (insn, JUMP_LABEL (reallabelprev)))
1898 {
1899 /* It is very likely that if there are USE insns before
1900 this jump, they hold REG_DEAD notes. These REG_DEAD
1901 notes are no longer valid due to this optimization,
1902 and will cause the life-analysis that following passes
1903 (notably delayed-branch scheduling) to think that
1904 these registers are dead when they are not.
1905
1906 To prevent this trouble, we just remove the USE insns
1907 from the insn chain. */
1908
1909 while (prev_uses && GET_CODE (prev_uses) == INSN
1910 && GET_CODE (PATTERN (prev_uses)) == USE)
1911 {
1912 rtx useless = prev_uses;
1913 prev_uses = prev_nonnote_insn (prev_uses);
1914 delete_insn (useless);
1915 }
1916
1917 delete_insn (reallabelprev);
1918 next = insn;
1919 changed = 1;
1920 }
1921
1922 /* We can now safely delete the label if it is unreferenced
1923 since the delete_insn above has deleted the BARRIER. */
e26a82e4 1924 if (prev_label && --LABEL_NUSES (prev_label) == 0)
15a63be1
RK
1925 delete_insn (prev_label);
1926 continue;
1927 }
1928 else
1929 {
1930 /* Detect a jump to a jump. */
1931
1932 nlabel = follow_jumps (JUMP_LABEL (insn));
1933 if (nlabel != JUMP_LABEL (insn)
1934 && redirect_jump (insn, nlabel))
1935 {
1936 changed = 1;
1937 next = insn;
1938 }
1939
1940 /* Look for if (foo) bar; else break; */
1941 /* The insns look like this:
1942 insn = condjump label1;
1943 ...range1 (some insns)...
1944 jump label2;
1945 label1:
1946 ...range2 (some insns)...
1947 jump somewhere unconditionally
1948 label2: */
1949 {
1950 rtx label1 = next_label (insn);
1951 rtx range1end = label1 ? prev_active_insn (label1) : 0;
1952 /* Don't do this optimization on the first round, so that
1953 jump-around-a-jump gets simplified before we ask here
1954 whether a jump is unconditional.
1955
1956 Also don't do it when we are called after reload since
1957 it will confuse reorg. */
1958 if (! first
1959 && (reload_completed ? ! flag_delayed_branch : 1)
1960 /* Make sure INSN is something we can invert. */
1961 && condjump_p (insn)
1962 && label1 != 0
1963 && JUMP_LABEL (insn) == label1
1964 && LABEL_NUSES (label1) == 1
1965 && GET_CODE (range1end) == JUMP_INSN
1966 && simplejump_p (range1end))
1967 {
1968 rtx label2 = next_label (label1);
1969 rtx range2end = label2 ? prev_active_insn (label2) : 0;
1970 if (range1end != range2end
1971 && JUMP_LABEL (range1end) == label2
1972 && GET_CODE (range2end) == JUMP_INSN
1973 && GET_CODE (NEXT_INSN (range2end)) == BARRIER
1974 /* Invert the jump condition, so we
1975 still execute the same insns in each case. */
1976 && invert_jump (insn, label1))
1977 {
1978 rtx range1beg = next_active_insn (insn);
1979 rtx range2beg = next_active_insn (label1);
1980 rtx range1after, range2after;
1981 rtx range1before, range2before;
f0e1b9a9 1982 rtx rangenext;
15a63be1 1983
1f109a14
JW
1984 /* Include in each range any notes before it, to be
1985 sure that we get the line number note if any, even
1986 if there are other notes here. */
0e690bdb 1987 while (PREV_INSN (range1beg)
1f109a14 1988 && GET_CODE (PREV_INSN (range1beg)) == NOTE)
0e690bdb
RS
1989 range1beg = PREV_INSN (range1beg);
1990
1991 while (PREV_INSN (range2beg)
1f109a14 1992 && GET_CODE (PREV_INSN (range2beg)) == NOTE)
0e690bdb
RS
1993 range2beg = PREV_INSN (range2beg);
1994
15a63be1
RK
1995 /* Don't move NOTEs for blocks or loops; shift them
1996 outside the ranges, where they'll stay put. */
915f619f
JW
1997 range1beg = squeeze_notes (range1beg, range1end);
1998 range2beg = squeeze_notes (range2beg, range2end);
15a63be1
RK
1999
2000 /* Get current surrounds of the 2 ranges. */
2001 range1before = PREV_INSN (range1beg);
2002 range2before = PREV_INSN (range2beg);
2003 range1after = NEXT_INSN (range1end);
2004 range2after = NEXT_INSN (range2end);
2005
2006 /* Splice range2 where range1 was. */
2007 NEXT_INSN (range1before) = range2beg;
2008 PREV_INSN (range2beg) = range1before;
2009 NEXT_INSN (range2end) = range1after;
2010 PREV_INSN (range1after) = range2end;
2011 /* Splice range1 where range2 was. */
2012 NEXT_INSN (range2before) = range1beg;
2013 PREV_INSN (range1beg) = range2before;
2014 NEXT_INSN (range1end) = range2after;
2015 PREV_INSN (range2after) = range1end;
f0e1b9a9
RE
2016
2017 /* Check for a loop end note between the end of
2018 range2, and the next code label. If there is one,
2019 then what we have really seen is
2020 if (foo) break; end_of_loop;
2021 and moved the break sequence outside the loop.
2022 We must move the LOOP_END note to where the
2023 loop really ends now, or we will confuse loop
ca188f16
JW
2024 optimization. Stop if we find a LOOP_BEG note
2025 first, since we don't want to move the LOOP_END
2026 note in that case. */
f0e1b9a9
RE
2027 for (;range2after != label2; range2after = rangenext)
2028 {
2029 rangenext = NEXT_INSN (range2after);
ca188f16 2030 if (GET_CODE (range2after) == NOTE)
f0e1b9a9 2031 {
ca188f16
JW
2032 if (NOTE_LINE_NUMBER (range2after)
2033 == NOTE_INSN_LOOP_END)
2034 {
2035 NEXT_INSN (PREV_INSN (range2after))
2036 = rangenext;
2037 PREV_INSN (rangenext)
2038 = PREV_INSN (range2after);
2039 PREV_INSN (range2after)
2040 = PREV_INSN (range1beg);
2041 NEXT_INSN (range2after) = range1beg;
2042 NEXT_INSN (PREV_INSN (range1beg))
2043 = range2after;
2044 PREV_INSN (range1beg) = range2after;
2045 }
2046 else if (NOTE_LINE_NUMBER (range2after)
2047 == NOTE_INSN_LOOP_BEG)
2048 break;
f0e1b9a9
RE
2049 }
2050 }
15a63be1
RK
2051 changed = 1;
2052 continue;
2053 }
2054 }
2055 }
2056
2057 /* Now that the jump has been tensioned,
2058 try cross jumping: check for identical code
0f41302f 2059 before the jump and before its target label. */
15a63be1
RK
2060
2061 /* First, cross jumping of conditional jumps: */
2062
2063 if (cross_jump && condjump_p (insn))
2064 {
2065 rtx newjpos, newlpos;
2066 rtx x = prev_real_insn (JUMP_LABEL (insn));
2067
2068 /* A conditional jump may be crossjumped
2069 only if the place it jumps to follows
2070 an opposing jump that comes back here. */
2071
2072 if (x != 0 && ! jump_back_p (x, insn))
2073 /* We have no opposing jump;
2074 cannot cross jump this insn. */
2075 x = 0;
2076
2077 newjpos = 0;
2078 /* TARGET is nonzero if it is ok to cross jump
2079 to code before TARGET. If so, see if matches. */
2080 if (x != 0)
2081 find_cross_jump (insn, x, 2,
2082 &newjpos, &newlpos);
2083
2084 if (newjpos != 0)
2085 {
2086 do_cross_jump (insn, newjpos, newlpos);
2087 /* Make the old conditional jump
2088 into an unconditional one. */
2089 SET_SRC (PATTERN (insn))
2090 = gen_rtx (LABEL_REF, VOIDmode, JUMP_LABEL (insn));
2091 INSN_CODE (insn) = -1;
2092 emit_barrier_after (insn);
2093 /* Add to jump_chain unless this is a new label
0f41302f 2094 whose UID is too large. */
15a63be1
RK
2095 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
2096 {
2097 jump_chain[INSN_UID (insn)]
2098 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2099 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
2100 }
2101 changed = 1;
2102 next = insn;
2103 }
2104 }
2105
2106 /* Cross jumping of unconditional jumps:
2107 a few differences. */
2108
2109 if (cross_jump && simplejump_p (insn))
2110 {
2111 rtx newjpos, newlpos;
2112 rtx target;
2113
2114 newjpos = 0;
2115
2116 /* TARGET is nonzero if it is ok to cross jump
2117 to code before TARGET. If so, see if matches. */
2118 find_cross_jump (insn, JUMP_LABEL (insn), 1,
2119 &newjpos, &newlpos);
2120
2121 /* If cannot cross jump to code before the label,
2122 see if we can cross jump to another jump to
2123 the same label. */
2124 /* Try each other jump to this label. */
2125 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
2126 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2127 target != 0 && newjpos == 0;
2128 target = jump_chain[INSN_UID (target)])
2129 if (target != insn
2130 && JUMP_LABEL (target) == JUMP_LABEL (insn)
2131 /* Ignore TARGET if it's deleted. */
2132 && ! INSN_DELETED_P (target))
2133 find_cross_jump (insn, target, 2,
2134 &newjpos, &newlpos);
2135
2136 if (newjpos != 0)
2137 {
2138 do_cross_jump (insn, newjpos, newlpos);
2139 changed = 1;
2140 next = insn;
2141 }
2142 }
2143
2144 /* This code was dead in the previous jump.c! */
2145 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
2146 {
2147 /* Return insns all "jump to the same place"
2148 so we can cross-jump between any two of them. */
2149
2150 rtx newjpos, newlpos, target;
2151
2152 newjpos = 0;
2153
2154 /* If cannot cross jump to code before the label,
2155 see if we can cross jump to another jump to
2156 the same label. */
2157 /* Try each other jump to this label. */
2158 for (target = jump_chain[0];
2159 target != 0 && newjpos == 0;
2160 target = jump_chain[INSN_UID (target)])
2161 if (target != insn
2162 && ! INSN_DELETED_P (target)
2163 && GET_CODE (PATTERN (target)) == RETURN)
2164 find_cross_jump (insn, target, 2,
2165 &newjpos, &newlpos);
2166
2167 if (newjpos != 0)
2168 {
2169 do_cross_jump (insn, newjpos, newlpos);
2170 changed = 1;
2171 next = insn;
2172 }
2173 }
2174 }
2175 }
2176
2177 first = 0;
2178 }
2179
2180 /* Delete extraneous line number notes.
2181 Note that two consecutive notes for different lines are not really
2182 extraneous. There should be some indication where that line belonged,
2183 even if it became empty. */
2184
2185 {
2186 rtx last_note = 0;
2187
2188 for (insn = f; insn; insn = NEXT_INSN (insn))
2189 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0)
2190 {
2191 /* Delete this note if it is identical to previous note. */
2192 if (last_note
2193 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
2194 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
2195 {
2196 delete_insn (insn);
2197 continue;
2198 }
2199
2200 last_note = insn;
2201 }
2202 }
2203
683e6ccd
RK
2204#ifdef HAVE_return
2205 if (HAVE_return)
2206 {
2207 /* If we fall through to the epilogue, see if we can insert a RETURN insn
2208 in front of it. If the machine allows it at this point (we might be
2209 after reload for a leaf routine), it will improve optimization for it
2210 to be there. We do this both here and at the start of this pass since
2211 the RETURN might have been deleted by some of our optimizations. */
2212 insn = get_last_insn ();
2213 while (insn && GET_CODE (insn) == NOTE)
2214 insn = PREV_INSN (insn);
2215
2216 if (insn && GET_CODE (insn) != BARRIER)
2217 {
2218 emit_jump_insn (gen_return ());
2219 emit_barrier ();
2220 }
2221 }
2222#endif
2223
15a63be1
RK
2224 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
2225 If so, delete it, and record that this function can drop off the end. */
2226
2227 insn = last_insn;
2228 {
2229 int n_labels = 1;
2230 while (insn
2231 /* One label can follow the end-note: the return label. */
2232 && ((GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
2233 /* Ordinary insns can follow it if returning a structure. */
2234 || GET_CODE (insn) == INSN
2235 /* If machine uses explicit RETURN insns, no epilogue,
2236 then one of them follows the note. */
2237 || (GET_CODE (insn) == JUMP_INSN
2238 && GET_CODE (PATTERN (insn)) == RETURN)
60374599
DE
2239 /* A barrier can follow the return insn. */
2240 || GET_CODE (insn) == BARRIER
15a63be1
RK
2241 /* Other kinds of notes can follow also. */
2242 || (GET_CODE (insn) == NOTE
2243 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)))
2244 insn = PREV_INSN (insn);
2245 }
2246
2247 /* Report if control can fall through at the end of the function. */
2248 if (insn && GET_CODE (insn) == NOTE
2249 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END)
2250 {
2251 can_reach_end = 1;
2252 delete_insn (insn);
2253 }
2254
2255 /* Show JUMP_CHAIN no longer valid. */
2256 jump_chain = 0;
2257}
2258\f
2259/* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
2260 jump. Assume that this unconditional jump is to the exit test code. If
2261 the code is sufficiently simple, make a copy of it before INSN,
2262 followed by a jump to the exit of the loop. Then delete the unconditional
2263 jump after INSN.
2264
15a63be1
RK
2265 Return 1 if we made the change, else 0.
2266
2267 This is only safe immediately after a regscan pass because it uses the
2268 values of regno_first_uid and regno_last_uid. */
2269
2270static int
2271duplicate_loop_exit_test (loop_start)
2272 rtx loop_start;
2273{
e33477be 2274 rtx insn, set, reg, p, link;
9c066566 2275 rtx copy = 0;
15a63be1
RK
2276 int num_insns = 0;
2277 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
2278 rtx lastexit;
2279 int max_reg = max_reg_num ();
2280 rtx *reg_map = 0;
2281
2282 /* Scan the exit code. We do not perform this optimization if any insn:
2283
2284 is a CALL_INSN
2285 is a CODE_LABEL
2286 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
2287 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
2288 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
2289 are not valid
2290
2291 Also, don't do this if the exit code is more than 20 insns. */
2292
2293 for (insn = exitcode;
2294 insn
2295 && ! (GET_CODE (insn) == NOTE
2296 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
2297 insn = NEXT_INSN (insn))
2298 {
2299 switch (GET_CODE (insn))
2300 {
2301 case CODE_LABEL:
2302 case CALL_INSN:
2303 return 0;
2304 case NOTE:
fe464caf
RK
2305 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
2306 a jump immediately after the loop start that branches outside
2307 the loop but within an outer loop, near the exit test.
2308 If we copied this exit test and created a phony
2309 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
2310 before the exit test look like these could be safely moved
2311 out of the loop even if they actually may be never executed.
2312 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
2313
15a63be1
RK
2314 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2315 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
fe464caf
RK
2316 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
2317 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
15a63be1
RK
2318 return 0;
2319 break;
2320 case JUMP_INSN:
2321 case INSN:
2322 if (++num_insns > 20
5f4f0e22
CH
2323 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
2324 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
15a63be1
RK
2325 return 0;
2326 break;
e9a25f70
JL
2327 default:
2328 break;
15a63be1
RK
2329 }
2330 }
2331
2332 /* Unless INSN is zero, we can do the optimization. */
2333 if (insn == 0)
2334 return 0;
2335
2336 lastexit = insn;
2337
2338 /* See if any insn sets a register only used in the loop exit code and
2339 not a user variable. If so, replace it with a new register. */
2340 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2341 if (GET_CODE (insn) == INSN
2342 && (set = single_set (insn)) != 0
e33477be
RK
2343 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
2344 || (GET_CODE (reg) == SUBREG
2345 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
2346 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
b1f21e0a 2347 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
15a63be1
RK
2348 {
2349 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
b1f21e0a 2350 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
15a63be1
RK
2351 break;
2352
2353 if (p != lastexit)
2354 {
2355 /* We can do the replacement. Allocate reg_map if this is the
2356 first replacement we found. */
2357 if (reg_map == 0)
2358 {
2359 reg_map = (rtx *) alloca (max_reg * sizeof (rtx));
4c9a05bc 2360 bzero ((char *) reg_map, max_reg * sizeof (rtx));
15a63be1
RK
2361 }
2362
e33477be 2363 REG_LOOP_TEST_P (reg) = 1;
15a63be1 2364
e33477be 2365 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
15a63be1
RK
2366 }
2367 }
2368
2369 /* Now copy each insn. */
2370 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2371 switch (GET_CODE (insn))
2372 {
2373 case BARRIER:
2374 copy = emit_barrier_before (loop_start);
2375 break;
2376 case NOTE:
2377 /* Only copy line-number notes. */
2378 if (NOTE_LINE_NUMBER (insn) >= 0)
2379 {
2380 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
2381 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
2382 }
2383 break;
2384
2385 case INSN:
2386 copy = emit_insn_before (copy_rtx (PATTERN (insn)), loop_start);
2387 if (reg_map)
2388 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2389
2390 mark_jump_label (PATTERN (copy), copy, 0);
2391
2392 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
2393 make them. */
2394 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2395 if (REG_NOTE_KIND (link) != REG_LABEL)
2396 REG_NOTES (copy)
2397 = copy_rtx (gen_rtx (EXPR_LIST, REG_NOTE_KIND (link),
2398 XEXP (link, 0), REG_NOTES (copy)));
2399 if (reg_map && REG_NOTES (copy))
2400 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2401 break;
2402
2403 case JUMP_INSN:
2404 copy = emit_jump_insn_before (copy_rtx (PATTERN (insn)), loop_start);
2405 if (reg_map)
2406 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2407 mark_jump_label (PATTERN (copy), copy, 0);
2408 if (REG_NOTES (insn))
2409 {
2410 REG_NOTES (copy) = copy_rtx (REG_NOTES (insn));
2411 if (reg_map)
2412 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2413 }
2414
2415 /* If this is a simple jump, add it to the jump chain. */
2416
2417 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
2418 && simplejump_p (copy))
2419 {
2420 jump_chain[INSN_UID (copy)]
2421 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2422 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2423 }
2424 break;
2425
2426 default:
2427 abort ();
2428 }
2429
2430 /* Now clean up by emitting a jump to the end label and deleting the jump
2431 at the start of the loop. */
9c066566 2432 if (! copy || GET_CODE (copy) != BARRIER)
15a63be1
RK
2433 {
2434 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
2435 loop_start);
2436 mark_jump_label (PATTERN (copy), copy, 0);
2437 if (INSN_UID (copy) < max_jump_chain
2438 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
2439 {
2440 jump_chain[INSN_UID (copy)]
2441 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2442 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2443 }
2444 emit_barrier_before (loop_start);
2445 }
2446
15a63be1
RK
2447 /* Mark the exit code as the virtual top of the converted loop. */
2448 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
2449
cd423ead
RK
2450 delete_insn (next_nonnote_insn (loop_start));
2451
15a63be1
RK
2452 return 1;
2453}
2454\f
2455/* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
915f619f
JW
2456 loop-end notes between START and END out before START. Assume that
2457 END is not such a note. START may be such a note. Returns the value
2458 of the new starting insn, which may be different if the original start
2459 was such a note. */
15a63be1 2460
915f619f 2461rtx
15a63be1
RK
2462squeeze_notes (start, end)
2463 rtx start, end;
2464{
2465 rtx insn;
2466 rtx next;
2467
2468 for (insn = start; insn != end; insn = next)
2469 {
2470 next = NEXT_INSN (insn);
2471 if (GET_CODE (insn) == NOTE
2472 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
2473 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2474 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2475 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
2476 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
2477 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
2478 {
915f619f
JW
2479 if (insn == start)
2480 start = next;
2481 else
2482 {
2483 rtx prev = PREV_INSN (insn);
2484 PREV_INSN (insn) = PREV_INSN (start);
2485 NEXT_INSN (insn) = start;
2486 NEXT_INSN (PREV_INSN (insn)) = insn;
2487 PREV_INSN (NEXT_INSN (insn)) = insn;
2488 NEXT_INSN (prev) = next;
2489 PREV_INSN (next) = prev;
2490 }
15a63be1
RK
2491 }
2492 }
915f619f
JW
2493
2494 return start;
15a63be1
RK
2495}
2496\f
2497/* Compare the instructions before insn E1 with those before E2
2498 to find an opportunity for cross jumping.
2499 (This means detecting identical sequences of insns followed by
2500 jumps to the same place, or followed by a label and a jump
2501 to that label, and replacing one with a jump to the other.)
2502
2503 Assume E1 is a jump that jumps to label E2
2504 (that is not always true but it might as well be).
2505 Find the longest possible equivalent sequences
2506 and store the first insns of those sequences into *F1 and *F2.
2507 Store zero there if no equivalent preceding instructions are found.
2508
2509 We give up if we find a label in stream 1.
2510 Actually we could transfer that label into stream 2. */
2511
2512static void
2513find_cross_jump (e1, e2, minimum, f1, f2)
2514 rtx e1, e2;
2515 int minimum;
2516 rtx *f1, *f2;
2517{
2518 register rtx i1 = e1, i2 = e2;
2519 register rtx p1, p2;
2520 int lose = 0;
2521
2522 rtx last1 = 0, last2 = 0;
2523 rtx afterlast1 = 0, afterlast2 = 0;
2524 rtx prev1;
2525
2526 *f1 = 0;
2527 *f2 = 0;
2528
2529 while (1)
2530 {
2531 i1 = prev_nonnote_insn (i1);
2532
2533 i2 = PREV_INSN (i2);
2534 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
2535 i2 = PREV_INSN (i2);
2536
2537 if (i1 == 0)
2538 break;
2539
2540 /* Don't allow the range of insns preceding E1 or E2
2541 to include the other (E2 or E1). */
2542 if (i2 == e1 || i1 == e2)
2543 break;
2544
2545 /* If we will get to this code by jumping, those jumps will be
2546 tensioned to go directly to the new label (before I2),
2547 so this cross-jumping won't cost extra. So reduce the minimum. */
2548 if (GET_CODE (i1) == CODE_LABEL)
2549 {
2550 --minimum;
2551 break;
2552 }
2553
2554 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
2555 break;
2556
2557 p1 = PATTERN (i1);
2558 p2 = PATTERN (i2);
2559
4d367579
DE
2560 /* If this is a CALL_INSN, compare register usage information.
2561 If we don't check this on stack register machines, the two
2562 CALL_INSNs might be merged leaving reg-stack.c with mismatching
2563 numbers of stack registers in the same basic block.
2564 If we don't check this on machines with delay slots, a delay slot may
2565 be filled that clobbers a parameter expected by the subroutine.
47b0bb94 2566
4d367579
DE
2567 ??? We take the simple route for now and assume that if they're
2568 equal, they were constructed identically. */
2569
2570 if (GET_CODE (i1) == CALL_INSN
2571 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
2572 CALL_INSN_FUNCTION_USAGE (i2)))
2573 lose = 1;
2574
2575#ifdef STACK_REGS
15a63be1
RK
2576 /* If cross_jump_death_matters is not 0, the insn's mode
2577 indicates whether or not the insn contains any stack-like
0f41302f 2578 regs. */
15a63be1 2579
47b0bb94 2580 if (!lose && cross_jump_death_matters && GET_MODE (i1) == QImode)
15a63be1
RK
2581 {
2582 /* If register stack conversion has already been done, then
2583 death notes must also be compared before it is certain that
0f41302f 2584 the two instruction streams match. */
15a63be1
RK
2585
2586 rtx note;
2587 HARD_REG_SET i1_regset, i2_regset;
2588
2589 CLEAR_HARD_REG_SET (i1_regset);
2590 CLEAR_HARD_REG_SET (i2_regset);
2591
2592 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
2593 if (REG_NOTE_KIND (note) == REG_DEAD
2594 && STACK_REG_P (XEXP (note, 0)))
2595 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
2596
2597 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
2598 if (REG_NOTE_KIND (note) == REG_DEAD
2599 && STACK_REG_P (XEXP (note, 0)))
2600 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
2601
2602 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
2603
2604 lose = 1;
2605
2606 done:
2607 ;
2608 }
2609#endif
2610
44c389e4
JW
2611 /* Don't allow old-style asm or volatile extended asms to be accepted
2612 for cross jumping purposes. It is conceptually correct to allow
2613 them, since cross-jumping preserves the dynamic instruction order
2614 even though it is changing the static instruction order. However,
2615 if an asm is being used to emit an assembler pseudo-op, such as
2616 the MIPS `.set reorder' pseudo-op, then the static instruction order
2617 matters and it must be preserved. */
2618 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
2619 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
2620 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
2621 lose = 1;
2622
2623 if (lose || GET_CODE (p1) != GET_CODE (p2)
15a63be1
RK
2624 || ! rtx_renumbered_equal_p (p1, p2))
2625 {
2626 /* The following code helps take care of G++ cleanups. */
2627 rtx equiv1;
2628 rtx equiv2;
2629
2630 if (!lose && GET_CODE (p1) == GET_CODE (p2)
5f4f0e22
CH
2631 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
2632 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
2633 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
2634 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
15a63be1
RK
2635 /* If the equivalences are not to a constant, they may
2636 reference pseudos that no longer exist, so we can't
2637 use them. */
2638 && CONSTANT_P (XEXP (equiv1, 0))
2639 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
2640 {
2641 rtx s1 = single_set (i1);
2642 rtx s2 = single_set (i2);
2643 if (s1 != 0 && s2 != 0
2644 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
2645 {
2646 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
2647 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
2648 if (! rtx_renumbered_equal_p (p1, p2))
2649 cancel_changes (0);
2650 else if (apply_change_group ())
2651 goto win;
2652 }
2653 }
2654
2655 /* Insns fail to match; cross jumping is limited to the following
2656 insns. */
2657
2658#ifdef HAVE_cc0
2659 /* Don't allow the insn after a compare to be shared by
2660 cross-jumping unless the compare is also shared.
2661 Here, if either of these non-matching insns is a compare,
2662 exclude the following insn from possible cross-jumping. */
2663 if (sets_cc0_p (p1) || sets_cc0_p (p2))
2664 last1 = afterlast1, last2 = afterlast2, ++minimum;
2665#endif
2666
2667 /* If cross-jumping here will feed a jump-around-jump
2668 optimization, this jump won't cost extra, so reduce
2669 the minimum. */
2670 if (GET_CODE (i1) == JUMP_INSN
2671 && JUMP_LABEL (i1)
2672 && prev_real_insn (JUMP_LABEL (i1)) == e1)
2673 --minimum;
2674 break;
2675 }
2676
2677 win:
2678 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
2679 {
2680 /* Ok, this insn is potentially includable in a cross-jump here. */
2681 afterlast1 = last1, afterlast2 = last2;
2682 last1 = i1, last2 = i2, --minimum;
2683 }
2684 }
2685
15a63be1
RK
2686 if (minimum <= 0 && last1 != 0 && last1 != e1)
2687 *f1 = last1, *f2 = last2;
2688}
2689
2690static void
2691do_cross_jump (insn, newjpos, newlpos)
2692 rtx insn, newjpos, newlpos;
2693{
2694 /* Find an existing label at this point
2695 or make a new one if there is none. */
2696 register rtx label = get_label_before (newlpos);
2697
2698 /* Make the same jump insn jump to the new point. */
2699 if (GET_CODE (PATTERN (insn)) == RETURN)
2700 {
2701 /* Remove from jump chain of returns. */
2702 delete_from_jump_chain (insn);
2703 /* Change the insn. */
2704 PATTERN (insn) = gen_jump (label);
2705 INSN_CODE (insn) = -1;
2706 JUMP_LABEL (insn) = label;
2707 LABEL_NUSES (label)++;
2708 /* Add to new the jump chain. */
2709 if (INSN_UID (label) < max_jump_chain
2710 && INSN_UID (insn) < max_jump_chain)
2711 {
2712 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
2713 jump_chain[INSN_UID (label)] = insn;
2714 }
2715 }
2716 else
2717 redirect_jump (insn, label);
2718
2719 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
2720 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
2721 the NEWJPOS stream. */
2722
2723 while (newjpos != insn)
2724 {
2725 rtx lnote;
2726
2727 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
2728 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
2729 || REG_NOTE_KIND (lnote) == REG_EQUIV)
2730 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
2731 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
2732 remove_note (newlpos, lnote);
2733
2734 delete_insn (newjpos);
2735 newjpos = next_real_insn (newjpos);
2736 newlpos = next_real_insn (newlpos);
2737 }
2738}
2739\f
2740/* Return the label before INSN, or put a new label there. */
2741
2742rtx
2743get_label_before (insn)
2744 rtx insn;
2745{
2746 rtx label;
2747
2748 /* Find an existing label at this point
2749 or make a new one if there is none. */
2750 label = prev_nonnote_insn (insn);
2751
2752 if (label == 0 || GET_CODE (label) != CODE_LABEL)
2753 {
2754 rtx prev = PREV_INSN (insn);
2755
15a63be1
RK
2756 label = gen_label_rtx ();
2757 emit_label_after (label, prev);
2758 LABEL_NUSES (label) = 0;
2759 }
2760 return label;
2761}
2762
2763/* Return the label after INSN, or put a new label there. */
2764
2765rtx
2766get_label_after (insn)
2767 rtx insn;
2768{
2769 rtx label;
2770
2771 /* Find an existing label at this point
2772 or make a new one if there is none. */
2773 label = next_nonnote_insn (insn);
2774
2775 if (label == 0 || GET_CODE (label) != CODE_LABEL)
2776 {
15a63be1
RK
2777 label = gen_label_rtx ();
2778 emit_label_after (label, insn);
2779 LABEL_NUSES (label) = 0;
2780 }
2781 return label;
2782}
2783\f
2784/* Return 1 if INSN is a jump that jumps to right after TARGET
2785 only on the condition that TARGET itself would drop through.
2786 Assumes that TARGET is a conditional jump. */
2787
2788static int
2789jump_back_p (insn, target)
2790 rtx insn, target;
2791{
2792 rtx cinsn, ctarget;
2793 enum rtx_code codei, codet;
2794
2795 if (simplejump_p (insn) || ! condjump_p (insn)
2796 || simplejump_p (target)
2797 || target != prev_real_insn (JUMP_LABEL (insn)))
2798 return 0;
2799
2800 cinsn = XEXP (SET_SRC (PATTERN (insn)), 0);
2801 ctarget = XEXP (SET_SRC (PATTERN (target)), 0);
2802
2803 codei = GET_CODE (cinsn);
2804 codet = GET_CODE (ctarget);
2805
2806 if (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx)
2807 {
2808 if (! can_reverse_comparison_p (cinsn, insn))
2809 return 0;
2810 codei = reverse_condition (codei);
2811 }
2812
2813 if (XEXP (SET_SRC (PATTERN (target)), 2) == pc_rtx)
2814 {
2815 if (! can_reverse_comparison_p (ctarget, target))
2816 return 0;
2817 codet = reverse_condition (codet);
2818 }
2819
2820 return (codei == codet
2821 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
2822 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
2823}
2824\f
2825/* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
2826 return non-zero if it is safe to reverse this comparison. It is if our
2827 floating-point is not IEEE, if this is an NE or EQ comparison, or if
2828 this is known to be an integer comparison. */
2829
2830int
2831can_reverse_comparison_p (comparison, insn)
2832 rtx comparison;
2833 rtx insn;
2834{
2835 rtx arg0;
2836
2837 /* If this is not actually a comparison, we can't reverse it. */
2838 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
2839 return 0;
2840
2841 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
2842 /* If this is an NE comparison, it is safe to reverse it to an EQ
2843 comparison and vice versa, even for floating point. If no operands
2844 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
2845 always false and NE is always true, so the reversal is also valid. */
9b2e59ad 2846 || flag_fast_math
15a63be1
RK
2847 || GET_CODE (comparison) == NE
2848 || GET_CODE (comparison) == EQ)
2849 return 1;
2850
2851 arg0 = XEXP (comparison, 0);
2852
2853 /* Make sure ARG0 is one of the actual objects being compared. If we
2854 can't do this, we can't be sure the comparison can be reversed.
2855
2856 Handle cc0 and a MODE_CC register. */
2857 if ((GET_CODE (arg0) == REG && GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC)
2858#ifdef HAVE_cc0
2859 || arg0 == cc0_rtx
2860#endif
2861 )
2862 {
2863 rtx prev = prev_nonnote_insn (insn);
2864 rtx set = single_set (prev);
2865
2866 if (set == 0 || SET_DEST (set) != arg0)
2867 return 0;
2868
2869 arg0 = SET_SRC (set);
2870
2871 if (GET_CODE (arg0) == COMPARE)
2872 arg0 = XEXP (arg0, 0);
2873 }
2874
2875 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
2876 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
2877 return (GET_CODE (arg0) == CONST_INT
2878 || (GET_MODE (arg0) != VOIDmode
2879 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_CC
2880 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_FLOAT));
2881}
2882
2883/* Given an rtx-code for a comparison, return the code
2884 for the negated comparison.
2885 WATCH OUT! reverse_condition is not safe to use on a jump
2886 that might be acting on the results of an IEEE floating point comparison,
2887 because of the special treatment of non-signaling nans in comparisons.
2888 Use can_reverse_comparison_p to be sure. */
2889
2890enum rtx_code
2891reverse_condition (code)
2892 enum rtx_code code;
2893{
2894 switch (code)
2895 {
2896 case EQ:
2897 return NE;
2898
2899 case NE:
2900 return EQ;
2901
2902 case GT:
2903 return LE;
2904
2905 case GE:
2906 return LT;
2907
2908 case LT:
2909 return GE;
2910
2911 case LE:
2912 return GT;
2913
2914 case GTU:
2915 return LEU;
2916
2917 case GEU:
2918 return LTU;
2919
2920 case LTU:
2921 return GEU;
2922
2923 case LEU:
2924 return GTU;
2925
2926 default:
2927 abort ();
2928 return UNKNOWN;
2929 }
2930}
2931
2932/* Similar, but return the code when two operands of a comparison are swapped.
2933 This IS safe for IEEE floating-point. */
2934
2935enum rtx_code
2936swap_condition (code)
2937 enum rtx_code code;
2938{
2939 switch (code)
2940 {
2941 case EQ:
2942 case NE:
2943 return code;
2944
2945 case GT:
2946 return LT;
2947
2948 case GE:
2949 return LE;
2950
2951 case LT:
2952 return GT;
2953
2954 case LE:
2955 return GE;
2956
2957 case GTU:
2958 return LTU;
2959
2960 case GEU:
2961 return LEU;
2962
2963 case LTU:
2964 return GTU;
2965
2966 case LEU:
2967 return GEU;
2968
2969 default:
2970 abort ();
2971 return UNKNOWN;
2972 }
2973}
2974
2975/* Given a comparison CODE, return the corresponding unsigned comparison.
2976 If CODE is an equality comparison or already an unsigned comparison,
2977 CODE is returned. */
2978
2979enum rtx_code
2980unsigned_condition (code)
2981 enum rtx_code code;
2982{
2983 switch (code)
2984 {
2985 case EQ:
2986 case NE:
2987 case GTU:
2988 case GEU:
2989 case LTU:
2990 case LEU:
2991 return code;
2992
2993 case GT:
2994 return GTU;
2995
2996 case GE:
2997 return GEU;
2998
2999 case LT:
3000 return LTU;
3001
3002 case LE:
3003 return LEU;
3004
3005 default:
3006 abort ();
3007 }
3008}
3009
3010/* Similarly, return the signed version of a comparison. */
3011
3012enum rtx_code
3013signed_condition (code)
3014 enum rtx_code code;
3015{
3016 switch (code)
3017 {
3018 case EQ:
3019 case NE:
3020 case GT:
3021 case GE:
3022 case LT:
3023 case LE:
3024 return code;
3025
3026 case GTU:
3027 return GT;
3028
3029 case GEU:
3030 return GE;
3031
3032 case LTU:
3033 return LT;
3034
3035 case LEU:
3036 return LE;
3037
3038 default:
3039 abort ();
3040 }
3041}
3042\f
3043/* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
3044 truth of CODE1 implies the truth of CODE2. */
3045
3046int
3047comparison_dominates_p (code1, code2)
3048 enum rtx_code code1, code2;
3049{
3050 if (code1 == code2)
3051 return 1;
3052
3053 switch (code1)
3054 {
3055 case EQ:
3056 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU)
3057 return 1;
3058 break;
3059
3060 case LT:
b0c38416 3061 if (code2 == LE || code2 == NE)
15a63be1
RK
3062 return 1;
3063 break;
3064
3065 case GT:
b0c38416 3066 if (code2 == GE || code2 == NE)
15a63be1
RK
3067 return 1;
3068 break;
3069
3070 case LTU:
b0c38416 3071 if (code2 == LEU || code2 == NE)
15a63be1
RK
3072 return 1;
3073 break;
3074
3075 case GTU:
b0c38416 3076 if (code2 == GEU || code2 == NE)
15a63be1
RK
3077 return 1;
3078 break;
e9a25f70
JL
3079
3080 default:
3081 break;
15a63be1
RK
3082 }
3083
3084 return 0;
3085}
3086\f
3087/* Return 1 if INSN is an unconditional jump and nothing else. */
3088
3089int
3090simplejump_p (insn)
3091 rtx insn;
3092{
3093 return (GET_CODE (insn) == JUMP_INSN
3094 && GET_CODE (PATTERN (insn)) == SET
3095 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
3096 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
3097}
3098
3099/* Return nonzero if INSN is a (possibly) conditional jump
3100 and nothing more. */
3101
3102int
3103condjump_p (insn)
3104 rtx insn;
3105{
3106 register rtx x = PATTERN (insn);
3480bb98
JL
3107 if (GET_CODE (x) != SET)
3108 return 0;
3109 if (GET_CODE (SET_DEST (x)) != PC)
3110 return 0;
3111 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3112 return 1;
3113 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3114 return 0;
3115 if (XEXP (SET_SRC (x), 2) == pc_rtx
3116 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3117 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3118 return 1;
3119 if (XEXP (SET_SRC (x), 1) == pc_rtx
3120 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3121 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3122 return 1;
3123 return 0;
3124}
3125
3126/* Return nonzero if INSN is a (possibly) conditional jump
3127 and nothing more. */
3128
3129int
3130condjump_in_parallel_p (insn)
3131 rtx insn;
3132{
3133 register rtx x = PATTERN (insn);
3134
3135 if (GET_CODE (x) != PARALLEL)
3136 return 0;
3137 else
3138 x = XVECEXP (x, 0, 0);
3139
15a63be1
RK
3140 if (GET_CODE (x) != SET)
3141 return 0;
3142 if (GET_CODE (SET_DEST (x)) != PC)
3143 return 0;
3144 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3145 return 1;
3146 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3147 return 0;
3148 if (XEXP (SET_SRC (x), 2) == pc_rtx
3149 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3150 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3151 return 1;
3152 if (XEXP (SET_SRC (x), 1) == pc_rtx
3153 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3154 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3155 return 1;
3156 return 0;
3157}
3158
3159/* Return 1 if X is an RTX that does nothing but set the condition codes
3160 and CLOBBER or USE registers.
3161 Return -1 if X does explicitly set the condition codes,
3162 but also does other things. */
3163
3164int
3165sets_cc0_p (x)
3166 rtx x;
3167{
3168#ifdef HAVE_cc0
3169 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
3170 return 1;
3171 if (GET_CODE (x) == PARALLEL)
3172 {
3173 int i;
3174 int sets_cc0 = 0;
3175 int other_things = 0;
3176 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3177 {
3178 if (GET_CODE (XVECEXP (x, 0, i)) == SET
3179 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
3180 sets_cc0 = 1;
3181 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
3182 other_things = 1;
3183 }
3184 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
3185 }
3186 return 0;
3187#else
3188 abort ();
3189#endif
3190}
3191\f
3192/* Follow any unconditional jump at LABEL;
3193 return the ultimate label reached by any such chain of jumps.
3194 If LABEL is not followed by a jump, return LABEL.
2d20b9df
RS
3195 If the chain loops or we can't find end, return LABEL,
3196 since that tells caller to avoid changing the insn.
15a63be1
RK
3197
3198 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
3199 a USE or CLOBBER. */
3200
3201rtx
3202follow_jumps (label)
3203 rtx label;
3204{
3205 register rtx insn;
3206 register rtx next;
3207 register rtx value = label;
3208 register int depth;
3209
3210 for (depth = 0;
3211 (depth < 10
3212 && (insn = next_active_insn (value)) != 0
3213 && GET_CODE (insn) == JUMP_INSN
a9cc9061
JL
3214 && ((JUMP_LABEL (insn) != 0 && simplejump_p (insn))
3215 || GET_CODE (PATTERN (insn)) == RETURN)
15a63be1
RK
3216 && (next = NEXT_INSN (insn))
3217 && GET_CODE (next) == BARRIER);
3218 depth++)
3219 {
3220 /* Don't chain through the insn that jumps into a loop
3221 from outside the loop,
3222 since that would create multiple loop entry jumps
3223 and prevent loop optimization. */
3224 rtx tem;
3225 if (!reload_completed)
3226 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
3227 if (GET_CODE (tem) == NOTE
f6a6a1b3
DE
3228 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
3229 /* ??? Optional. Disables some optimizations, but makes
3230 gcov output more accurate with -O. */
3231 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
15a63be1
RK
3232 return value;
3233
3234 /* If we have found a cycle, make the insn jump to itself. */
3235 if (JUMP_LABEL (insn) == label)
2d20b9df 3236 return label;
b209b3c5
JVA
3237
3238 tem = next_active_insn (JUMP_LABEL (insn));
3239 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
3240 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
3241 break;
3242
15a63be1
RK
3243 value = JUMP_LABEL (insn);
3244 }
2d20b9df
RS
3245 if (depth == 10)
3246 return label;
15a63be1
RK
3247 return value;
3248}
3249
3250/* Assuming that field IDX of X is a vector of label_refs,
3251 replace each of them by the ultimate label reached by it.
3252 Return nonzero if a change is made.
3253 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
3254
3255static int
3256tension_vector_labels (x, idx)
3257 register rtx x;
3258 register int idx;
3259{
3260 int changed = 0;
3261 register int i;
3262 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
3263 {
3264 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
3265 register rtx nlabel = follow_jumps (olabel);
3266 if (nlabel && nlabel != olabel)
3267 {
3268 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
3269 ++LABEL_NUSES (nlabel);
3270 if (--LABEL_NUSES (olabel) == 0)
3271 delete_insn (olabel);
3272 changed = 1;
3273 }
3274 }
3275 return changed;
3276}
3277\f
3278/* Find all CODE_LABELs referred to in X, and increment their use counts.
3279 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
3280 in INSN, then store one of them in JUMP_LABEL (INSN).
3281 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
3282 referenced in INSN, add a REG_LABEL note containing that label to INSN.
3283 Also, when there are consecutive labels, canonicalize on the last of them.
3284
3285 Note that two labels separated by a loop-beginning note
3286 must be kept distinct if we have not yet done loop-optimization,
3287 because the gap between them is where loop-optimize
3288 will want to move invariant code to. CROSS_JUMP tells us
3289 that loop-optimization is done with.
3290
3291 Once reload has completed (CROSS_JUMP non-zero), we need not consider
3292 two labels distinct if they are separated by only USE or CLOBBER insns. */
3293
3294static void
3295mark_jump_label (x, insn, cross_jump)
3296 register rtx x;
3297 rtx insn;
3298 int cross_jump;
3299{
3300 register RTX_CODE code = GET_CODE (x);
3301 register int i;
3302 register char *fmt;
3303
3304 switch (code)
3305 {
3306 case PC:
3307 case CC0:
3308 case REG:
3309 case SUBREG:
3310 case CONST_INT:
3311 case SYMBOL_REF:
3312 case CONST_DOUBLE:
3313 case CLOBBER:
3314 case CALL:
3315 return;
3316
d7ea4cf6
RK
3317 case MEM:
3318 /* If this is a constant-pool reference, see if it is a label. */
3319 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3320 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3321 mark_jump_label (get_pool_constant (XEXP (x, 0)), insn, cross_jump);
3322 break;
3323
15a63be1
RK
3324 case LABEL_REF:
3325 {
5c5e36c5
RK
3326 rtx label = XEXP (x, 0);
3327 rtx olabel = label;
3328 rtx note;
3329 rtx next;
3330
15a63be1
RK
3331 if (GET_CODE (label) != CODE_LABEL)
3332 abort ();
5c5e36c5 3333
705f26cf
RS
3334 /* Ignore references to labels of containing functions. */
3335 if (LABEL_REF_NONLOCAL_P (x))
3336 break;
5c5e36c5 3337
15a63be1
RK
3338 /* If there are other labels following this one,
3339 replace it with the last of the consecutive labels. */
3340 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
3341 {
3342 if (GET_CODE (next) == CODE_LABEL)
3343 label = next;
3344 else if (cross_jump && GET_CODE (next) == INSN
3345 && (GET_CODE (PATTERN (next)) == USE
3346 || GET_CODE (PATTERN (next)) == CLOBBER))
3347 continue;
3348 else if (GET_CODE (next) != NOTE)
3349 break;
3350 else if (! cross_jump
3351 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
f6a6a1b3
DE
3352 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
3353 /* ??? Optional. Disables some optimizations, but
3354 makes gcov output more accurate with -O. */
3355 || (flag_test_coverage && NOTE_LINE_NUMBER (next) > 0)))
15a63be1
RK
3356 break;
3357 }
5c5e36c5 3358
15a63be1
RK
3359 XEXP (x, 0) = label;
3360 ++LABEL_NUSES (label);
5c5e36c5 3361
15a63be1
RK
3362 if (insn)
3363 {
3364 if (GET_CODE (insn) == JUMP_INSN)
3365 JUMP_LABEL (insn) = label;
5c5e36c5
RK
3366
3367 /* If we've changed OLABEL and we had a REG_LABEL note
3368 for it, update it as well. */
3369 else if (label != olabel
3370 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
3371 XEXP (note, 0) = label;
3372
3373 /* Otherwise, add a REG_LABEL note for LABEL unless there already
3374 is one. */
0e690bdb 3375 else if (! find_reg_note (insn, REG_LABEL, label))
15a63be1
RK
3376 {
3377 rtx next = next_real_insn (label);
3378 /* Don't record labels that refer to dispatch tables.
3379 This is not necessary, since the tablejump
3380 references the same label.
3381 And if we did record them, flow.c would make worse code. */
3382 if (next == 0
3383 || ! (GET_CODE (next) == JUMP_INSN
3384 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3385 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC)))
8cd2aff2
RK
3386 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_LABEL, label,
3387 REG_NOTES (insn));
15a63be1
RK
3388 }
3389 }
3390 return;
3391 }
3392
3393 /* Do walk the labels in a vector, but not the first operand of an
3394 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
3395 case ADDR_VEC:
3396 case ADDR_DIFF_VEC:
3397 {
3398 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
3399
3400 for (i = 0; i < XVECLEN (x, eltnum); i++)
5f4f0e22 3401 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, cross_jump);
15a63be1 3402 }
e9a25f70
JL
3403 return;
3404
3405 default:
3406 break;
15a63be1
RK
3407 }
3408
3409 fmt = GET_RTX_FORMAT (code);
3410 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3411 {
3412 if (fmt[i] == 'e')
3413 mark_jump_label (XEXP (x, i), insn, cross_jump);
3414 else if (fmt[i] == 'E')
3415 {
3416 register int j;
3417 for (j = 0; j < XVECLEN (x, i); j++)
3418 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump);
3419 }
3420 }
3421}
3422
3423/* If all INSN does is set the pc, delete it,
3424 and delete the insn that set the condition codes for it
3425 if that's what the previous thing was. */
3426
3427void
3428delete_jump (insn)
3429 rtx insn;
3430{
3e5478ea
RK
3431 register rtx set = single_set (insn);
3432
3433 if (set && GET_CODE (SET_DEST (set)) == PC)
3434 delete_computation (insn);
3435}
3436
3437/* Delete INSN and recursively delete insns that compute values used only
3438 by INSN. This uses the REG_DEAD notes computed during flow analysis.
3439 If we are running before flow.c, we need do nothing since flow.c will
3440 delete dead code. We also can't know if the registers being used are
3441 dead or not at this point.
3442
3443 Otherwise, look at all our REG_DEAD notes. If a previous insn does
3444 nothing other than set a register that dies in this insn, we can delete
3445 that insn as well.
3446
3447 On machines with CC0, if CC0 is used in this insn, we may be able to
3448 delete the insn that set it. */
3449
8cd2aff2 3450static void
3e5478ea
RK
3451delete_computation (insn)
3452 rtx insn;
3453{
3454 rtx note, next;
15a63be1 3455
15a63be1 3456#ifdef HAVE_cc0
2fb95912 3457 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
3e5478ea 3458 {
77472c5a 3459 rtx prev = prev_nonnote_insn (insn);
15a63be1
RK
3460 /* We assume that at this stage
3461 CC's are always set explicitly
3462 and always immediately before the jump that
3463 will use them. So if the previous insn
3464 exists to set the CC's, delete it
3465 (unless it performs auto-increments, etc.). */
3466 if (prev && GET_CODE (prev) == INSN
3467 && sets_cc0_p (PATTERN (prev)))
3468 {
3469 if (sets_cc0_p (PATTERN (prev)) > 0
5f4f0e22 3470 && !FIND_REG_INC_NOTE (prev, NULL_RTX))
3e5478ea 3471 delete_computation (prev);
15a63be1
RK
3472 else
3473 /* Otherwise, show that cc0 won't be used. */
3474 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_UNUSED,
3475 cc0_rtx, REG_NOTES (prev));
3476 }
77472c5a 3477 }
3e5478ea 3478#endif
15a63be1 3479
77472c5a
TW
3480 for (note = REG_NOTES (insn); note; note = next)
3481 {
3482 rtx our_prev;
15a63be1 3483
77472c5a 3484 next = XEXP (note, 1);
15a63be1 3485
77472c5a
TW
3486 if (REG_NOTE_KIND (note) != REG_DEAD
3487 /* Verify that the REG_NOTE is legitimate. */
3488 || GET_CODE (XEXP (note, 0)) != REG)
3489 continue;
15a63be1 3490
77472c5a
TW
3491 for (our_prev = prev_nonnote_insn (insn);
3492 our_prev && GET_CODE (our_prev) == INSN;
3493 our_prev = prev_nonnote_insn (our_prev))
3494 {
3495 /* If we reach a SEQUENCE, it is too complex to try to
3496 do anything with it, so give up. */
3497 if (GET_CODE (PATTERN (our_prev)) == SEQUENCE)
3498 break;
15a63be1 3499
77472c5a
TW
3500 if (GET_CODE (PATTERN (our_prev)) == USE
3501 && GET_CODE (XEXP (PATTERN (our_prev), 0)) == INSN)
3502 /* reorg creates USEs that look like this. We leave them
3503 alone because reorg needs them for its own purposes. */
3504 break;
15a63be1 3505
77472c5a
TW
3506 if (reg_set_p (XEXP (note, 0), PATTERN (our_prev)))
3507 {
3508 if (FIND_REG_INC_NOTE (our_prev, NULL_RTX))
3509 break;
15a63be1 3510
77472c5a
TW
3511 if (GET_CODE (PATTERN (our_prev)) == PARALLEL)
3512 {
3513 /* If we find a SET of something else, we can't
3514 delete the insn. */
15a63be1 3515
77472c5a 3516 int i;
15a63be1 3517
77472c5a
TW
3518 for (i = 0; i < XVECLEN (PATTERN (our_prev), 0); i++)
3519 {
3520 rtx part = XVECEXP (PATTERN (our_prev), 0, i);
15a63be1 3521
77472c5a
TW
3522 if (GET_CODE (part) == SET
3523 && SET_DEST (part) != XEXP (note, 0))
3524 break;
3525 }
15a63be1 3526
77472c5a
TW
3527 if (i == XVECLEN (PATTERN (our_prev), 0))
3528 delete_computation (our_prev);
3529 }
3530 else if (GET_CODE (PATTERN (our_prev)) == SET
3531 && SET_DEST (PATTERN (our_prev)) == XEXP (note, 0))
3532 delete_computation (our_prev);
3533
3534 break;
3535 }
3536
3537 /* If OUR_PREV references the register that dies here, it is an
3538 additional use. Hence any prior SET isn't dead. However, this
3539 insn becomes the new place for the REG_DEAD note. */
3540 if (reg_overlap_mentioned_p (XEXP (note, 0),
3541 PATTERN (our_prev)))
3542 {
3543 XEXP (note, 1) = REG_NOTES (our_prev);
3544 REG_NOTES (our_prev) = note;
3545 break;
3546 }
3547 }
15a63be1 3548 }
3e5478ea 3549
77472c5a 3550 delete_insn (insn);
15a63be1
RK
3551}
3552\f
3553/* Delete insn INSN from the chain of insns and update label ref counts.
3554 May delete some following insns as a consequence; may even delete
3555 a label elsewhere and insns that follow it.
3556
3557 Returns the first insn after INSN that was not deleted. */
3558
3559rtx
3560delete_insn (insn)
3561 register rtx insn;
3562{
3563 register rtx next = NEXT_INSN (insn);
3564 register rtx prev = PREV_INSN (insn);
196cedd0
RS
3565 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
3566 register int dont_really_delete = 0;
15a63be1
RK
3567
3568 while (next && INSN_DELETED_P (next))
3569 next = NEXT_INSN (next);
3570
3571 /* This insn is already deleted => return first following nondeleted. */
3572 if (INSN_DELETED_P (insn))
3573 return next;
3574
196cedd0
RS
3575 /* Don't delete user-declared labels. Convert them to special NOTEs
3576 instead. */
9571f079
RK
3577 if (was_code_label && LABEL_NAME (insn) != 0
3578 && optimize && ! dont_really_delete)
196cedd0
RS
3579 {
3580 PUT_CODE (insn, NOTE);
3581 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
3582 NOTE_SOURCE_FILE (insn) = 0;
3583 dont_really_delete = 1;
3584 }
3585 else
3586 /* Mark this insn as deleted. */
3587 INSN_DELETED_P (insn) = 1;
15a63be1
RK
3588
3589 /* If this is an unconditional jump, delete it from the jump chain. */
3590 if (simplejump_p (insn))
3591 delete_from_jump_chain (insn);
3592
3593 /* If instruction is followed by a barrier,
3594 delete the barrier too. */
3595
3596 if (next != 0 && GET_CODE (next) == BARRIER)
3597 {
3598 INSN_DELETED_P (next) = 1;
3599 next = NEXT_INSN (next);
3600 }
3601
3602 /* Patch out INSN (and the barrier if any) */
3603
196cedd0 3604 if (optimize && ! dont_really_delete)
15a63be1
RK
3605 {
3606 if (prev)
3607 {
3608 NEXT_INSN (prev) = next;
3609 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3610 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
3611 XVECLEN (PATTERN (prev), 0) - 1)) = next;
3612 }
3613
3614 if (next)
3615 {
3616 PREV_INSN (next) = prev;
3617 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3618 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3619 }
3620
3621 if (prev && NEXT_INSN (prev) == 0)
3622 set_last_insn (prev);
3623 }
3624
3625 /* If deleting a jump, decrement the count of the label,
3626 and delete the label if it is now unused. */
3627
3628 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
3629 if (--LABEL_NUSES (JUMP_LABEL (insn)) == 0)
3630 {
3631 /* This can delete NEXT or PREV,
3632 either directly if NEXT is JUMP_LABEL (INSN),
3633 or indirectly through more levels of jumps. */
3634 delete_insn (JUMP_LABEL (insn));
3635 /* I feel a little doubtful about this loop,
3636 but I see no clean and sure alternative way
3637 to find the first insn after INSN that is not now deleted.
3638 I hope this works. */
3639 while (next && INSN_DELETED_P (next))
3640 next = NEXT_INSN (next);
3641 return next;
3642 }
3643
3c7d7a4a
DE
3644 /* Likewise if we're deleting a dispatch table. */
3645
3646 if (GET_CODE (insn) == JUMP_INSN
3647 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
3648 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
3649 {
3650 rtx pat = PATTERN (insn);
3651 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3652 int len = XVECLEN (pat, diff_vec_p);
3653
3654 for (i = 0; i < len; i++)
3655 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
3656 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
3657 while (next && INSN_DELETED_P (next))
3658 next = NEXT_INSN (next);
3659 return next;
3660 }
3661
15a63be1
RK
3662 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
3663 prev = PREV_INSN (prev);
3664
3665 /* If INSN was a label and a dispatch table follows it,
3666 delete the dispatch table. The tablejump must have gone already.
3667 It isn't useful to fall through into a table. */
3668
196cedd0 3669 if (was_code_label
15a63be1
RK
3670 && NEXT_INSN (insn) != 0
3671 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
3672 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
3673 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
3674 next = delete_insn (NEXT_INSN (insn));
3675
3676 /* If INSN was a label, delete insns following it if now unreachable. */
3677
196cedd0 3678 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
15a63be1
RK
3679 {
3680 register RTX_CODE code;
3681 while (next != 0
8cd2aff2 3682 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
4134d7fc 3683 || code == NOTE || code == BARRIER
2e1dbf22 3684 || (code == CODE_LABEL && INSN_DELETED_P (next))))
15a63be1
RK
3685 {
3686 if (code == NOTE
3687 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
3688 next = NEXT_INSN (next);
2e1dbf22
RS
3689 /* Keep going past other deleted labels to delete what follows. */
3690 else if (code == CODE_LABEL && INSN_DELETED_P (next))
3691 next = NEXT_INSN (next);
15a63be1
RK
3692 else
3693 /* Note: if this deletes a jump, it can cause more
3694 deletion of unreachable code, after a different label.
3695 As long as the value from this recursive call is correct,
3696 this invocation functions correctly. */
3697 next = delete_insn (next);
3698 }
3699 }
3700
3701 return next;
3702}
3703
3704/* Advance from INSN till reaching something not deleted
3705 then return that. May return INSN itself. */
3706
3707rtx
3708next_nondeleted_insn (insn)
3709 rtx insn;
3710{
3711 while (INSN_DELETED_P (insn))
3712 insn = NEXT_INSN (insn);
3713 return insn;
3714}
3715\f
3716/* Delete a range of insns from FROM to TO, inclusive.
3717 This is for the sake of peephole optimization, so assume
3718 that whatever these insns do will still be done by a new
3719 peephole insn that will replace them. */
3720
3721void
3722delete_for_peephole (from, to)
3723 register rtx from, to;
3724{
3725 register rtx insn = from;
3726
3727 while (1)
3728 {
3729 register rtx next = NEXT_INSN (insn);
3730 register rtx prev = PREV_INSN (insn);
3731
3732 if (GET_CODE (insn) != NOTE)
3733 {
3734 INSN_DELETED_P (insn) = 1;
3735
3736 /* Patch this insn out of the chain. */
3737 /* We don't do this all at once, because we
3738 must preserve all NOTEs. */
3739 if (prev)
3740 NEXT_INSN (prev) = next;
3741
3742 if (next)
3743 PREV_INSN (next) = prev;
3744 }
3745
3746 if (insn == to)
3747 break;
3748 insn = next;
3749 }
3750
3751 /* Note that if TO is an unconditional jump
3752 we *do not* delete the BARRIER that follows,
3753 since the peephole that replaces this sequence
3754 is also an unconditional jump in that case. */
3755}
3756\f
3757/* Invert the condition of the jump JUMP, and make it jump
3758 to label NLABEL instead of where it jumps now. */
3759
3760int
3761invert_jump (jump, nlabel)
3762 rtx jump, nlabel;
3763{
15a63be1
RK
3764 /* We have to either invert the condition and change the label or
3765 do neither. Either operation could fail. We first try to invert
3766 the jump. If that succeeds, we try changing the label. If that fails,
3767 we invert the jump back to what it was. */
3768
3769 if (! invert_exp (PATTERN (jump), jump))
3770 return 0;
3771
3772 if (redirect_jump (jump, nlabel))
f6a6a1b3
DE
3773 {
3774 if (flag_branch_probabilities)
3775 {
3776 rtx note = find_reg_note (jump, REG_BR_PROB, 0);
3777
3778 /* An inverted jump means that a probability taken becomes a
3779 probability not taken. Subtract the branch probability from the
3780 probability base to convert it back to a taken probability.
3781 (We don't flip the probability on a branch that's never taken. */
3782 if (note && XINT (XEXP (note, 0), 0) >= 0)
3783 XINT (XEXP (note, 0), 0) = REG_BR_PROB_BASE - XINT (XEXP (note, 0), 0);
3784 }
3785
3786 return 1;
3787 }
15a63be1
RK
3788
3789 if (! invert_exp (PATTERN (jump), jump))
3790 /* This should just be putting it back the way it was. */
3791 abort ();
3792
3793 return 0;
3794}
3795
3796/* Invert the jump condition of rtx X contained in jump insn, INSN.
3797
3798 Return 1 if we can do so, 0 if we cannot find a way to do so that
3799 matches a pattern. */
3800
4214a505 3801int
15a63be1
RK
3802invert_exp (x, insn)
3803 rtx x;
3804 rtx insn;
3805{
3806 register RTX_CODE code;
3807 register int i;
3808 register char *fmt;
3809
3810 code = GET_CODE (x);
3811
3812 if (code == IF_THEN_ELSE)
3813 {
3814 register rtx comp = XEXP (x, 0);
3815 register rtx tem;
3816
3817 /* We can do this in two ways: The preferable way, which can only
3818 be done if this is not an integer comparison, is to reverse
3819 the comparison code. Otherwise, swap the THEN-part and ELSE-part
3820 of the IF_THEN_ELSE. If we can't do either, fail. */
3821
3822 if (can_reverse_comparison_p (comp, insn)
3823 && validate_change (insn, &XEXP (x, 0),
3824 gen_rtx (reverse_condition (GET_CODE (comp)),
3825 GET_MODE (comp), XEXP (comp, 0),
3826 XEXP (comp, 1)), 0))
3827 return 1;
3828
3829 tem = XEXP (x, 1);
3830 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
3831 validate_change (insn, &XEXP (x, 2), tem, 1);
3832 return apply_change_group ();
3833 }
3834
3835 fmt = GET_RTX_FORMAT (code);
3836 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3837 {
3838 if (fmt[i] == 'e')
3839 if (! invert_exp (XEXP (x, i), insn))
3840 return 0;
3841 if (fmt[i] == 'E')
3842 {
3843 register int j;
3844 for (j = 0; j < XVECLEN (x, i); j++)
3845 if (!invert_exp (XVECEXP (x, i, j), insn))
3846 return 0;
3847 }
3848 }
3849
3850 return 1;
3851}
3852\f
3853/* Make jump JUMP jump to label NLABEL instead of where it jumps now.
3854 If the old jump target label is unused as a result,
3855 it and the code following it may be deleted.
3856
3857 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
3858 RETURN insn.
3859
3860 The return value will be 1 if the change was made, 0 if it wasn't (this
3861 can only occur for NLABEL == 0). */
3862
3863int
3864redirect_jump (jump, nlabel)
3865 rtx jump, nlabel;
3866{
3867 register rtx olabel = JUMP_LABEL (jump);
3868
3869 if (nlabel == olabel)
3870 return 1;
3871
3872 if (! redirect_exp (&PATTERN (jump), olabel, nlabel, jump))
3873 return 0;
3874
3875 /* If this is an unconditional branch, delete it from the jump_chain of
3876 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
3877 have UID's in range and JUMP_CHAIN is valid). */
3878 if (jump_chain && (simplejump_p (jump)
3879 || GET_CODE (PATTERN (jump)) == RETURN))
3880 {
3881 int label_index = nlabel ? INSN_UID (nlabel) : 0;
3882
3883 delete_from_jump_chain (jump);
2d20b9df
RS
3884 if (label_index < max_jump_chain
3885 && INSN_UID (jump) < max_jump_chain)
15a63be1
RK
3886 {
3887 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
3888 jump_chain[label_index] = jump;
3889 }
3890 }
3891
3892 JUMP_LABEL (jump) = nlabel;
3893 if (nlabel)
3894 ++LABEL_NUSES (nlabel);
3895
3896 if (olabel && --LABEL_NUSES (olabel) == 0)
3897 delete_insn (olabel);
3898
3899 return 1;
3900}
3901
3902/* Delete the instruction JUMP from any jump chain it might be on. */
3903
3904static void
3905delete_from_jump_chain (jump)
3906 rtx jump;
3907{
3908 int index;
3909 rtx olabel = JUMP_LABEL (jump);
3910
3911 /* Handle unconditional jumps. */
3912 if (jump_chain && olabel != 0
3913 && INSN_UID (olabel) < max_jump_chain
3914 && simplejump_p (jump))
3915 index = INSN_UID (olabel);
3916 /* Handle return insns. */
3917 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
3918 index = 0;
3919 else return;
3920
3921 if (jump_chain[index] == jump)
3922 jump_chain[index] = jump_chain[INSN_UID (jump)];
3923 else
3924 {
3925 rtx insn;
3926
3927 for (insn = jump_chain[index];
3928 insn != 0;
3929 insn = jump_chain[INSN_UID (insn)])
3930 if (jump_chain[INSN_UID (insn)] == jump)
3931 {
3932 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
3933 break;
3934 }
3935 }
3936}
3937
3938/* If NLABEL is nonzero, throughout the rtx at LOC,
3939 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
3940 zero, alter (RETURN) to (LABEL_REF NLABEL).
3941
3942 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
3943 validity with validate_change. Convert (set (pc) (label_ref olabel))
3944 to (return).
3945
3946 Return 0 if we found a change we would like to make but it is invalid.
3947 Otherwise, return 1. */
3948
4214a505 3949int
15a63be1
RK
3950redirect_exp (loc, olabel, nlabel, insn)
3951 rtx *loc;
3952 rtx olabel, nlabel;
3953 rtx insn;
3954{
3955 register rtx x = *loc;
3956 register RTX_CODE code = GET_CODE (x);
3957 register int i;
3958 register char *fmt;
3959
3960 if (code == LABEL_REF)
3961 {
3962 if (XEXP (x, 0) == olabel)
3963 {
3964 if (nlabel)
3965 XEXP (x, 0) = nlabel;
3966 else
3967 return validate_change (insn, loc, gen_rtx (RETURN, VOIDmode), 0);
3968 return 1;
3969 }
3970 }
3971 else if (code == RETURN && olabel == 0)
3972 {
3973 x = gen_rtx (LABEL_REF, VOIDmode, nlabel);
3974 if (loc == &PATTERN (insn))
3975 x = gen_rtx (SET, VOIDmode, pc_rtx, x);
3976 return validate_change (insn, loc, x, 0);
3977 }
3978
3979 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
3980 && GET_CODE (SET_SRC (x)) == LABEL_REF
3981 && XEXP (SET_SRC (x), 0) == olabel)
3982 return validate_change (insn, loc, gen_rtx (RETURN, VOIDmode), 0);
3983
3984 fmt = GET_RTX_FORMAT (code);
3985 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3986 {
3987 if (fmt[i] == 'e')
3988 if (! redirect_exp (&XEXP (x, i), olabel, nlabel, insn))
3989 return 0;
3990 if (fmt[i] == 'E')
3991 {
3992 register int j;
3993 for (j = 0; j < XVECLEN (x, i); j++)
3994 if (! redirect_exp (&XVECEXP (x, i, j), olabel, nlabel, insn))
3995 return 0;
3996 }
3997 }
3998
3999 return 1;
4000}
4001\f
4002/* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
4003
4004 If the old jump target label (before the dispatch table) becomes unused,
4005 it and the dispatch table may be deleted. In that case, find the insn
6dc42e49 4006 before the jump references that label and delete it and logical successors
15a63be1
RK
4007 too. */
4008
8cd2aff2 4009static void
15a63be1
RK
4010redirect_tablejump (jump, nlabel)
4011 rtx jump, nlabel;
4012{
4013 register rtx olabel = JUMP_LABEL (jump);
4014
4015 /* Add this jump to the jump_chain of NLABEL. */
4016 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
4017 && INSN_UID (jump) < max_jump_chain)
4018 {
4019 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
4020 jump_chain[INSN_UID (nlabel)] = jump;
4021 }
4022
4023 PATTERN (jump) = gen_jump (nlabel);
4024 JUMP_LABEL (jump) = nlabel;
4025 ++LABEL_NUSES (nlabel);
4026 INSN_CODE (jump) = -1;
4027
4028 if (--LABEL_NUSES (olabel) == 0)
4029 {
4030 delete_labelref_insn (jump, olabel, 0);
4031 delete_insn (olabel);
4032 }
4033}
4034
4035/* Find the insn referencing LABEL that is a logical predecessor of INSN.
4036 If we found one, delete it and then delete this insn if DELETE_THIS is
4037 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
4038
4039static int
4040delete_labelref_insn (insn, label, delete_this)
4041 rtx insn, label;
4042 int delete_this;
4043{
4044 int deleted = 0;
4045 rtx link;
4046
4047 if (GET_CODE (insn) != NOTE
4048 && reg_mentioned_p (label, PATTERN (insn)))
4049 {
4050 if (delete_this)
4051 {
4052 delete_insn (insn);
4053 deleted = 1;
4054 }
4055 else
4056 return 1;
4057 }
4058
4059 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
4060 if (delete_labelref_insn (XEXP (link, 0), label, 1))
4061 {
4062 if (delete_this)
4063 {
4064 delete_insn (insn);
4065 deleted = 1;
4066 }
4067 else
4068 return 1;
4069 }
4070
4071 return deleted;
4072}
4073\f
4074/* Like rtx_equal_p except that it considers two REGs as equal
4fe73cc1
RK
4075 if they renumber to the same value and considers two commutative
4076 operations to be the same if the order of the operands has been
4077 reversed. */
15a63be1
RK
4078
4079int
4080rtx_renumbered_equal_p (x, y)
4081 rtx x, y;
4082{
4083 register int i;
4084 register RTX_CODE code = GET_CODE (x);
4085 register char *fmt;
4086
4087 if (x == y)
4088 return 1;
4fe73cc1 4089
15a63be1
RK
4090 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
4091 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
4092 && GET_CODE (SUBREG_REG (y)) == REG)))
4093 {
4fe73cc1
RK
4094 int reg_x = -1, reg_y = -1;
4095 int word_x = 0, word_y = 0;
15a63be1
RK
4096
4097 if (GET_MODE (x) != GET_MODE (y))
4098 return 0;
4099
4100 /* If we haven't done any renumbering, don't
4101 make any assumptions. */
4102 if (reg_renumber == 0)
4103 return rtx_equal_p (x, y);
4104
4105 if (code == SUBREG)
4106 {
4fe73cc1
RK
4107 reg_x = REGNO (SUBREG_REG (x));
4108 word_x = SUBREG_WORD (x);
4109
4110 if (reg_renumber[reg_x] >= 0)
4111 {
4112 reg_x = reg_renumber[reg_x] + word_x;
4113 word_x = 0;
4114 }
15a63be1 4115 }
4fe73cc1 4116
15a63be1
RK
4117 else
4118 {
4fe73cc1
RK
4119 reg_x = REGNO (x);
4120 if (reg_renumber[reg_x] >= 0)
4121 reg_x = reg_renumber[reg_x];
15a63be1 4122 }
4fe73cc1 4123
15a63be1
RK
4124 if (GET_CODE (y) == SUBREG)
4125 {
4fe73cc1
RK
4126 reg_y = REGNO (SUBREG_REG (y));
4127 word_y = SUBREG_WORD (y);
4128
4129 if (reg_renumber[reg_y] >= 0)
4130 {
4131 reg_y = reg_renumber[reg_y];
4132 word_y = 0;
4133 }
15a63be1 4134 }
4fe73cc1 4135
15a63be1
RK
4136 else
4137 {
4fe73cc1
RK
4138 reg_y = REGNO (y);
4139 if (reg_renumber[reg_y] >= 0)
4140 reg_y = reg_renumber[reg_y];
15a63be1 4141 }
4fe73cc1
RK
4142
4143 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
15a63be1 4144 }
4fe73cc1 4145
15a63be1
RK
4146 /* Now we have disposed of all the cases
4147 in which different rtx codes can match. */
4148 if (code != GET_CODE (y))
4149 return 0;
4fe73cc1 4150
15a63be1
RK
4151 switch (code)
4152 {
4153 case PC:
4154 case CC0:
4155 case ADDR_VEC:
4156 case ADDR_DIFF_VEC:
4157 return 0;
4158
4159 case CONST_INT:
38b3167e 4160 return INTVAL (x) == INTVAL (y);
15a63be1
RK
4161
4162 case LABEL_REF:
705f26cf
RS
4163 /* We can't assume nonlocal labels have their following insns yet. */
4164 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
4165 return XEXP (x, 0) == XEXP (y, 0);
4fe73cc1 4166
15a63be1
RK
4167 /* Two label-refs are equivalent if they point at labels
4168 in the same position in the instruction stream. */
4169 return (next_real_insn (XEXP (x, 0))
4170 == next_real_insn (XEXP (y, 0)));
4171
4172 case SYMBOL_REF:
4173 return XSTR (x, 0) == XSTR (y, 0);
e9a25f70
JL
4174
4175 default:
4176 break;
15a63be1
RK
4177 }
4178
4179 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
4180
4181 if (GET_MODE (x) != GET_MODE (y))
4182 return 0;
4183
4fe73cc1
RK
4184 /* For commutative operations, the RTX match if the operand match in any
4185 order. Also handle the simple binary and unary cases without a loop. */
4186 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4187 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4188 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
4189 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
4190 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
4191 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4192 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4193 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
4194 else if (GET_RTX_CLASS (code) == '1')
4195 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
4196
15a63be1
RK
4197 /* Compare the elements. If any pair of corresponding elements
4198 fail to match, return 0 for the whole things. */
4199
4200 fmt = GET_RTX_FORMAT (code);
4201 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4202 {
4203 register int j;
4204 switch (fmt[i])
4205 {
5f4f0e22
CH
4206 case 'w':
4207 if (XWINT (x, i) != XWINT (y, i))
4208 return 0;
4209 break;
4210
15a63be1
RK
4211 case 'i':
4212 if (XINT (x, i) != XINT (y, i))
4213 return 0;
4214 break;
4215
4216 case 's':
4217 if (strcmp (XSTR (x, i), XSTR (y, i)))
4218 return 0;
4219 break;
4220
4221 case 'e':
4222 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
4223 return 0;
4224 break;
4225
4226 case 'u':
4227 if (XEXP (x, i) != XEXP (y, i))
4228 return 0;
4229 /* fall through. */
4230 case '0':
4231 break;
4232
4233 case 'E':
4234 if (XVECLEN (x, i) != XVECLEN (y, i))
4235 return 0;
4236 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4237 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
4238 return 0;
4239 break;
4240
4241 default:
4242 abort ();
4243 }
4244 }
4245 return 1;
4246}
4247\f
4248/* If X is a hard register or equivalent to one or a subregister of one,
4249 return the hard register number. If X is a pseudo register that was not
4250 assigned a hard register, return the pseudo register number. Otherwise,
4251 return -1. Any rtx is valid for X. */
4252
4253int
4254true_regnum (x)
4255 rtx x;
4256{
4257 if (GET_CODE (x) == REG)
4258 {
4259 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
4260 return reg_renumber[REGNO (x)];
4261 return REGNO (x);
4262 }
4263 if (GET_CODE (x) == SUBREG)
4264 {
4265 int base = true_regnum (SUBREG_REG (x));
4266 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
4267 return SUBREG_WORD (x) + base;
4268 }
4269 return -1;
4270}
4271\f
4272/* Optimize code of the form:
4273
4274 for (x = a[i]; x; ...)
4275 ...
4276 for (x = a[i]; x; ...)
4277 ...
4278 foo:
4279
4280 Loop optimize will change the above code into
4281
4282 if (x = a[i])
4283 for (;;)
4284 { ...; if (! (x = ...)) break; }
4285 if (x = a[i])
4286 for (;;)
4287 { ...; if (! (x = ...)) break; }
4288 foo:
4289
4290 In general, if the first test fails, the program can branch
4291 directly to `foo' and skip the second try which is doomed to fail.
4292 We run this after loop optimization and before flow analysis. */
4293
4294/* When comparing the insn patterns, we track the fact that different
4295 pseudo-register numbers may have been used in each computation.
4296 The following array stores an equivalence -- same_regs[I] == J means
4297 that pseudo register I was used in the first set of tests in a context
4298 where J was used in the second set. We also count the number of such
4299 pending equivalences. If nonzero, the expressions really aren't the
4300 same. */
4301
7ee8a9d5 4302static int *same_regs;
15a63be1
RK
4303
4304static int num_same_regs;
4305
4306/* Track any registers modified between the target of the first jump and
4307 the second jump. They never compare equal. */
4308
4309static char *modified_regs;
4310
4311/* Record if memory was modified. */
4312
4313static int modified_mem;
4314
4315/* Called via note_stores on each insn between the target of the first
4316 branch and the second branch. It marks any changed registers. */
4317
4318static void
4319mark_modified_reg (dest, x)
4320 rtx dest;
4321 rtx x;
4322{
4323 int regno, i;
4324
4325 if (GET_CODE (dest) == SUBREG)
4326 dest = SUBREG_REG (dest);
4327
4328 if (GET_CODE (dest) == MEM)
4329 modified_mem = 1;
4330
4331 if (GET_CODE (dest) != REG)
4332 return;
4333
4334 regno = REGNO (dest);
4335 if (regno >= FIRST_PSEUDO_REGISTER)
4336 modified_regs[regno] = 1;
4337 else
4338 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
4339 modified_regs[regno + i] = 1;
4340}
4341
4342/* F is the first insn in the chain of insns. */
4343
4344void
aa38b201 4345thread_jumps (f, max_reg, flag_before_loop)
15a63be1
RK
4346 rtx f;
4347 int max_reg;
aa38b201 4348 int flag_before_loop;
15a63be1
RK
4349{
4350 /* Basic algorithm is to find a conditional branch,
4351 the label it may branch to, and the branch after
4352 that label. If the two branches test the same condition,
4353 walk back from both branch paths until the insn patterns
4354 differ, or code labels are hit. If we make it back to
4355 the target of the first branch, then we know that the first branch
4356 will either always succeed or always fail depending on the relative
4357 senses of the two branches. So adjust the first branch accordingly
4358 in this case. */
4359
4360 rtx label, b1, b2, t1, t2;
4361 enum rtx_code code1, code2;
4362 rtx b1op0, b1op1, b2op0, b2op1;
4363 int changed = 1;
4364 int i;
7ee8a9d5 4365 int *all_reset;
15a63be1
RK
4366
4367 /* Allocate register tables and quick-reset table. */
4368 modified_regs = (char *) alloca (max_reg * sizeof (char));
7ee8a9d5
RK
4369 same_regs = (int *) alloca (max_reg * sizeof (int));
4370 all_reset = (int *) alloca (max_reg * sizeof (int));
15a63be1
RK
4371 for (i = 0; i < max_reg; i++)
4372 all_reset[i] = -1;
4373
4374 while (changed)
4375 {
4376 changed = 0;
4377
4378 for (b1 = f; b1; b1 = NEXT_INSN (b1))
4379 {
4380 /* Get to a candidate branch insn. */
4381 if (GET_CODE (b1) != JUMP_INSN
4382 || ! condjump_p (b1) || simplejump_p (b1)
4383 || JUMP_LABEL (b1) == 0)
4384 continue;
4385
4386 bzero (modified_regs, max_reg * sizeof (char));
4387 modified_mem = 0;
4388
4c9a05bc
RK
4389 bcopy ((char *) all_reset, (char *) same_regs,
4390 max_reg * sizeof (int));
15a63be1
RK
4391 num_same_regs = 0;
4392
4393 label = JUMP_LABEL (b1);
4394
4395 /* Look for a branch after the target. Record any registers and
4396 memory modified between the target and the branch. Stop when we
4397 get to a label since we can't know what was changed there. */
4398 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
4399 {
4400 if (GET_CODE (b2) == CODE_LABEL)
4401 break;
4402
4403 else if (GET_CODE (b2) == JUMP_INSN)
4404 {
4405 /* If this is an unconditional jump and is the only use of
4406 its target label, we can follow it. */
4407 if (simplejump_p (b2)
4408 && JUMP_LABEL (b2) != 0
4409 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
4410 {
4411 b2 = JUMP_LABEL (b2);
4412 continue;
4413 }
4414 else
4415 break;
4416 }
4417
4418 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
4419 continue;
4420
4421 if (GET_CODE (b2) == CALL_INSN)
4422 {
4423 modified_mem = 1;
4424 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4425 if (call_used_regs[i] && ! fixed_regs[i]
4426 && i != STACK_POINTER_REGNUM
4427 && i != FRAME_POINTER_REGNUM
cbe23927 4428 && i != HARD_FRAME_POINTER_REGNUM
15a63be1
RK
4429 && i != ARG_POINTER_REGNUM)
4430 modified_regs[i] = 1;
4431 }
4432
4433 note_stores (PATTERN (b2), mark_modified_reg);
4434 }
4435
4436 /* Check the next candidate branch insn from the label
4437 of the first. */
4438 if (b2 == 0
4439 || GET_CODE (b2) != JUMP_INSN
4440 || b2 == b1
4441 || ! condjump_p (b2)
4442 || simplejump_p (b2))
4443 continue;
4444
4445 /* Get the comparison codes and operands, reversing the
4446 codes if appropriate. If we don't have comparison codes,
4447 we can't do anything. */
4448 b1op0 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 0);
4449 b1op1 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 1);
4450 code1 = GET_CODE (XEXP (SET_SRC (PATTERN (b1)), 0));
4451 if (XEXP (SET_SRC (PATTERN (b1)), 1) == pc_rtx)
4452 code1 = reverse_condition (code1);
4453
4454 b2op0 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 0);
4455 b2op1 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 1);
4456 code2 = GET_CODE (XEXP (SET_SRC (PATTERN (b2)), 0));
4457 if (XEXP (SET_SRC (PATTERN (b2)), 1) == pc_rtx)
4458 code2 = reverse_condition (code2);
4459
4460 /* If they test the same things and knowing that B1 branches
4461 tells us whether or not B2 branches, check if we
4462 can thread the branch. */
4463 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
4464 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
4465 && (comparison_dominates_p (code1, code2)
cc5e1642
R
4466 || (comparison_dominates_p (code1, reverse_condition (code2))
4467 && can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (b1)),
4468 0),
4469 b1))))
15a63be1
RK
4470 {
4471 t1 = prev_nonnote_insn (b1);
4472 t2 = prev_nonnote_insn (b2);
4473
4474 while (t1 != 0 && t2 != 0)
4475 {
15a63be1
RK
4476 if (t2 == label)
4477 {
4478 /* We have reached the target of the first branch.
4479 If there are no pending register equivalents,
4480 we know that this branch will either always
4481 succeed (if the senses of the two branches are
4482 the same) or always fail (if not). */
4483 rtx new_label;
4484
4485 if (num_same_regs != 0)
4486 break;
4487
4488 if (comparison_dominates_p (code1, code2))
4489 new_label = JUMP_LABEL (b2);
4490 else
4491 new_label = get_label_after (b2);
4492
aa38b201
TG
4493 if (JUMP_LABEL (b1) != new_label)
4494 {
4495 rtx prev = PREV_INSN (new_label);
4496
4497 if (flag_before_loop
4498 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
4499 {
4500 /* Don't thread to the loop label. If a loop
4501 label is reused, loop optimization will
4502 be disabled for that loop. */
4503 new_label = gen_label_rtx ();
4504 emit_label_after (new_label, PREV_INSN (prev));
4505 }
4506 changed |= redirect_jump (b1, new_label);
4507 }
15a63be1
RK
4508 break;
4509 }
4510
4511 /* If either of these is not a normal insn (it might be
4512 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
4513 have already been skipped above.) Similarly, fail
4514 if the insns are different. */
4515 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
4516 || recog_memoized (t1) != recog_memoized (t2)
4517 || ! rtx_equal_for_thread_p (PATTERN (t1),
4518 PATTERN (t2), t2))
4519 break;
4520
4521 t1 = prev_nonnote_insn (t1);
4522 t2 = prev_nonnote_insn (t2);
4523 }
4524 }
4525 }
4526 }
4527}
4528\f
4529/* This is like RTX_EQUAL_P except that it knows about our handling of
4530 possibly equivalent registers and knows to consider volatile and
4531 modified objects as not equal.
4532
4533 YINSN is the insn containing Y. */
4534
4535int
4536rtx_equal_for_thread_p (x, y, yinsn)
4537 rtx x, y;
4538 rtx yinsn;
4539{
4540 register int i;
4541 register int j;
4542 register enum rtx_code code;
4543 register char *fmt;
4544
4545 code = GET_CODE (x);
4546 /* Rtx's of different codes cannot be equal. */
4547 if (code != GET_CODE (y))
4548 return 0;
4549
4550 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
4551 (REG:SI x) and (REG:HI x) are NOT equivalent. */
4552
4553 if (GET_MODE (x) != GET_MODE (y))
4554 return 0;
4555
35d9eabb
RK
4556 /* For floating-point, consider everything unequal. This is a bit
4557 pessimistic, but this pass would only rarely do anything for FP
4558 anyway. */
4559 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
4560 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_fast_math)
4561 return 0;
4562
413c72c2
RK
4563 /* For commutative operations, the RTX match if the operand match in any
4564 order. Also handle the simple binary and unary cases without a loop. */
4565 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
c5ea5f3b
RK
4566 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4567 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
4568 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
4569 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
413c72c2 4570 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
c5ea5f3b
RK
4571 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4572 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
413c72c2 4573 else if (GET_RTX_CLASS (code) == '1')
c5ea5f3b 4574 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
413c72c2 4575
15a63be1
RK
4576 /* Handle special-cases first. */
4577 switch (code)
4578 {
4579 case REG:
4580 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
4581 return 1;
4582
4583 /* If neither is user variable or hard register, check for possible
4584 equivalence. */
4585 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
4586 || REGNO (x) < FIRST_PSEUDO_REGISTER
4587 || REGNO (y) < FIRST_PSEUDO_REGISTER)
4588 return 0;
4589
4590 if (same_regs[REGNO (x)] == -1)
4591 {
4592 same_regs[REGNO (x)] = REGNO (y);
4593 num_same_regs++;
4594
4595 /* If this is the first time we are seeing a register on the `Y'
4596 side, see if it is the last use. If not, we can't thread the
4597 jump, so mark it as not equivalent. */
b1f21e0a 4598 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
15a63be1
RK
4599 return 0;
4600
4601 return 1;
4602 }
4603 else
4604 return (same_regs[REGNO (x)] == REGNO (y));
4605
4606 break;
4607
4608 case MEM:
6dc42e49 4609 /* If memory modified or either volatile, not equivalent.
0f41302f 4610 Else, check address. */
15a63be1
RK
4611 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4612 return 0;
4613
4614 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4615
4616 case ASM_INPUT:
4617 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4618 return 0;
4619
4620 break;
4621
4622 case SET:
4623 /* Cancel a pending `same_regs' if setting equivalenced registers.
4624 Then process source. */
4625 if (GET_CODE (SET_DEST (x)) == REG
4626 && GET_CODE (SET_DEST (y)) == REG)
4627 {
4628 if (same_regs[REGNO (SET_DEST (x))] == REGNO (SET_DEST (y)))
4629 {
4630 same_regs[REGNO (SET_DEST (x))] = -1;
4631 num_same_regs--;
4632 }
4633 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
4634 return 0;
4635 }
4636 else
4637 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
4638 return 0;
4639
4640 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
4641
4642 case LABEL_REF:
4643 return XEXP (x, 0) == XEXP (y, 0);
4644
4645 case SYMBOL_REF:
4646 return XSTR (x, 0) == XSTR (y, 0);
e9a25f70
JL
4647
4648 default:
4649 break;
15a63be1
RK
4650 }
4651
4652 if (x == y)
4653 return 1;
4654
4655 fmt = GET_RTX_FORMAT (code);
4656 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4657 {
4658 switch (fmt[i])
4659 {
5f4f0e22
CH
4660 case 'w':
4661 if (XWINT (x, i) != XWINT (y, i))
4662 return 0;
4663 break;
4664
15a63be1
RK
4665 case 'n':
4666 case 'i':
4667 if (XINT (x, i) != XINT (y, i))
4668 return 0;
4669 break;
4670
4671 case 'V':
4672 case 'E':
4673 /* Two vectors must have the same length. */
4674 if (XVECLEN (x, i) != XVECLEN (y, i))
4675 return 0;
4676
4677 /* And the corresponding elements must match. */
4678 for (j = 0; j < XVECLEN (x, i); j++)
4679 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
4680 XVECEXP (y, i, j), yinsn) == 0)
4681 return 0;
4682 break;
4683
4684 case 'e':
4685 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
4686 return 0;
4687 break;
4688
4689 case 'S':
4690 case 's':
4691 if (strcmp (XSTR (x, i), XSTR (y, i)))
4692 return 0;
4693 break;
4694
4695 case 'u':
4696 /* These are just backpointers, so they don't matter. */
4697 break;
4698
4699 case '0':
4700 break;
4701
4702 /* It is believed that rtx's at this level will never
4703 contain anything but integers and other rtx's,
4704 except for within LABEL_REFs and SYMBOL_REFs. */
4705 default:
4706 abort ();
4707 }
4708 }
4709 return 1;
4710}