]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/jump.c
Oops, missed ChangeLog in last checkin...
[thirdparty/gcc.git] / gcc / jump.c
CommitLineData
15a63be1 1/* Optimize jump instructions, for GNU compiler.
3b708058
JL
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000 Free Software Foundation, Inc.
15a63be1
RK
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
a35311b0
RK
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
15a63be1
RK
21
22
23/* This is the jump-optimization pass of the compiler.
24 It is run two or three times: once before cse, sometimes once after cse,
25 and once after reload (before final).
26
27 jump_optimize deletes unreachable code and labels that are not used.
28 It also deletes jumps that jump to the following insn,
29 and simplifies jumps around unconditional jumps and jumps
30 to unconditional jumps.
31
32 Each CODE_LABEL has a count of the times it is used
33 stored in the LABEL_NUSES internal field, and each JUMP_INSN
34 has one label that it refers to stored in the
35 JUMP_LABEL internal field. With this we can detect labels that
36 become unused because of the deletion of all the jumps that
37 formerly used them. The JUMP_LABEL info is sometimes looked
38 at by later passes.
39
40 Optionally, cross-jumping can be done. Currently it is done
41 only the last time (when after reload and before final).
42 In fact, the code for cross-jumping now assumes that register
43 allocation has been done, since it uses `rtx_renumbered_equal_p'.
44
45 Jump optimization is done after cse when cse's constant-propagation
46 causes jumps to become unconditional or to be deleted.
47
48 Unreachable loops are not detected here, because the labels
49 have references and the insns appear reachable from the labels.
50 find_basic_blocks in flow.c finds and deletes such loops.
51
52 The subroutines delete_insn, redirect_jump, and invert_jump are used
53 from other passes as well. */
54
55#include "config.h"
670ee920 56#include "system.h"
15a63be1 57#include "rtl.h"
6baf1cc8 58#include "tm_p.h"
15a63be1
RK
59#include "flags.h"
60#include "hard-reg-set.h"
61#include "regs.h"
15a63be1
RK
62#include "insn-config.h"
63#include "insn-flags.h"
0c63f729 64#include "insn-attr.h"
e9a25f70 65#include "recog.h"
49ad7cfa 66#include "function.h"
3c86a619 67#include "expr.h"
15a63be1 68#include "real.h"
6adb4e3a 69#include "except.h"
2e107e9e 70#include "toplev.h"
15a63be1
RK
71
72/* ??? Eventually must record somehow the labels used by jumps
73 from nested functions. */
74/* Pre-record the next or previous real insn for each label?
75 No, this pass is very fast anyway. */
76/* Condense consecutive labels?
77 This would make life analysis faster, maybe. */
78/* Optimize jump y; x: ... y: jumpif... x?
79 Don't know if it is worth bothering with. */
80/* Optimize two cases of conditional jump to conditional jump?
81 This can never delete any instruction or make anything dead,
82 or even change what is live at any point.
83 So perhaps let combiner do it. */
84
85/* Vector indexed by uid.
86 For each CODE_LABEL, index by its uid to get first unconditional jump
87 that jumps to the label.
88 For each JUMP_INSN, index by its uid to get the next unconditional jump
89 that jumps to the same label.
90 Element 0 is the start of a chain of all return insns.
91 (It is safe to use element 0 because insn uid 0 is not used. */
92
93static rtx *jump_chain;
94
15a63be1
RK
95/* Maximum index in jump_chain. */
96
97static int max_jump_chain;
98
99/* Set nonzero by jump_optimize if control can fall through
100 to the end of the function. */
101int can_reach_end;
102
103/* Indicates whether death notes are significant in cross jump analysis.
104 Normally they are not significant, because of A and B jump to C,
105 and R dies in A, it must die in B. But this might not be true after
106 stack register conversion, and we must compare death notes in that
0f41302f 107 case. */
15a63be1
RK
108
109static int cross_jump_death_matters = 0;
110
3fe41456
KG
111static int init_label_info PARAMS ((rtx));
112static void delete_barrier_successors PARAMS ((rtx));
113static void mark_all_labels PARAMS ((rtx, int));
114static rtx delete_unreferenced_labels PARAMS ((rtx));
115static void delete_noop_moves PARAMS ((rtx));
d29c259b 116static int calculate_can_reach_end PARAMS ((rtx, int));
3fe41456
KG
117static int duplicate_loop_exit_test PARAMS ((rtx));
118static void find_cross_jump PARAMS ((rtx, rtx, int, rtx *, rtx *));
119static void do_cross_jump PARAMS ((rtx, rtx, rtx));
120static int jump_back_p PARAMS ((rtx, rtx));
121static int tension_vector_labels PARAMS ((rtx, int));
a76063a6 122static void mark_jump_label PARAMS ((rtx, rtx, int, int));
3fe41456
KG
123static void delete_computation PARAMS ((rtx));
124static void delete_from_jump_chain PARAMS ((rtx));
125static int delete_labelref_insn PARAMS ((rtx, rtx, int));
126static void mark_modified_reg PARAMS ((rtx, rtx, void *));
127static void redirect_tablejump PARAMS ((rtx, rtx));
0a1c58a2 128static void jump_optimize_1 PARAMS ((rtx, int, int, int, int, int));
341a243e 129#if ! defined(HAVE_cc0) && ! defined(HAVE_conditional_arithmetic)
3fe41456 130static rtx find_insert_position PARAMS ((rtx, rtx));
7a87758d 131#endif
3fe41456
KG
132static int returnjump_p_1 PARAMS ((rtx *, void *));
133static void delete_prior_computation PARAMS ((rtx, rtx));
0a1c58a2 134\f
c4403371
JL
135/* Main external entry point into the jump optimizer. See comments before
136 jump_optimize_1 for descriptions of the arguments. */
137void
138jump_optimize (f, cross_jump, noop_moves, after_regscan)
139 rtx f;
140 int cross_jump;
141 int noop_moves;
142 int after_regscan;
143{
0a1c58a2 144 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0, 0);
c4403371
JL
145}
146
147/* Alternate entry into the jump optimizer. This entry point only rebuilds
148 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
149 instructions. */
150void
151rebuild_jump_labels (f)
152 rtx f;
153{
0a1c58a2 154 jump_optimize_1 (f, 0, 0, 0, 1, 0);
c4403371
JL
155}
156
0a1c58a2
JL
157/* Alternate entry into the jump optimizer. Do only trivial optimizations. */
158void
159jump_optimize_minimal (f)
160 rtx f;
161{
162 jump_optimize_1 (f, 0, 0, 0, 0, 1);
163}
15a63be1
RK
164\f
165/* Delete no-op jumps and optimize jumps to jumps
166 and jumps around jumps.
167 Delete unused labels and unreachable code.
168
169 If CROSS_JUMP is 1, detect matching code
170 before a jump and its destination and unify them.
171 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
172
173 If NOOP_MOVES is nonzero, delete no-op move insns.
174
175 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
176 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
177
c4403371
JL
178 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
179 and JUMP_LABEL field for jumping insns.
180
15a63be1
RK
181 If `optimize' is zero, don't change any code,
182 just determine whether control drops off the end of the function.
183 This case occurs when we have -W and not -O.
184 It works because `delete_insn' checks the value of `optimize'
0a1c58a2
JL
185 and refrains from actually deleting when that is 0.
186
187 If MINIMAL is nonzero, then we only perform trivial optimizations:
188
189 * Removal of unreachable code after BARRIERs.
190 * Removal of unreferenced CODE_LABELs.
191 * Removal of a jump to the next instruction.
192 * Removal of a conditional jump followed by an unconditional jump
193 to the same target as the conditional jump.
194 * Simplify a conditional jump around an unconditional jump.
195 * Simplify a jump to a jump.
196 * Delete extraneous line number notes.
197 */
15a63be1 198
1ae5c6c2 199static void
0a1c58a2
JL
200jump_optimize_1 (f, cross_jump, noop_moves, after_regscan,
201 mark_labels_only, minimal)
15a63be1
RK
202 rtx f;
203 int cross_jump;
204 int noop_moves;
205 int after_regscan;
c4403371 206 int mark_labels_only;
0a1c58a2 207 int minimal;
15a63be1 208{
5828374f 209 register rtx insn, next;
15a63be1 210 int changed;
a87ef323 211 int old_max_reg;
15a63be1
RK
212 int first = 1;
213 int max_uid = 0;
214 rtx last_insn;
215
216 cross_jump_death_matters = (cross_jump == 2);
269ef46c 217 max_uid = init_label_info (f) + 1;
15a63be1 218
18f3f864
JL
219 /* If we are performing cross jump optimizations, then initialize
220 tables mapping UIDs to EH regions to avoid incorrect movement
221 of insns from one EH region to another. */
222 if (flag_exceptions && cross_jump)
223 init_insn_eh_region (f, max_uid);
224
e4884e95
RK
225 if (! mark_labels_only)
226 delete_barrier_successors (f);
15a63be1
RK
227
228 /* Leave some extra room for labels and duplicate exit test insns
229 we make. */
230 max_jump_chain = max_uid * 14 / 10;
67289ea6 231 jump_chain = (rtx *) xcalloc (max_jump_chain, sizeof (rtx));
15a63be1 232
269ef46c 233 mark_all_labels (f, cross_jump);
15a63be1
RK
234
235 /* Keep track of labels used from static data;
236 they cannot ever be deleted. */
237
238 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
239 LABEL_NUSES (XEXP (insn, 0))++;
240
6adb4e3a
MS
241 check_exception_handler_labels ();
242
243 /* Keep track of labels used for marking handlers for exception
244 regions; they cannot usually be deleted. */
245
246 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
247 LABEL_NUSES (XEXP (insn, 0))++;
248
55a98783
JL
249 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
250 notes and recompute LABEL_NUSES. */
251 if (mark_labels_only)
67289ea6 252 goto end;
55a98783 253
0a1c58a2
JL
254 if (! minimal)
255 exception_optimize ();
6adb4e3a 256
269ef46c 257 last_insn = delete_unreferenced_labels (f);
15a63be1 258
15a63be1 259 if (noop_moves)
269ef46c 260 delete_noop_moves (f);
15a63be1 261
2156dfe3
RK
262 /* If we haven't yet gotten to reload and we have just run regscan,
263 delete any insn that sets a register that isn't used elsewhere.
264 This helps some of the optimizations below by having less insns
265 being jumped around. */
266
d29c259b 267 if (optimize && ! reload_completed && after_regscan)
2156dfe3
RK
268 for (insn = f; insn; insn = next)
269 {
270 rtx set = single_set (insn);
271
272 next = NEXT_INSN (insn);
273
274 if (set && GET_CODE (SET_DEST (set)) == REG
275 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
b1f21e0a 276 && REGNO_FIRST_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
b803fb99
RS
277 /* We use regno_last_note_uid so as not to delete the setting
278 of a reg that's used in notes. A subsequent optimization
279 might arrange to use that reg for real. */
b1f21e0a 280 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
d008e26c 281 && ! side_effects_p (SET_SRC (set))
af37f0dd
JL
282 && ! find_reg_note (insn, REG_RETVAL, 0)
283 /* An ADDRESSOF expression can turn into a use of the internal arg
284 pointer, so do not delete the initialization of the internal
285 arg pointer yet. If it is truly dead, flow will delete the
286 initializing insn. */
287 && SET_DEST (set) != current_function_internal_arg_pointer)
2156dfe3
RK
288 delete_insn (insn);
289 }
290
15a63be1
RK
291 /* Now iterate optimizing jumps until nothing changes over one pass. */
292 changed = 1;
a87ef323 293 old_max_reg = max_reg_num ();
15a63be1
RK
294 while (changed)
295 {
15a63be1
RK
296 changed = 0;
297
298 for (insn = f; insn; insn = next)
299 {
300 rtx reallabelprev;
a544cfd2 301 rtx temp, temp1, temp2 = NULL_RTX, temp3, temp4, temp5, temp6;
15a63be1 302 rtx nlabel;
c16ddde3 303 int this_is_simplejump, this_is_condjump, reversep = 0;
3480bb98 304 int this_is_condjump_in_parallel;
a87ef323 305
15a63be1
RK
306 next = NEXT_INSN (insn);
307
308 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
309 jump. Try to optimize by duplicating the loop exit test if so.
310 This is only safe immediately after regscan, because it uses
d49256bb
RH
311 the values of regno_first_uid and regno_last_uid. */
312 if (after_regscan && GET_CODE (insn) == NOTE
15a63be1
RK
313 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
314 && (temp1 = next_nonnote_insn (insn)) != 0
315 && simplejump_p (temp1))
316 {
317 temp = PREV_INSN (insn);
318 if (duplicate_loop_exit_test (insn))
319 {
320 changed = 1;
321 next = NEXT_INSN (temp);
322 continue;
323 }
324 }
325
326 if (GET_CODE (insn) != JUMP_INSN)
327 continue;
328
329 this_is_simplejump = simplejump_p (insn);
330 this_is_condjump = condjump_p (insn);
3480bb98 331 this_is_condjump_in_parallel = condjump_in_parallel_p (insn);
15a63be1
RK
332
333 /* Tension the labels in dispatch tables. */
334
335 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
336 changed |= tension_vector_labels (PATTERN (insn), 0);
337 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
338 changed |= tension_vector_labels (PATTERN (insn), 1);
339
c5c76735
JL
340 /* See if this jump goes to another jump and redirect if so. */
341 nlabel = follow_jumps (JUMP_LABEL (insn));
342 if (nlabel != JUMP_LABEL (insn))
343 changed |= redirect_jump (insn, nlabel);
344
751b8992 345 if (! optimize || minimal)
7c22ee2b
RH
346 continue;
347
15a63be1
RK
348 /* If a dispatch table always goes to the same place,
349 get rid of it and replace the insn that uses it. */
350
351 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
352 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
353 {
354 int i;
355 rtx pat = PATTERN (insn);
356 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
357 int len = XVECLEN (pat, diff_vec_p);
358 rtx dispatch = prev_real_insn (insn);
dc9e9f3f 359 rtx set;
15a63be1
RK
360
361 for (i = 0; i < len; i++)
362 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
363 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
364 break;
dc9e9f3f 365
15a63be1 366 if (i == len
0546e268 367 && dispatch != 0
15a63be1
RK
368 && GET_CODE (dispatch) == JUMP_INSN
369 && JUMP_LABEL (dispatch) != 0
dc9e9f3f
RE
370 /* Don't mess with a casesi insn.
371 XXX according to the comment before computed_jump_p(),
372 all casesi insns should be a parallel of the jump
373 and a USE of a LABEL_REF. */
374 && ! ((set = single_set (dispatch)) != NULL
375 && (GET_CODE (SET_SRC (set)) == IF_THEN_ELSE))
15a63be1
RK
376 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
377 {
378 redirect_tablejump (dispatch,
379 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
380 changed = 1;
381 }
382 }
383
15a63be1
RK
384 /* If a jump references the end of the function, try to turn
385 it into a RETURN insn, possibly a conditional one. */
c5c76735 386 if (JUMP_LABEL (insn) != 0
8e318904
JL
387 && (next_active_insn (JUMP_LABEL (insn)) == 0
388 || GET_CODE (PATTERN (next_active_insn (JUMP_LABEL (insn))))
389 == RETURN))
5f4f0e22 390 changed |= redirect_jump (insn, NULL_RTX);
15a63be1 391
c5c76735
JL
392 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
393
15a63be1 394 /* Detect jump to following insn. */
c5c76735 395 if (reallabelprev == insn && this_is_condjump)
15a63be1 396 {
cd423ead 397 next = next_real_insn (JUMP_LABEL (insn));
15a63be1
RK
398 delete_jump (insn);
399 changed = 1;
400 continue;
401 }
402
c5c76735
JL
403 /* Detect a conditional jump going to the same place
404 as an immediately following unconditional jump. */
405 else if (this_is_condjump
406 && (temp = next_active_insn (insn)) != 0
407 && simplejump_p (temp)
408 && (next_active_insn (JUMP_LABEL (insn))
409 == next_active_insn (JUMP_LABEL (temp))))
410 {
411 /* Don't mess up test coverage analysis. */
412 temp2 = temp;
413 if (flag_test_coverage && !reload_completed)
414 for (temp2 = insn; temp2 != temp; temp2 = NEXT_INSN (temp2))
415 if (GET_CODE (temp2) == NOTE && NOTE_LINE_NUMBER (temp2) > 0)
416 break;
417
418 if (temp2 == temp)
419 {
420 delete_jump (insn);
421 changed = 1;
422 continue;
423 }
424 }
425
426 /* Detect a conditional jump jumping over an unconditional jump. */
427
428 else if ((this_is_condjump || this_is_condjump_in_parallel)
429 && ! this_is_simplejump
430 && reallabelprev != 0
431 && GET_CODE (reallabelprev) == JUMP_INSN
432 && prev_active_insn (reallabelprev) == insn
433 && no_labels_between_p (insn, reallabelprev)
434 && simplejump_p (reallabelprev))
435 {
436 /* When we invert the unconditional jump, we will be
437 decrementing the usage count of its old label.
438 Make sure that we don't delete it now because that
439 might cause the following code to be deleted. */
440 rtx prev_uses = prev_nonnote_insn (reallabelprev);
441 rtx prev_label = JUMP_LABEL (insn);
442
443 if (prev_label)
444 ++LABEL_NUSES (prev_label);
445
446 if (invert_jump (insn, JUMP_LABEL (reallabelprev)))
447 {
448 /* It is very likely that if there are USE insns before
449 this jump, they hold REG_DEAD notes. These REG_DEAD
450 notes are no longer valid due to this optimization,
451 and will cause the life-analysis that following passes
452 (notably delayed-branch scheduling) to think that
453 these registers are dead when they are not.
454
455 To prevent this trouble, we just remove the USE insns
456 from the insn chain. */
457
458 while (prev_uses && GET_CODE (prev_uses) == INSN
459 && GET_CODE (PATTERN (prev_uses)) == USE)
460 {
461 rtx useless = prev_uses;
462 prev_uses = prev_nonnote_insn (prev_uses);
463 delete_insn (useless);
464 }
465
466 delete_insn (reallabelprev);
467 changed = 1;
468 }
469
470 /* We can now safely delete the label if it is unreferenced
471 since the delete_insn above has deleted the BARRIER. */
472 if (prev_label && --LABEL_NUSES (prev_label) == 0)
473 delete_insn (prev_label);
474
475 next = NEXT_INSN (insn);
476 }
477
d45cf215 478 /* If we have an unconditional jump preceded by a USE, try to put
15a63be1
RK
479 the USE before the target and jump there. This simplifies many
480 of the optimizations below since we don't have to worry about
481 dealing with these USE insns. We only do this if the label
482 being branch to already has the identical USE or if code
483 never falls through to that label. */
484
c5c76735
JL
485 else if (this_is_simplejump
486 && (temp = prev_nonnote_insn (insn)) != 0
487 && GET_CODE (temp) == INSN
488 && GET_CODE (PATTERN (temp)) == USE
489 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
490 && (GET_CODE (temp1) == BARRIER
491 || (GET_CODE (temp1) == INSN
492 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
493 /* Don't do this optimization if we have a loop containing
494 only the USE instruction, and the loop start label has
495 a usage count of 1. This is because we will redo this
496 optimization everytime through the outer loop, and jump
497 opt will never exit. */
498 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
499 && temp2 == JUMP_LABEL (insn)
500 && LABEL_NUSES (temp2) == 1))
15a63be1
RK
501 {
502 if (GET_CODE (temp1) == BARRIER)
503 {
2156dfe3 504 emit_insn_after (PATTERN (temp), temp1);
15a63be1
RK
505 temp1 = NEXT_INSN (temp1);
506 }
15a63be1 507
2156dfe3 508 delete_insn (temp);
15a63be1
RK
509 redirect_jump (insn, get_label_before (temp1));
510 reallabelprev = prev_real_insn (temp1);
511 changed = 1;
c5c76735 512 next = NEXT_INSN (insn);
15a63be1
RK
513 }
514
515 /* Simplify if (...) x = a; else x = b; by converting it
516 to x = b; if (...) x = a;
517 if B is sufficiently simple, the test doesn't involve X,
518 and nothing in the test modifies B or X.
519
520 If we have small register classes, we also can't do this if X
521 is a hard register.
522
523 If the "x = b;" insn has any REG_NOTES, we don't do this because
524 of the possibility that we are running after CSE and there is a
525 REG_EQUAL note that is only valid if the branch has already been
526 taken. If we move the insn with the REG_EQUAL note, we may
527 fold the comparison to always be false in a later CSE pass.
528 (We could also delete the REG_NOTES when moving the insn, but it
529 seems simpler to not move it.) An exception is that we can move
530 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
531 value is the same as "b".
532
533 INSN is the branch over the `else' part.
534
535 We set:
536
d45cf215 537 TEMP to the jump insn preceding "x = a;"
15a63be1
RK
538 TEMP1 to X
539 TEMP2 to the insn that sets "x = b;"
2156dfe3
RK
540 TEMP3 to the insn that sets "x = a;"
541 TEMP4 to the set of "x = b"; */
15a63be1
RK
542
543 if (this_is_simplejump
544 && (temp3 = prev_active_insn (insn)) != 0
545 && GET_CODE (temp3) == INSN
2156dfe3
RK
546 && (temp4 = single_set (temp3)) != 0
547 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
f95182a4
ILT
548 && (! SMALL_REGISTER_CLASSES
549 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
15a63be1
RK
550 && (temp2 = next_active_insn (insn)) != 0
551 && GET_CODE (temp2) == INSN
2156dfe3
RK
552 && (temp4 = single_set (temp2)) != 0
553 && rtx_equal_p (SET_DEST (temp4), temp1)
0bef9b8d
RH
554 && ! side_effects_p (SET_SRC (temp4))
555 && ! may_trap_p (SET_SRC (temp4))
15a63be1
RK
556 && (REG_NOTES (temp2) == 0
557 || ((REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUAL
558 || REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUIV)
559 && XEXP (REG_NOTES (temp2), 1) == 0
560 && rtx_equal_p (XEXP (REG_NOTES (temp2), 0),
2156dfe3 561 SET_SRC (temp4))))
15a63be1
RK
562 && (temp = prev_active_insn (temp3)) != 0
563 && condjump_p (temp) && ! simplejump_p (temp)
564 /* TEMP must skip over the "x = a;" insn */
565 && prev_real_insn (JUMP_LABEL (temp)) == insn
566 && no_labels_between_p (insn, JUMP_LABEL (temp))
567 /* There must be no other entries to the "x = b;" insn. */
568 && no_labels_between_p (JUMP_LABEL (temp), temp2)
569 /* INSN must either branch to the insn after TEMP2 or the insn
570 after TEMP2 must branch to the same place as INSN. */
571 && (reallabelprev == temp2
2156dfe3
RK
572 || ((temp5 = next_active_insn (temp2)) != 0
573 && simplejump_p (temp5)
574 && JUMP_LABEL (temp5) == JUMP_LABEL (insn))))
15a63be1
RK
575 {
576 /* The test expression, X, may be a complicated test with
577 multiple branches. See if we can find all the uses of
578 the label that TEMP branches to without hitting a CALL_INSN
579 or a jump to somewhere else. */
580 rtx target = JUMP_LABEL (temp);
581 int nuses = LABEL_NUSES (target);
51723711
KG
582 rtx p;
583#ifdef HAVE_cc0
584 rtx q;
585#endif
15a63be1
RK
586
587 /* Set P to the first jump insn that goes around "x = a;". */
588 for (p = temp; nuses && p; p = prev_nonnote_insn (p))
589 {
590 if (GET_CODE (p) == JUMP_INSN)
591 {
592 if (condjump_p (p) && ! simplejump_p (p)
593 && JUMP_LABEL (p) == target)
594 {
595 nuses--;
596 if (nuses == 0)
597 break;
598 }
599 else
600 break;
601 }
602 else if (GET_CODE (p) == CALL_INSN)
603 break;
604 }
605
606#ifdef HAVE_cc0
607 /* We cannot insert anything between a set of cc and its use
608 so if P uses cc0, we must back up to the previous insn. */
609 q = prev_nonnote_insn (p);
610 if (q && GET_RTX_CLASS (GET_CODE (q)) == 'i'
611 && sets_cc0_p (PATTERN (q)))
612 p = q;
613#endif
614
615 if (p)
616 p = PREV_INSN (p);
617
618 /* If we found all the uses and there was no data conflict, we
619 can move the assignment unless we can branch into the middle
620 from somewhere. */
621 if (nuses == 0 && p
622 && no_labels_between_p (p, insn)
623 && ! reg_referenced_between_p (temp1, p, NEXT_INSN (temp3))
624 && ! reg_set_between_p (temp1, p, temp3)
2156dfe3 625 && (GET_CODE (SET_SRC (temp4)) == CONST_INT
8127f74f
JW
626 || ! modified_between_p (SET_SRC (temp4), p, temp2))
627 /* Verify that registers used by the jump are not clobbered
628 by the instruction being moved. */
1f5fb51f
HB
629 && ! regs_set_between_p (PATTERN (temp),
630 PREV_INSN (temp2),
8127f74f 631 NEXT_INSN (temp2)))
15a63be1 632 {
2156dfe3
RK
633 emit_insn_after_with_line_notes (PATTERN (temp2), p, temp2);
634 delete_insn (temp2);
15a63be1
RK
635
636 /* Set NEXT to an insn that we know won't go away. */
637 next = next_active_insn (insn);
638
639 /* Delete the jump around the set. Note that we must do
640 this before we redirect the test jumps so that it won't
641 delete the code immediately following the assignment
642 we moved (which might be a jump). */
643
644 delete_insn (insn);
645
646 /* We either have two consecutive labels or a jump to
647 a jump, so adjust all the JUMP_INSNs to branch to where
648 INSN branches to. */
649 for (p = NEXT_INSN (p); p != next; p = NEXT_INSN (p))
650 if (GET_CODE (p) == JUMP_INSN)
651 redirect_jump (p, target);
652
653 changed = 1;
c5c76735 654 next = NEXT_INSN (insn);
15a63be1
RK
655 continue;
656 }
657 }
658
66bd9361
JW
659 /* Simplify if (...) { x = a; goto l; } x = b; by converting it
660 to x = a; if (...) goto l; x = b;
661 if A is sufficiently simple, the test doesn't involve X,
662 and nothing in the test modifies A or X.
663
664 If we have small register classes, we also can't do this if X
665 is a hard register.
666
667 If the "x = a;" insn has any REG_NOTES, we don't do this because
668 of the possibility that we are running after CSE and there is a
669 REG_EQUAL note that is only valid if the branch has already been
670 taken. If we move the insn with the REG_EQUAL note, we may
671 fold the comparison to always be false in a later CSE pass.
672 (We could also delete the REG_NOTES when moving the insn, but it
673 seems simpler to not move it.) An exception is that we can move
674 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
675 value is the same as "a".
676
677 INSN is the goto.
678
679 We set:
680
681 TEMP to the jump insn preceding "x = a;"
682 TEMP1 to X
683 TEMP2 to the insn that sets "x = b;"
684 TEMP3 to the insn that sets "x = a;"
685 TEMP4 to the set of "x = a"; */
686
687 if (this_is_simplejump
688 && (temp2 = next_active_insn (insn)) != 0
689 && GET_CODE (temp2) == INSN
690 && (temp4 = single_set (temp2)) != 0
691 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
f95182a4
ILT
692 && (! SMALL_REGISTER_CLASSES
693 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
66bd9361
JW
694 && (temp3 = prev_active_insn (insn)) != 0
695 && GET_CODE (temp3) == INSN
696 && (temp4 = single_set (temp3)) != 0
697 && rtx_equal_p (SET_DEST (temp4), temp1)
0bef9b8d
RH
698 && ! side_effects_p (SET_SRC (temp4))
699 && ! may_trap_p (SET_SRC (temp4))
66bd9361
JW
700 && (REG_NOTES (temp3) == 0
701 || ((REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUAL
702 || REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUIV)
703 && XEXP (REG_NOTES (temp3), 1) == 0
704 && rtx_equal_p (XEXP (REG_NOTES (temp3), 0),
705 SET_SRC (temp4))))
706 && (temp = prev_active_insn (temp3)) != 0
707 && condjump_p (temp) && ! simplejump_p (temp)
708 /* TEMP must skip over the "x = a;" insn */
709 && prev_real_insn (JUMP_LABEL (temp)) == insn
710 && no_labels_between_p (temp, insn))
711 {
712 rtx prev_label = JUMP_LABEL (temp);
713 rtx insert_after = prev_nonnote_insn (temp);
714
715#ifdef HAVE_cc0
716 /* We cannot insert anything between a set of cc and its use. */
717 if (insert_after && GET_RTX_CLASS (GET_CODE (insert_after)) == 'i'
718 && sets_cc0_p (PATTERN (insert_after)))
719 insert_after = prev_nonnote_insn (insert_after);
720#endif
721 ++LABEL_NUSES (prev_label);
722
723 if (insert_after
724 && no_labels_between_p (insert_after, temp)
7a7233ff 725 && ! reg_referenced_between_p (temp1, insert_after, temp3)
04bd0246
DE
726 && ! reg_referenced_between_p (temp1, temp3,
727 NEXT_INSN (temp2))
66bd9361 728 && ! reg_set_between_p (temp1, insert_after, temp)
0bef9b8d 729 && ! modified_between_p (SET_SRC (temp4), insert_after, temp)
8127f74f
JW
730 /* Verify that registers used by the jump are not clobbered
731 by the instruction being moved. */
1f5fb51f
HB
732 && ! regs_set_between_p (PATTERN (temp),
733 PREV_INSN (temp3),
8127f74f 734 NEXT_INSN (temp3))
66bd9361
JW
735 && invert_jump (temp, JUMP_LABEL (insn)))
736 {
737 emit_insn_after_with_line_notes (PATTERN (temp3),
738 insert_after, temp3);
739 delete_insn (temp3);
740 delete_insn (insn);
741 /* Set NEXT to an insn that we know won't go away. */
742 next = temp2;
743 changed = 1;
744 }
745 if (prev_label && --LABEL_NUSES (prev_label) == 0)
746 delete_insn (prev_label);
747 if (changed)
748 continue;
749 }
750
c5c76735
JL
751#if !defined(HAVE_cc0) && !defined(HAVE_conditional_arithmetic)
752
2156dfe3
RK
753 /* If we have if (...) x = exp; and branches are expensive,
754 EXP is a single insn, does not have any side effects, cannot
755 trap, and is not too costly, convert this to
756 t = exp; if (...) x = t;
757
758 Don't do this when we have CC0 because it is unlikely to help
759 and we'd need to worry about where to place the new insn and
760 the potential for conflicts. We also can't do this when we have
761 notes on the insn for the same reason as above.
762
c5c76735
JL
763 If we have conditional arithmetic, this will make this
764 harder to optimize later and isn't needed, so don't do it
765 in that case either.
766
2156dfe3
RK
767 We set:
768
769 TEMP to the "x = exp;" insn.
f903b91f 770 TEMP1 to the single set in the "x = exp;" insn.
2156dfe3
RK
771 TEMP2 to "x". */
772
773 if (! reload_completed
774 && this_is_condjump && ! this_is_simplejump
775 && BRANCH_COST >= 3
776 && (temp = next_nonnote_insn (insn)) != 0
a8fc41af 777 && GET_CODE (temp) == INSN
2156dfe3
RK
778 && REG_NOTES (temp) == 0
779 && (reallabelprev == temp
780 || ((temp2 = next_active_insn (temp)) != 0
781 && simplejump_p (temp2)
782 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
783 && (temp1 = single_set (temp)) != 0
784 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
f95182a4
ILT
785 && (! SMALL_REGISTER_CLASSES
786 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
2156dfe3
RK
787 && GET_CODE (SET_SRC (temp1)) != REG
788 && GET_CODE (SET_SRC (temp1)) != SUBREG
789 && GET_CODE (SET_SRC (temp1)) != CONST_INT
790 && ! side_effects_p (SET_SRC (temp1))
791 && ! may_trap_p (SET_SRC (temp1))
97fa962f 792 && rtx_cost (SET_SRC (temp1), SET) < 10)
2156dfe3
RK
793 {
794 rtx new = gen_reg_rtx (GET_MODE (temp2));
795
956d6950
JL
796 if ((temp3 = find_insert_position (insn, temp))
797 && validate_change (temp, &SET_DEST (temp1), new, 0))
2156dfe3
RK
798 {
799 next = emit_insn_after (gen_move_insn (temp2, new), insn);
800 emit_insn_after_with_line_notes (PATTERN (temp),
956d6950 801 PREV_INSN (temp3), temp);
2156dfe3 802 delete_insn (temp);
5954c8a8 803 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
f903b91f
DM
804
805 if (after_regscan)
806 {
807 reg_scan_update (temp3, NEXT_INSN (next), old_max_reg);
808 old_max_reg = max_reg_num ();
809 }
2156dfe3
RK
810 }
811 }
812
813 /* Similarly, if it takes two insns to compute EXP but they
814 have the same destination. Here TEMP3 will be the second
815 insn and TEMP4 the SET from that insn. */
816
817 if (! reload_completed
818 && this_is_condjump && ! this_is_simplejump
819 && BRANCH_COST >= 4
820 && (temp = next_nonnote_insn (insn)) != 0
a8fc41af 821 && GET_CODE (temp) == INSN
2156dfe3
RK
822 && REG_NOTES (temp) == 0
823 && (temp3 = next_nonnote_insn (temp)) != 0
a8fc41af 824 && GET_CODE (temp3) == INSN
2156dfe3
RK
825 && REG_NOTES (temp3) == 0
826 && (reallabelprev == temp3
827 || ((temp2 = next_active_insn (temp3)) != 0
828 && simplejump_p (temp2)
829 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
830 && (temp1 = single_set (temp)) != 0
831 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
832 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
f95182a4
ILT
833 && (! SMALL_REGISTER_CLASSES
834 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
2156dfe3
RK
835 && ! side_effects_p (SET_SRC (temp1))
836 && ! may_trap_p (SET_SRC (temp1))
97fa962f 837 && rtx_cost (SET_SRC (temp1), SET) < 10
2156dfe3
RK
838 && (temp4 = single_set (temp3)) != 0
839 && rtx_equal_p (SET_DEST (temp4), temp2)
840 && ! side_effects_p (SET_SRC (temp4))
841 && ! may_trap_p (SET_SRC (temp4))
97fa962f 842 && rtx_cost (SET_SRC (temp4), SET) < 10)
2156dfe3
RK
843 {
844 rtx new = gen_reg_rtx (GET_MODE (temp2));
845
956d6950
JL
846 if ((temp5 = find_insert_position (insn, temp))
847 && (temp6 = find_insert_position (insn, temp3))
848 && validate_change (temp, &SET_DEST (temp1), new, 0))
2156dfe3 849 {
956d6950
JL
850 /* Use the earliest of temp5 and temp6. */
851 if (temp5 != insn)
852 temp6 = temp5;
2156dfe3
RK
853 next = emit_insn_after (gen_move_insn (temp2, new), insn);
854 emit_insn_after_with_line_notes (PATTERN (temp),
956d6950 855 PREV_INSN (temp6), temp);
2156dfe3
RK
856 emit_insn_after_with_line_notes
857 (replace_rtx (PATTERN (temp3), temp2, new),
956d6950 858 PREV_INSN (temp6), temp3);
2156dfe3
RK
859 delete_insn (temp);
860 delete_insn (temp3);
5954c8a8 861 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
f903b91f
DM
862
863 if (after_regscan)
864 {
865 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
866 old_max_reg = max_reg_num ();
867 }
2156dfe3
RK
868 }
869 }
870
871 /* Finally, handle the case where two insns are used to
872 compute EXP but a temporary register is used. Here we must
0f41302f 873 ensure that the temporary register is not used anywhere else. */
2156dfe3
RK
874
875 if (! reload_completed
876 && after_regscan
877 && this_is_condjump && ! this_is_simplejump
878 && BRANCH_COST >= 4
879 && (temp = next_nonnote_insn (insn)) != 0
a8fc41af 880 && GET_CODE (temp) == INSN
2156dfe3
RK
881 && REG_NOTES (temp) == 0
882 && (temp3 = next_nonnote_insn (temp)) != 0
a8fc41af 883 && GET_CODE (temp3) == INSN
2156dfe3
RK
884 && REG_NOTES (temp3) == 0
885 && (reallabelprev == temp3
886 || ((temp2 = next_active_insn (temp3)) != 0
887 && simplejump_p (temp2)
888 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
889 && (temp1 = single_set (temp)) != 0
c03c4711
RK
890 && (temp5 = SET_DEST (temp1),
891 (GET_CODE (temp5) == REG
892 || (GET_CODE (temp5) == SUBREG
893 && (temp5 = SUBREG_REG (temp5),
894 GET_CODE (temp5) == REG))))
2156dfe3 895 && REGNO (temp5) >= FIRST_PSEUDO_REGISTER
b1f21e0a
MM
896 && REGNO_FIRST_UID (REGNO (temp5)) == INSN_UID (temp)
897 && REGNO_LAST_UID (REGNO (temp5)) == INSN_UID (temp3)
2156dfe3
RK
898 && ! side_effects_p (SET_SRC (temp1))
899 && ! may_trap_p (SET_SRC (temp1))
97fa962f 900 && rtx_cost (SET_SRC (temp1), SET) < 10
2156dfe3
RK
901 && (temp4 = single_set (temp3)) != 0
902 && (temp2 = SET_DEST (temp4), GET_CODE (temp2) == REG)
903 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
f95182a4
ILT
904 && (! SMALL_REGISTER_CLASSES
905 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
2156dfe3
RK
906 && rtx_equal_p (SET_DEST (temp4), temp2)
907 && ! side_effects_p (SET_SRC (temp4))
908 && ! may_trap_p (SET_SRC (temp4))
97fa962f 909 && rtx_cost (SET_SRC (temp4), SET) < 10)
2156dfe3
RK
910 {
911 rtx new = gen_reg_rtx (GET_MODE (temp2));
912
956d6950
JL
913 if ((temp5 = find_insert_position (insn, temp))
914 && (temp6 = find_insert_position (insn, temp3))
915 && validate_change (temp3, &SET_DEST (temp4), new, 0))
2156dfe3 916 {
956d6950
JL
917 /* Use the earliest of temp5 and temp6. */
918 if (temp5 != insn)
919 temp6 = temp5;
2156dfe3
RK
920 next = emit_insn_after (gen_move_insn (temp2, new), insn);
921 emit_insn_after_with_line_notes (PATTERN (temp),
956d6950 922 PREV_INSN (temp6), temp);
2156dfe3 923 emit_insn_after_with_line_notes (PATTERN (temp3),
956d6950 924 PREV_INSN (temp6), temp3);
2156dfe3
RK
925 delete_insn (temp);
926 delete_insn (temp3);
5954c8a8 927 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
f903b91f
DM
928
929 if (after_regscan)
930 {
931 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
932 old_max_reg = max_reg_num ();
933 }
2156dfe3
RK
934 }
935 }
936#endif /* HAVE_cc0 */
937
c5c76735 938#ifdef HAVE_conditional_arithmetic
2ee2956a
RH
939 /* ??? This is disabled in genconfig, as this simple-minded
940 transformation can incredibly lengthen register lifetimes.
941
86702e31 942 Consider this example:
2ee2956a
RH
943
944 234 (set (pc)
945 (if_then_else (ne (reg:DI 149) (const_int 0 [0x0]))
946 (label_ref 248) (pc)))
947 237 (set (reg/i:DI 0 $0) (const_int 1 [0x1]))
948 239 (set (pc) (label_ref 2382))
949 248 (code_label ("yybackup"))
950
951 This will be transformed to:
952
953 237 (set (reg/i:DI 0 $0)
954 (if_then_else:DI (eq (reg:DI 149) (const_int 0 [0x0]))
955 (const_int 1 [0x1]) (reg/i:DI 0 $0)))
956 239 (set (pc)
957 (if_then_else (eq (reg:DI 149) (const_int 0 [0x0]))
958 (label_ref 2382) (pc)))
959
960 which, from this narrow viewpoint looks fine. Except that
961 between this and 3 other ocurrences of the same pattern, $0
962 is now live for basically the entire function, and we'll
963 get an abort in caller_save.
964
965 Any replacement for this code should recall that a set of
966 a register that is not live need not, and indeed should not,
967 be conditionalized. Either that, or delay the transformation
968 until after register allocation. */
969
c5c76735
JL
970 /* See if this is a conditional jump around a small number of
971 instructions that we can conditionalize. Don't do this before
972 the initial CSE pass or after reload.
973
974 We reject any insns that have side effects or may trap.
975 Strictly speaking, this is not needed since the machine may
976 support conditionalizing these too, but we won't deal with that
977 now. Specifically, this means that we can't conditionalize a
978 CALL_INSN, which some machines, such as the ARC, can do, but
979 this is a very minor optimization. */
980 if (this_is_condjump && ! this_is_simplejump
d29c259b 981 && cse_not_expected && ! reload_completed
c5c76735
JL
982 && BRANCH_COST > 2
983 && can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (insn)), 0),
984 insn))
985 {
986 rtx ourcond = XEXP (SET_SRC (PATTERN (insn)), 0);
987 int num_insns = 0;
988 char *storage = (char *) oballoc (0);
989 int last_insn = 0, failed = 0;
990 rtx changed_jump = 0;
991
992 ourcond = gen_rtx (reverse_condition (GET_CODE (ourcond)),
993 VOIDmode, XEXP (ourcond, 0),
994 XEXP (ourcond, 1));
995
996 /* Scan forward BRANCH_COST real insns looking for the JUMP_LABEL
997 of this insn. We see if we think we can conditionalize the
998 insns we pass. For now, we only deal with insns that have
999 one SET. We stop after an insn that modifies anything in
1000 OURCOND, if we have too many insns, or if we have an insn
1001 with a side effect or that may trip. Note that we will
1002 be modifying any unconditional jumps we encounter to be
1003 conditional; this will have the effect of also doing this
1004 optimization on the "else" the next time around. */
1005 for (temp1 = NEXT_INSN (insn);
1006 num_insns <= BRANCH_COST && ! failed && temp1 != 0
1007 && GET_CODE (temp1) != CODE_LABEL;
1008 temp1 = NEXT_INSN (temp1))
1009 {
1010 /* Ignore everything but an active insn. */
1011 if (GET_RTX_CLASS (GET_CODE (temp1)) != 'i'
1012 || GET_CODE (PATTERN (temp1)) == USE
1013 || GET_CODE (PATTERN (temp1)) == CLOBBER)
1014 continue;
1015
1016 /* If this was an unconditional jump, record it since we'll
1017 need to remove the BARRIER if we succeed. We can only
1018 have one such jump since there must be a label after
1019 the BARRIER and it's either ours, in which case it's the
14a774a9
RK
1020 only one or some other, in which case we'd fail.
1021 Likewise if it's a CALL_INSN followed by a BARRIER. */
c5c76735 1022
14a774a9
RK
1023 if (simplejump_p (temp1)
1024 || (GET_CODE (temp1) == CALL_INSN
1025 && NEXT_INSN (temp1) != 0
1026 && GET_CODE (NEXT_INSN (temp1)) == BARRIER))
1027 {
1028 if (changed_jump == 0)
1029 changed_jump = temp1;
1030 else
1031 changed_jump
1032 = gen_rtx_INSN_LIST (VOIDmode, temp1, changed_jump);
1033 }
c5c76735
JL
1034
1035 /* See if we are allowed another insn and if this insn
1036 if one we think we may be able to handle. */
1037 if (++num_insns > BRANCH_COST
1038 || last_insn
14a774a9
RK
1039 || (((temp2 = single_set (temp1)) == 0
1040 || side_effects_p (SET_SRC (temp2))
1041 || may_trap_p (SET_SRC (temp2)))
1042 && GET_CODE (temp1) != CALL_INSN))
1043 failed = 1;
1044 else if (temp2 != 0)
c5c76735
JL
1045 validate_change (temp1, &SET_SRC (temp2),
1046 gen_rtx_IF_THEN_ELSE
1047 (GET_MODE (SET_DEST (temp2)),
1048 copy_rtx (ourcond),
1049 SET_SRC (temp2), SET_DEST (temp2)),
1050 1);
14a774a9
RK
1051 else
1052 {
1053 /* This is a CALL_INSN that doesn't have a SET. */
1054 rtx *call_loc = &PATTERN (temp1);
1055
1056 if (GET_CODE (*call_loc) == PARALLEL)
1057 call_loc = &XVECEXP (*call_loc, 0, 0);
1058
1059 validate_change (temp1, call_loc,
1060 gen_rtx_IF_THEN_ELSE
1061 (VOIDmode, copy_rtx (ourcond),
1062 *call_loc, const0_rtx),
1063 1);
1064 }
1065
c5c76735
JL
1066
1067 if (modified_in_p (ourcond, temp1))
1068 last_insn = 1;
1069 }
1070
1071 /* If we've reached our jump label, haven't failed, and all
1072 the changes above are valid, we can delete this jump
1073 insn. Also remove a BARRIER after any jump that used
1074 to be unconditional and remove any REG_EQUAL or REG_EQUIV
1075 that might have previously been present on insns we
1076 made conditional. */
1077 if (temp1 == JUMP_LABEL (insn) && ! failed
1078 && apply_change_group ())
1079 {
1080 for (temp1 = NEXT_INSN (insn); temp1 != JUMP_LABEL (insn);
1081 temp1 = NEXT_INSN (temp1))
1082 if (GET_RTX_CLASS (GET_CODE (temp1)) == 'i')
1083 for (temp2 = REG_NOTES (temp1); temp2 != 0;
1084 temp2 = XEXP (temp2, 1))
1085 if (REG_NOTE_KIND (temp2) == REG_EQUAL
1086 || REG_NOTE_KIND (temp2) == REG_EQUIV)
1087 remove_note (temp1, temp2);
1088
1089 if (changed_jump != 0)
1090 {
14a774a9
RK
1091 while (GET_CODE (changed_jump) == INSN_LIST)
1092 {
1093 delete_barrier (NEXT_INSN (XEXP (changed_jump, 0)));
1094 changed_jump = XEXP (changed_jump, 1);
1095 }
c5c76735 1096
14a774a9 1097 delete_barrier (NEXT_INSN (changed_jump));
c5c76735
JL
1098 }
1099
1100 delete_insn (insn);
1101 changed = 1;
1102 continue;
1103 }
1104 else
1105 {
1106 cancel_changes (0);
1107 obfree (storage);
1108 }
1109 }
1110#endif
f5da5c87
JH
1111 /* If branches are expensive, convert
1112 if (foo) bar++; to bar += (foo != 0);
1113 and similarly for "bar--;"
1114
1115 INSN is the conditional branch around the arithmetic. We set:
1116
1117 TEMP is the arithmetic insn.
1118 TEMP1 is the SET doing the arithmetic.
1119 TEMP2 is the operand being incremented or decremented.
1120 TEMP3 to the condition being tested.
1121 TEMP4 to the earliest insn used to find the condition. */
1122
1123 if ((BRANCH_COST >= 2
1124#ifdef HAVE_incscc
1125 || HAVE_incscc
1126#endif
1127#ifdef HAVE_decscc
1128 || HAVE_decscc
1129#endif
1130 )
1131 && ! reload_completed
1132 && this_is_condjump && ! this_is_simplejump
1133 && (temp = next_nonnote_insn (insn)) != 0
1134 && (temp1 = single_set (temp)) != 0
1135 && (temp2 = SET_DEST (temp1),
1136 GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT)
1137 && GET_CODE (SET_SRC (temp1)) == PLUS
1138 && (XEXP (SET_SRC (temp1), 1) == const1_rtx
1139 || XEXP (SET_SRC (temp1), 1) == constm1_rtx)
1140 && rtx_equal_p (temp2, XEXP (SET_SRC (temp1), 0))
1141 && ! side_effects_p (temp2)
1142 && ! may_trap_p (temp2)
1143 /* INSN must either branch to the insn after TEMP or the insn
1144 after TEMP must branch to the same place as INSN. */
1145 && (reallabelprev == temp
1146 || ((temp3 = next_active_insn (temp)) != 0
1147 && simplejump_p (temp3)
1148 && JUMP_LABEL (temp3) == JUMP_LABEL (insn)))
1149 && (temp3 = get_condition (insn, &temp4)) != 0
1150 /* We must be comparing objects whose modes imply the size.
1151 We could handle BLKmode if (1) emit_store_flag could
1152 and (2) we could find the size reliably. */
1153 && GET_MODE (XEXP (temp3, 0)) != BLKmode
1154 && can_reverse_comparison_p (temp3, insn))
1155 {
1156 rtx temp6, target = 0, seq, init_insn = 0, init = temp2;
1157 enum rtx_code code = reverse_condition (GET_CODE (temp3));
1158
1159 start_sequence ();
1160
1161 /* It must be the case that TEMP2 is not modified in the range
1162 [TEMP4, INSN). The one exception we make is if the insn
1163 before INSN sets TEMP2 to something which is also unchanged
1164 in that range. In that case, we can move the initialization
1165 into our sequence. */
1166
1167 if ((temp5 = prev_active_insn (insn)) != 0
1168 && no_labels_between_p (temp5, insn)
1169 && GET_CODE (temp5) == INSN
1170 && (temp6 = single_set (temp5)) != 0
1171 && rtx_equal_p (temp2, SET_DEST (temp6))
1172 && (CONSTANT_P (SET_SRC (temp6))
1173 || GET_CODE (SET_SRC (temp6)) == REG
1174 || GET_CODE (SET_SRC (temp6)) == SUBREG))
1175 {
1176 emit_insn (PATTERN (temp5));
1177 init_insn = temp5;
1178 init = SET_SRC (temp6);
1179 }
1180
1181 if (CONSTANT_P (init)
1182 || ! reg_set_between_p (init, PREV_INSN (temp4), insn))
1183 target = emit_store_flag (gen_reg_rtx (GET_MODE (temp2)), code,
1184 XEXP (temp3, 0), XEXP (temp3, 1),
1185 VOIDmode,
1186 (code == LTU || code == LEU
1187 || code == GTU || code == GEU), 1);
1188
1189 /* If we can do the store-flag, do the addition or
1190 subtraction. */
1191
1192 if (target)
1193 target = expand_binop (GET_MODE (temp2),
1194 (XEXP (SET_SRC (temp1), 1) == const1_rtx
1195 ? add_optab : sub_optab),
1196 temp2, target, temp2, 0, OPTAB_WIDEN);
1197
1198 if (target != 0)
1199 {
1200 /* Put the result back in temp2 in case it isn't already.
1201 Then replace the jump, possible a CC0-setting insn in
1202 front of the jump, and TEMP, with the sequence we have
1203 made. */
1204
1205 if (target != temp2)
1206 emit_move_insn (temp2, target);
1207
1208 seq = get_insns ();
1209 end_sequence ();
1210
1211 emit_insns_before (seq, temp4);
1212 delete_insn (temp);
1213
1214 if (init_insn)
1215 delete_insn (init_insn);
1216
1217 next = NEXT_INSN (insn);
1218#ifdef HAVE_cc0
1219 delete_insn (prev_nonnote_insn (insn));
1220#endif
1221 delete_insn (insn);
1222
1223 if (after_regscan)
1224 {
1225 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1226 old_max_reg = max_reg_num ();
1227 }
1228
1229 changed = 1;
1230 continue;
1231 }
1232 else
1233 end_sequence ();
1234 }
c5c76735 1235
7209f1f9 1236 /* Try to use a conditional move (if the target has them), or a
c5c76735
JL
1237 store-flag insn. If the target has conditional arithmetic as
1238 well as conditional move, the above code will have done something.
1239 Note that we prefer the above code since it is more general: the
1240 code below can make changes that require work to undo.
1241
1242 The general case here is:
7209f1f9
DE
1243
1244 1) x = a; if (...) x = b; and
1245 2) if (...) x = b;
1246
1247 If the jump would be faster, the machine should not have defined
1248 the movcc or scc insns!. These cases are often made by the
3915de94 1249 previous optimization.
15a63be1 1250
7209f1f9
DE
1251 The second case is treated as x = x; if (...) x = b;.
1252
15a63be1
RK
1253 INSN here is the jump around the store. We set:
1254
1f081ffb 1255 TEMP to the "x op= b;" insn.
15a63be1 1256 TEMP1 to X.
7209f1f9 1257 TEMP2 to B.
15a63be1
RK
1258 TEMP3 to A (X in the second case).
1259 TEMP4 to the condition being tested.
1f081ffb
RH
1260 TEMP5 to the earliest insn used to find the condition.
1261 TEMP6 to the SET of TEMP. */
15a63be1
RK
1262
1263 if (/* We can't do this after reload has completed. */
1264 ! reload_completed
c5c76735
JL
1265#ifdef HAVE_conditional_arithmetic
1266 /* Defer this until after CSE so the above code gets the
1267 first crack at it. */
1268 && cse_not_expected
1269#endif
15a63be1
RK
1270 && this_is_condjump && ! this_is_simplejump
1271 /* Set TEMP to the "x = b;" insn. */
1272 && (temp = next_nonnote_insn (insn)) != 0
1273 && GET_CODE (temp) == INSN
1f081ffb
RH
1274 && (temp6 = single_set (temp)) != NULL_RTX
1275 && GET_CODE (temp1 = SET_DEST (temp6)) == REG
f95182a4
ILT
1276 && (! SMALL_REGISTER_CLASSES
1277 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
1f081ffb 1278 && ! side_effects_p (temp2 = SET_SRC (temp6))
0bef9b8d 1279 && ! may_trap_p (temp2)
2156dfe3
RK
1280 /* Allow either form, but prefer the former if both apply.
1281 There is no point in using the old value of TEMP1 if
1282 it is a register, since cse will alias them. It can
1283 lose if the old value were a hard register since CSE
c85f7c16
JL
1284 won't replace hard registers. Avoid using TEMP3 if
1285 small register classes and it is a hard register. */
1286 && (((temp3 = reg_set_last (temp1, insn)) != 0
1287 && ! (SMALL_REGISTER_CLASSES && GET_CODE (temp3) == REG
1288 && REGNO (temp3) < FIRST_PSEUDO_REGISTER))
7209f1f9
DE
1289 /* Make the latter case look like x = x; if (...) x = b; */
1290 || (temp3 = temp1, 1))
15a63be1
RK
1291 /* INSN must either branch to the insn after TEMP or the insn
1292 after TEMP must branch to the same place as INSN. */
1293 && (reallabelprev == temp
1294 || ((temp4 = next_active_insn (temp)) != 0
1295 && simplejump_p (temp4)
1296 && JUMP_LABEL (temp4) == JUMP_LABEL (insn)))
1297 && (temp4 = get_condition (insn, &temp5)) != 0
7124e1e5
RS
1298 /* We must be comparing objects whose modes imply the size.
1299 We could handle BLKmode if (1) emit_store_flag could
1300 and (2) we could find the size reliably. */
1301 && GET_MODE (XEXP (temp4, 0)) != BLKmode
04d23d7c
RK
1302 /* Even if branches are cheap, the store_flag optimization
1303 can win when the operation to be performed can be
1304 expressed directly. */
2156dfe3
RK
1305#ifdef HAVE_cc0
1306 /* If the previous insn sets CC0 and something else, we can't
1307 do this since we are going to delete that insn. */
1308
1309 && ! ((temp6 = prev_nonnote_insn (insn)) != 0
1310 && GET_CODE (temp6) == INSN
01ca1b91
RK
1311 && (sets_cc0_p (PATTERN (temp6)) == -1
1312 || (sets_cc0_p (PATTERN (temp6)) == 1
1313 && FIND_REG_INC_NOTE (temp6, NULL_RTX))))
2156dfe3
RK
1314#endif
1315 )
15a63be1 1316 {
7209f1f9
DE
1317#ifdef HAVE_conditional_move
1318 /* First try a conditional move. */
1319 {
1320 enum rtx_code code = GET_CODE (temp4);
1321 rtx var = temp1;
1322 rtx cond0, cond1, aval, bval;
1f081ffb 1323 rtx target, new_insn;
7209f1f9
DE
1324
1325 /* Copy the compared variables into cond0 and cond1, so that
1326 any side effects performed in or after the old comparison,
1327 will not affect our compare which will come later. */
1328 /* ??? Is it possible to just use the comparison in the jump
1329 insn? After all, we're going to delete it. We'd have
1330 to modify emit_conditional_move to take a comparison rtx
1331 instead or write a new function. */
144a5f9d 1332
7209f1f9
DE
1333 /* We want the target to be able to simplify comparisons with
1334 zero (and maybe other constants as well), so don't create
1335 pseudos for them. There's no need to either. */
144a5f9d
JL
1336 if (GET_CODE (XEXP (temp4, 0)) == CONST_INT
1337 || GET_CODE (XEXP (temp4, 0)) == CONST_DOUBLE)
1338 cond0 = XEXP (temp4, 0);
1339 else
1340 cond0 = gen_reg_rtx (GET_MODE (XEXP (temp4, 0)));
1341
7209f1f9
DE
1342 if (GET_CODE (XEXP (temp4, 1)) == CONST_INT
1343 || GET_CODE (XEXP (temp4, 1)) == CONST_DOUBLE)
1344 cond1 = XEXP (temp4, 1);
1345 else
1346 cond1 = gen_reg_rtx (GET_MODE (XEXP (temp4, 1)));
1347
1f081ffb
RH
1348 /* Careful about copying these values -- an IOR or what may
1349 need to do other things, like clobber flags. */
1350 /* ??? Assume for the moment that AVAL is ok. */
7209f1f9 1351 aval = temp3;
7209f1f9
DE
1352
1353 start_sequence ();
1f081ffb 1354
39cc9917
RH
1355 /* We're dealing with a single_set insn with no side effects
1356 on SET_SRC. We do need to be reasonably certain that if
1357 we need to force BVAL into a register that we won't
1358 clobber the flags -- general_operand should suffice. */
1359 if (general_operand (temp2, GET_MODE (var)))
1f081ffb
RH
1360 bval = temp2;
1361 else
1362 {
1363 bval = gen_reg_rtx (GET_MODE (var));
1364 new_insn = copy_rtx (temp);
1365 temp6 = single_set (new_insn);
1366 SET_DEST (temp6) = bval;
1367 emit_insn (PATTERN (new_insn));
1368 }
39cc9917 1369
7209f1f9
DE
1370 target = emit_conditional_move (var, code,
1371 cond0, cond1, VOIDmode,
1372 aval, bval, GET_MODE (var),
1373 (code == LTU || code == GEU
1374 || code == LEU || code == GTU));
1375
1376 if (target)
1377 {
9da8a3ae
RH
1378 rtx seq1, seq2, last;
1379 int copy_ok;
7209f1f9
DE
1380
1381 /* Save the conditional move sequence but don't emit it
1382 yet. On some machines, like the alpha, it is possible
1383 that temp5 == insn, so next generate the sequence that
1384 saves the compared values and then emit both
1385 sequences ensuring seq1 occurs before seq2. */
1386 seq2 = get_insns ();
1387 end_sequence ();
1388
9da8a3ae
RH
1389 /* "Now that we can't fail..." Famous last words.
1390 Generate the copy insns that preserve the compared
1391 values. */
7209f1f9
DE
1392 start_sequence ();
1393 emit_move_insn (cond0, XEXP (temp4, 0));
1394 if (cond1 != XEXP (temp4, 1))
1395 emit_move_insn (cond1, XEXP (temp4, 1));
1396 seq1 = get_insns ();
1397 end_sequence ();
1398
9da8a3ae
RH
1399 /* Validate the sequence -- this may be some weird
1400 bit-extract-and-test instruction for which there
1401 exists no complimentary bit-extract insn. */
1402 copy_ok = 1;
1403 for (last = seq1; last ; last = NEXT_INSN (last))
1404 if (recog_memoized (last) < 0)
1405 {
1406 copy_ok = 0;
1407 break;
1408 }
f903b91f 1409
9da8a3ae 1410 if (copy_ok)
f903b91f 1411 {
9da8a3ae
RH
1412 emit_insns_before (seq1, temp5);
1413
1414 /* Insert conditional move after insn, to be sure
1415 that the jump and a possible compare won't be
1416 separated. */
1417 last = emit_insns_after (seq2, insn);
1418
1419 /* ??? We can also delete the insn that sets X to A.
1420 Flow will do it too though. */
1421 delete_insn (temp);
1422 next = NEXT_INSN (insn);
1423 delete_jump (insn);
1424
1425 if (after_regscan)
1426 {
1427 reg_scan_update (seq1, NEXT_INSN (last),
1428 old_max_reg);
1429 old_max_reg = max_reg_num ();
1430 }
1431
1432 changed = 1;
1433 continue;
f903b91f 1434 }
7209f1f9
DE
1435 }
1436 else
2156dfe3 1437 end_sequence ();
7209f1f9
DE
1438 }
1439#endif
2156dfe3 1440
7209f1f9
DE
1441 /* That didn't work, try a store-flag insn.
1442
1443 We further divide the cases into:
1444
1445 1) x = a; if (...) x = b; and either A or B is zero,
1446 2) if (...) x = 0; and jumps are expensive,
1447 3) x = a; if (...) x = b; and A and B are constants where all
1448 the set bits in A are also set in B and jumps are expensive,
1449 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
1450 more expensive, and
1451 5) if (...) x = b; if jumps are even more expensive. */
1452
1453 if (GET_MODE_CLASS (GET_MODE (temp1)) == MODE_INT
665853dc
BS
1454 /* We will be passing this as operand into expand_and. No
1455 good if it's not valid as an operand. */
1456 && general_operand (temp2, GET_MODE (temp2))
7209f1f9
DE
1457 && ((GET_CODE (temp3) == CONST_INT)
1458 /* Make the latter case look like
1459 x = x; if (...) x = 0; */
1460 || (temp3 = temp1,
1461 ((BRANCH_COST >= 2
1462 && temp2 == const0_rtx)
1463 || BRANCH_COST >= 3)))
1464 /* If B is zero, OK; if A is zero, can only do (1) if we
1465 can reverse the condition. See if (3) applies possibly
1466 by reversing the condition. Prefer reversing to (4) when
1467 branches are very expensive. */
04d23d7c
RK
1468 && (((BRANCH_COST >= 2
1469 || STORE_FLAG_VALUE == -1
1470 || (STORE_FLAG_VALUE == 1
1471 /* Check that the mask is a power of two,
1472 so that it can probably be generated
1473 with a shift. */
702d7434 1474 && GET_CODE (temp3) == CONST_INT
04d23d7c
RK
1475 && exact_log2 (INTVAL (temp3)) >= 0))
1476 && (reversep = 0, temp2 == const0_rtx))
1477 || ((BRANCH_COST >= 2
1478 || STORE_FLAG_VALUE == -1
1479 || (STORE_FLAG_VALUE == 1
702d7434 1480 && GET_CODE (temp2) == CONST_INT
04d23d7c
RK
1481 && exact_log2 (INTVAL (temp2)) >= 0))
1482 && temp3 == const0_rtx
7209f1f9
DE
1483 && (reversep = can_reverse_comparison_p (temp4, insn)))
1484 || (BRANCH_COST >= 2
1485 && GET_CODE (temp2) == CONST_INT
1486 && GET_CODE (temp3) == CONST_INT
1487 && ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp2)
1488 || ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp3)
1489 && (reversep = can_reverse_comparison_p (temp4,
1490 insn)))))
1491 || BRANCH_COST >= 3)
1492 )
1493 {
1494 enum rtx_code code = GET_CODE (temp4);
1495 rtx uval, cval, var = temp1;
1496 int normalizep;
1497 rtx target;
1498
1499 /* If necessary, reverse the condition. */
1500 if (reversep)
1501 code = reverse_condition (code), uval = temp2, cval = temp3;
1502 else
1503 uval = temp3, cval = temp2;
1504
1505 /* If CVAL is non-zero, normalize to -1. Otherwise, if UVAL
1506 is the constant 1, it is best to just compute the result
1507 directly. If UVAL is constant and STORE_FLAG_VALUE
1508 includes all of its bits, it is best to compute the flag
1509 value unnormalized and `and' it with UVAL. Otherwise,
1510 normalize to -1 and `and' with UVAL. */
1511 normalizep = (cval != const0_rtx ? -1
1512 : (uval == const1_rtx ? 1
1513 : (GET_CODE (uval) == CONST_INT
1514 && (INTVAL (uval) & ~STORE_FLAG_VALUE) == 0)
1515 ? 0 : -1));
1516
1517 /* We will be putting the store-flag insn immediately in
1518 front of the comparison that was originally being done,
1519 so we know all the variables in TEMP4 will be valid.
1520 However, this might be in front of the assignment of
1521 A to VAR. If it is, it would clobber the store-flag
1522 we will be emitting.
1523
1524 Therefore, emit into a temporary which will be copied to
1525 VAR immediately after TEMP. */
2156dfe3
RK
1526
1527 start_sequence ();
7209f1f9
DE
1528 target = emit_store_flag (gen_reg_rtx (GET_MODE (var)), code,
1529 XEXP (temp4, 0), XEXP (temp4, 1),
1530 VOIDmode,
1531 (code == LTU || code == LEU
1532 || code == GEU || code == GTU),
1533 normalizep);
1534 if (target)
3915de94 1535 {
7209f1f9
DE
1536 rtx seq;
1537 rtx before = insn;
3915de94 1538
7209f1f9
DE
1539 seq = get_insns ();
1540 end_sequence ();
3915de94 1541
7209f1f9
DE
1542 /* Put the store-flag insns in front of the first insn
1543 used to compute the condition to ensure that we
1544 use the same values of them as the current
1545 comparison. However, the remainder of the insns we
1546 generate will be placed directly in front of the
1547 jump insn, in case any of the pseudos we use
1548 are modified earlier. */
3915de94 1549
7209f1f9 1550 emit_insns_before (seq, temp5);
3915de94 1551
7209f1f9
DE
1552 start_sequence ();
1553
1554 /* Both CVAL and UVAL are non-zero. */
1555 if (cval != const0_rtx && uval != const0_rtx)
c71ebae3 1556 {
7209f1f9
DE
1557 rtx tem1, tem2;
1558
1559 tem1 = expand_and (uval, target, NULL_RTX);
1560 if (GET_CODE (cval) == CONST_INT
1561 && GET_CODE (uval) == CONST_INT
1562 && (INTVAL (cval) & INTVAL (uval)) == INTVAL (cval))
1563 tem2 = cval;
1564 else
1565 {
1566 tem2 = expand_unop (GET_MODE (var), one_cmpl_optab,
1567 target, NULL_RTX, 0);
1568 tem2 = expand_and (cval, tem2,
1569 (GET_CODE (tem2) == REG
1570 ? tem2 : 0));
1571 }
1572
1573 /* If we usually make new pseudos, do so here. This
1574 turns out to help machines that have conditional
1575 move insns. */
1576 /* ??? Conditional moves have already been handled.
1577 This may be obsolete. */
1578
1579 if (flag_expensive_optimizations)
1580 target = 0;
1581
1582 target = expand_binop (GET_MODE (var), ior_optab,
1583 tem1, tem2, target,
1584 1, OPTAB_WIDEN);
c71ebae3 1585 }
7209f1f9
DE
1586 else if (normalizep != 1)
1587 {
1588 /* We know that either CVAL or UVAL is zero. If
1589 UVAL is zero, negate TARGET and `and' with CVAL.
1590 Otherwise, `and' with UVAL. */
1591 if (uval == const0_rtx)
1592 {
1593 target = expand_unop (GET_MODE (var), one_cmpl_optab,
1594 target, NULL_RTX, 0);
1595 uval = cval;
1596 }
c71ebae3 1597
7209f1f9
DE
1598 target = expand_and (uval, target,
1599 (GET_CODE (target) == REG
1600 && ! preserve_subexpressions_p ()
1601 ? target : NULL_RTX));
1602 }
3915de94 1603
7209f1f9
DE
1604 emit_move_insn (var, target);
1605 seq = get_insns ();
1606 end_sequence ();
01ca1b91 1607#ifdef HAVE_cc0
7209f1f9
DE
1608 /* If INSN uses CC0, we must not separate it from the
1609 insn that sets cc0. */
1610 if (reg_mentioned_p (cc0_rtx, PATTERN (before)))
1611 before = prev_nonnote_insn (before);
01ca1b91 1612#endif
7209f1f9 1613 emit_insns_before (seq, before);
01ca1b91 1614
7209f1f9
DE
1615 delete_insn (temp);
1616 next = NEXT_INSN (insn);
1617 delete_jump (insn);
f903b91f
DM
1618
1619 if (after_regscan)
1620 {
1621 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1622 old_max_reg = max_reg_num ();
1623 }
1624
7209f1f9
DE
1625 changed = 1;
1626 continue;
1627 }
1628 else
1629 end_sequence ();
15a63be1 1630 }
15a63be1
RK
1631 }
1632
15a63be1
RK
1633
1634 /* Simplify if (...) x = 1; else {...} if (x) ...
1635 We recognize this case scanning backwards as well.
1636
1637 TEMP is the assignment to x;
1638 TEMP1 is the label at the head of the second if. */
1639 /* ?? This should call get_condition to find the values being
1640 compared, instead of looking for a COMPARE insn when HAVE_cc0
1641 is not defined. This would allow it to work on the m88k. */
1642 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1643 is not defined and the condition is tested by a separate compare
1644 insn. This is because the code below assumes that the result
1645 of the compare dies in the following branch.
1646
1647 Not only that, but there might be other insns between the
1648 compare and branch whose results are live. Those insns need
1649 to be executed.
1650
1651 A way to fix this is to move the insns at JUMP_LABEL (insn)
1652 to before INSN. If we are running before flow, they will
1653 be deleted if they aren't needed. But this doesn't work
1654 well after flow.
1655
1656 This is really a special-case of jump threading, anyway. The
1657 right thing to do is to replace this and jump threading with
1658 much simpler code in cse.
1659
1660 This code has been turned off in the non-cc0 case in the
1661 meantime. */
1662
1663#ifdef HAVE_cc0
1664 else if (this_is_simplejump
1665 /* Safe to skip USE and CLOBBER insns here
1666 since they will not be deleted. */
1667 && (temp = prev_active_insn (insn))
1668 && no_labels_between_p (temp, insn)
1669 && GET_CODE (temp) == INSN
1670 && GET_CODE (PATTERN (temp)) == SET
1671 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1672 && CONSTANT_P (SET_SRC (PATTERN (temp)))
1673 && (temp1 = next_active_insn (JUMP_LABEL (insn)))
1674 /* If we find that the next value tested is `x'
1675 (TEMP1 is the insn where this happens), win. */
1676 && GET_CODE (temp1) == INSN
1677 && GET_CODE (PATTERN (temp1)) == SET
1678#ifdef HAVE_cc0
1679 /* Does temp1 `tst' the value of x? */
1680 && SET_SRC (PATTERN (temp1)) == SET_DEST (PATTERN (temp))
1681 && SET_DEST (PATTERN (temp1)) == cc0_rtx
1682 && (temp1 = next_nonnote_insn (temp1))
1683#else
1684 /* Does temp1 compare the value of x against zero? */
1685 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1686 && XEXP (SET_SRC (PATTERN (temp1)), 1) == const0_rtx
1687 && (XEXP (SET_SRC (PATTERN (temp1)), 0)
1688 == SET_DEST (PATTERN (temp)))
1689 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1690 && (temp1 = find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1691#endif
1692 && condjump_p (temp1))
1693 {
1694 /* Get the if_then_else from the condjump. */
1695 rtx choice = SET_SRC (PATTERN (temp1));
1696 if (GET_CODE (choice) == IF_THEN_ELSE)
1697 {
1698 enum rtx_code code = GET_CODE (XEXP (choice, 0));
1699 rtx val = SET_SRC (PATTERN (temp));
1700 rtx cond
1701 = simplify_relational_operation (code, GET_MODE (SET_DEST (PATTERN (temp))),
1702 val, const0_rtx);
1703 rtx ultimate;
1704
1705 if (cond == const_true_rtx)
1706 ultimate = XEXP (choice, 1);
1707 else if (cond == const0_rtx)
1708 ultimate = XEXP (choice, 2);
1709 else
1710 ultimate = 0;
1711
1712 if (ultimate == pc_rtx)
1713 ultimate = get_label_after (temp1);
1714 else if (ultimate && GET_CODE (ultimate) != RETURN)
1715 ultimate = XEXP (ultimate, 0);
1716
9e390837 1717 if (ultimate && JUMP_LABEL(insn) != ultimate)
15a63be1
RK
1718 changed |= redirect_jump (insn, ultimate);
1719 }
1720 }
1721#endif
1722
1723#if 0
1724 /* @@ This needs a bit of work before it will be right.
1725
1726 Any type of comparison can be accepted for the first and
1727 second compare. When rewriting the first jump, we must
1728 compute the what conditions can reach label3, and use the
1729 appropriate code. We can not simply reverse/swap the code
1730 of the first jump. In some cases, the second jump must be
1731 rewritten also.
1732
1733 For example,
1734 < == converts to > ==
1735 < != converts to == >
1736 etc.
1737
1738 If the code is written to only accept an '==' test for the second
1739 compare, then all that needs to be done is to swap the condition
1740 of the first branch.
1741
1742 It is questionable whether we want this optimization anyways,
1743 since if the user wrote code like this because he/she knew that
6dc42e49 1744 the jump to label1 is taken most of the time, then rewriting
15a63be1
RK
1745 this gives slower code. */
1746 /* @@ This should call get_condition to find the values being
1747 compared, instead of looking for a COMPARE insn when HAVE_cc0
1748 is not defined. This would allow it to work on the m88k. */
1749 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1750 is not defined and the condition is tested by a separate compare
1751 insn. This is because the code below assumes that the result
1752 of the compare dies in the following branch. */
1753
1754 /* Simplify test a ~= b
1755 condjump label1;
1756 test a == b
1757 condjump label2;
1758 jump label3;
1759 label1:
1760
1761 rewriting as
1762 test a ~~= b
1763 condjump label3
1764 test a == b
1765 condjump label2
1766 label1:
1767
1768 where ~= is an inequality, e.g. >, and ~~= is the swapped
1769 inequality, e.g. <.
1770
1771 We recognize this case scanning backwards.
1772
1773 TEMP is the conditional jump to `label2';
1774 TEMP1 is the test for `a == b';
1775 TEMP2 is the conditional jump to `label1';
1776 TEMP3 is the test for `a ~= b'. */
1777 else if (this_is_simplejump
1778 && (temp = prev_active_insn (insn))
1779 && no_labels_between_p (temp, insn)
1780 && condjump_p (temp)
1781 && (temp1 = prev_active_insn (temp))
1782 && no_labels_between_p (temp1, temp)
1783 && GET_CODE (temp1) == INSN
1784 && GET_CODE (PATTERN (temp1)) == SET
1785#ifdef HAVE_cc0
1786 && sets_cc0_p (PATTERN (temp1)) == 1
1787#else
1788 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1789 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1790 && (temp == find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1791#endif
1792 && (temp2 = prev_active_insn (temp1))
1793 && no_labels_between_p (temp2, temp1)
1794 && condjump_p (temp2)
1795 && JUMP_LABEL (temp2) == next_nonnote_insn (NEXT_INSN (insn))
1796 && (temp3 = prev_active_insn (temp2))
1797 && no_labels_between_p (temp3, temp2)
1798 && GET_CODE (PATTERN (temp3)) == SET
1799 && rtx_equal_p (SET_DEST (PATTERN (temp3)),
1800 SET_DEST (PATTERN (temp1)))
1801 && rtx_equal_p (SET_SRC (PATTERN (temp1)),
1802 SET_SRC (PATTERN (temp3)))
1803 && ! inequality_comparisons_p (PATTERN (temp))
1804 && inequality_comparisons_p (PATTERN (temp2)))
1805 {
1806 rtx fallthrough_label = JUMP_LABEL (temp2);
1807
1808 ++LABEL_NUSES (fallthrough_label);
1809 if (swap_jump (temp2, JUMP_LABEL (insn)))
1810 {
1811 delete_insn (insn);
1812 changed = 1;
1813 }
1814
1815 if (--LABEL_NUSES (fallthrough_label) == 0)
1816 delete_insn (fallthrough_label);
1817 }
1818#endif
1819 /* Simplify if (...) {... x = 1;} if (x) ...
1820
1821 We recognize this case backwards.
1822
1823 TEMP is the test of `x';
1824 TEMP1 is the assignment to `x' at the end of the
1825 previous statement. */
1826 /* @@ This should call get_condition to find the values being
1827 compared, instead of looking for a COMPARE insn when HAVE_cc0
1828 is not defined. This would allow it to work on the m88k. */
1829 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1830 is not defined and the condition is tested by a separate compare
1831 insn. This is because the code below assumes that the result
1832 of the compare dies in the following branch. */
2d20b9df
RS
1833
1834 /* ??? This has to be turned off. The problem is that the
1835 unconditional jump might indirectly end up branching to the
1836 label between TEMP1 and TEMP. We can't detect this, in general,
1837 since it may become a jump to there after further optimizations.
1838 If that jump is done, it will be deleted, so we will retry
1839 this optimization in the next pass, thus an infinite loop.
1840
1841 The present code prevents this by putting the jump after the
1842 label, but this is not logically correct. */
1843#if 0
15a63be1
RK
1844 else if (this_is_condjump
1845 /* Safe to skip USE and CLOBBER insns here
1846 since they will not be deleted. */
1847 && (temp = prev_active_insn (insn))
1848 && no_labels_between_p (temp, insn)
1849 && GET_CODE (temp) == INSN
1850 && GET_CODE (PATTERN (temp)) == SET
1851#ifdef HAVE_cc0
1852 && sets_cc0_p (PATTERN (temp)) == 1
1853 && GET_CODE (SET_SRC (PATTERN (temp))) == REG
1854#else
1855 /* Temp must be a compare insn, we can not accept a register
1856 to register move here, since it may not be simply a
1857 tst insn. */
1858 && GET_CODE (SET_SRC (PATTERN (temp))) == COMPARE
1859 && XEXP (SET_SRC (PATTERN (temp)), 1) == const0_rtx
1860 && GET_CODE (XEXP (SET_SRC (PATTERN (temp)), 0)) == REG
1861 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1862 && insn == find_next_ref (SET_DEST (PATTERN (temp)), temp)
1863#endif
1864 /* May skip USE or CLOBBER insns here
1865 for checking for opportunity, since we
1866 take care of them later. */
1867 && (temp1 = prev_active_insn (temp))
1868 && GET_CODE (temp1) == INSN
1869 && GET_CODE (PATTERN (temp1)) == SET
1870#ifdef HAVE_cc0
1871 && SET_SRC (PATTERN (temp)) == SET_DEST (PATTERN (temp1))
1872#else
1873 && (XEXP (SET_SRC (PATTERN (temp)), 0)
1874 == SET_DEST (PATTERN (temp1)))
1875#endif
1876 && CONSTANT_P (SET_SRC (PATTERN (temp1)))
1877 /* If this isn't true, cse will do the job. */
1878 && ! no_labels_between_p (temp1, temp))
1879 {
1880 /* Get the if_then_else from the condjump. */
1881 rtx choice = SET_SRC (PATTERN (insn));
1882 if (GET_CODE (choice) == IF_THEN_ELSE
1883 && (GET_CODE (XEXP (choice, 0)) == EQ
1884 || GET_CODE (XEXP (choice, 0)) == NE))
1885 {
1886 int want_nonzero = (GET_CODE (XEXP (choice, 0)) == NE);
1887 rtx last_insn;
1888 rtx ultimate;
1889 rtx p;
1890
1891 /* Get the place that condjump will jump to
1892 if it is reached from here. */
1893 if ((SET_SRC (PATTERN (temp1)) != const0_rtx)
1894 == want_nonzero)
1895 ultimate = XEXP (choice, 1);
1896 else
1897 ultimate = XEXP (choice, 2);
1898 /* Get it as a CODE_LABEL. */
1899 if (ultimate == pc_rtx)
1900 ultimate = get_label_after (insn);
1901 else
1902 /* Get the label out of the LABEL_REF. */
1903 ultimate = XEXP (ultimate, 0);
1904
2d20b9df
RS
1905 /* Insert the jump immediately before TEMP, specifically
1906 after the label that is between TEMP1 and TEMP. */
1907 last_insn = PREV_INSN (temp);
15a63be1
RK
1908
1909 /* If we would be branching to the next insn, the jump
1910 would immediately be deleted and the re-inserted in
1911 a subsequent pass over the code. So don't do anything
1912 in that case. */
1913 if (next_active_insn (last_insn)
1914 != next_active_insn (ultimate))
1915 {
1916 emit_barrier_after (last_insn);
1917 p = emit_jump_insn_after (gen_jump (ultimate),
1918 last_insn);
1919 JUMP_LABEL (p) = ultimate;
1920 ++LABEL_NUSES (ultimate);
1921 if (INSN_UID (ultimate) < max_jump_chain
1922 && INSN_CODE (p) < max_jump_chain)
1923 {
1924 jump_chain[INSN_UID (p)]
1925 = jump_chain[INSN_UID (ultimate)];
1926 jump_chain[INSN_UID (ultimate)] = p;
1927 }
1928 changed = 1;
1929 continue;
1930 }
1931 }
1932 }
2d20b9df 1933#endif
e0cd0770
JC
1934#ifdef HAVE_trap
1935 /* Detect a conditional jump jumping over an unconditional trap. */
1936 else if (HAVE_trap
1937 && this_is_condjump && ! this_is_simplejump
1938 && reallabelprev != 0
1939 && GET_CODE (reallabelprev) == INSN
1940 && GET_CODE (PATTERN (reallabelprev)) == TRAP_IF
1941 && TRAP_CONDITION (PATTERN (reallabelprev)) == const_true_rtx
1942 && prev_active_insn (reallabelprev) == insn
1943 && no_labels_between_p (insn, reallabelprev)
1944 && (temp2 = get_condition (insn, &temp4))
1945 && can_reverse_comparison_p (temp2, insn))
1946 {
1947 rtx new = gen_cond_trap (reverse_condition (GET_CODE (temp2)),
1948 XEXP (temp2, 0), XEXP (temp2, 1),
1949 TRAP_CODE (PATTERN (reallabelprev)));
1950
1951 if (new)
1952 {
1953 emit_insn_before (new, temp4);
1954 delete_insn (reallabelprev);
1955 delete_jump (insn);
1956 changed = 1;
1957 continue;
1958 }
1959 }
1960 /* Detect a jump jumping to an unconditional trap. */
1961 else if (HAVE_trap && this_is_condjump
1962 && (temp = next_active_insn (JUMP_LABEL (insn)))
1963 && GET_CODE (temp) == INSN
1964 && GET_CODE (PATTERN (temp)) == TRAP_IF
1965 && (this_is_simplejump
1966 || (temp2 = get_condition (insn, &temp4))))
1967 {
1968 rtx tc = TRAP_CONDITION (PATTERN (temp));
1969
1970 if (tc == const_true_rtx
1971 || (! this_is_simplejump && rtx_equal_p (temp2, tc)))
1972 {
1973 rtx new;
1974 /* Replace an unconditional jump to a trap with a trap. */
1975 if (this_is_simplejump)
1976 {
1977 emit_barrier_after (emit_insn_before (gen_trap (), insn));
1978 delete_jump (insn);
1979 changed = 1;
1980 continue;
1981 }
1982 new = gen_cond_trap (GET_CODE (temp2), XEXP (temp2, 0),
1983 XEXP (temp2, 1),
1984 TRAP_CODE (PATTERN (temp)));
1985 if (new)
1986 {
1987 emit_insn_before (new, temp4);
1988 delete_jump (insn);
1989 changed = 1;
1990 continue;
1991 }
1992 }
1993 /* If the trap condition and jump condition are mutually
1994 exclusive, redirect the jump to the following insn. */
1995 else if (GET_RTX_CLASS (GET_CODE (tc)) == '<'
1996 && ! this_is_simplejump
1997 && swap_condition (GET_CODE (temp2)) == GET_CODE (tc)
1998 && rtx_equal_p (XEXP (tc, 0), XEXP (temp2, 0))
1999 && rtx_equal_p (XEXP (tc, 1), XEXP (temp2, 1))
2000 && redirect_jump (insn, get_label_after (temp)))
2001 {
2002 changed = 1;
2003 continue;
2004 }
2005 }
2006#endif
15a63be1
RK
2007 else
2008 {
15a63be1
RK
2009 /* Now that the jump has been tensioned,
2010 try cross jumping: check for identical code
0f41302f 2011 before the jump and before its target label. */
15a63be1
RK
2012
2013 /* First, cross jumping of conditional jumps: */
2014
2015 if (cross_jump && condjump_p (insn))
2016 {
2017 rtx newjpos, newlpos;
2018 rtx x = prev_real_insn (JUMP_LABEL (insn));
2019
2020 /* A conditional jump may be crossjumped
2021 only if the place it jumps to follows
2022 an opposing jump that comes back here. */
2023
2024 if (x != 0 && ! jump_back_p (x, insn))
2025 /* We have no opposing jump;
2026 cannot cross jump this insn. */
2027 x = 0;
2028
2029 newjpos = 0;
2030 /* TARGET is nonzero if it is ok to cross jump
2031 to code before TARGET. If so, see if matches. */
2032 if (x != 0)
2033 find_cross_jump (insn, x, 2,
2034 &newjpos, &newlpos);
2035
2036 if (newjpos != 0)
2037 {
2038 do_cross_jump (insn, newjpos, newlpos);
2039 /* Make the old conditional jump
2040 into an unconditional one. */
2041 SET_SRC (PATTERN (insn))
38a448ca 2042 = gen_rtx_LABEL_REF (VOIDmode, JUMP_LABEL (insn));
15a63be1
RK
2043 INSN_CODE (insn) = -1;
2044 emit_barrier_after (insn);
2045 /* Add to jump_chain unless this is a new label
0f41302f 2046 whose UID is too large. */
15a63be1
RK
2047 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
2048 {
2049 jump_chain[INSN_UID (insn)]
2050 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2051 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
2052 }
2053 changed = 1;
2054 next = insn;
2055 }
2056 }
2057
2058 /* Cross jumping of unconditional jumps:
2059 a few differences. */
2060
2061 if (cross_jump && simplejump_p (insn))
2062 {
2063 rtx newjpos, newlpos;
2064 rtx target;
2065
2066 newjpos = 0;
2067
2068 /* TARGET is nonzero if it is ok to cross jump
2069 to code before TARGET. If so, see if matches. */
2070 find_cross_jump (insn, JUMP_LABEL (insn), 1,
2071 &newjpos, &newlpos);
2072
2073 /* If cannot cross jump to code before the label,
2074 see if we can cross jump to another jump to
2075 the same label. */
2076 /* Try each other jump to this label. */
2077 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
2078 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2079 target != 0 && newjpos == 0;
2080 target = jump_chain[INSN_UID (target)])
2081 if (target != insn
2082 && JUMP_LABEL (target) == JUMP_LABEL (insn)
2083 /* Ignore TARGET if it's deleted. */
2084 && ! INSN_DELETED_P (target))
2085 find_cross_jump (insn, target, 2,
2086 &newjpos, &newlpos);
2087
2088 if (newjpos != 0)
2089 {
2090 do_cross_jump (insn, newjpos, newlpos);
2091 changed = 1;
2092 next = insn;
2093 }
2094 }
2095
2096 /* This code was dead in the previous jump.c! */
2097 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
2098 {
2099 /* Return insns all "jump to the same place"
2100 so we can cross-jump between any two of them. */
2101
2102 rtx newjpos, newlpos, target;
2103
2104 newjpos = 0;
2105
2106 /* If cannot cross jump to code before the label,
2107 see if we can cross jump to another jump to
2108 the same label. */
2109 /* Try each other jump to this label. */
2110 for (target = jump_chain[0];
2111 target != 0 && newjpos == 0;
2112 target = jump_chain[INSN_UID (target)])
2113 if (target != insn
2114 && ! INSN_DELETED_P (target)
2115 && GET_CODE (PATTERN (target)) == RETURN)
2116 find_cross_jump (insn, target, 2,
2117 &newjpos, &newlpos);
2118
2119 if (newjpos != 0)
2120 {
2121 do_cross_jump (insn, newjpos, newlpos);
2122 changed = 1;
2123 next = insn;
2124 }
2125 }
2126 }
2127 }
2128
2129 first = 0;
2130 }
2131
2132 /* Delete extraneous line number notes.
2133 Note that two consecutive notes for different lines are not really
2134 extraneous. There should be some indication where that line belonged,
2135 even if it became empty. */
2136
2137 {
2138 rtx last_note = 0;
2139
2140 for (insn = f; insn; insn = NEXT_INSN (insn))
2141 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0)
2142 {
2143 /* Delete this note if it is identical to previous note. */
2144 if (last_note
2145 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
2146 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
2147 {
2148 delete_insn (insn);
2149 continue;
2150 }
2151
2152 last_note = insn;
2153 }
2154 }
2155
efa90f05
JL
2156 /* CAN_REACH_END is persistent for each function. Once set it should
2157 not be cleared. This is especially true for the case where we
2158 delete the NOTE_FUNCTION_END note. CAN_REACH_END is cleared by
2159 the front-end before compiling each function. */
0a1c58a2 2160 if (! minimal && calculate_can_reach_end (last_insn, optimize != 0))
efa90f05 2161 can_reach_end = 1;
269ef46c 2162
67289ea6
MM
2163end:
2164 /* Clean up. */
2165 free (jump_chain);
269ef46c
DM
2166 jump_chain = 0;
2167}
2168\f
2169/* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
2170 notes whose labels don't occur in the insn any more. Returns the
2171 largest INSN_UID found. */
2172static int
2173init_label_info (f)
2174 rtx f;
2175{
2176 int largest_uid = 0;
2177 rtx insn;
2178
2179 for (insn = f; insn; insn = NEXT_INSN (insn))
2180 {
2181 if (GET_CODE (insn) == CODE_LABEL)
2182 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
2183 else if (GET_CODE (insn) == JUMP_INSN)
2184 JUMP_LABEL (insn) = 0;
2185 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
2186 {
2187 rtx note, next;
2188
2189 for (note = REG_NOTES (insn); note; note = next)
2190 {
2191 next = XEXP (note, 1);
2192 if (REG_NOTE_KIND (note) == REG_LABEL
2193 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
2194 remove_note (insn, note);
2195 }
2196 }
2197 if (INSN_UID (insn) > largest_uid)
2198 largest_uid = INSN_UID (insn);
2199 }
2200
2201 return largest_uid;
2202}
2203
e78d9500
JL
2204/* Delete insns following barriers, up to next label.
2205
2206 Also delete no-op jumps created by gcse. */
e4884e95 2207
269ef46c
DM
2208static void
2209delete_barrier_successors (f)
2210 rtx f;
2211{
2212 rtx insn;
2213
2214 for (insn = f; insn;)
2215 {
2216 if (GET_CODE (insn) == BARRIER)
2217 {
2218 insn = NEXT_INSN (insn);
312f6255
GK
2219
2220 never_reached_warning (insn);
2221
269ef46c
DM
2222 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
2223 {
2224 if (GET_CODE (insn) == NOTE
2225 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2226 insn = NEXT_INSN (insn);
2227 else
2228 insn = delete_insn (insn);
2229 }
2230 /* INSN is now the code_label. */
2231 }
e4884e95 2232
e78d9500
JL
2233 /* Also remove (set (pc) (pc)) insns which can be created by
2234 gcse. We eliminate such insns now to avoid having them
2235 cause problems later. */
2236 else if (GET_CODE (insn) == JUMP_INSN
5dd34fe0 2237 && GET_CODE (PATTERN (insn)) == SET
e78d9500
JL
2238 && SET_SRC (PATTERN (insn)) == pc_rtx
2239 && SET_DEST (PATTERN (insn)) == pc_rtx)
2240 insn = delete_insn (insn);
2241
269ef46c
DM
2242 else
2243 insn = NEXT_INSN (insn);
2244 }
2245}
2246
2247/* Mark the label each jump jumps to.
2248 Combine consecutive labels, and count uses of labels.
2249
2250 For each label, make a chain (using `jump_chain')
2251 of all the *unconditional* jumps that jump to it;
2252 also make a chain of all returns.
2253
2254 CROSS_JUMP indicates whether we are doing cross jumping
2255 and if we are whether we will be paying attention to
2256 death notes or not. */
2257
2258static void
2259mark_all_labels (f, cross_jump)
2260 rtx f;
2261 int cross_jump;
2262{
2263 rtx insn;
2264
2265 for (insn = f; insn; insn = NEXT_INSN (insn))
2266 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2267 {
210ee0dd
BS
2268 if (GET_CODE (insn) == CALL_INSN
2269 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2270 {
2271 mark_all_labels (XEXP (PATTERN (insn), 0), cross_jump);
2272 mark_all_labels (XEXP (PATTERN (insn), 1), cross_jump);
2273 mark_all_labels (XEXP (PATTERN (insn), 2), cross_jump);
2274 continue;
2275 }
2276
a76063a6 2277 mark_jump_label (PATTERN (insn), insn, cross_jump, 0);
269ef46c
DM
2278 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
2279 {
2280 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
2281 {
2282 jump_chain[INSN_UID (insn)]
2283 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2284 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
2285 }
2286 if (GET_CODE (PATTERN (insn)) == RETURN)
2287 {
2288 jump_chain[INSN_UID (insn)] = jump_chain[0];
2289 jump_chain[0] = insn;
2290 }
2291 }
2292 }
2293}
2294
2295/* Delete all labels already not referenced.
2296 Also find and return the last insn. */
2297
2298static rtx
2299delete_unreferenced_labels (f)
2300 rtx f;
2301{
2302 rtx final = NULL_RTX;
2303 rtx insn;
2304
2305 for (insn = f; insn; )
2306 {
8cd0faaf
CM
2307 if (GET_CODE (insn) == CODE_LABEL
2308 && LABEL_NUSES (insn) == 0
2309 && LABEL_ALTERNATE_NAME (insn) == NULL)
269ef46c
DM
2310 insn = delete_insn (insn);
2311 else
2312 {
2313 final = insn;
2314 insn = NEXT_INSN (insn);
2315 }
2316 }
2317
2318 return final;
2319}
2320
2321/* Delete various simple forms of moves which have no necessary
2322 side effect. */
2323
2324static void
2325delete_noop_moves (f)
2326 rtx f;
2327{
2328 rtx insn, next;
2329
2330 for (insn = f; insn; )
2331 {
2332 next = NEXT_INSN (insn);
2333
2334 if (GET_CODE (insn) == INSN)
2335 {
2336 register rtx body = PATTERN (insn);
2337
269ef46c
DM
2338 /* Detect and delete no-op move instructions
2339 resulting from not allocating a parameter in a register. */
2340
2341 if (GET_CODE (body) == SET
2342 && (SET_DEST (body) == SET_SRC (body)
2343 || (GET_CODE (SET_DEST (body)) == MEM
2344 && GET_CODE (SET_SRC (body)) == MEM
2345 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
2346 && ! (GET_CODE (SET_DEST (body)) == MEM
2347 && MEM_VOLATILE_P (SET_DEST (body)))
2348 && ! (GET_CODE (SET_SRC (body)) == MEM
2349 && MEM_VOLATILE_P (SET_SRC (body))))
2350 delete_computation (insn);
2351
2352 /* Detect and ignore no-op move instructions
2353 resulting from smart or fortuitous register allocation. */
2354
2355 else if (GET_CODE (body) == SET)
2356 {
2357 int sreg = true_regnum (SET_SRC (body));
2358 int dreg = true_regnum (SET_DEST (body));
2359
2360 if (sreg == dreg && sreg >= 0)
2361 delete_insn (insn);
2362 else if (sreg >= 0 && dreg >= 0)
2363 {
2364 rtx trial;
2365 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
2366 sreg, NULL_PTR, dreg,
2367 GET_MODE (SET_SRC (body)));
2368
2369 if (tem != 0
2370 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
2371 {
2372 /* DREG may have been the target of a REG_DEAD note in
2373 the insn which makes INSN redundant. If so, reorg
2374 would still think it is dead. So search for such a
2375 note and delete it if we find it. */
2376 if (! find_regno_note (insn, REG_UNUSED, dreg))
2377 for (trial = prev_nonnote_insn (insn);
2378 trial && GET_CODE (trial) != CODE_LABEL;
2379 trial = prev_nonnote_insn (trial))
2380 if (find_regno_note (trial, REG_DEAD, dreg))
2381 {
2382 remove_death (dreg, trial);
2383 break;
2384 }
2385
2386 /* Deleting insn could lose a death-note for SREG. */
2387 if ((trial = find_regno_note (insn, REG_DEAD, sreg)))
2388 {
2389 /* Change this into a USE so that we won't emit
2390 code for it, but still can keep the note. */
2391 PATTERN (insn)
2392 = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
2393 INSN_CODE (insn) = -1;
2394 /* Remove all reg notes but the REG_DEAD one. */
2395 REG_NOTES (insn) = trial;
2396 XEXP (trial, 1) = NULL_RTX;
2397 }
2398 else
2399 delete_insn (insn);
2400 }
2401 }
2402 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
2403 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
2404 NULL_PTR, 0,
2405 GET_MODE (SET_DEST (body))))
2406 {
2407 /* This handles the case where we have two consecutive
2408 assignments of the same constant to pseudos that didn't
2409 get a hard reg. Each SET from the constant will be
2410 converted into a SET of the spill register and an
2411 output reload will be made following it. This produces
2412 two loads of the same constant into the same spill
2413 register. */
2414
2415 rtx in_insn = insn;
2416
2417 /* Look back for a death note for the first reg.
2418 If there is one, it is no longer accurate. */
2419 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
2420 {
2421 if ((GET_CODE (in_insn) == INSN
2422 || GET_CODE (in_insn) == JUMP_INSN)
2423 && find_regno_note (in_insn, REG_DEAD, dreg))
2424 {
2425 remove_death (dreg, in_insn);
2426 break;
2427 }
2428 in_insn = PREV_INSN (in_insn);
2429 }
2430
2431 /* Delete the second load of the value. */
2432 delete_insn (insn);
2433 }
2434 }
2435 else if (GET_CODE (body) == PARALLEL)
2436 {
2437 /* If each part is a set between two identical registers or
2438 a USE or CLOBBER, delete the insn. */
2439 int i, sreg, dreg;
2440 rtx tem;
2441
2442 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
2443 {
2444 tem = XVECEXP (body, 0, i);
2445 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
2446 continue;
2447
2448 if (GET_CODE (tem) != SET
2449 || (sreg = true_regnum (SET_SRC (tem))) < 0
2450 || (dreg = true_regnum (SET_DEST (tem))) < 0
2451 || dreg != sreg)
2452 break;
2453 }
2454
2455 if (i < 0)
2456 delete_insn (insn);
2457 }
2458 /* Also delete insns to store bit fields if they are no-ops. */
2459 /* Not worth the hair to detect this in the big-endian case. */
2460 else if (! BYTES_BIG_ENDIAN
2461 && GET_CODE (body) == SET
2462 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
2463 && XEXP (SET_DEST (body), 2) == const0_rtx
2464 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
2465 && ! (GET_CODE (SET_SRC (body)) == MEM
2466 && MEM_VOLATILE_P (SET_SRC (body))))
2467 delete_insn (insn);
2468 }
2469 insn = next;
2470 }
2471}
2472
2473/* See if there is still a NOTE_INSN_FUNCTION_END in this function.
2474 If so indicate that this function can drop off the end by returning
2475 1, else return 0.
2476
2477 CHECK_DELETED indicates whether we must check if the note being
2478 searched for has the deleted flag set.
2479
2480 DELETE_FINAL_NOTE indicates whether we should delete the note
2481 if we find it. */
2482
2483static int
d29c259b 2484calculate_can_reach_end (last, delete_final_note)
269ef46c 2485 rtx last;
269ef46c
DM
2486 int delete_final_note;
2487{
2488 rtx insn = last;
2489 int n_labels = 1;
2490
2491 while (insn != NULL_RTX)
2492 {
2493 int ok = 0;
2494
2495 /* One label can follow the end-note: the return label. */
2496 if (GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
2497 ok = 1;
2498 /* Ordinary insns can follow it if returning a structure. */
2499 else if (GET_CODE (insn) == INSN)
2500 ok = 1;
2501 /* If machine uses explicit RETURN insns, no epilogue,
2502 then one of them follows the note. */
2503 else if (GET_CODE (insn) == JUMP_INSN
2504 && GET_CODE (PATTERN (insn)) == RETURN)
2505 ok = 1;
2506 /* A barrier can follow the return insn. */
2507 else if (GET_CODE (insn) == BARRIER)
2508 ok = 1;
2509 /* Other kinds of notes can follow also. */
2510 else if (GET_CODE (insn) == NOTE
2511 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2512 ok = 1;
2513
2514 if (ok != 1)
2515 break;
15a63be1 2516
15a63be1 2517 insn = PREV_INSN (insn);
269ef46c 2518 }
15a63be1 2519
269ef46c
DM
2520 /* See if we backed up to the appropriate type of note. */
2521 if (insn != NULL_RTX
2522 && GET_CODE (insn) == NOTE
d29c259b 2523 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END)
15a63be1 2524 {
269ef46c
DM
2525 if (delete_final_note)
2526 delete_insn (insn);
2527 return 1;
15a63be1
RK
2528 }
2529
269ef46c 2530 return 0;
15a63be1 2531}
269ef46c 2532
15a63be1
RK
2533/* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
2534 jump. Assume that this unconditional jump is to the exit test code. If
2535 the code is sufficiently simple, make a copy of it before INSN,
2536 followed by a jump to the exit of the loop. Then delete the unconditional
2537 jump after INSN.
2538
15a63be1
RK
2539 Return 1 if we made the change, else 0.
2540
2541 This is only safe immediately after a regscan pass because it uses the
2542 values of regno_first_uid and regno_last_uid. */
2543
2544static int
2545duplicate_loop_exit_test (loop_start)
2546 rtx loop_start;
2547{
e33477be 2548 rtx insn, set, reg, p, link;
5ca8e6f7 2549 rtx copy = 0, first_copy = 0;
15a63be1
RK
2550 int num_insns = 0;
2551 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
2552 rtx lastexit;
2553 int max_reg = max_reg_num ();
2554 rtx *reg_map = 0;
2555
2556 /* Scan the exit code. We do not perform this optimization if any insn:
2557
2558 is a CALL_INSN
2559 is a CODE_LABEL
2560 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
2561 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
2562 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
d143a890 2563 is not valid.
752e74f9
JL
2564
2565 We also do not do this if we find an insn with ASM_OPERANDS. While
2566 this restriction should not be necessary, copying an insn with
2567 ASM_OPERANDS can confuse asm_noperands in some cases.
15a63be1
RK
2568
2569 Also, don't do this if the exit code is more than 20 insns. */
2570
2571 for (insn = exitcode;
2572 insn
2573 && ! (GET_CODE (insn) == NOTE
2574 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
2575 insn = NEXT_INSN (insn))
2576 {
2577 switch (GET_CODE (insn))
2578 {
2579 case CODE_LABEL:
2580 case CALL_INSN:
2581 return 0;
2582 case NOTE:
fe464caf
RK
2583 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
2584 a jump immediately after the loop start that branches outside
2585 the loop but within an outer loop, near the exit test.
2586 If we copied this exit test and created a phony
2587 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
2588 before the exit test look like these could be safely moved
2589 out of the loop even if they actually may be never executed.
2590 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
2591
15a63be1 2592 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
fe464caf 2593 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
15a63be1 2594 return 0;
93de5c31
MM
2595
2596 if (optimize < 2
2597 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2598 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2599 /* If we were to duplicate this code, we would not move
2600 the BLOCK notes, and so debugging the moved code would
2601 be difficult. Thus, we only move the code with -O2 or
2602 higher. */
2603 return 0;
2604
15a63be1
RK
2605 break;
2606 case JUMP_INSN:
2607 case INSN:
d143a890
BS
2608 /* The code below would grossly mishandle REG_WAS_0 notes,
2609 so get rid of them here. */
2610 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
2611 remove_note (insn, p);
15a63be1 2612 if (++num_insns > 20
5f4f0e22 2613 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
da43a810 2614 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
15a63be1
RK
2615 return 0;
2616 break;
e9a25f70
JL
2617 default:
2618 break;
15a63be1
RK
2619 }
2620 }
2621
2622 /* Unless INSN is zero, we can do the optimization. */
2623 if (insn == 0)
2624 return 0;
2625
2626 lastexit = insn;
2627
2628 /* See if any insn sets a register only used in the loop exit code and
2629 not a user variable. If so, replace it with a new register. */
2630 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2631 if (GET_CODE (insn) == INSN
2632 && (set = single_set (insn)) != 0
e33477be
RK
2633 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
2634 || (GET_CODE (reg) == SUBREG
2635 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
2636 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
b1f21e0a 2637 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
15a63be1
RK
2638 {
2639 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
b1f21e0a 2640 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
15a63be1
RK
2641 break;
2642
2643 if (p != lastexit)
2644 {
2645 /* We can do the replacement. Allocate reg_map if this is the
2646 first replacement we found. */
2647 if (reg_map == 0)
67289ea6 2648 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
15a63be1 2649
e33477be 2650 REG_LOOP_TEST_P (reg) = 1;
15a63be1 2651
e33477be 2652 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
15a63be1
RK
2653 }
2654 }
2655
2656 /* Now copy each insn. */
2657 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
5ca8e6f7
PB
2658 {
2659 switch (GET_CODE (insn))
2660 {
2661 case BARRIER:
2662 copy = emit_barrier_before (loop_start);
2663 break;
2664 case NOTE:
2665 /* Only copy line-number notes. */
2666 if (NOTE_LINE_NUMBER (insn) >= 0)
2667 {
2668 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
2669 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
2670 }
2671 break;
2672
2673 case INSN:
da43a810 2674 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
5ca8e6f7
PB
2675 if (reg_map)
2676 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2677
a76063a6 2678 mark_jump_label (PATTERN (copy), copy, 0, 0);
5ca8e6f7
PB
2679
2680 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
2681 make them. */
2682 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2683 if (REG_NOTE_KIND (link) != REG_LABEL)
2684 REG_NOTES (copy)
da43a810 2685 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5ca8e6f7
PB
2686 XEXP (link, 0),
2687 REG_NOTES (copy)));
2688 if (reg_map && REG_NOTES (copy))
2689 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2690 break;
2691
2692 case JUMP_INSN:
da43a810 2693 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)), loop_start);
5ca8e6f7
PB
2694 if (reg_map)
2695 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
a76063a6 2696 mark_jump_label (PATTERN (copy), copy, 0, 0);
5ca8e6f7
PB
2697 if (REG_NOTES (insn))
2698 {
da43a810 2699 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
5ca8e6f7
PB
2700 if (reg_map)
2701 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2702 }
2703
2704 /* If this is a simple jump, add it to the jump chain. */
2705
2706 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
2707 && simplejump_p (copy))
2708 {
2709 jump_chain[INSN_UID (copy)]
2710 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2711 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2712 }
2713 break;
2714
2715 default:
2716 abort ();
2717 }
15a63be1 2718
5ca8e6f7
PB
2719 /* Record the first insn we copied. We need it so that we can
2720 scan the copied insns for new pseudo registers. */
2721 if (! first_copy)
2722 first_copy = copy;
2723 }
15a63be1
RK
2724
2725 /* Now clean up by emitting a jump to the end label and deleting the jump
2726 at the start of the loop. */
9c066566 2727 if (! copy || GET_CODE (copy) != BARRIER)
15a63be1
RK
2728 {
2729 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
2730 loop_start);
5ca8e6f7
PB
2731
2732 /* Record the first insn we copied. We need it so that we can
2733 scan the copied insns for new pseudo registers. This may not
2734 be strictly necessary since we should have copied at least one
2735 insn above. But I am going to be safe. */
2736 if (! first_copy)
2737 first_copy = copy;
2738
a76063a6 2739 mark_jump_label (PATTERN (copy), copy, 0, 0);
15a63be1
RK
2740 if (INSN_UID (copy) < max_jump_chain
2741 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
2742 {
2743 jump_chain[INSN_UID (copy)]
2744 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2745 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2746 }
2747 emit_barrier_before (loop_start);
2748 }
2749
5ca8e6f7
PB
2750 /* Now scan from the first insn we copied to the last insn we copied
2751 (copy) for new pseudo registers. Do this after the code to jump to
2752 the end label since that might create a new pseudo too. */
2753 reg_scan_update (first_copy, copy, max_reg);
2754
15a63be1
RK
2755 /* Mark the exit code as the virtual top of the converted loop. */
2756 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
2757
cd423ead 2758 delete_insn (next_nonnote_insn (loop_start));
67289ea6
MM
2759
2760 /* Clean up. */
2761 if (reg_map)
2762 free (reg_map);
cd423ead 2763
15a63be1
RK
2764 return 1;
2765}
2766\f
2767/* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
915f619f
JW
2768 loop-end notes between START and END out before START. Assume that
2769 END is not such a note. START may be such a note. Returns the value
2770 of the new starting insn, which may be different if the original start
2771 was such a note. */
15a63be1 2772
915f619f 2773rtx
15a63be1
RK
2774squeeze_notes (start, end)
2775 rtx start, end;
2776{
2777 rtx insn;
2778 rtx next;
2779
2780 for (insn = start; insn != end; insn = next)
2781 {
2782 next = NEXT_INSN (insn);
2783 if (GET_CODE (insn) == NOTE
2784 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
2785 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2786 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2787 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
2788 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
2789 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
2790 {
915f619f
JW
2791 if (insn == start)
2792 start = next;
2793 else
2794 {
2795 rtx prev = PREV_INSN (insn);
2796 PREV_INSN (insn) = PREV_INSN (start);
2797 NEXT_INSN (insn) = start;
2798 NEXT_INSN (PREV_INSN (insn)) = insn;
2799 PREV_INSN (NEXT_INSN (insn)) = insn;
2800 NEXT_INSN (prev) = next;
2801 PREV_INSN (next) = prev;
2802 }
15a63be1
RK
2803 }
2804 }
915f619f
JW
2805
2806 return start;
15a63be1
RK
2807}
2808\f
2809/* Compare the instructions before insn E1 with those before E2
2810 to find an opportunity for cross jumping.
2811 (This means detecting identical sequences of insns followed by
2812 jumps to the same place, or followed by a label and a jump
2813 to that label, and replacing one with a jump to the other.)
2814
2815 Assume E1 is a jump that jumps to label E2
2816 (that is not always true but it might as well be).
2817 Find the longest possible equivalent sequences
2818 and store the first insns of those sequences into *F1 and *F2.
2819 Store zero there if no equivalent preceding instructions are found.
2820
2821 We give up if we find a label in stream 1.
2822 Actually we could transfer that label into stream 2. */
2823
2824static void
2825find_cross_jump (e1, e2, minimum, f1, f2)
2826 rtx e1, e2;
2827 int minimum;
2828 rtx *f1, *f2;
2829{
2830 register rtx i1 = e1, i2 = e2;
2831 register rtx p1, p2;
2832 int lose = 0;
2833
2834 rtx last1 = 0, last2 = 0;
2835 rtx afterlast1 = 0, afterlast2 = 0;
15a63be1
RK
2836
2837 *f1 = 0;
2838 *f2 = 0;
2839
2840 while (1)
2841 {
2842 i1 = prev_nonnote_insn (i1);
2843
2844 i2 = PREV_INSN (i2);
2845 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
2846 i2 = PREV_INSN (i2);
2847
2848 if (i1 == 0)
2849 break;
2850
2851 /* Don't allow the range of insns preceding E1 or E2
2852 to include the other (E2 or E1). */
2853 if (i2 == e1 || i1 == e2)
2854 break;
2855
2856 /* If we will get to this code by jumping, those jumps will be
2857 tensioned to go directly to the new label (before I2),
2858 so this cross-jumping won't cost extra. So reduce the minimum. */
2859 if (GET_CODE (i1) == CODE_LABEL)
2860 {
2861 --minimum;
2862 break;
2863 }
2864
2865 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
2866 break;
2867
18f3f864
JL
2868 /* Avoid moving insns across EH regions if either of the insns
2869 can throw. */
2870 if (flag_exceptions
2871 && (asynchronous_exceptions || GET_CODE (i1) == CALL_INSN)
2872 && !in_same_eh_region (i1, i2))
2873 break;
2874
15a63be1
RK
2875 p1 = PATTERN (i1);
2876 p2 = PATTERN (i2);
2877
4d367579
DE
2878 /* If this is a CALL_INSN, compare register usage information.
2879 If we don't check this on stack register machines, the two
2880 CALL_INSNs might be merged leaving reg-stack.c with mismatching
2881 numbers of stack registers in the same basic block.
2882 If we don't check this on machines with delay slots, a delay slot may
2883 be filled that clobbers a parameter expected by the subroutine.
47b0bb94 2884
4d367579
DE
2885 ??? We take the simple route for now and assume that if they're
2886 equal, they were constructed identically. */
2887
2888 if (GET_CODE (i1) == CALL_INSN
2889 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
2890 CALL_INSN_FUNCTION_USAGE (i2)))
2891 lose = 1;
2892
2893#ifdef STACK_REGS
15a63be1
RK
2894 /* If cross_jump_death_matters is not 0, the insn's mode
2895 indicates whether or not the insn contains any stack-like
0f41302f 2896 regs. */
15a63be1 2897
21b2cd73 2898 if (!lose && cross_jump_death_matters && stack_regs_mentioned (i1))
15a63be1
RK
2899 {
2900 /* If register stack conversion has already been done, then
2901 death notes must also be compared before it is certain that
0f41302f 2902 the two instruction streams match. */
15a63be1
RK
2903
2904 rtx note;
2905 HARD_REG_SET i1_regset, i2_regset;
2906
2907 CLEAR_HARD_REG_SET (i1_regset);
2908 CLEAR_HARD_REG_SET (i2_regset);
2909
2910 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
2911 if (REG_NOTE_KIND (note) == REG_DEAD
2912 && STACK_REG_P (XEXP (note, 0)))
2913 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
2914
2915 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
2916 if (REG_NOTE_KIND (note) == REG_DEAD
2917 && STACK_REG_P (XEXP (note, 0)))
2918 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
2919
2920 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
2921
2922 lose = 1;
2923
2924 done:
2925 ;
2926 }
2927#endif
2928
44c389e4
JW
2929 /* Don't allow old-style asm or volatile extended asms to be accepted
2930 for cross jumping purposes. It is conceptually correct to allow
2931 them, since cross-jumping preserves the dynamic instruction order
2932 even though it is changing the static instruction order. However,
2933 if an asm is being used to emit an assembler pseudo-op, such as
2934 the MIPS `.set reorder' pseudo-op, then the static instruction order
2935 matters and it must be preserved. */
2936 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
2937 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
2938 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
2939 lose = 1;
2940
2941 if (lose || GET_CODE (p1) != GET_CODE (p2)
15a63be1
RK
2942 || ! rtx_renumbered_equal_p (p1, p2))
2943 {
2944 /* The following code helps take care of G++ cleanups. */
2945 rtx equiv1;
2946 rtx equiv2;
2947
2948 if (!lose && GET_CODE (p1) == GET_CODE (p2)
5f4f0e22
CH
2949 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
2950 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
2951 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
2952 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
15a63be1
RK
2953 /* If the equivalences are not to a constant, they may
2954 reference pseudos that no longer exist, so we can't
2955 use them. */
2956 && CONSTANT_P (XEXP (equiv1, 0))
2957 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
2958 {
2959 rtx s1 = single_set (i1);
2960 rtx s2 = single_set (i2);
2961 if (s1 != 0 && s2 != 0
2962 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
2963 {
2964 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
2965 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
2966 if (! rtx_renumbered_equal_p (p1, p2))
2967 cancel_changes (0);
2968 else if (apply_change_group ())
2969 goto win;
2970 }
2971 }
2972
2973 /* Insns fail to match; cross jumping is limited to the following
2974 insns. */
2975
2976#ifdef HAVE_cc0
2977 /* Don't allow the insn after a compare to be shared by
2978 cross-jumping unless the compare is also shared.
2979 Here, if either of these non-matching insns is a compare,
2980 exclude the following insn from possible cross-jumping. */
2981 if (sets_cc0_p (p1) || sets_cc0_p (p2))
2982 last1 = afterlast1, last2 = afterlast2, ++minimum;
2983#endif
2984
2985 /* If cross-jumping here will feed a jump-around-jump
2986 optimization, this jump won't cost extra, so reduce
2987 the minimum. */
2988 if (GET_CODE (i1) == JUMP_INSN
2989 && JUMP_LABEL (i1)
2990 && prev_real_insn (JUMP_LABEL (i1)) == e1)
2991 --minimum;
2992 break;
2993 }
2994
2995 win:
2996 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
2997 {
2998 /* Ok, this insn is potentially includable in a cross-jump here. */
2999 afterlast1 = last1, afterlast2 = last2;
3000 last1 = i1, last2 = i2, --minimum;
3001 }
3002 }
3003
15a63be1
RK
3004 if (minimum <= 0 && last1 != 0 && last1 != e1)
3005 *f1 = last1, *f2 = last2;
3006}
3007
3008static void
3009do_cross_jump (insn, newjpos, newlpos)
3010 rtx insn, newjpos, newlpos;
3011{
3012 /* Find an existing label at this point
3013 or make a new one if there is none. */
3014 register rtx label = get_label_before (newlpos);
3015
3016 /* Make the same jump insn jump to the new point. */
3017 if (GET_CODE (PATTERN (insn)) == RETURN)
3018 {
3019 /* Remove from jump chain of returns. */
3020 delete_from_jump_chain (insn);
3021 /* Change the insn. */
3022 PATTERN (insn) = gen_jump (label);
3023 INSN_CODE (insn) = -1;
3024 JUMP_LABEL (insn) = label;
3025 LABEL_NUSES (label)++;
3026 /* Add to new the jump chain. */
3027 if (INSN_UID (label) < max_jump_chain
3028 && INSN_UID (insn) < max_jump_chain)
3029 {
3030 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
3031 jump_chain[INSN_UID (label)] = insn;
3032 }
3033 }
3034 else
3035 redirect_jump (insn, label);
3036
3037 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
3038 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
3039 the NEWJPOS stream. */
3040
3041 while (newjpos != insn)
3042 {
3043 rtx lnote;
3044
3045 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
3046 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
3047 || REG_NOTE_KIND (lnote) == REG_EQUIV)
3048 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
3049 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
3050 remove_note (newlpos, lnote);
3051
3052 delete_insn (newjpos);
3053 newjpos = next_real_insn (newjpos);
3054 newlpos = next_real_insn (newlpos);
3055 }
3056}
3057\f
3058/* Return the label before INSN, or put a new label there. */
3059
3060rtx
3061get_label_before (insn)
3062 rtx insn;
3063{
3064 rtx label;
3065
3066 /* Find an existing label at this point
3067 or make a new one if there is none. */
3068 label = prev_nonnote_insn (insn);
3069
3070 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3071 {
3072 rtx prev = PREV_INSN (insn);
3073
15a63be1
RK
3074 label = gen_label_rtx ();
3075 emit_label_after (label, prev);
3076 LABEL_NUSES (label) = 0;
3077 }
3078 return label;
3079}
3080
3081/* Return the label after INSN, or put a new label there. */
3082
3083rtx
3084get_label_after (insn)
3085 rtx insn;
3086{
3087 rtx label;
3088
3089 /* Find an existing label at this point
3090 or make a new one if there is none. */
3091 label = next_nonnote_insn (insn);
3092
3093 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3094 {
15a63be1
RK
3095 label = gen_label_rtx ();
3096 emit_label_after (label, insn);
3097 LABEL_NUSES (label) = 0;
3098 }
3099 return label;
3100}
3101\f
3102/* Return 1 if INSN is a jump that jumps to right after TARGET
3103 only on the condition that TARGET itself would drop through.
3104 Assumes that TARGET is a conditional jump. */
3105
3106static int
3107jump_back_p (insn, target)
3108 rtx insn, target;
3109{
3110 rtx cinsn, ctarget;
3111 enum rtx_code codei, codet;
3112
3113 if (simplejump_p (insn) || ! condjump_p (insn)
3114 || simplejump_p (target)
3115 || target != prev_real_insn (JUMP_LABEL (insn)))
3116 return 0;
3117
3118 cinsn = XEXP (SET_SRC (PATTERN (insn)), 0);
3119 ctarget = XEXP (SET_SRC (PATTERN (target)), 0);
3120
3121 codei = GET_CODE (cinsn);
3122 codet = GET_CODE (ctarget);
3123
3124 if (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx)
3125 {
3126 if (! can_reverse_comparison_p (cinsn, insn))
3127 return 0;
3128 codei = reverse_condition (codei);
3129 }
3130
3131 if (XEXP (SET_SRC (PATTERN (target)), 2) == pc_rtx)
3132 {
3133 if (! can_reverse_comparison_p (ctarget, target))
3134 return 0;
3135 codet = reverse_condition (codet);
3136 }
3137
3138 return (codei == codet
3139 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
3140 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
3141}
3142\f
3143/* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
3144 return non-zero if it is safe to reverse this comparison. It is if our
3145 floating-point is not IEEE, if this is an NE or EQ comparison, or if
3146 this is known to be an integer comparison. */
3147
3148int
3149can_reverse_comparison_p (comparison, insn)
3150 rtx comparison;
3151 rtx insn;
3152{
3153 rtx arg0;
3154
3155 /* If this is not actually a comparison, we can't reverse it. */
3156 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
3157 return 0;
3158
3159 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3160 /* If this is an NE comparison, it is safe to reverse it to an EQ
3161 comparison and vice versa, even for floating point. If no operands
3162 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
3163 always false and NE is always true, so the reversal is also valid. */
9b2e59ad 3164 || flag_fast_math
15a63be1
RK
3165 || GET_CODE (comparison) == NE
3166 || GET_CODE (comparison) == EQ)
3167 return 1;
3168
3169 arg0 = XEXP (comparison, 0);
3170
3171 /* Make sure ARG0 is one of the actual objects being compared. If we
3172 can't do this, we can't be sure the comparison can be reversed.
3173
3174 Handle cc0 and a MODE_CC register. */
3175 if ((GET_CODE (arg0) == REG && GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC)
3176#ifdef HAVE_cc0
3177 || arg0 == cc0_rtx
3178#endif
3179 )
3180 {
3181 rtx prev = prev_nonnote_insn (insn);
f8d41a69 3182 rtx set;
15a63be1 3183
c5c76735
JL
3184 /* First see if the condition code mode alone if enough to say we can
3185 reverse the condition. If not, then search backwards for a set of
3186 ARG0. We do not need to check for an insn clobbering it since valid
3187 code will contain set a set with no intervening clobber. But
3188 stop when we reach a label. */
3189#ifdef REVERSIBLE_CC_MODE
3190 if (GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC
3191 && REVERSIBLE_CC_MODE (GET_MODE (arg0)))
3192 return 1;
3193#endif
3194
3195 for (prev = prev_nonnote_insn (insn);
3196 prev != 0 && GET_CODE (prev) != CODE_LABEL;
3197 prev = prev_nonnote_insn (prev))
3198 if ((set = single_set (prev)) != 0
3199 && rtx_equal_p (SET_DEST (set), arg0))
3200 {
3201 arg0 = SET_SRC (set);
15a63be1 3202
c5c76735
JL
3203 if (GET_CODE (arg0) == COMPARE)
3204 arg0 = XEXP (arg0, 0);
3205 break;
3206 }
15a63be1
RK
3207 }
3208
3209 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
3210 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
3211 return (GET_CODE (arg0) == CONST_INT
3212 || (GET_MODE (arg0) != VOIDmode
3213 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_CC
3214 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_FLOAT));
3215}
3216
1eb8759b
RH
3217/* Given an rtx-code for a comparison, return the code for the negated
3218 comparison. If no such code exists, return UNKNOWN.
3219
3220 WATCH OUT! reverse_condition is not safe to use on a jump that might
3221 be acting on the results of an IEEE floating point comparison, because
3222 of the special treatment of non-signaling nans in comparisons.
15a63be1
RK
3223 Use can_reverse_comparison_p to be sure. */
3224
3225enum rtx_code
3226reverse_condition (code)
3227 enum rtx_code code;
3228{
3229 switch (code)
3230 {
3231 case EQ:
3232 return NE;
15a63be1
RK
3233 case NE:
3234 return EQ;
15a63be1
RK
3235 case GT:
3236 return LE;
15a63be1
RK
3237 case GE:
3238 return LT;
15a63be1
RK
3239 case LT:
3240 return GE;
15a63be1
RK
3241 case LE:
3242 return GT;
15a63be1
RK
3243 case GTU:
3244 return LEU;
15a63be1
RK
3245 case GEU:
3246 return LTU;
15a63be1
RK
3247 case LTU:
3248 return GEU;
15a63be1
RK
3249 case LEU:
3250 return GTU;
1eb8759b
RH
3251 case UNORDERED:
3252 return ORDERED;
3253 case ORDERED:
3254 return UNORDERED;
3255
3256 case UNLT:
3257 case UNLE:
3258 case UNGT:
3259 case UNGE:
3260 case UNEQ:
7913f3d0 3261 case LTGT:
1eb8759b 3262 return UNKNOWN;
15a63be1
RK
3263
3264 default:
3265 abort ();
15a63be1
RK
3266 }
3267}
3268
7913f3d0
RH
3269/* Similar, but we're allowed to generate unordered comparisons, which
3270 makes it safe for IEEE floating-point. Of course, we have to recognize
3271 that the target will support them too... */
3272
3273enum rtx_code
3274reverse_condition_maybe_unordered (code)
3275 enum rtx_code code;
3276{
3277 /* Non-IEEE formats don't have unordered conditions. */
3278 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
3279 return reverse_condition (code);
3280
3281 switch (code)
3282 {
3283 case EQ:
3284 return NE;
3285 case NE:
3286 return EQ;
3287 case GT:
3288 return UNLE;
3289 case GE:
3290 return UNLT;
3291 case LT:
3292 return UNGE;
3293 case LE:
3294 return UNGT;
3295 case LTGT:
3296 return UNEQ;
3297 case GTU:
3298 return LEU;
3299 case GEU:
3300 return LTU;
3301 case LTU:
3302 return GEU;
3303 case LEU:
3304 return GTU;
3305 case UNORDERED:
3306 return ORDERED;
3307 case ORDERED:
3308 return UNORDERED;
3309 case UNLT:
3310 return GE;
3311 case UNLE:
3312 return GT;
3313 case UNGT:
3314 return LE;
3315 case UNGE:
3316 return LT;
3317 case UNEQ:
3318 return LTGT;
3319
3320 default:
3321 abort ();
3322 }
3323}
3324
15a63be1
RK
3325/* Similar, but return the code when two operands of a comparison are swapped.
3326 This IS safe for IEEE floating-point. */
3327
3328enum rtx_code
3329swap_condition (code)
3330 enum rtx_code code;
3331{
3332 switch (code)
3333 {
3334 case EQ:
3335 case NE:
1eb8759b
RH
3336 case UNORDERED:
3337 case ORDERED:
3338 case UNEQ:
7913f3d0 3339 case LTGT:
15a63be1
RK
3340 return code;
3341
3342 case GT:
3343 return LT;
15a63be1
RK
3344 case GE:
3345 return LE;
15a63be1
RK
3346 case LT:
3347 return GT;
15a63be1
RK
3348 case LE:
3349 return GE;
15a63be1
RK
3350 case GTU:
3351 return LTU;
15a63be1
RK
3352 case GEU:
3353 return LEU;
15a63be1
RK
3354 case LTU:
3355 return GTU;
15a63be1
RK
3356 case LEU:
3357 return GEU;
1eb8759b
RH
3358 case UNLT:
3359 return UNGT;
3360 case UNLE:
3361 return UNGE;
3362 case UNGT:
3363 return UNLT;
3364 case UNGE:
3365 return UNLE;
3366
15a63be1
RK
3367 default:
3368 abort ();
15a63be1
RK
3369 }
3370}
3371
3372/* Given a comparison CODE, return the corresponding unsigned comparison.
3373 If CODE is an equality comparison or already an unsigned comparison,
3374 CODE is returned. */
3375
3376enum rtx_code
3377unsigned_condition (code)
3378 enum rtx_code code;
3379{
3380 switch (code)
3381 {
3382 case EQ:
3383 case NE:
3384 case GTU:
3385 case GEU:
3386 case LTU:
3387 case LEU:
3388 return code;
3389
3390 case GT:
3391 return GTU;
15a63be1
RK
3392 case GE:
3393 return GEU;
15a63be1
RK
3394 case LT:
3395 return LTU;
15a63be1
RK
3396 case LE:
3397 return LEU;
3398
3399 default:
3400 abort ();
3401 }
3402}
3403
3404/* Similarly, return the signed version of a comparison. */
3405
3406enum rtx_code
3407signed_condition (code)
3408 enum rtx_code code;
3409{
3410 switch (code)
3411 {
3412 case EQ:
3413 case NE:
3414 case GT:
3415 case GE:
3416 case LT:
3417 case LE:
3418 return code;
3419
3420 case GTU:
3421 return GT;
15a63be1
RK
3422 case GEU:
3423 return GE;
15a63be1
RK
3424 case LTU:
3425 return LT;
15a63be1
RK
3426 case LEU:
3427 return LE;
3428
3429 default:
3430 abort ();
3431 }
3432}
3433\f
3434/* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
3435 truth of CODE1 implies the truth of CODE2. */
3436
3437int
3438comparison_dominates_p (code1, code2)
3439 enum rtx_code code1, code2;
3440{
3441 if (code1 == code2)
3442 return 1;
3443
3444 switch (code1)
3445 {
3446 case EQ:
7913f3d0
RH
3447 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
3448 || code2 == ORDERED)
15a63be1
RK
3449 return 1;
3450 break;
3451
3452 case LT:
7913f3d0 3453 if (code2 == LE || code2 == NE || code2 == ORDERED)
15a63be1
RK
3454 return 1;
3455 break;
3456
3457 case GT:
7913f3d0
RH
3458 if (code2 == GE || code2 == NE || code2 == ORDERED)
3459 return 1;
3460 break;
3461
3462 case GE:
3463 case LE:
3464 if (code2 == ORDERED)
3465 return 1;
3466 break;
3467
3468 case LTGT:
3469 if (code2 == NE || code2 == ORDERED)
15a63be1
RK
3470 return 1;
3471 break;
3472
3473 case LTU:
b0c38416 3474 if (code2 == LEU || code2 == NE)
15a63be1
RK
3475 return 1;
3476 break;
3477
3478 case GTU:
b0c38416 3479 if (code2 == GEU || code2 == NE)
15a63be1
RK
3480 return 1;
3481 break;
7913f3d0
RH
3482
3483 case UNORDERED:
3484 if (code2 == NE)
3485 return 1;
3486 break;
e9a25f70
JL
3487
3488 default:
3489 break;
15a63be1
RK
3490 }
3491
3492 return 0;
3493}
3494\f
3495/* Return 1 if INSN is an unconditional jump and nothing else. */
3496
3497int
3498simplejump_p (insn)
3499 rtx insn;
3500{
3501 return (GET_CODE (insn) == JUMP_INSN
3502 && GET_CODE (PATTERN (insn)) == SET
3503 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
3504 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
3505}
3506
3507/* Return nonzero if INSN is a (possibly) conditional jump
3508 and nothing more. */
3509
3510int
3511condjump_p (insn)
3512 rtx insn;
3513{
3514 register rtx x = PATTERN (insn);
c5c76735
JL
3515
3516 if (GET_CODE (x) != SET
3517 || GET_CODE (SET_DEST (x)) != PC)
3480bb98 3518 return 0;
c5c76735
JL
3519
3520 x = SET_SRC (x);
3521 if (GET_CODE (x) == LABEL_REF)
3480bb98 3522 return 1;
c5c76735
JL
3523 else return (GET_CODE (x) == IF_THEN_ELSE
3524 && ((GET_CODE (XEXP (x, 2)) == PC
3525 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
3526 || GET_CODE (XEXP (x, 1)) == RETURN))
3527 || (GET_CODE (XEXP (x, 1)) == PC
3528 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
3529 || GET_CODE (XEXP (x, 2)) == RETURN))));
3530
3480bb98
JL
3531 return 0;
3532}
3533
c5c76735
JL
3534/* Return nonzero if INSN is a (possibly) conditional jump inside a
3535 PARALLEL. */
3480bb98
JL
3536
3537int
3538condjump_in_parallel_p (insn)
3539 rtx insn;
3540{
3541 register rtx x = PATTERN (insn);
3542
3543 if (GET_CODE (x) != PARALLEL)
3544 return 0;
3545 else
3546 x = XVECEXP (x, 0, 0);
3547
15a63be1
RK
3548 if (GET_CODE (x) != SET)
3549 return 0;
3550 if (GET_CODE (SET_DEST (x)) != PC)
3551 return 0;
3552 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3553 return 1;
3554 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3555 return 0;
3556 if (XEXP (SET_SRC (x), 2) == pc_rtx
3557 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3558 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3559 return 1;
3560 if (XEXP (SET_SRC (x), 1) == pc_rtx
3561 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3562 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3563 return 1;
3564 return 0;
3565}
3566
d804ed43
RH
3567/* Return the label of a conditional jump. */
3568
3569rtx
3570condjump_label (insn)
3571 rtx insn;
3572{
3573 register rtx x = PATTERN (insn);
3574
3575 if (GET_CODE (x) == PARALLEL)
3576 x = XVECEXP (x, 0, 0);
3577 if (GET_CODE (x) != SET)
3578 return NULL_RTX;
3579 if (GET_CODE (SET_DEST (x)) != PC)
3580 return NULL_RTX;
3581 x = SET_SRC (x);
3582 if (GET_CODE (x) == LABEL_REF)
3583 return x;
3584 if (GET_CODE (x) != IF_THEN_ELSE)
3585 return NULL_RTX;
3586 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
3587 return XEXP (x, 1);
3588 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
3589 return XEXP (x, 2);
3590 return NULL_RTX;
3591}
3592
e881bb1b
RH
3593/* Return true if INSN is a (possibly conditional) return insn. */
3594
3595static int
3596returnjump_p_1 (loc, data)
3597 rtx *loc;
3598 void *data ATTRIBUTE_UNUSED;
3599{
3600 rtx x = *loc;
06be9173 3601 return x && GET_CODE (x) == RETURN;
e881bb1b
RH
3602}
3603
3604int
3605returnjump_p (insn)
3606 rtx insn;
3607{
3608 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
3609}
3610
d0e80719
RH
3611/* Return true if INSN is a jump that only transfers control and
3612 nothing more. */
3613
3614int
3615onlyjump_p (insn)
3616 rtx insn;
3617{
3618 rtx set;
3619
3620 if (GET_CODE (insn) != JUMP_INSN)
3621 return 0;
3622
3623 set = single_set (insn);
3624 if (set == NULL)
3625 return 0;
3626 if (GET_CODE (SET_DEST (set)) != PC)
3627 return 0;
3628 if (side_effects_p (SET_SRC (set)))
3629 return 0;
3630
3631 return 1;
3632}
3633
51d87cd9
BS
3634#ifdef HAVE_cc0
3635
15a63be1
RK
3636/* Return 1 if X is an RTX that does nothing but set the condition codes
3637 and CLOBBER or USE registers.
3638 Return -1 if X does explicitly set the condition codes,
3639 but also does other things. */
3640
3641int
3642sets_cc0_p (x)
e51712db 3643 rtx x ATTRIBUTE_UNUSED;
15a63be1 3644{
15a63be1
RK
3645 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
3646 return 1;
3647 if (GET_CODE (x) == PARALLEL)
3648 {
3649 int i;
3650 int sets_cc0 = 0;
3651 int other_things = 0;
3652 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3653 {
3654 if (GET_CODE (XVECEXP (x, 0, i)) == SET
3655 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
3656 sets_cc0 = 1;
3657 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
3658 other_things = 1;
3659 }
3660 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
3661 }
3662 return 0;
15a63be1 3663}
51d87cd9 3664#endif
15a63be1
RK
3665\f
3666/* Follow any unconditional jump at LABEL;
3667 return the ultimate label reached by any such chain of jumps.
3668 If LABEL is not followed by a jump, return LABEL.
2d20b9df
RS
3669 If the chain loops or we can't find end, return LABEL,
3670 since that tells caller to avoid changing the insn.
15a63be1
RK
3671
3672 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
3673 a USE or CLOBBER. */
3674
3675rtx
3676follow_jumps (label)
3677 rtx label;
3678{
3679 register rtx insn;
3680 register rtx next;
3681 register rtx value = label;
3682 register int depth;
3683
3684 for (depth = 0;
3685 (depth < 10
3686 && (insn = next_active_insn (value)) != 0
3687 && GET_CODE (insn) == JUMP_INSN
a9cc9061
JL
3688 && ((JUMP_LABEL (insn) != 0 && simplejump_p (insn))
3689 || GET_CODE (PATTERN (insn)) == RETURN)
15a63be1
RK
3690 && (next = NEXT_INSN (insn))
3691 && GET_CODE (next) == BARRIER);
3692 depth++)
3693 {
3694 /* Don't chain through the insn that jumps into a loop
3695 from outside the loop,
3696 since that would create multiple loop entry jumps
3697 and prevent loop optimization. */
3698 rtx tem;
3699 if (!reload_completed)
3700 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
3701 if (GET_CODE (tem) == NOTE
f6a6a1b3
DE
3702 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
3703 /* ??? Optional. Disables some optimizations, but makes
3704 gcov output more accurate with -O. */
3705 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
15a63be1
RK
3706 return value;
3707
3708 /* If we have found a cycle, make the insn jump to itself. */
3709 if (JUMP_LABEL (insn) == label)
2d20b9df 3710 return label;
b209b3c5
JVA
3711
3712 tem = next_active_insn (JUMP_LABEL (insn));
3713 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
3714 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
3715 break;
3716
15a63be1
RK
3717 value = JUMP_LABEL (insn);
3718 }
2d20b9df
RS
3719 if (depth == 10)
3720 return label;
15a63be1
RK
3721 return value;
3722}
3723
3724/* Assuming that field IDX of X is a vector of label_refs,
3725 replace each of them by the ultimate label reached by it.
3726 Return nonzero if a change is made.
3727 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
3728
3729static int
3730tension_vector_labels (x, idx)
3731 register rtx x;
3732 register int idx;
3733{
3734 int changed = 0;
3735 register int i;
3736 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
3737 {
3738 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
3739 register rtx nlabel = follow_jumps (olabel);
3740 if (nlabel && nlabel != olabel)
3741 {
3742 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
3743 ++LABEL_NUSES (nlabel);
3744 if (--LABEL_NUSES (olabel) == 0)
3745 delete_insn (olabel);
3746 changed = 1;
3747 }
3748 }
3749 return changed;
3750}
3751\f
3752/* Find all CODE_LABELs referred to in X, and increment their use counts.
3753 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
3754 in INSN, then store one of them in JUMP_LABEL (INSN).
3755 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
3756 referenced in INSN, add a REG_LABEL note containing that label to INSN.
3757 Also, when there are consecutive labels, canonicalize on the last of them.
3758
3759 Note that two labels separated by a loop-beginning note
3760 must be kept distinct if we have not yet done loop-optimization,
3761 because the gap between them is where loop-optimize
3762 will want to move invariant code to. CROSS_JUMP tells us
3763 that loop-optimization is done with.
3764
3765 Once reload has completed (CROSS_JUMP non-zero), we need not consider
3766 two labels distinct if they are separated by only USE or CLOBBER insns. */
3767
3768static void
a76063a6 3769mark_jump_label (x, insn, cross_jump, in_mem)
15a63be1
RK
3770 register rtx x;
3771 rtx insn;
3772 int cross_jump;
a76063a6 3773 int in_mem;
15a63be1
RK
3774{
3775 register RTX_CODE code = GET_CODE (x);
3776 register int i;
6f7d635c 3777 register const char *fmt;
15a63be1
RK
3778
3779 switch (code)
3780 {
3781 case PC:
3782 case CC0:
3783 case REG:
3784 case SUBREG:
3785 case CONST_INT:
15a63be1
RK
3786 case CONST_DOUBLE:
3787 case CLOBBER:
3788 case CALL:
3789 return;
3790
d7ea4cf6 3791 case MEM:
a76063a6
CP
3792 in_mem = 1;
3793 break;
3794
3795 case SYMBOL_REF:
3796 if (!in_mem)
3797 return;
3798
d7ea4cf6 3799 /* If this is a constant-pool reference, see if it is a label. */
a76063a6
CP
3800 if (CONSTANT_POOL_ADDRESS_P (x))
3801 mark_jump_label (get_pool_constant (x), insn, cross_jump, in_mem);
d7ea4cf6
RK
3802 break;
3803
15a63be1
RK
3804 case LABEL_REF:
3805 {
5c5e36c5
RK
3806 rtx label = XEXP (x, 0);
3807 rtx olabel = label;
3808 rtx note;
3809 rtx next;
3810
15a63be1
RK
3811 if (GET_CODE (label) != CODE_LABEL)
3812 abort ();
5c5e36c5 3813
705f26cf
RS
3814 /* Ignore references to labels of containing functions. */
3815 if (LABEL_REF_NONLOCAL_P (x))
3816 break;
5c5e36c5 3817
15a63be1
RK
3818 /* If there are other labels following this one,
3819 replace it with the last of the consecutive labels. */
3820 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
3821 {
3822 if (GET_CODE (next) == CODE_LABEL)
3823 label = next;
3824 else if (cross_jump && GET_CODE (next) == INSN
3825 && (GET_CODE (PATTERN (next)) == USE
3826 || GET_CODE (PATTERN (next)) == CLOBBER))
3827 continue;
3828 else if (GET_CODE (next) != NOTE)
3829 break;
3830 else if (! cross_jump
3831 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
f6a6a1b3
DE
3832 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
3833 /* ??? Optional. Disables some optimizations, but
3834 makes gcov output more accurate with -O. */
3835 || (flag_test_coverage && NOTE_LINE_NUMBER (next) > 0)))
15a63be1
RK
3836 break;
3837 }
5c5e36c5 3838
15a63be1 3839 XEXP (x, 0) = label;
ac9b3c97
R
3840 if (! insn || ! INSN_DELETED_P (insn))
3841 ++LABEL_NUSES (label);
5c5e36c5 3842
15a63be1
RK
3843 if (insn)
3844 {
3845 if (GET_CODE (insn) == JUMP_INSN)
3846 JUMP_LABEL (insn) = label;
5c5e36c5
RK
3847
3848 /* If we've changed OLABEL and we had a REG_LABEL note
3849 for it, update it as well. */
3850 else if (label != olabel
3851 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
3852 XEXP (note, 0) = label;
3853
3854 /* Otherwise, add a REG_LABEL note for LABEL unless there already
3855 is one. */
0e690bdb 3856 else if (! find_reg_note (insn, REG_LABEL, label))
15a63be1 3857 {
e105f02c
JL
3858 /* This code used to ignore labels which refered to dispatch
3859 tables to avoid flow.c generating worse code.
3860
3861 However, in the presense of global optimizations like
3862 gcse which call find_basic_blocks without calling
3863 life_analysis, not recording such labels will lead
3864 to compiler aborts because of inconsistencies in the
3865 flow graph. So we go ahead and record the label.
3866
3867 It may also be the case that the optimization argument
3868 is no longer valid because of the more accurate cfg
3869 we build in find_basic_blocks -- it no longer pessimizes
3870 code when it finds a REG_LABEL note. */
3871 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, label,
3872 REG_NOTES (insn));
15a63be1
RK
3873 }
3874 }
3875 return;
3876 }
3877
3878 /* Do walk the labels in a vector, but not the first operand of an
3879 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
3880 case ADDR_VEC:
3881 case ADDR_DIFF_VEC:
ac9b3c97
R
3882 if (! INSN_DELETED_P (insn))
3883 {
3884 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
15a63be1 3885
ac9b3c97 3886 for (i = 0; i < XVECLEN (x, eltnum); i++)
a76063a6
CP
3887 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX,
3888 cross_jump, in_mem);
ac9b3c97 3889 }
e9a25f70
JL
3890 return;
3891
3892 default:
3893 break;
15a63be1
RK
3894 }
3895
3896 fmt = GET_RTX_FORMAT (code);
3897 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3898 {
3899 if (fmt[i] == 'e')
a76063a6 3900 mark_jump_label (XEXP (x, i), insn, cross_jump, in_mem);
15a63be1
RK
3901 else if (fmt[i] == 'E')
3902 {
3903 register int j;
3904 for (j = 0; j < XVECLEN (x, i); j++)
a76063a6 3905 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump, in_mem);
15a63be1
RK
3906 }
3907 }
3908}
3909
3910/* If all INSN does is set the pc, delete it,
3911 and delete the insn that set the condition codes for it
3912 if that's what the previous thing was. */
3913
3914void
3915delete_jump (insn)
3916 rtx insn;
3917{
3e5478ea
RK
3918 register rtx set = single_set (insn);
3919
3920 if (set && GET_CODE (SET_DEST (set)) == PC)
3921 delete_computation (insn);
3922}
3923
14a774a9
RK
3924/* Verify INSN is a BARRIER and delete it. */
3925
3926void
3927delete_barrier (insn)
3928 rtx insn;
3929{
3930 if (GET_CODE (insn) != BARRIER)
3931 abort ();
3932
3933 delete_insn (insn);
3934}
3935
cfe2d2e7
JW
3936/* Recursively delete prior insns that compute the value (used only by INSN
3937 which the caller is deleting) stored in the register mentioned by NOTE
3938 which is a REG_DEAD note associated with INSN. */
3939
3940static void
3941delete_prior_computation (note, insn)
3942 rtx note;
3943 rtx insn;
3944{
3945 rtx our_prev;
3946 rtx reg = XEXP (note, 0);
3947
3948 for (our_prev = prev_nonnote_insn (insn);
6f1661e5
JW
3949 our_prev && (GET_CODE (our_prev) == INSN
3950 || GET_CODE (our_prev) == CALL_INSN);
cfe2d2e7
JW
3951 our_prev = prev_nonnote_insn (our_prev))
3952 {
3953 rtx pat = PATTERN (our_prev);
3954
6f1661e5
JW
3955 /* If we reach a CALL which is not calling a const function
3956 or the callee pops the arguments, then give up. */
3957 if (GET_CODE (our_prev) == CALL_INSN
3958 && (! CONST_CALL_P (our_prev)
3959 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
3960 break;
3961
cfe2d2e7
JW
3962 /* If we reach a SEQUENCE, it is too complex to try to
3963 do anything with it, so give up. */
3964 if (GET_CODE (pat) == SEQUENCE)
3965 break;
3966
3967 if (GET_CODE (pat) == USE
3968 && GET_CODE (XEXP (pat, 0)) == INSN)
3969 /* reorg creates USEs that look like this. We leave them
3970 alone because reorg needs them for its own purposes. */
3971 break;
3972
3973 if (reg_set_p (reg, pat))
3974 {
6f1661e5 3975 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
cfe2d2e7
JW
3976 break;
3977
3978 if (GET_CODE (pat) == PARALLEL)
3979 {
3980 /* If we find a SET of something else, we can't
3981 delete the insn. */
3982
3983 int i;
3984
3985 for (i = 0; i < XVECLEN (pat, 0); i++)
3986 {
3987 rtx part = XVECEXP (pat, 0, i);
3988
3989 if (GET_CODE (part) == SET
3990 && SET_DEST (part) != reg)
3991 break;
3992 }
3993
3994 if (i == XVECLEN (pat, 0))
3995 delete_computation (our_prev);
3996 }
3997 else if (GET_CODE (pat) == SET
3998 && GET_CODE (SET_DEST (pat)) == REG)
3999 {
4000 int dest_regno = REGNO (SET_DEST (pat));
4001 int dest_endregno
4002 = dest_regno + (dest_regno < FIRST_PSEUDO_REGISTER
4003 ? HARD_REGNO_NREGS (dest_regno,
4004 GET_MODE (SET_DEST (pat))) : 1);
4005 int regno = REGNO (reg);
4006 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
4007 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
4008
4009 if (dest_regno >= regno
4010 && dest_endregno <= endregno)
4011 delete_computation (our_prev);
4012
4013 /* We may have a multi-word hard register and some, but not
4014 all, of the words of the register are needed in subsequent
4015 insns. Write REG_UNUSED notes for those parts that were not
4016 needed. */
4017 else if (dest_regno <= regno
6f1661e5 4018 && dest_endregno >= endregno)
cfe2d2e7
JW
4019 {
4020 int i;
4021
4022 REG_NOTES (our_prev)
4023 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (our_prev));
4024
4025 for (i = dest_regno; i < dest_endregno; i++)
4026 if (! find_regno_note (our_prev, REG_UNUSED, i))
4027 break;
4028
4029 if (i == dest_endregno)
4030 delete_computation (our_prev);
4031 }
4032 }
4033
4034 break;
4035 }
4036
4037 /* If PAT references the register that dies here, it is an
4038 additional use. Hence any prior SET isn't dead. However, this
4039 insn becomes the new place for the REG_DEAD note. */
4040 if (reg_overlap_mentioned_p (reg, pat))
4041 {
4042 XEXP (note, 1) = REG_NOTES (our_prev);
4043 REG_NOTES (our_prev) = note;
4044 break;
4045 }
4046 }
4047}
4048
3e5478ea
RK
4049/* Delete INSN and recursively delete insns that compute values used only
4050 by INSN. This uses the REG_DEAD notes computed during flow analysis.
4051 If we are running before flow.c, we need do nothing since flow.c will
4052 delete dead code. We also can't know if the registers being used are
4053 dead or not at this point.
4054
4055 Otherwise, look at all our REG_DEAD notes. If a previous insn does
4056 nothing other than set a register that dies in this insn, we can delete
4057 that insn as well.
4058
4059 On machines with CC0, if CC0 is used in this insn, we may be able to
4060 delete the insn that set it. */
4061
8cd2aff2 4062static void
3e5478ea
RK
4063delete_computation (insn)
4064 rtx insn;
4065{
4066 rtx note, next;
cfe2d2e7 4067 rtx set;
15a63be1 4068
15a63be1 4069#ifdef HAVE_cc0
2fb95912 4070 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
3e5478ea 4071 {
77472c5a 4072 rtx prev = prev_nonnote_insn (insn);
15a63be1
RK
4073 /* We assume that at this stage
4074 CC's are always set explicitly
4075 and always immediately before the jump that
4076 will use them. So if the previous insn
4077 exists to set the CC's, delete it
4078 (unless it performs auto-increments, etc.). */
4079 if (prev && GET_CODE (prev) == INSN
4080 && sets_cc0_p (PATTERN (prev)))
4081 {
4082 if (sets_cc0_p (PATTERN (prev)) > 0
cfe2d2e7 4083 && ! side_effects_p (PATTERN (prev)))
3e5478ea 4084 delete_computation (prev);
15a63be1
RK
4085 else
4086 /* Otherwise, show that cc0 won't be used. */
38a448ca
RH
4087 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
4088 cc0_rtx, REG_NOTES (prev));
15a63be1 4089 }
77472c5a 4090 }
3e5478ea 4091#endif
15a63be1 4092
0c63f729
JL
4093#ifdef INSN_SCHEDULING
4094 /* ?!? The schedulers do not keep REG_DEAD notes accurate after
4095 reload has completed. The schedulers need to be fixed. Until
4096 they are, we must not rely on the death notes here. */
4097 if (reload_completed && flag_schedule_insns_after_reload)
4098 {
4099 delete_insn (insn);
4100 return;
4101 }
4102#endif
4103
6f1661e5
JW
4104 /* The REG_DEAD note may have been omitted for a register
4105 which is both set and used by the insn. */
cfe2d2e7 4106 set = single_set (insn);
6f1661e5
JW
4107 if (set && GET_CODE (SET_DEST (set)) == REG)
4108 {
4109 int dest_regno = REGNO (SET_DEST (set));
4110 int dest_endregno
4111 = dest_regno + (dest_regno < FIRST_PSEUDO_REGISTER
4112 ? HARD_REGNO_NREGS (dest_regno,
4113 GET_MODE (SET_DEST (set))) : 1);
4114 int i;
4115
4116 for (i = dest_regno; i < dest_endregno; i++)
4117 {
4118 if (! refers_to_regno_p (i, i + 1, SET_SRC (set), NULL_PTR)
4119 || find_regno_note (insn, REG_DEAD, i))
4120 continue;
4121
4122 note = gen_rtx_EXPR_LIST (REG_DEAD, (i < FIRST_PSEUDO_REGISTER
4123 ? gen_rtx_REG (reg_raw_mode[i], i)
4124 : SET_DEST (set)), NULL_RTX);
4125 delete_prior_computation (note, insn);
4126 }
4127 }
cfe2d2e7 4128
77472c5a
TW
4129 for (note = REG_NOTES (insn); note; note = next)
4130 {
77472c5a 4131 next = XEXP (note, 1);
15a63be1 4132
77472c5a
TW
4133 if (REG_NOTE_KIND (note) != REG_DEAD
4134 /* Verify that the REG_NOTE is legitimate. */
4135 || GET_CODE (XEXP (note, 0)) != REG)
4136 continue;
15a63be1 4137
cfe2d2e7 4138 delete_prior_computation (note, insn);
15a63be1 4139 }
3e5478ea 4140
77472c5a 4141 delete_insn (insn);
15a63be1
RK
4142}
4143\f
4144/* Delete insn INSN from the chain of insns and update label ref counts.
4145 May delete some following insns as a consequence; may even delete
4146 a label elsewhere and insns that follow it.
4147
4148 Returns the first insn after INSN that was not deleted. */
4149
4150rtx
4151delete_insn (insn)
4152 register rtx insn;
4153{
4154 register rtx next = NEXT_INSN (insn);
4155 register rtx prev = PREV_INSN (insn);
196cedd0
RS
4156 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
4157 register int dont_really_delete = 0;
15a63be1
RK
4158
4159 while (next && INSN_DELETED_P (next))
4160 next = NEXT_INSN (next);
4161
4162 /* This insn is already deleted => return first following nondeleted. */
4163 if (INSN_DELETED_P (insn))
4164 return next;
4165
55a98783
JL
4166 if (was_code_label)
4167 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
4168
ae32f34a
RH
4169 /* Don't delete user-declared labels. When optimizing, convert them
4170 to special NOTEs instead. When not optimizing, leave them alone. */
4171 if (was_code_label && LABEL_NAME (insn) != 0)
196cedd0 4172 {
ae32f34a
RH
4173 if (! optimize)
4174 dont_really_delete = 1;
4175 else if (! dont_really_delete)
4176 {
4177 PUT_CODE (insn, NOTE);
4178 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
4179 NOTE_SOURCE_FILE (insn) = 0;
4180 dont_really_delete = 1;
4181 }
196cedd0
RS
4182 }
4183 else
4184 /* Mark this insn as deleted. */
4185 INSN_DELETED_P (insn) = 1;
15a63be1
RK
4186
4187 /* If this is an unconditional jump, delete it from the jump chain. */
4188 if (simplejump_p (insn))
4189 delete_from_jump_chain (insn);
4190
4191 /* If instruction is followed by a barrier,
4192 delete the barrier too. */
4193
4194 if (next != 0 && GET_CODE (next) == BARRIER)
4195 {
4196 INSN_DELETED_P (next) = 1;
4197 next = NEXT_INSN (next);
4198 }
4199
4200 /* Patch out INSN (and the barrier if any) */
4201
d29c259b 4202 if (! dont_really_delete)
15a63be1
RK
4203 {
4204 if (prev)
4205 {
4206 NEXT_INSN (prev) = next;
4207 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
4208 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
4209 XVECLEN (PATTERN (prev), 0) - 1)) = next;
4210 }
4211
4212 if (next)
4213 {
4214 PREV_INSN (next) = prev;
4215 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
4216 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
4217 }
4218
4219 if (prev && NEXT_INSN (prev) == 0)
4220 set_last_insn (prev);
4221 }
4222
4223 /* If deleting a jump, decrement the count of the label,
4224 and delete the label if it is now unused. */
4225
4226 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1fe65930
RH
4227 {
4228 rtx lab = JUMP_LABEL (insn), lab_next;
4229
4230 if (--LABEL_NUSES (lab) == 0)
4231 {
4232 /* This can delete NEXT or PREV,
4233 either directly if NEXT is JUMP_LABEL (INSN),
4234 or indirectly through more levels of jumps. */
4235 delete_insn (lab);
4236
4237 /* I feel a little doubtful about this loop,
4238 but I see no clean and sure alternative way
4239 to find the first insn after INSN that is not now deleted.
4240 I hope this works. */
4241 while (next && INSN_DELETED_P (next))
4242 next = NEXT_INSN (next);
4243 return next;
4244 }
4245 else if ((lab_next = next_nonnote_insn (lab)) != NULL
4246 && GET_CODE (lab_next) == JUMP_INSN
4247 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
4248 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
4249 {
4250 /* If we're deleting the tablejump, delete the dispatch table.
4251 We may not be able to kill the label immediately preceeding
4252 just yet, as it might be referenced in code leading up to
4253 the tablejump. */
4254 delete_insn (lab_next);
4255 }
4256 }
15a63be1 4257
3c7d7a4a
DE
4258 /* Likewise if we're deleting a dispatch table. */
4259
4260 if (GET_CODE (insn) == JUMP_INSN
4261 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
4262 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
4263 {
4264 rtx pat = PATTERN (insn);
4265 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
4266 int len = XVECLEN (pat, diff_vec_p);
4267
4268 for (i = 0; i < len; i++)
4269 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
4270 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
4271 while (next && INSN_DELETED_P (next))
4272 next = NEXT_INSN (next);
4273 return next;
4274 }
4275
15a63be1
RK
4276 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
4277 prev = PREV_INSN (prev);
4278
4279 /* If INSN was a label and a dispatch table follows it,
4280 delete the dispatch table. The tablejump must have gone already.
4281 It isn't useful to fall through into a table. */
4282
196cedd0 4283 if (was_code_label
15a63be1
RK
4284 && NEXT_INSN (insn) != 0
4285 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
4286 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
4287 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
4288 next = delete_insn (NEXT_INSN (insn));
4289
4290 /* If INSN was a label, delete insns following it if now unreachable. */
4291
196cedd0 4292 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
15a63be1
RK
4293 {
4294 register RTX_CODE code;
4295 while (next != 0
8cd2aff2 4296 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
4134d7fc 4297 || code == NOTE || code == BARRIER
2e1dbf22 4298 || (code == CODE_LABEL && INSN_DELETED_P (next))))
15a63be1
RK
4299 {
4300 if (code == NOTE
4301 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
4302 next = NEXT_INSN (next);
2e1dbf22
RS
4303 /* Keep going past other deleted labels to delete what follows. */
4304 else if (code == CODE_LABEL && INSN_DELETED_P (next))
4305 next = NEXT_INSN (next);
15a63be1
RK
4306 else
4307 /* Note: if this deletes a jump, it can cause more
4308 deletion of unreachable code, after a different label.
4309 As long as the value from this recursive call is correct,
4310 this invocation functions correctly. */
4311 next = delete_insn (next);
4312 }
4313 }
4314
4315 return next;
4316}
4317
4318/* Advance from INSN till reaching something not deleted
4319 then return that. May return INSN itself. */
4320
4321rtx
4322next_nondeleted_insn (insn)
4323 rtx insn;
4324{
4325 while (INSN_DELETED_P (insn))
4326 insn = NEXT_INSN (insn);
4327 return insn;
4328}
4329\f
4330/* Delete a range of insns from FROM to TO, inclusive.
4331 This is for the sake of peephole optimization, so assume
4332 that whatever these insns do will still be done by a new
4333 peephole insn that will replace them. */
4334
4335void
4336delete_for_peephole (from, to)
4337 register rtx from, to;
4338{
4339 register rtx insn = from;
4340
4341 while (1)
4342 {
4343 register rtx next = NEXT_INSN (insn);
4344 register rtx prev = PREV_INSN (insn);
4345
4346 if (GET_CODE (insn) != NOTE)
4347 {
4348 INSN_DELETED_P (insn) = 1;
4349
4350 /* Patch this insn out of the chain. */
4351 /* We don't do this all at once, because we
4352 must preserve all NOTEs. */
4353 if (prev)
4354 NEXT_INSN (prev) = next;
4355
4356 if (next)
4357 PREV_INSN (next) = prev;
4358 }
4359
4360 if (insn == to)
4361 break;
4362 insn = next;
4363 }
4364
4365 /* Note that if TO is an unconditional jump
4366 we *do not* delete the BARRIER that follows,
4367 since the peephole that replaces this sequence
4368 is also an unconditional jump in that case. */
4369}
4370\f
312f6255
GK
4371/* We have determined that INSN is never reached, and are about to
4372 delete it. Print a warning if the user asked for one.
4373
4374 To try to make this warning more useful, this should only be called
4375 once per basic block not reached, and it only warns when the basic
4376 block contains more than one line from the current function, and
4377 contains at least one operation. CSE and inlining can duplicate insns,
4378 so it's possible to get spurious warnings from this. */
4379
4380void
4381never_reached_warning (avoided_insn)
4382 rtx avoided_insn;
4383{
4384 rtx insn;
4385 rtx a_line_note = NULL;
4386 int two_avoided_lines = 0;
4387 int contains_insn = 0;
4388
4389 if (! warn_notreached)
4390 return;
4391
4392 /* Scan forwards, looking at LINE_NUMBER notes, until
4393 we hit a LABEL or we run out of insns. */
4394
4395 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
4396 {
4397 if (GET_CODE (insn) == CODE_LABEL)
4398 break;
4399 else if (GET_CODE (insn) == NOTE /* A line number note? */
4400 && NOTE_LINE_NUMBER (insn) >= 0)
4401 {
4402 if (a_line_note == NULL)
4403 a_line_note = insn;
4404 else
4405 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
4406 != NOTE_LINE_NUMBER (insn));
4407 }
4408 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4409 contains_insn = 1;
4410 }
4411 if (two_avoided_lines && contains_insn)
4412 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
4413 NOTE_LINE_NUMBER (a_line_note),
4414 "will never be executed");
4415}
4416\f
15a63be1
RK
4417/* Invert the condition of the jump JUMP, and make it jump
4418 to label NLABEL instead of where it jumps now. */
4419
4420int
4421invert_jump (jump, nlabel)
4422 rtx jump, nlabel;
4423{
15a63be1
RK
4424 /* We have to either invert the condition and change the label or
4425 do neither. Either operation could fail. We first try to invert
4426 the jump. If that succeeds, we try changing the label. If that fails,
4427 we invert the jump back to what it was. */
4428
4429 if (! invert_exp (PATTERN (jump), jump))
4430 return 0;
4431
4432 if (redirect_jump (jump, nlabel))
f6a6a1b3
DE
4433 {
4434 if (flag_branch_probabilities)
4435 {
4436 rtx note = find_reg_note (jump, REG_BR_PROB, 0);
4437
4438 /* An inverted jump means that a probability taken becomes a
4439 probability not taken. Subtract the branch probability from the
4440 probability base to convert it back to a taken probability.
4441 (We don't flip the probability on a branch that's never taken. */
4442 if (note && XINT (XEXP (note, 0), 0) >= 0)
4443 XINT (XEXP (note, 0), 0) = REG_BR_PROB_BASE - XINT (XEXP (note, 0), 0);
4444 }
4445
4446 return 1;
4447 }
15a63be1
RK
4448
4449 if (! invert_exp (PATTERN (jump), jump))
4450 /* This should just be putting it back the way it was. */
4451 abort ();
4452
4453 return 0;
4454}
4455
4456/* Invert the jump condition of rtx X contained in jump insn, INSN.
4457
4458 Return 1 if we can do so, 0 if we cannot find a way to do so that
4459 matches a pattern. */
4460
4214a505 4461int
15a63be1
RK
4462invert_exp (x, insn)
4463 rtx x;
4464 rtx insn;
4465{
4466 register RTX_CODE code;
4467 register int i;
6f7d635c 4468 register const char *fmt;
15a63be1
RK
4469
4470 code = GET_CODE (x);
4471
4472 if (code == IF_THEN_ELSE)
4473 {
4474 register rtx comp = XEXP (x, 0);
4475 register rtx tem;
4476
4477 /* We can do this in two ways: The preferable way, which can only
4478 be done if this is not an integer comparison, is to reverse
4479 the comparison code. Otherwise, swap the THEN-part and ELSE-part
4480 of the IF_THEN_ELSE. If we can't do either, fail. */
4481
4482 if (can_reverse_comparison_p (comp, insn)
4483 && validate_change (insn, &XEXP (x, 0),
38a448ca
RH
4484 gen_rtx_fmt_ee (reverse_condition (GET_CODE (comp)),
4485 GET_MODE (comp), XEXP (comp, 0),
4486 XEXP (comp, 1)), 0))
15a63be1
RK
4487 return 1;
4488
4489 tem = XEXP (x, 1);
4490 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
4491 validate_change (insn, &XEXP (x, 2), tem, 1);
4492 return apply_change_group ();
4493 }
4494
4495 fmt = GET_RTX_FORMAT (code);
4496 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4497 {
4498 if (fmt[i] == 'e')
9d0878fd
KG
4499 {
4500 if (! invert_exp (XEXP (x, i), insn))
4501 return 0;
4502 }
d4757e6a 4503 else if (fmt[i] == 'E')
15a63be1
RK
4504 {
4505 register int j;
4506 for (j = 0; j < XVECLEN (x, i); j++)
4507 if (!invert_exp (XVECEXP (x, i, j), insn))
4508 return 0;
4509 }
4510 }
4511
4512 return 1;
4513}
4514\f
4515/* Make jump JUMP jump to label NLABEL instead of where it jumps now.
4516 If the old jump target label is unused as a result,
4517 it and the code following it may be deleted.
4518
4519 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
4520 RETURN insn.
4521
4522 The return value will be 1 if the change was made, 0 if it wasn't (this
4523 can only occur for NLABEL == 0). */
4524
4525int
4526redirect_jump (jump, nlabel)
4527 rtx jump, nlabel;
4528{
4529 register rtx olabel = JUMP_LABEL (jump);
4530
4531 if (nlabel == olabel)
4532 return 1;
4533
4534 if (! redirect_exp (&PATTERN (jump), olabel, nlabel, jump))
4535 return 0;
4536
4537 /* If this is an unconditional branch, delete it from the jump_chain of
4538 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
4539 have UID's in range and JUMP_CHAIN is valid). */
4540 if (jump_chain && (simplejump_p (jump)
4541 || GET_CODE (PATTERN (jump)) == RETURN))
4542 {
4543 int label_index = nlabel ? INSN_UID (nlabel) : 0;
4544
4545 delete_from_jump_chain (jump);
2d20b9df
RS
4546 if (label_index < max_jump_chain
4547 && INSN_UID (jump) < max_jump_chain)
15a63be1
RK
4548 {
4549 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
4550 jump_chain[label_index] = jump;
4551 }
4552 }
4553
4554 JUMP_LABEL (jump) = nlabel;
4555 if (nlabel)
4556 ++LABEL_NUSES (nlabel);
4557
d4cf5733
JM
4558 /* If we're eliding the jump over exception cleanups at the end of a
4559 function, move the function end note so that -Wreturn-type works. */
4560 if (olabel && NEXT_INSN (olabel)
4561 && GET_CODE (NEXT_INSN (olabel)) == NOTE
4562 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
4563 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
4564
15a63be1
RK
4565 if (olabel && --LABEL_NUSES (olabel) == 0)
4566 delete_insn (olabel);
4567
4568 return 1;
4569}
4570
4571/* Delete the instruction JUMP from any jump chain it might be on. */
4572
4573static void
4574delete_from_jump_chain (jump)
4575 rtx jump;
4576{
4577 int index;
4578 rtx olabel = JUMP_LABEL (jump);
4579
4580 /* Handle unconditional jumps. */
4581 if (jump_chain && olabel != 0
4582 && INSN_UID (olabel) < max_jump_chain
4583 && simplejump_p (jump))
4584 index = INSN_UID (olabel);
4585 /* Handle return insns. */
4586 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
4587 index = 0;
4588 else return;
4589
4590 if (jump_chain[index] == jump)
4591 jump_chain[index] = jump_chain[INSN_UID (jump)];
4592 else
4593 {
4594 rtx insn;
4595
4596 for (insn = jump_chain[index];
4597 insn != 0;
4598 insn = jump_chain[INSN_UID (insn)])
4599 if (jump_chain[INSN_UID (insn)] == jump)
4600 {
4601 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
4602 break;
4603 }
4604 }
4605}
4606
4607/* If NLABEL is nonzero, throughout the rtx at LOC,
4608 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
4609 zero, alter (RETURN) to (LABEL_REF NLABEL).
4610
4611 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
4612 validity with validate_change. Convert (set (pc) (label_ref olabel))
4613 to (return).
4614
4615 Return 0 if we found a change we would like to make but it is invalid.
4616 Otherwise, return 1. */
4617
4214a505 4618int
15a63be1
RK
4619redirect_exp (loc, olabel, nlabel, insn)
4620 rtx *loc;
4621 rtx olabel, nlabel;
4622 rtx insn;
4623{
4624 register rtx x = *loc;
4625 register RTX_CODE code = GET_CODE (x);
4626 register int i;
6f7d635c 4627 register const char *fmt;
15a63be1
RK
4628
4629 if (code == LABEL_REF)
4630 {
4631 if (XEXP (x, 0) == olabel)
4632 {
4633 if (nlabel)
4634 XEXP (x, 0) = nlabel;
4635 else
38a448ca 4636 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
15a63be1
RK
4637 return 1;
4638 }
4639 }
4640 else if (code == RETURN && olabel == 0)
4641 {
38a448ca 4642 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
15a63be1 4643 if (loc == &PATTERN (insn))
38a448ca 4644 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
15a63be1
RK
4645 return validate_change (insn, loc, x, 0);
4646 }
4647
4648 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
4649 && GET_CODE (SET_SRC (x)) == LABEL_REF
4650 && XEXP (SET_SRC (x), 0) == olabel)
38a448ca 4651 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
15a63be1
RK
4652
4653 fmt = GET_RTX_FORMAT (code);
4654 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4655 {
4656 if (fmt[i] == 'e')
9d0878fd
KG
4657 {
4658 if (! redirect_exp (&XEXP (x, i), olabel, nlabel, insn))
4659 return 0;
4660 }
d4757e6a 4661 else if (fmt[i] == 'E')
15a63be1
RK
4662 {
4663 register int j;
4664 for (j = 0; j < XVECLEN (x, i); j++)
4665 if (! redirect_exp (&XVECEXP (x, i, j), olabel, nlabel, insn))
4666 return 0;
4667 }
4668 }
4669
4670 return 1;
4671}
4672\f
4673/* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
4674
4675 If the old jump target label (before the dispatch table) becomes unused,
4676 it and the dispatch table may be deleted. In that case, find the insn
6dc42e49 4677 before the jump references that label and delete it and logical successors
15a63be1
RK
4678 too. */
4679
8cd2aff2 4680static void
15a63be1
RK
4681redirect_tablejump (jump, nlabel)
4682 rtx jump, nlabel;
4683{
4684 register rtx olabel = JUMP_LABEL (jump);
4685
4686 /* Add this jump to the jump_chain of NLABEL. */
4687 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
4688 && INSN_UID (jump) < max_jump_chain)
4689 {
4690 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
4691 jump_chain[INSN_UID (nlabel)] = jump;
4692 }
4693
4694 PATTERN (jump) = gen_jump (nlabel);
4695 JUMP_LABEL (jump) = nlabel;
4696 ++LABEL_NUSES (nlabel);
4697 INSN_CODE (jump) = -1;
4698
4699 if (--LABEL_NUSES (olabel) == 0)
4700 {
4701 delete_labelref_insn (jump, olabel, 0);
4702 delete_insn (olabel);
4703 }
4704}
4705
4706/* Find the insn referencing LABEL that is a logical predecessor of INSN.
4707 If we found one, delete it and then delete this insn if DELETE_THIS is
4708 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
4709
4710static int
4711delete_labelref_insn (insn, label, delete_this)
4712 rtx insn, label;
4713 int delete_this;
4714{
4715 int deleted = 0;
4716 rtx link;
4717
4718 if (GET_CODE (insn) != NOTE
4719 && reg_mentioned_p (label, PATTERN (insn)))
4720 {
4721 if (delete_this)
4722 {
4723 delete_insn (insn);
4724 deleted = 1;
4725 }
4726 else
4727 return 1;
4728 }
4729
4730 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
4731 if (delete_labelref_insn (XEXP (link, 0), label, 1))
4732 {
4733 if (delete_this)
4734 {
4735 delete_insn (insn);
4736 deleted = 1;
4737 }
4738 else
4739 return 1;
4740 }
4741
4742 return deleted;
4743}
4744\f
4745/* Like rtx_equal_p except that it considers two REGs as equal
4fe73cc1
RK
4746 if they renumber to the same value and considers two commutative
4747 operations to be the same if the order of the operands has been
8fc001f9
JL
4748 reversed.
4749
4750 ??? Addition is not commutative on the PA due to the weird implicit
4751 space register selection rules for memory addresses. Therefore, we
4752 don't consider a + b == b + a.
4753
4754 We could/should make this test a little tighter. Possibly only
4755 disabling it on the PA via some backend macro or only disabling this
4756 case when the PLUS is inside a MEM. */
15a63be1
RK
4757
4758int
4759rtx_renumbered_equal_p (x, y)
4760 rtx x, y;
4761{
4762 register int i;
4763 register RTX_CODE code = GET_CODE (x);
6f7d635c 4764 register const char *fmt;
15a63be1
RK
4765
4766 if (x == y)
4767 return 1;
4fe73cc1 4768
15a63be1
RK
4769 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
4770 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
4771 && GET_CODE (SUBREG_REG (y)) == REG)))
4772 {
4fe73cc1
RK
4773 int reg_x = -1, reg_y = -1;
4774 int word_x = 0, word_y = 0;
15a63be1
RK
4775
4776 if (GET_MODE (x) != GET_MODE (y))
4777 return 0;
4778
4779 /* If we haven't done any renumbering, don't
4780 make any assumptions. */
4781 if (reg_renumber == 0)
4782 return rtx_equal_p (x, y);
4783
4784 if (code == SUBREG)
4785 {
4fe73cc1
RK
4786 reg_x = REGNO (SUBREG_REG (x));
4787 word_x = SUBREG_WORD (x);
4788
4789 if (reg_renumber[reg_x] >= 0)
4790 {
4791 reg_x = reg_renumber[reg_x] + word_x;
4792 word_x = 0;
4793 }
15a63be1 4794 }
4fe73cc1 4795
15a63be1
RK
4796 else
4797 {
4fe73cc1
RK
4798 reg_x = REGNO (x);
4799 if (reg_renumber[reg_x] >= 0)
4800 reg_x = reg_renumber[reg_x];
15a63be1 4801 }
4fe73cc1 4802
15a63be1
RK
4803 if (GET_CODE (y) == SUBREG)
4804 {
4fe73cc1
RK
4805 reg_y = REGNO (SUBREG_REG (y));
4806 word_y = SUBREG_WORD (y);
4807
4808 if (reg_renumber[reg_y] >= 0)
4809 {
4810 reg_y = reg_renumber[reg_y];
4811 word_y = 0;
4812 }
15a63be1 4813 }
4fe73cc1 4814
15a63be1
RK
4815 else
4816 {
4fe73cc1
RK
4817 reg_y = REGNO (y);
4818 if (reg_renumber[reg_y] >= 0)
4819 reg_y = reg_renumber[reg_y];
15a63be1 4820 }
4fe73cc1
RK
4821
4822 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
15a63be1 4823 }
4fe73cc1 4824
15a63be1
RK
4825 /* Now we have disposed of all the cases
4826 in which different rtx codes can match. */
4827 if (code != GET_CODE (y))
4828 return 0;
4fe73cc1 4829
15a63be1
RK
4830 switch (code)
4831 {
4832 case PC:
4833 case CC0:
4834 case ADDR_VEC:
4835 case ADDR_DIFF_VEC:
4836 return 0;
4837
4838 case CONST_INT:
38b3167e 4839 return INTVAL (x) == INTVAL (y);
15a63be1
RK
4840
4841 case LABEL_REF:
705f26cf
RS
4842 /* We can't assume nonlocal labels have their following insns yet. */
4843 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
4844 return XEXP (x, 0) == XEXP (y, 0);
4fe73cc1 4845
15a63be1
RK
4846 /* Two label-refs are equivalent if they point at labels
4847 in the same position in the instruction stream. */
4848 return (next_real_insn (XEXP (x, 0))
4849 == next_real_insn (XEXP (y, 0)));
4850
4851 case SYMBOL_REF:
4852 return XSTR (x, 0) == XSTR (y, 0);
e9a25f70 4853
bba596a3
RH
4854 case CODE_LABEL:
4855 /* If we didn't match EQ equality above, they aren't the same. */
4856 return 0;
4857
e9a25f70
JL
4858 default:
4859 break;
15a63be1
RK
4860 }
4861
4862 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
4863
4864 if (GET_MODE (x) != GET_MODE (y))
4865 return 0;
4866
4fe73cc1 4867 /* For commutative operations, the RTX match if the operand match in any
8fc001f9
JL
4868 order. Also handle the simple binary and unary cases without a loop.
4869
4870 ??? Don't consider PLUS a commutative operator; see comments above. */
4871 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4872 && code != PLUS)
4fe73cc1
RK
4873 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4874 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
4875 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
4876 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
4877 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4878 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4879 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
4880 else if (GET_RTX_CLASS (code) == '1')
4881 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
4882
15a63be1
RK
4883 /* Compare the elements. If any pair of corresponding elements
4884 fail to match, return 0 for the whole things. */
4885
4886 fmt = GET_RTX_FORMAT (code);
4887 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4888 {
4889 register int j;
4890 switch (fmt[i])
4891 {
5f4f0e22
CH
4892 case 'w':
4893 if (XWINT (x, i) != XWINT (y, i))
4894 return 0;
4895 break;
4896
15a63be1
RK
4897 case 'i':
4898 if (XINT (x, i) != XINT (y, i))
4899 return 0;
4900 break;
4901
4902 case 's':
4903 if (strcmp (XSTR (x, i), XSTR (y, i)))
4904 return 0;
4905 break;
4906
4907 case 'e':
4908 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
4909 return 0;
4910 break;
4911
4912 case 'u':
4913 if (XEXP (x, i) != XEXP (y, i))
4914 return 0;
4915 /* fall through. */
4916 case '0':
4917 break;
4918
4919 case 'E':
4920 if (XVECLEN (x, i) != XVECLEN (y, i))
4921 return 0;
4922 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4923 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
4924 return 0;
4925 break;
4926
4927 default:
4928 abort ();
4929 }
4930 }
4931 return 1;
4932}
4933\f
4934/* If X is a hard register or equivalent to one or a subregister of one,
4935 return the hard register number. If X is a pseudo register that was not
4936 assigned a hard register, return the pseudo register number. Otherwise,
4937 return -1. Any rtx is valid for X. */
4938
4939int
4940true_regnum (x)
4941 rtx x;
4942{
4943 if (GET_CODE (x) == REG)
4944 {
4945 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
4946 return reg_renumber[REGNO (x)];
4947 return REGNO (x);
4948 }
4949 if (GET_CODE (x) == SUBREG)
4950 {
4951 int base = true_regnum (SUBREG_REG (x));
4952 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
4953 return SUBREG_WORD (x) + base;
4954 }
4955 return -1;
4956}
4957\f
4958/* Optimize code of the form:
4959
4960 for (x = a[i]; x; ...)
4961 ...
4962 for (x = a[i]; x; ...)
4963 ...
4964 foo:
4965
4966 Loop optimize will change the above code into
4967
4968 if (x = a[i])
4969 for (;;)
4970 { ...; if (! (x = ...)) break; }
4971 if (x = a[i])
4972 for (;;)
4973 { ...; if (! (x = ...)) break; }
4974 foo:
4975
4976 In general, if the first test fails, the program can branch
4977 directly to `foo' and skip the second try which is doomed to fail.
4978 We run this after loop optimization and before flow analysis. */
4979
4980/* When comparing the insn patterns, we track the fact that different
4981 pseudo-register numbers may have been used in each computation.
4982 The following array stores an equivalence -- same_regs[I] == J means
4983 that pseudo register I was used in the first set of tests in a context
4984 where J was used in the second set. We also count the number of such
4985 pending equivalences. If nonzero, the expressions really aren't the
4986 same. */
4987
7ee8a9d5 4988static int *same_regs;
15a63be1
RK
4989
4990static int num_same_regs;
4991
4992/* Track any registers modified between the target of the first jump and
4993 the second jump. They never compare equal. */
4994
4995static char *modified_regs;
4996
4997/* Record if memory was modified. */
4998
4999static int modified_mem;
5000
5001/* Called via note_stores on each insn between the target of the first
5002 branch and the second branch. It marks any changed registers. */
5003
5004static void
84832317 5005mark_modified_reg (dest, x, data)
15a63be1 5006 rtx dest;
487a6e06 5007 rtx x ATTRIBUTE_UNUSED;
84832317 5008 void *data ATTRIBUTE_UNUSED;
15a63be1 5009{
770ae6cc
RK
5010 int regno;
5011 unsigned int i;
15a63be1
RK
5012
5013 if (GET_CODE (dest) == SUBREG)
5014 dest = SUBREG_REG (dest);
5015
5016 if (GET_CODE (dest) == MEM)
5017 modified_mem = 1;
5018
5019 if (GET_CODE (dest) != REG)
5020 return;
5021
5022 regno = REGNO (dest);
5023 if (regno >= FIRST_PSEUDO_REGISTER)
5024 modified_regs[regno] = 1;
5025 else
5026 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
5027 modified_regs[regno + i] = 1;
5028}
5029
5030/* F is the first insn in the chain of insns. */
5031
5032void
aa38b201 5033thread_jumps (f, max_reg, flag_before_loop)
15a63be1
RK
5034 rtx f;
5035 int max_reg;
aa38b201 5036 int flag_before_loop;
15a63be1
RK
5037{
5038 /* Basic algorithm is to find a conditional branch,
5039 the label it may branch to, and the branch after
5040 that label. If the two branches test the same condition,
5041 walk back from both branch paths until the insn patterns
5042 differ, or code labels are hit. If we make it back to
5043 the target of the first branch, then we know that the first branch
5044 will either always succeed or always fail depending on the relative
5045 senses of the two branches. So adjust the first branch accordingly
5046 in this case. */
5047
5048 rtx label, b1, b2, t1, t2;
5049 enum rtx_code code1, code2;
5050 rtx b1op0, b1op1, b2op0, b2op1;
5051 int changed = 1;
5052 int i;
7ee8a9d5 5053 int *all_reset;
15a63be1
RK
5054
5055 /* Allocate register tables and quick-reset table. */
67289ea6
MM
5056 modified_regs = (char *) xmalloc (max_reg * sizeof (char));
5057 same_regs = (int *) xmalloc (max_reg * sizeof (int));
5058 all_reset = (int *) xmalloc (max_reg * sizeof (int));
15a63be1
RK
5059 for (i = 0; i < max_reg; i++)
5060 all_reset[i] = -1;
5061
5062 while (changed)
5063 {
5064 changed = 0;
5065
5066 for (b1 = f; b1; b1 = NEXT_INSN (b1))
5067 {
5068 /* Get to a candidate branch insn. */
5069 if (GET_CODE (b1) != JUMP_INSN
5070 || ! condjump_p (b1) || simplejump_p (b1)
5071 || JUMP_LABEL (b1) == 0)
5072 continue;
5073
5074 bzero (modified_regs, max_reg * sizeof (char));
5075 modified_mem = 0;
5076
4c9a05bc
RK
5077 bcopy ((char *) all_reset, (char *) same_regs,
5078 max_reg * sizeof (int));
15a63be1
RK
5079 num_same_regs = 0;
5080
5081 label = JUMP_LABEL (b1);
5082
5083 /* Look for a branch after the target. Record any registers and
5084 memory modified between the target and the branch. Stop when we
5085 get to a label since we can't know what was changed there. */
5086 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
5087 {
5088 if (GET_CODE (b2) == CODE_LABEL)
5089 break;
5090
5091 else if (GET_CODE (b2) == JUMP_INSN)
5092 {
5093 /* If this is an unconditional jump and is the only use of
5094 its target label, we can follow it. */
5095 if (simplejump_p (b2)
5096 && JUMP_LABEL (b2) != 0
5097 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
5098 {
5099 b2 = JUMP_LABEL (b2);
5100 continue;
5101 }
5102 else
5103 break;
5104 }
5105
5106 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
5107 continue;
5108
5109 if (GET_CODE (b2) == CALL_INSN)
5110 {
5111 modified_mem = 1;
5112 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5113 if (call_used_regs[i] && ! fixed_regs[i]
5114 && i != STACK_POINTER_REGNUM
5115 && i != FRAME_POINTER_REGNUM
cbe23927 5116 && i != HARD_FRAME_POINTER_REGNUM
15a63be1
RK
5117 && i != ARG_POINTER_REGNUM)
5118 modified_regs[i] = 1;
5119 }
5120
84832317 5121 note_stores (PATTERN (b2), mark_modified_reg, NULL);
15a63be1
RK
5122 }
5123
5124 /* Check the next candidate branch insn from the label
5125 of the first. */
5126 if (b2 == 0
5127 || GET_CODE (b2) != JUMP_INSN
5128 || b2 == b1
5129 || ! condjump_p (b2)
5130 || simplejump_p (b2))
5131 continue;
5132
5133 /* Get the comparison codes and operands, reversing the
5134 codes if appropriate. If we don't have comparison codes,
5135 we can't do anything. */
5136 b1op0 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 0);
5137 b1op1 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 1);
5138 code1 = GET_CODE (XEXP (SET_SRC (PATTERN (b1)), 0));
5139 if (XEXP (SET_SRC (PATTERN (b1)), 1) == pc_rtx)
5140 code1 = reverse_condition (code1);
5141
5142 b2op0 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 0);
5143 b2op1 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 1);
5144 code2 = GET_CODE (XEXP (SET_SRC (PATTERN (b2)), 0));
5145 if (XEXP (SET_SRC (PATTERN (b2)), 1) == pc_rtx)
5146 code2 = reverse_condition (code2);
5147
5148 /* If they test the same things and knowing that B1 branches
5149 tells us whether or not B2 branches, check if we
5150 can thread the branch. */
5151 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
5152 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
5153 && (comparison_dominates_p (code1, code2)
1eb8759b
RH
5154 || (can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (b1)),
5155 0),
5156 b1)
5157 && comparison_dominates_p (code1, reverse_condition (code2)))))
5158
15a63be1
RK
5159 {
5160 t1 = prev_nonnote_insn (b1);
5161 t2 = prev_nonnote_insn (b2);
5162
5163 while (t1 != 0 && t2 != 0)
5164 {
15a63be1
RK
5165 if (t2 == label)
5166 {
5167 /* We have reached the target of the first branch.
5168 If there are no pending register equivalents,
5169 we know that this branch will either always
5170 succeed (if the senses of the two branches are
5171 the same) or always fail (if not). */
5172 rtx new_label;
5173
5174 if (num_same_regs != 0)
5175 break;
5176
5177 if (comparison_dominates_p (code1, code2))
5178 new_label = JUMP_LABEL (b2);
5179 else
5180 new_label = get_label_after (b2);
5181
aa38b201
TG
5182 if (JUMP_LABEL (b1) != new_label)
5183 {
5184 rtx prev = PREV_INSN (new_label);
5185
5186 if (flag_before_loop
c71407f9 5187 && GET_CODE (prev) == NOTE
aa38b201
TG
5188 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
5189 {
5190 /* Don't thread to the loop label. If a loop
5191 label is reused, loop optimization will
5192 be disabled for that loop. */
5193 new_label = gen_label_rtx ();
5194 emit_label_after (new_label, PREV_INSN (prev));
5195 }
5196 changed |= redirect_jump (b1, new_label);
5197 }
15a63be1
RK
5198 break;
5199 }
5200
5201 /* If either of these is not a normal insn (it might be
5202 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
5203 have already been skipped above.) Similarly, fail
5204 if the insns are different. */
5205 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
5206 || recog_memoized (t1) != recog_memoized (t2)
5207 || ! rtx_equal_for_thread_p (PATTERN (t1),
5208 PATTERN (t2), t2))
5209 break;
5210
5211 t1 = prev_nonnote_insn (t1);
5212 t2 = prev_nonnote_insn (t2);
5213 }
5214 }
5215 }
5216 }
67289ea6
MM
5217
5218 /* Clean up. */
5219 free (modified_regs);
5220 free (same_regs);
5221 free (all_reset);
15a63be1
RK
5222}
5223\f
5224/* This is like RTX_EQUAL_P except that it knows about our handling of
5225 possibly equivalent registers and knows to consider volatile and
5226 modified objects as not equal.
5227
5228 YINSN is the insn containing Y. */
5229
5230int
5231rtx_equal_for_thread_p (x, y, yinsn)
5232 rtx x, y;
5233 rtx yinsn;
5234{
5235 register int i;
5236 register int j;
5237 register enum rtx_code code;
6f7d635c 5238 register const char *fmt;
15a63be1
RK
5239
5240 code = GET_CODE (x);
5241 /* Rtx's of different codes cannot be equal. */
5242 if (code != GET_CODE (y))
5243 return 0;
5244
5245 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
5246 (REG:SI x) and (REG:HI x) are NOT equivalent. */
5247
5248 if (GET_MODE (x) != GET_MODE (y))
5249 return 0;
5250
35d9eabb
RK
5251 /* For floating-point, consider everything unequal. This is a bit
5252 pessimistic, but this pass would only rarely do anything for FP
5253 anyway. */
5254 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
5255 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_fast_math)
5256 return 0;
5257
413c72c2
RK
5258 /* For commutative operations, the RTX match if the operand match in any
5259 order. Also handle the simple binary and unary cases without a loop. */
5260 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
c5ea5f3b
RK
5261 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
5262 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
5263 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
5264 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
413c72c2 5265 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
c5ea5f3b
RK
5266 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
5267 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
413c72c2 5268 else if (GET_RTX_CLASS (code) == '1')
c5ea5f3b 5269 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
413c72c2 5270
15a63be1
RK
5271 /* Handle special-cases first. */
5272 switch (code)
5273 {
5274 case REG:
5275 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
5276 return 1;
5277
5278 /* If neither is user variable or hard register, check for possible
5279 equivalence. */
5280 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
5281 || REGNO (x) < FIRST_PSEUDO_REGISTER
5282 || REGNO (y) < FIRST_PSEUDO_REGISTER)
5283 return 0;
5284
5285 if (same_regs[REGNO (x)] == -1)
5286 {
5287 same_regs[REGNO (x)] = REGNO (y);
5288 num_same_regs++;
5289
5290 /* If this is the first time we are seeing a register on the `Y'
5291 side, see if it is the last use. If not, we can't thread the
5292 jump, so mark it as not equivalent. */
b1f21e0a 5293 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
15a63be1
RK
5294 return 0;
5295
5296 return 1;
5297 }
5298 else
770ae6cc 5299 return (same_regs[REGNO (x)] == (int) REGNO (y));
15a63be1
RK
5300
5301 break;
5302
5303 case MEM:
6dc42e49 5304 /* If memory modified or either volatile, not equivalent.
0f41302f 5305 Else, check address. */
15a63be1
RK
5306 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
5307 return 0;
5308
5309 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
5310
5311 case ASM_INPUT:
5312 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
5313 return 0;
5314
5315 break;
5316
5317 case SET:
5318 /* Cancel a pending `same_regs' if setting equivalenced registers.
5319 Then process source. */
5320 if (GET_CODE (SET_DEST (x)) == REG
5321 && GET_CODE (SET_DEST (y)) == REG)
5322 {
770ae6cc 5323 if (same_regs[REGNO (SET_DEST (x))] == (int) REGNO (SET_DEST (y)))
15a63be1
RK
5324 {
5325 same_regs[REGNO (SET_DEST (x))] = -1;
5326 num_same_regs--;
5327 }
5328 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
5329 return 0;
5330 }
5331 else
5332 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
5333 return 0;
5334
5335 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
5336
5337 case LABEL_REF:
5338 return XEXP (x, 0) == XEXP (y, 0);
5339
5340 case SYMBOL_REF:
5341 return XSTR (x, 0) == XSTR (y, 0);
e9a25f70
JL
5342
5343 default:
5344 break;
15a63be1
RK
5345 }
5346
5347 if (x == y)
5348 return 1;
5349
5350 fmt = GET_RTX_FORMAT (code);
5351 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5352 {
5353 switch (fmt[i])
5354 {
5f4f0e22
CH
5355 case 'w':
5356 if (XWINT (x, i) != XWINT (y, i))
5357 return 0;
5358 break;
5359
15a63be1
RK
5360 case 'n':
5361 case 'i':
5362 if (XINT (x, i) != XINT (y, i))
5363 return 0;
5364 break;
5365
5366 case 'V':
5367 case 'E':
5368 /* Two vectors must have the same length. */
5369 if (XVECLEN (x, i) != XVECLEN (y, i))
5370 return 0;
5371
5372 /* And the corresponding elements must match. */
5373 for (j = 0; j < XVECLEN (x, i); j++)
5374 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
5375 XVECEXP (y, i, j), yinsn) == 0)
5376 return 0;
5377 break;
5378
5379 case 'e':
5380 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
5381 return 0;
5382 break;
5383
5384 case 'S':
5385 case 's':
5386 if (strcmp (XSTR (x, i), XSTR (y, i)))
5387 return 0;
5388 break;
5389
5390 case 'u':
5391 /* These are just backpointers, so they don't matter. */
5392 break;
5393
5394 case '0':
8f985ec4 5395 case 't':
15a63be1
RK
5396 break;
5397
5398 /* It is believed that rtx's at this level will never
5399 contain anything but integers and other rtx's,
5400 except for within LABEL_REFs and SYMBOL_REFs. */
5401 default:
5402 abort ();
5403 }
5404 }
5405 return 1;
5406}
956d6950
JL
5407\f
5408
341a243e 5409#if !defined(HAVE_cc0) && !defined(HAVE_conditional_arithmetic)
956d6950
JL
5410/* Return the insn that NEW can be safely inserted in front of starting at
5411 the jump insn INSN. Return 0 if it is not safe to do this jump
5412 optimization. Note that NEW must contain a single set. */
5413
5414static rtx
5415find_insert_position (insn, new)
5416 rtx insn;
5417 rtx new;
5418{
5419 int i;
5420 rtx prev;
5421
5422 /* If NEW does not clobber, it is safe to insert NEW before INSN. */
5423 if (GET_CODE (PATTERN (new)) != PARALLEL)
5424 return insn;
5425
5426 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5427 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5428 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5429 insn))
5430 break;
5431
5432 if (i < 0)
5433 return insn;
5434
5435 /* There is a good chance that the previous insn PREV sets the thing
5436 being clobbered (often the CC in a hard reg). If PREV does not
5437 use what NEW sets, we can insert NEW before PREV. */
5438
5439 prev = prev_active_insn (insn);
5440 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5441 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5442 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5443 insn)
5444 && ! modified_in_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5445 prev))
5446 return 0;
5447
5448 return reg_mentioned_p (SET_DEST (single_set (new)), prev) ? 0 : prev;
5449}
06abe5a4 5450#endif /* !HAVE_cc0 */