]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ifcvt.c
* Makefile.in, alias.c, basic-block.h, bb-reorder.c, bitmap.c,
[thirdparty/gcc.git] / gcc / ifcvt.c
1 /* If-conversion support.
2 Copyright (C) 2000, 2001 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23
24 #include "rtl.h"
25 #include "regs.h"
26 #include "function.h"
27 #include "flags.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
32 #include "expr.h"
33 #include "real.h"
34 #include "output.h"
35 #include "toplev.h"
36 #include "tm_p.h"
37
38
39 #ifndef HAVE_conditional_execution
40 #define HAVE_conditional_execution 0
41 #endif
42 #ifndef HAVE_conditional_move
43 #define HAVE_conditional_move 0
44 #endif
45 #ifndef HAVE_incscc
46 #define HAVE_incscc 0
47 #endif
48 #ifndef HAVE_decscc
49 #define HAVE_decscc 0
50 #endif
51 #ifndef HAVE_trap
52 #define HAVE_trap 0
53 #endif
54 #ifndef HAVE_conditional_trap
55 #define HAVE_conditional_trap 0
56 #endif
57
58 #ifndef MAX_CONDITIONAL_EXECUTE
59 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
60 #endif
61
62 #define NULL_EDGE ((struct edge_def *)NULL)
63 #define NULL_BLOCK ((struct basic_block_def *)NULL)
64
65 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
66 static int num_possible_if_blocks;
67
68 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
69 execution. */
70 static int num_updated_if_blocks;
71
72 /* # of basic blocks that were removed. */
73 static int num_removed_blocks;
74
75 /* True if life data ok at present. */
76 static bool life_data_ok;
77
78 /* The post-dominator relation on the original block numbers. */
79 static sbitmap *post_dominators;
80
81 /* Forward references. */
82 static int count_bb_insns PARAMS ((basic_block));
83 static rtx first_active_insn PARAMS ((basic_block));
84 static int last_active_insn_p PARAMS ((basic_block, rtx));
85 static int seq_contains_jump PARAMS ((rtx));
86
87 static int cond_exec_process_insns PARAMS ((rtx, rtx, rtx, rtx, int));
88 static rtx cond_exec_get_condition PARAMS ((rtx));
89 static int cond_exec_process_if_block PARAMS ((basic_block, basic_block,
90 basic_block, basic_block));
91
92 static rtx noce_get_condition PARAMS ((rtx, rtx *));
93 static int noce_operand_ok PARAMS ((rtx));
94 static int noce_process_if_block PARAMS ((basic_block, basic_block,
95 basic_block, basic_block));
96
97 static int process_if_block PARAMS ((basic_block, basic_block,
98 basic_block, basic_block));
99 static void merge_if_block PARAMS ((basic_block, basic_block,
100 basic_block, basic_block));
101
102 static int find_if_header PARAMS ((basic_block));
103 static int find_if_block PARAMS ((basic_block, edge, edge));
104 static int find_if_case_1 PARAMS ((basic_block, edge, edge));
105 static int find_if_case_2 PARAMS ((basic_block, edge, edge));
106 static int find_cond_trap PARAMS ((basic_block, edge, edge));
107 static int find_memory PARAMS ((rtx *, void *));
108 static int dead_or_predicable PARAMS ((basic_block, basic_block,
109 basic_block, basic_block, int));
110 static void noce_emit_move_insn PARAMS ((rtx, rtx));
111 \f
112 /* Abuse the basic_block AUX field to store the original block index,
113 as well as a flag indicating that the block should be rescaned for
114 life analysis. */
115
116 #define SET_ORIG_INDEX(BB,I) ((BB)->aux = (void *)((size_t)(I) << 1))
117 #define ORIG_INDEX(BB) ((size_t)(BB)->aux >> 1)
118 #define SET_UPDATE_LIFE(BB) ((BB)->aux = (void *)((size_t)(BB)->aux | 1))
119 #define UPDATE_LIFE(BB) ((size_t)(BB)->aux & 1)
120
121 \f
122 /* Count the number of non-jump active insns in BB. */
123
124 static int
125 count_bb_insns (bb)
126 basic_block bb;
127 {
128 int count = 0;
129 rtx insn = bb->head;
130
131 while (1)
132 {
133 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
134 count++;
135
136 if (insn == bb->end)
137 break;
138 insn = NEXT_INSN (insn);
139 }
140
141 return count;
142 }
143
144 /* Return the first non-jump active insn in the basic block. */
145
146 static rtx
147 first_active_insn (bb)
148 basic_block bb;
149 {
150 rtx insn = bb->head;
151
152 if (GET_CODE (insn) == CODE_LABEL)
153 {
154 if (insn == bb->end)
155 return NULL_RTX;
156 insn = NEXT_INSN (insn);
157 }
158
159 while (GET_CODE (insn) == NOTE)
160 {
161 if (insn == bb->end)
162 return NULL_RTX;
163 insn = NEXT_INSN (insn);
164 }
165
166 if (GET_CODE (insn) == JUMP_INSN)
167 return NULL_RTX;
168
169 return insn;
170 }
171
172 /* Return true if INSN is the last active non-jump insn in BB. */
173
174 static int
175 last_active_insn_p (bb, insn)
176 basic_block bb;
177 rtx insn;
178 {
179 do
180 {
181 if (insn == bb->end)
182 return TRUE;
183 insn = NEXT_INSN (insn);
184 }
185 while (GET_CODE (insn) == NOTE);
186
187 return GET_CODE (insn) == JUMP_INSN;
188 }
189
190 /* It is possible, especially when having dealt with multi-word
191 arithmetic, for the expanders to have emitted jumps. Search
192 through the sequence and return TRUE if a jump exists so that
193 we can abort the conversion. */
194
195 static int
196 seq_contains_jump (insn)
197 rtx insn;
198 {
199 while (insn)
200 {
201 if (GET_CODE (insn) == JUMP_INSN)
202 return 1;
203 insn = NEXT_INSN (insn);
204 }
205 return 0;
206 }
207 \f
208 /* Go through a bunch of insns, converting them to conditional
209 execution format if possible. Return TRUE if all of the non-note
210 insns were processed. */
211
212 static int
213 cond_exec_process_insns (start, end, test, prob_val, mod_ok)
214 rtx start; /* first insn to look at */
215 rtx end; /* last insn to look at */
216 rtx test; /* conditional execution test */
217 rtx prob_val; /* probability of branch taken. */
218 int mod_ok; /* true if modifications ok last insn. */
219 {
220 int must_be_last = FALSE;
221 rtx insn;
222 rtx pattern;
223
224 for (insn = start; ; insn = NEXT_INSN (insn))
225 {
226 if (GET_CODE (insn) == NOTE)
227 goto insn_done;
228
229 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
230 abort ();
231
232 /* Remove USE insns that get in the way. */
233 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
234 {
235 /* ??? Ug. Actually unlinking the thing is problematic,
236 given what we'd have to coordinate with our callers. */
237 PUT_CODE (insn, NOTE);
238 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
239 NOTE_SOURCE_FILE (insn) = 0;
240 goto insn_done;
241 }
242
243 /* Last insn wasn't last? */
244 if (must_be_last)
245 return FALSE;
246
247 if (modified_in_p (test, insn))
248 {
249 if (!mod_ok)
250 return FALSE;
251 must_be_last = TRUE;
252 }
253
254 /* Now build the conditional form of the instruction. */
255 pattern = PATTERN (insn);
256
257 /* If the machine needs to modify the insn being conditionally executed,
258 say for example to force a constant integer operand into a temp
259 register, do so here. */
260 #ifdef IFCVT_MODIFY_INSN
261 IFCVT_MODIFY_INSN (pattern, insn);
262 if (! pattern)
263 return FALSE;
264 #endif
265
266 validate_change (insn, &PATTERN (insn),
267 gen_rtx_COND_EXEC (VOIDmode, copy_rtx (test),
268 pattern), 1);
269
270 if (GET_CODE (insn) == CALL_INSN && prob_val)
271 validate_change (insn, &REG_NOTES (insn),
272 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
273 REG_NOTES (insn)), 1);
274
275 insn_done:
276 if (insn == end)
277 break;
278 }
279
280 return TRUE;
281 }
282
283 /* Return the condition for a jump. Do not do any special processing. */
284
285 static rtx
286 cond_exec_get_condition (jump)
287 rtx jump;
288 {
289 rtx test_if, cond;
290
291 if (any_condjump_p (jump))
292 test_if = SET_SRC (pc_set (jump));
293 else
294 return NULL_RTX;
295 cond = XEXP (test_if, 0);
296
297 /* If this branches to JUMP_LABEL when the condition is false,
298 reverse the condition. */
299 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
300 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
301 {
302 enum rtx_code rev = reversed_comparison_code (cond, jump);
303 if (rev == UNKNOWN)
304 return NULL_RTX;
305
306 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
307 XEXP (cond, 1));
308 }
309
310 return cond;
311 }
312
313 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
314 to conditional execution. Return TRUE if we were successful at
315 converting the the block. */
316
317 static int
318 cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb)
319 basic_block test_bb; /* Basic block test is in */
320 basic_block then_bb; /* Basic block for THEN block */
321 basic_block else_bb; /* Basic block for ELSE block */
322 basic_block join_bb; /* Basic block the join label is in */
323 {
324 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
325 rtx then_start; /* first insn in THEN block */
326 rtx then_end; /* last insn + 1 in THEN block */
327 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
328 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
329 int max; /* max # of insns to convert. */
330 int then_mod_ok; /* whether conditional mods are ok in THEN */
331 rtx true_expr; /* test for else block insns */
332 rtx false_expr; /* test for then block insns */
333 rtx true_prob_val; /* probability of else block */
334 rtx false_prob_val; /* probability of then block */
335 int n_insns;
336 enum rtx_code false_code;
337
338 /* Find the conditional jump to the ELSE or JOIN part, and isolate
339 the test. */
340 test_expr = cond_exec_get_condition (test_bb->end);
341 if (! test_expr)
342 return FALSE;
343
344 /* If the conditional jump is more than just a conditional jump,
345 then we can not do conditional execution conversion on this block. */
346 if (!onlyjump_p (test_bb->end))
347 return FALSE;
348
349 /* Collect the bounds of where we're to search. */
350
351 then_start = then_bb->head;
352 then_end = then_bb->end;
353
354 /* Skip a label heading THEN block. */
355 if (GET_CODE (then_start) == CODE_LABEL)
356 then_start = NEXT_INSN (then_start);
357
358 /* Skip a (use (const_int 0)) or branch as the final insn. */
359 if (GET_CODE (then_end) == INSN
360 && GET_CODE (PATTERN (then_end)) == USE
361 && GET_CODE (XEXP (PATTERN (then_end), 0)) == CONST_INT)
362 then_end = PREV_INSN (then_end);
363 else if (GET_CODE (then_end) == JUMP_INSN)
364 then_end = PREV_INSN (then_end);
365
366 if (else_bb)
367 {
368 /* Skip the ELSE block's label. */
369 else_start = NEXT_INSN (else_bb->head);
370 else_end = else_bb->end;
371
372 /* Skip a (use (const_int 0)) or branch as the final insn. */
373 if (GET_CODE (else_end) == INSN
374 && GET_CODE (PATTERN (else_end)) == USE
375 && GET_CODE (XEXP (PATTERN (else_end), 0)) == CONST_INT)
376 else_end = PREV_INSN (else_end);
377 else if (GET_CODE (else_end) == JUMP_INSN)
378 else_end = PREV_INSN (else_end);
379 }
380
381 /* How many instructions should we convert in total? */
382 n_insns = 0;
383 if (else_bb)
384 {
385 max = 2 * MAX_CONDITIONAL_EXECUTE;
386 n_insns = count_bb_insns (else_bb);
387 }
388 else
389 max = MAX_CONDITIONAL_EXECUTE;
390 n_insns += count_bb_insns (then_bb);
391 if (n_insns > max)
392 return FALSE;
393
394 /* Map test_expr/test_jump into the appropriate MD tests to use on
395 the conditionally executed code. */
396
397 true_expr = test_expr;
398
399 false_code = reversed_comparison_code (true_expr, test_bb->end);
400 if (false_code != UNKNOWN)
401 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
402 XEXP (true_expr, 0), XEXP (true_expr, 1));
403 else
404 false_expr = NULL_RTX;
405
406 #ifdef IFCVT_MODIFY_TESTS
407 /* If the machine description needs to modify the tests, such as setting a
408 conditional execution register from a comparison, it can do so here. */
409 IFCVT_MODIFY_TESTS (true_expr, false_expr, test_bb, then_bb, else_bb,
410 join_bb);
411
412 /* See if the conversion failed */
413 if (!true_expr || !false_expr)
414 goto fail;
415 #endif
416
417 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
418 if (true_prob_val)
419 {
420 true_prob_val = XEXP (true_prob_val, 0);
421 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
422 }
423 else
424 false_prob_val = NULL_RTX;
425
426 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
427 on then THEN block. */
428 then_mod_ok = (else_bb == NULL_BLOCK);
429
430 /* Go through the THEN and ELSE blocks converting the insns if possible
431 to conditional execution. */
432
433 if (then_end
434 && (! false_expr
435 || ! cond_exec_process_insns (then_start, then_end, false_expr,
436 false_prob_val, then_mod_ok)))
437 goto fail;
438
439 if (else_bb
440 && ! cond_exec_process_insns (else_start, else_end,
441 true_expr, true_prob_val, TRUE))
442 goto fail;
443
444 if (! apply_change_group ())
445 return FALSE;
446
447 #ifdef IFCVT_MODIFY_FINAL
448 /* Do any machine dependent final modifications */
449 IFCVT_MODIFY_FINAL (test_bb, then_bb, else_bb, join_bb);
450 #endif
451
452 /* Conversion succeeded. */
453 if (rtl_dump_file)
454 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
455 n_insns, (n_insns == 1) ? " was" : "s were");
456
457 /* Merge the blocks! */
458 merge_if_block (test_bb, then_bb, else_bb, join_bb);
459 return TRUE;
460
461 fail:
462 #ifdef IFCVT_MODIFY_CANCEL
463 /* Cancel any machine dependent changes. */
464 IFCVT_MODIFY_CANCEL (test_bb, then_bb, else_bb, join_bb);
465 #endif
466
467 cancel_changes (0);
468 return FALSE;
469 }
470 \f
471 /* Used by noce_process_if_block to communicate with its subroutines.
472
473 The subroutines know that A and B may be evaluated freely. They
474 know that X is a register. They should insert new instructions
475 before cond_earliest. */
476
477 struct noce_if_info
478 {
479 basic_block test_bb;
480 rtx insn_a, insn_b;
481 rtx x, a, b;
482 rtx jump, cond, cond_earliest;
483 };
484
485 static rtx noce_emit_store_flag PARAMS ((struct noce_if_info *,
486 rtx, int, int));
487 static int noce_try_store_flag PARAMS ((struct noce_if_info *));
488 static int noce_try_store_flag_inc PARAMS ((struct noce_if_info *));
489 static int noce_try_store_flag_constants PARAMS ((struct noce_if_info *));
490 static int noce_try_store_flag_mask PARAMS ((struct noce_if_info *));
491 static rtx noce_emit_cmove PARAMS ((struct noce_if_info *,
492 rtx, enum rtx_code, rtx,
493 rtx, rtx, rtx));
494 static int noce_try_cmove PARAMS ((struct noce_if_info *));
495 static int noce_try_cmove_arith PARAMS ((struct noce_if_info *));
496 static rtx noce_get_alt_condition PARAMS ((struct noce_if_info *,
497 rtx, rtx *));
498 static int noce_try_minmax PARAMS ((struct noce_if_info *));
499 static int noce_try_abs PARAMS ((struct noce_if_info *));
500
501 /* Helper function for noce_try_store_flag*. */
502
503 static rtx
504 noce_emit_store_flag (if_info, x, reversep, normalize)
505 struct noce_if_info *if_info;
506 rtx x;
507 int reversep, normalize;
508 {
509 rtx cond = if_info->cond;
510 int cond_complex;
511 enum rtx_code code;
512
513 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
514 || ! general_operand (XEXP (cond, 1), VOIDmode));
515
516 /* If earliest == jump, or when the condition is complex, try to
517 build the store_flag insn directly. */
518
519 if (cond_complex)
520 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
521
522 if (reversep)
523 code = reversed_comparison_code (cond, if_info->jump);
524 else
525 code = GET_CODE (cond);
526
527 if ((if_info->cond_earliest == if_info->jump || cond_complex)
528 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
529 {
530 rtx tmp;
531
532 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
533 XEXP (cond, 1));
534 tmp = gen_rtx_SET (VOIDmode, x, tmp);
535
536 start_sequence ();
537 tmp = emit_insn (tmp);
538
539 if (recog_memoized (tmp) >= 0)
540 {
541 tmp = get_insns ();
542 end_sequence ();
543 emit_insns (tmp);
544
545 if_info->cond_earliest = if_info->jump;
546
547 return x;
548 }
549
550 end_sequence ();
551 }
552
553 /* Don't even try if the comparison operands are weird. */
554 if (cond_complex)
555 return NULL_RTX;
556
557 return emit_store_flag (x, code, XEXP (cond, 0),
558 XEXP (cond, 1), VOIDmode,
559 (code == LTU || code == LEU
560 || code == GEU || code == GTU), normalize);
561 }
562
563 /* Emit instruction to move a rtx into STRICT_LOW_PART. */
564 static void
565 noce_emit_move_insn (x, y)
566 rtx x, y;
567 {
568 enum machine_mode outmode, inmode;
569 rtx outer, inner;
570 int bitpos;
571
572 if (GET_CODE (x) != STRICT_LOW_PART)
573 {
574 emit_move_insn (x, y);
575 return;
576 }
577
578 outer = XEXP (x, 0);
579 inner = XEXP (outer, 0);
580 outmode = GET_MODE (outer);
581 inmode = GET_MODE (inner);
582 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
583 store_bit_field (inner, GET_MODE_BITSIZE (outmode),
584 bitpos, outmode, y, GET_MODE_BITSIZE (inmode),
585 GET_MODE_BITSIZE (inmode));
586 }
587
588 /* Convert "if (test) x = 1; else x = 0".
589
590 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
591 tried in noce_try_store_flag_constants after noce_try_cmove has had
592 a go at the conversion. */
593
594 static int
595 noce_try_store_flag (if_info)
596 struct noce_if_info *if_info;
597 {
598 int reversep;
599 rtx target, seq;
600
601 if (GET_CODE (if_info->b) == CONST_INT
602 && INTVAL (if_info->b) == STORE_FLAG_VALUE
603 && if_info->a == const0_rtx)
604 reversep = 0;
605 else if (if_info->b == const0_rtx
606 && GET_CODE (if_info->a) == CONST_INT
607 && INTVAL (if_info->a) == STORE_FLAG_VALUE
608 && (reversed_comparison_code (if_info->cond, if_info->jump)
609 != UNKNOWN))
610 reversep = 1;
611 else
612 return FALSE;
613
614 start_sequence ();
615
616 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
617 if (target)
618 {
619 if (target != if_info->x)
620 noce_emit_move_insn (if_info->x, target);
621
622 seq = get_insns ();
623 end_sequence ();
624 emit_insns_before (seq, if_info->cond_earliest);
625
626 return TRUE;
627 }
628 else
629 {
630 end_sequence ();
631 return FALSE;
632 }
633 }
634
635 /* Convert "if (test) x = a; else x = b", for A and B constant. */
636
637 static int
638 noce_try_store_flag_constants (if_info)
639 struct noce_if_info *if_info;
640 {
641 rtx target, seq;
642 int reversep;
643 HOST_WIDE_INT itrue, ifalse, diff, tmp;
644 int normalize, can_reverse;
645 enum machine_mode mode;
646
647 if (! no_new_pseudos
648 && GET_CODE (if_info->a) == CONST_INT
649 && GET_CODE (if_info->b) == CONST_INT)
650 {
651 mode = GET_MODE (if_info->x);
652 ifalse = INTVAL (if_info->a);
653 itrue = INTVAL (if_info->b);
654 diff = trunc_int_for_mode (itrue - ifalse, mode);
655
656 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
657 != UNKNOWN);
658
659 reversep = 0;
660 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
661 normalize = 0;
662 else if (ifalse == 0 && exact_log2 (itrue) >= 0
663 && (STORE_FLAG_VALUE == 1
664 || BRANCH_COST >= 2))
665 normalize = 1;
666 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
667 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
668 normalize = 1, reversep = 1;
669 else if (itrue == -1
670 && (STORE_FLAG_VALUE == -1
671 || BRANCH_COST >= 2))
672 normalize = -1;
673 else if (ifalse == -1 && can_reverse
674 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
675 normalize = -1, reversep = 1;
676 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
677 || BRANCH_COST >= 3)
678 normalize = -1;
679 else
680 return FALSE;
681
682 if (reversep)
683 {
684 tmp = itrue; itrue = ifalse; ifalse = tmp;
685 diff = trunc_int_for_mode (-diff, mode);
686 }
687
688 start_sequence ();
689 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
690 if (! target)
691 {
692 end_sequence ();
693 return FALSE;
694 }
695
696 /* if (test) x = 3; else x = 4;
697 => x = 3 + (test == 0); */
698 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
699 {
700 target = expand_simple_binop (mode,
701 (diff == STORE_FLAG_VALUE
702 ? PLUS : MINUS),
703 GEN_INT (ifalse), target, if_info->x, 0,
704 OPTAB_WIDEN);
705 }
706
707 /* if (test) x = 8; else x = 0;
708 => x = (test != 0) << 3; */
709 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
710 {
711 target = expand_simple_binop (mode, ASHIFT,
712 target, GEN_INT (tmp), if_info->x, 0,
713 OPTAB_WIDEN);
714 }
715
716 /* if (test) x = -1; else x = b;
717 => x = -(test != 0) | b; */
718 else if (itrue == -1)
719 {
720 target = expand_simple_binop (mode, IOR,
721 target, GEN_INT (ifalse), if_info->x, 0,
722 OPTAB_WIDEN);
723 }
724
725 /* if (test) x = a; else x = b;
726 => x = (-(test != 0) & (b - a)) + a; */
727 else
728 {
729 target = expand_simple_binop (mode, AND,
730 target, GEN_INT (diff), if_info->x, 0,
731 OPTAB_WIDEN);
732 if (target)
733 target = expand_simple_binop (mode, PLUS,
734 target, GEN_INT (ifalse),
735 if_info->x, 0, OPTAB_WIDEN);
736 }
737
738 if (! target)
739 {
740 end_sequence ();
741 return FALSE;
742 }
743
744 if (target != if_info->x)
745 noce_emit_move_insn (if_info->x, target);
746
747 seq = get_insns ();
748 end_sequence ();
749
750 if (seq_contains_jump (seq))
751 return FALSE;
752
753 emit_insns_before (seq, if_info->cond_earliest);
754
755 return TRUE;
756 }
757
758 return FALSE;
759 }
760
761 /* Convert "if (test) foo++" into "foo += (test != 0)", and
762 similarly for "foo--". */
763
764 static int
765 noce_try_store_flag_inc (if_info)
766 struct noce_if_info *if_info;
767 {
768 rtx target, seq;
769 int subtract, normalize;
770
771 if (! no_new_pseudos
772 && (BRANCH_COST >= 2
773 || HAVE_incscc
774 || HAVE_decscc)
775 /* Should be no `else' case to worry about. */
776 && if_info->b == if_info->x
777 && GET_CODE (if_info->a) == PLUS
778 && (XEXP (if_info->a, 1) == const1_rtx
779 || XEXP (if_info->a, 1) == constm1_rtx)
780 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
781 && (reversed_comparison_code (if_info->cond, if_info->jump)
782 != UNKNOWN))
783 {
784 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
785 subtract = 0, normalize = 0;
786 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
787 subtract = 1, normalize = 0;
788 else
789 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
790
791 start_sequence ();
792
793 target = noce_emit_store_flag (if_info,
794 gen_reg_rtx (GET_MODE (if_info->x)),
795 1, normalize);
796
797 if (target)
798 target = expand_simple_binop (GET_MODE (if_info->x),
799 subtract ? MINUS : PLUS,
800 if_info->x, target, if_info->x,
801 0, OPTAB_WIDEN);
802 if (target)
803 {
804 if (target != if_info->x)
805 noce_emit_move_insn (if_info->x, target);
806
807 seq = get_insns ();
808 end_sequence ();
809
810 if (seq_contains_jump (seq))
811 return FALSE;
812
813 emit_insns_before (seq, if_info->cond_earliest);
814
815 return TRUE;
816 }
817
818 end_sequence ();
819 }
820
821 return FALSE;
822 }
823
824 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
825
826 static int
827 noce_try_store_flag_mask (if_info)
828 struct noce_if_info *if_info;
829 {
830 rtx target, seq;
831 int reversep;
832
833 reversep = 0;
834 if (! no_new_pseudos
835 && (BRANCH_COST >= 2
836 || STORE_FLAG_VALUE == -1)
837 && ((if_info->a == const0_rtx
838 && rtx_equal_p (if_info->b, if_info->x))
839 || ((reversep = (reversed_comparison_code (if_info->cond,
840 if_info->jump)
841 != UNKNOWN))
842 && if_info->b == const0_rtx
843 && rtx_equal_p (if_info->a, if_info->x))))
844 {
845 start_sequence ();
846 target = noce_emit_store_flag (if_info,
847 gen_reg_rtx (GET_MODE (if_info->x)),
848 reversep, -1);
849 if (target)
850 target = expand_simple_binop (GET_MODE (if_info->x), AND,
851 if_info->x, target, if_info->x, 0,
852 OPTAB_WIDEN);
853
854 if (target)
855 {
856 if (target != if_info->x)
857 noce_emit_move_insn (if_info->x, target);
858
859 seq = get_insns ();
860 end_sequence ();
861
862 if (seq_contains_jump (seq))
863 return FALSE;
864
865 emit_insns_before (seq, if_info->cond_earliest);
866
867 return TRUE;
868 }
869
870 end_sequence ();
871 }
872
873 return FALSE;
874 }
875
876 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
877
878 static rtx
879 noce_emit_cmove (if_info, x, code, cmp_a, cmp_b, vfalse, vtrue)
880 struct noce_if_info *if_info;
881 rtx x, cmp_a, cmp_b, vfalse, vtrue;
882 enum rtx_code code;
883 {
884 /* If earliest == jump, try to build the cmove insn directly.
885 This is helpful when combine has created some complex condition
886 (like for alpha's cmovlbs) that we can't hope to regenerate
887 through the normal interface. */
888
889 if (if_info->cond_earliest == if_info->jump)
890 {
891 rtx tmp;
892
893 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
894 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
895 tmp = gen_rtx_SET (VOIDmode, x, tmp);
896
897 start_sequence ();
898 tmp = emit_insn (tmp);
899
900 if (recog_memoized (tmp) >= 0)
901 {
902 tmp = get_insns ();
903 end_sequence ();
904 emit_insns (tmp);
905
906 return x;
907 }
908
909 end_sequence ();
910 }
911
912 /* Don't even try if the comparison operands are weird. */
913 if (! general_operand (cmp_a, GET_MODE (cmp_a))
914 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
915 return NULL_RTX;
916
917 #if HAVE_conditional_move
918 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
919 vtrue, vfalse, GET_MODE (x),
920 (code == LTU || code == GEU
921 || code == LEU || code == GTU));
922 #else
923 /* We'll never get here, as noce_process_if_block doesn't call the
924 functions involved. Ifdef code, however, should be discouraged
925 because it leads to typos in the code not selected. However,
926 emit_conditional_move won't exist either. */
927 return NULL_RTX;
928 #endif
929 }
930
931 /* Try only simple constants and registers here. More complex cases
932 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
933 has had a go at it. */
934
935 static int
936 noce_try_cmove (if_info)
937 struct noce_if_info *if_info;
938 {
939 enum rtx_code code;
940 rtx target, seq;
941
942 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
943 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
944 {
945 start_sequence ();
946
947 code = GET_CODE (if_info->cond);
948 target = noce_emit_cmove (if_info, if_info->x, code,
949 XEXP (if_info->cond, 0),
950 XEXP (if_info->cond, 1),
951 if_info->a, if_info->b);
952
953 if (target)
954 {
955 if (target != if_info->x)
956 noce_emit_move_insn (if_info->x, target);
957
958 seq = get_insns ();
959 end_sequence ();
960 emit_insns_before (seq, if_info->cond_earliest);
961 return TRUE;
962 }
963 else
964 {
965 end_sequence ();
966 return FALSE;
967 }
968 }
969
970 return FALSE;
971 }
972
973 /* Try more complex cases involving conditional_move. */
974
975 static int
976 noce_try_cmove_arith (if_info)
977 struct noce_if_info *if_info;
978 {
979 rtx a = if_info->a;
980 rtx b = if_info->b;
981 rtx x = if_info->x;
982 rtx insn_a, insn_b;
983 rtx tmp, target;
984 int is_mem = 0;
985 enum rtx_code code;
986
987 /* A conditional move from two memory sources is equivalent to a
988 conditional on their addresses followed by a load. Don't do this
989 early because it'll screw alias analysis. Note that we've
990 already checked for no side effects. */
991 if (! no_new_pseudos && cse_not_expected
992 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
993 && BRANCH_COST >= 5)
994 {
995 a = XEXP (a, 0);
996 b = XEXP (b, 0);
997 x = gen_reg_rtx (Pmode);
998 is_mem = 1;
999 }
1000
1001 /* ??? We could handle this if we knew that a load from A or B could
1002 not fault. This is also true if we've already loaded
1003 from the address along the path from ENTRY. */
1004 else if (may_trap_p (a) || may_trap_p (b))
1005 return FALSE;
1006
1007 /* if (test) x = a + b; else x = c - d;
1008 => y = a + b;
1009 x = c - d;
1010 if (test)
1011 x = y;
1012 */
1013
1014 code = GET_CODE (if_info->cond);
1015 insn_a = if_info->insn_a;
1016 insn_b = if_info->insn_b;
1017
1018 /* Possibly rearrange operands to make things come out more natural. */
1019 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1020 {
1021 int reversep = 0;
1022 if (rtx_equal_p (b, x))
1023 reversep = 1;
1024 else if (general_operand (b, GET_MODE (b)))
1025 reversep = 1;
1026
1027 if (reversep)
1028 {
1029 code = reversed_comparison_code (if_info->cond, if_info->jump);
1030 tmp = a, a = b, b = tmp;
1031 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1032 }
1033 }
1034
1035 start_sequence ();
1036
1037 /* If either operand is complex, load it into a register first.
1038 The best way to do this is to copy the original insn. In this
1039 way we preserve any clobbers etc that the insn may have had.
1040 This is of course not possible in the IS_MEM case. */
1041 if (! general_operand (a, GET_MODE (a)))
1042 {
1043 rtx set;
1044
1045 if (no_new_pseudos)
1046 goto end_seq_and_fail;
1047
1048 if (is_mem)
1049 {
1050 tmp = gen_reg_rtx (GET_MODE (a));
1051 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1052 }
1053 else if (! insn_a)
1054 goto end_seq_and_fail;
1055 else
1056 {
1057 a = gen_reg_rtx (GET_MODE (a));
1058 tmp = copy_rtx (insn_a);
1059 set = single_set (tmp);
1060 SET_DEST (set) = a;
1061 tmp = emit_insn (PATTERN (tmp));
1062 }
1063 if (recog_memoized (tmp) < 0)
1064 goto end_seq_and_fail;
1065 }
1066 if (! general_operand (b, GET_MODE (b)))
1067 {
1068 rtx set;
1069
1070 if (no_new_pseudos)
1071 goto end_seq_and_fail;
1072
1073 if (is_mem)
1074 {
1075 tmp = gen_reg_rtx (GET_MODE (b));
1076 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
1077 }
1078 else if (! insn_b)
1079 goto end_seq_and_fail;
1080 else
1081 {
1082 b = gen_reg_rtx (GET_MODE (b));
1083 tmp = copy_rtx (insn_b);
1084 set = single_set (tmp);
1085 SET_DEST (set) = b;
1086 tmp = emit_insn (PATTERN (tmp));
1087 }
1088 if (recog_memoized (tmp) < 0)
1089 goto end_seq_and_fail;
1090 }
1091
1092 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1093 XEXP (if_info->cond, 1), a, b);
1094
1095 if (! target)
1096 goto end_seq_and_fail;
1097
1098 /* If we're handling a memory for above, emit the load now. */
1099 if (is_mem)
1100 {
1101 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1102
1103 /* Copy over flags as appropriate. */
1104 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1105 MEM_VOLATILE_P (tmp) = 1;
1106 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1107 MEM_IN_STRUCT_P (tmp) = 1;
1108 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1109 MEM_SCALAR_P (tmp) = 1;
1110 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1111 set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1112
1113 noce_emit_move_insn (if_info->x, tmp);
1114 }
1115 else if (target != x)
1116 noce_emit_move_insn (x, target);
1117
1118 tmp = get_insns ();
1119 end_sequence ();
1120 emit_insns_before (tmp, if_info->cond_earliest);
1121 return TRUE;
1122
1123 end_seq_and_fail:
1124 end_sequence ();
1125 return FALSE;
1126 }
1127
1128 /* For most cases, the simplified condition we found is the best
1129 choice, but this is not the case for the min/max/abs transforms.
1130 For these we wish to know that it is A or B in the condition. */
1131
1132 static rtx
1133 noce_get_alt_condition (if_info, target, earliest)
1134 struct noce_if_info *if_info;
1135 rtx target;
1136 rtx *earliest;
1137 {
1138 rtx cond, set, insn;
1139 int reverse;
1140
1141 /* If target is already mentioned in the known condition, return it. */
1142 if (reg_mentioned_p (target, if_info->cond))
1143 {
1144 *earliest = if_info->cond_earliest;
1145 return if_info->cond;
1146 }
1147
1148 set = pc_set (if_info->jump);
1149 cond = XEXP (SET_SRC (set), 0);
1150 reverse
1151 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1152 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1153
1154 /* If we're looking for a constant, try to make the conditional
1155 have that constant in it. There are two reasons why it may
1156 not have the constant we want:
1157
1158 1. GCC may have needed to put the constant in a register, because
1159 the target can't compare directly against that constant. For
1160 this case, we look for a SET immediately before the comparison
1161 that puts a constant in that register.
1162
1163 2. GCC may have canonicalized the conditional, for example
1164 replacing "if x < 4" with "if x <= 3". We can undo that (or
1165 make equivalent types of changes) to get the constants we need
1166 if they're off by one in the right direction. */
1167
1168 if (GET_CODE (target) == CONST_INT)
1169 {
1170 enum rtx_code code = GET_CODE (if_info->cond);
1171 rtx op_a = XEXP (if_info->cond, 0);
1172 rtx op_b = XEXP (if_info->cond, 1);
1173 rtx prev_insn;
1174
1175 /* First, look to see if we put a constant in a register. */
1176 prev_insn = PREV_INSN (if_info->cond_earliest);
1177 if (prev_insn
1178 && INSN_P (prev_insn)
1179 && GET_CODE (PATTERN (prev_insn)) == SET)
1180 {
1181 rtx src = find_reg_equal_equiv_note (prev_insn);
1182 if (!src)
1183 src = SET_SRC (PATTERN (prev_insn));
1184 if (GET_CODE (src) == CONST_INT)
1185 {
1186 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1187 op_a = src;
1188 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1189 op_b = src;
1190
1191 if (GET_CODE (op_a) == CONST_INT)
1192 {
1193 rtx tmp = op_a;
1194 op_a = op_b;
1195 op_b = tmp;
1196 code = swap_condition (code);
1197 }
1198 }
1199 }
1200
1201 /* Now, look to see if we can get the right constant by
1202 adjusting the conditional. */
1203 if (GET_CODE (op_b) == CONST_INT)
1204 {
1205 HOST_WIDE_INT desired_val = INTVAL (target);
1206 HOST_WIDE_INT actual_val = INTVAL (op_b);
1207
1208 switch (code)
1209 {
1210 case LT:
1211 if (actual_val == desired_val + 1)
1212 {
1213 code = LE;
1214 op_b = GEN_INT (desired_val);
1215 }
1216 break;
1217 case LE:
1218 if (actual_val == desired_val - 1)
1219 {
1220 code = LT;
1221 op_b = GEN_INT (desired_val);
1222 }
1223 break;
1224 case GT:
1225 if (actual_val == desired_val - 1)
1226 {
1227 code = GE;
1228 op_b = GEN_INT (desired_val);
1229 }
1230 break;
1231 case GE:
1232 if (actual_val == desired_val + 1)
1233 {
1234 code = GT;
1235 op_b = GEN_INT (desired_val);
1236 }
1237 break;
1238 default:
1239 break;
1240 }
1241 }
1242
1243 /* If we made any changes, generate a new conditional that is
1244 equivalent to what we started with, but has the right
1245 constants in it. */
1246 if (code != GET_CODE (if_info->cond)
1247 || op_a != XEXP (if_info->cond, 0)
1248 || op_b != XEXP (if_info->cond, 1))
1249 {
1250 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1251 *earliest = if_info->cond_earliest;
1252 return cond;
1253 }
1254 }
1255
1256 cond = canonicalize_condition (if_info->jump, cond, reverse,
1257 earliest, target);
1258 if (! cond || ! reg_mentioned_p (target, cond))
1259 return NULL;
1260
1261 /* We almost certainly searched back to a different place.
1262 Need to re-verify correct lifetimes. */
1263
1264 /* X may not be mentioned in the range (cond_earliest, jump]. */
1265 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1266 if (INSN_P (insn) && reg_mentioned_p (if_info->x, insn))
1267 return NULL;
1268
1269 /* A and B may not be modified in the range [cond_earliest, jump). */
1270 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1271 if (INSN_P (insn)
1272 && (modified_in_p (if_info->a, insn)
1273 || modified_in_p (if_info->b, insn)))
1274 return NULL;
1275
1276 return cond;
1277 }
1278
1279 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1280
1281 static int
1282 noce_try_minmax (if_info)
1283 struct noce_if_info *if_info;
1284 {
1285 rtx cond, earliest, target, seq;
1286 enum rtx_code code, op;
1287 int unsignedp;
1288
1289 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1290 if (no_new_pseudos)
1291 return FALSE;
1292
1293 /* ??? Reject FP modes since we don't know how 0 vs -0 or NaNs
1294 will be resolved with an SMIN/SMAX. It wouldn't be too hard
1295 to get the target to tell us... */
1296 if (FLOAT_MODE_P (GET_MODE (if_info->x))
1297 && TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
1298 && ! flag_unsafe_math_optimizations)
1299 return FALSE;
1300
1301 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1302 if (!cond)
1303 return FALSE;
1304
1305 /* Verify the condition is of the form we expect, and canonicalize
1306 the comparison code. */
1307 code = GET_CODE (cond);
1308 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1309 {
1310 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1311 return FALSE;
1312 }
1313 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1314 {
1315 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1316 return FALSE;
1317 code = swap_condition (code);
1318 }
1319 else
1320 return FALSE;
1321
1322 /* Determine what sort of operation this is. Note that the code is for
1323 a taken branch, so the code->operation mapping appears backwards. */
1324 switch (code)
1325 {
1326 case LT:
1327 case LE:
1328 case UNLT:
1329 case UNLE:
1330 op = SMAX;
1331 unsignedp = 0;
1332 break;
1333 case GT:
1334 case GE:
1335 case UNGT:
1336 case UNGE:
1337 op = SMIN;
1338 unsignedp = 0;
1339 break;
1340 case LTU:
1341 case LEU:
1342 op = UMAX;
1343 unsignedp = 1;
1344 break;
1345 case GTU:
1346 case GEU:
1347 op = UMIN;
1348 unsignedp = 1;
1349 break;
1350 default:
1351 return FALSE;
1352 }
1353
1354 start_sequence ();
1355
1356 target = expand_simple_binop (GET_MODE (if_info->x), op,
1357 if_info->a, if_info->b,
1358 if_info->x, unsignedp, OPTAB_WIDEN);
1359 if (! target)
1360 {
1361 end_sequence ();
1362 return FALSE;
1363 }
1364 if (target != if_info->x)
1365 noce_emit_move_insn (if_info->x, target);
1366
1367 seq = get_insns ();
1368 end_sequence ();
1369
1370 if (seq_contains_jump (seq))
1371 return FALSE;
1372
1373 emit_insns_before (seq, earliest);
1374 if_info->cond = cond;
1375 if_info->cond_earliest = earliest;
1376
1377 return TRUE;
1378 }
1379
1380 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1381
1382 static int
1383 noce_try_abs (if_info)
1384 struct noce_if_info *if_info;
1385 {
1386 rtx cond, earliest, target, seq, a, b, c;
1387 int negate;
1388
1389 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1390 if (no_new_pseudos)
1391 return FALSE;
1392
1393 /* Recognize A and B as constituting an ABS or NABS. */
1394 a = if_info->a;
1395 b = if_info->b;
1396 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1397 negate = 0;
1398 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1399 {
1400 c = a; a = b; b = c;
1401 negate = 1;
1402 }
1403 else
1404 return FALSE;
1405
1406 cond = noce_get_alt_condition (if_info, b, &earliest);
1407 if (!cond)
1408 return FALSE;
1409
1410 /* Verify the condition is of the form we expect. */
1411 if (rtx_equal_p (XEXP (cond, 0), b))
1412 c = XEXP (cond, 1);
1413 else if (rtx_equal_p (XEXP (cond, 1), b))
1414 c = XEXP (cond, 0);
1415 else
1416 return FALSE;
1417
1418 /* Verify that C is zero. Search backward through the block for
1419 a REG_EQUAL note if necessary. */
1420 if (REG_P (c))
1421 {
1422 rtx insn, note = NULL;
1423 for (insn = earliest;
1424 insn != if_info->test_bb->head;
1425 insn = PREV_INSN (insn))
1426 if (INSN_P (insn)
1427 && ((note = find_reg_note (insn, REG_EQUAL, c))
1428 || (note = find_reg_note (insn, REG_EQUIV, c))))
1429 break;
1430 if (! note)
1431 return FALSE;
1432 c = XEXP (note, 0);
1433 }
1434 if (GET_CODE (c) == MEM
1435 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1436 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1437 c = get_pool_constant (XEXP (c, 0));
1438
1439 /* Work around funny ideas get_condition has wrt canonicalization.
1440 Note that these rtx constants are known to be CONST_INT, and
1441 therefore imply integer comparisons. */
1442 if (c == constm1_rtx && GET_CODE (cond) == GT)
1443 ;
1444 else if (c == const1_rtx && GET_CODE (cond) == LT)
1445 ;
1446 else if (c != CONST0_RTX (GET_MODE (b)))
1447 return FALSE;
1448
1449 /* Determine what sort of operation this is. */
1450 switch (GET_CODE (cond))
1451 {
1452 case LT:
1453 case LE:
1454 case UNLT:
1455 case UNLE:
1456 negate = !negate;
1457 break;
1458 case GT:
1459 case GE:
1460 case UNGT:
1461 case UNGE:
1462 break;
1463 default:
1464 return FALSE;
1465 }
1466
1467 start_sequence ();
1468
1469 target = expand_simple_unop (GET_MODE (if_info->x), ABS, b, if_info->x, 0);
1470
1471 /* ??? It's a quandry whether cmove would be better here, especially
1472 for integers. Perhaps combine will clean things up. */
1473 if (target && negate)
1474 target = expand_simple_unop (GET_MODE (target), NEG, target, if_info->x, 0);
1475
1476 if (! target)
1477 {
1478 end_sequence ();
1479 return FALSE;
1480 }
1481
1482 if (target != if_info->x)
1483 noce_emit_move_insn (if_info->x, target);
1484
1485 seq = get_insns ();
1486 end_sequence ();
1487
1488 if (seq_contains_jump (seq))
1489 return FALSE;
1490
1491 emit_insns_before (seq, earliest);
1492 if_info->cond = cond;
1493 if_info->cond_earliest = earliest;
1494
1495 return TRUE;
1496 }
1497
1498 /* Look for the condition for the jump first. We'd prefer to avoid
1499 get_condition if we can -- it tries to look back for the contents
1500 of an original compare. On targets that use normal integers for
1501 comparisons, e.g. alpha, this is wasteful. */
1502
1503 static rtx
1504 noce_get_condition (jump, earliest)
1505 rtx jump;
1506 rtx *earliest;
1507 {
1508 rtx cond;
1509 rtx set;
1510
1511 /* If the condition variable is a register and is MODE_INT, accept it.
1512 Otherwise, fall back on get_condition. */
1513
1514 if (! any_condjump_p (jump))
1515 return NULL_RTX;
1516
1517 set = pc_set (jump);
1518
1519 cond = XEXP (SET_SRC (set), 0);
1520 if (GET_CODE (XEXP (cond, 0)) == REG
1521 && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_INT)
1522 {
1523 *earliest = jump;
1524
1525 /* If this branches to JUMP_LABEL when the condition is false,
1526 reverse the condition. */
1527 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1528 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump))
1529 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1530 GET_MODE (cond), XEXP (cond, 0),
1531 XEXP (cond, 1));
1532 }
1533 else
1534 cond = get_condition (jump, earliest);
1535
1536 return cond;
1537 }
1538
1539 /* Return true if OP is ok for if-then-else processing. */
1540
1541 static int
1542 noce_operand_ok (op)
1543 rtx op;
1544 {
1545 /* We special-case memories, so handle any of them with
1546 no address side effects. */
1547 if (GET_CODE (op) == MEM)
1548 return ! side_effects_p (XEXP (op, 0));
1549
1550 if (side_effects_p (op))
1551 return FALSE;
1552
1553 /* ??? Unfortuantely may_trap_p can't look at flag_trapping_math, due to
1554 being linked into the genfoo programs. This is probably a mistake.
1555 With finite operands, most fp operations don't trap. */
1556 if (!flag_trapping_math && FLOAT_MODE_P (GET_MODE (op)))
1557 switch (GET_CODE (op))
1558 {
1559 case DIV:
1560 case MOD:
1561 case UDIV:
1562 case UMOD:
1563 /* ??? This is kinda lame -- almost every target will have forced
1564 the constant into a register first. But given the expense of
1565 division, this is probably for the best. */
1566 return (CONSTANT_P (XEXP (op, 1))
1567 && XEXP (op, 1) != CONST0_RTX (GET_MODE (op))
1568 && ! may_trap_p (XEXP (op, 0)));
1569
1570 default:
1571 switch (GET_RTX_CLASS (GET_CODE (op)))
1572 {
1573 case '1':
1574 return ! may_trap_p (XEXP (op, 0));
1575 case 'c':
1576 case '2':
1577 return ! may_trap_p (XEXP (op, 0)) && ! may_trap_p (XEXP (op, 1));
1578 }
1579 break;
1580 }
1581
1582 return ! may_trap_p (op);
1583 }
1584
1585 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1586 without using conditional execution. Return TRUE if we were
1587 successful at converting the the block. */
1588
1589 static int
1590 noce_process_if_block (test_bb, then_bb, else_bb, join_bb)
1591 basic_block test_bb; /* Basic block test is in */
1592 basic_block then_bb; /* Basic block for THEN block */
1593 basic_block else_bb; /* Basic block for ELSE block */
1594 basic_block join_bb; /* Basic block the join label is in */
1595 {
1596 /* We're looking for patterns of the form
1597
1598 (1) if (...) x = a; else x = b;
1599 (2) x = b; if (...) x = a;
1600 (3) if (...) x = a; // as if with an initial x = x.
1601
1602 The later patterns require jumps to be more expensive.
1603
1604 ??? For future expansion, look for multiple X in such patterns. */
1605
1606 struct noce_if_info if_info;
1607 rtx insn_a, insn_b;
1608 rtx set_a, set_b;
1609 rtx orig_x, x, a, b;
1610 rtx jump, cond, insn;
1611
1612 /* If this is not a standard conditional jump, we can't parse it. */
1613 jump = test_bb->end;
1614 cond = noce_get_condition (jump, &if_info.cond_earliest);
1615 if (! cond)
1616 return FALSE;
1617
1618 /* If the conditional jump is more than just a conditional jump,
1619 then we can not do if-conversion on this block. */
1620 if (! onlyjump_p (jump))
1621 return FALSE;
1622
1623 /* We must be comparing objects whose modes imply the size. */
1624 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1625 return FALSE;
1626
1627 /* Look for one of the potential sets. */
1628 insn_a = first_active_insn (then_bb);
1629 if (! insn_a
1630 || ! last_active_insn_p (then_bb, insn_a)
1631 || (set_a = single_set (insn_a)) == NULL_RTX)
1632 return FALSE;
1633
1634 x = SET_DEST (set_a);
1635 a = SET_SRC (set_a);
1636
1637 /* Look for the other potential set. Make sure we've got equivalent
1638 destinations. */
1639 /* ??? This is overconservative. Storing to two different mems is
1640 as easy as conditionally computing the address. Storing to a
1641 single mem merely requires a scratch memory to use as one of the
1642 destination addresses; often the memory immediately below the
1643 stack pointer is available for this. */
1644 set_b = NULL_RTX;
1645 if (else_bb)
1646 {
1647 insn_b = first_active_insn (else_bb);
1648 if (! insn_b
1649 || ! last_active_insn_p (else_bb, insn_b)
1650 || (set_b = single_set (insn_b)) == NULL_RTX
1651 || ! rtx_equal_p (x, SET_DEST (set_b)))
1652 return FALSE;
1653 }
1654 else
1655 {
1656 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1657 if (! insn_b
1658 || GET_CODE (insn_b) != INSN
1659 || (set_b = single_set (insn_b)) == NULL_RTX
1660 || ! rtx_equal_p (x, SET_DEST (set_b))
1661 || reg_mentioned_p (x, cond)
1662 || reg_mentioned_p (x, a)
1663 || reg_mentioned_p (x, SET_SRC (set_b)))
1664 insn_b = set_b = NULL_RTX;
1665 }
1666 b = (set_b ? SET_SRC (set_b) : x);
1667
1668 /* X may not be mentioned in the range (cond_earliest, jump]. */
1669 for (insn = jump; insn != if_info.cond_earliest; insn = PREV_INSN (insn))
1670 if (INSN_P (insn) && reg_mentioned_p (x, insn))
1671 return FALSE;
1672
1673 /* A and B may not be modified in the range [cond_earliest, jump). */
1674 for (insn = if_info.cond_earliest; insn != jump; insn = NEXT_INSN (insn))
1675 if (INSN_P (insn)
1676 && (modified_in_p (a, insn) || modified_in_p (b, insn)))
1677 return FALSE;
1678
1679 /* Only operate on register destinations, and even then avoid extending
1680 the lifetime of hard registers on small register class machines. */
1681 orig_x = x;
1682 if (GET_CODE (x) != REG
1683 || (SMALL_REGISTER_CLASSES
1684 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1685 {
1686 if (no_new_pseudos)
1687 return FALSE;
1688 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1689 ? XEXP (x, 0) : x));
1690 }
1691
1692 /* Don't operate on sources that may trap or are volatile. */
1693 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1694 return FALSE;
1695
1696 /* Set up the info block for our subroutines. */
1697 if_info.test_bb = test_bb;
1698 if_info.cond = cond;
1699 if_info.jump = jump;
1700 if_info.insn_a = insn_a;
1701 if_info.insn_b = insn_b;
1702 if_info.x = x;
1703 if_info.a = a;
1704 if_info.b = b;
1705
1706 /* Try optimizations in some approximation of a useful order. */
1707 /* ??? Should first look to see if X is live incoming at all. If it
1708 isn't, we don't need anything but an unconditional set. */
1709
1710 /* Look and see if A and B are really the same. Avoid creating silly
1711 cmove constructs that no one will fix up later. */
1712 if (rtx_equal_p (a, b))
1713 {
1714 /* If we have an INSN_B, we don't have to create any new rtl. Just
1715 move the instruction that we already have. If we don't have an
1716 INSN_B, that means that A == X, and we've got a noop move. In
1717 that case don't do anything and let the code below delete INSN_A. */
1718 if (insn_b && else_bb)
1719 {
1720 rtx note;
1721
1722 if (else_bb && insn_b == else_bb->end)
1723 else_bb->end = PREV_INSN (insn_b);
1724 reorder_insns (insn_b, insn_b, PREV_INSN (if_info.cond_earliest));
1725
1726 /* If there was a REG_EQUAL note, delete it since it may have been
1727 true due to this insn being after a jump. */
1728 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
1729 remove_note (insn_b, note);
1730
1731 insn_b = NULL_RTX;
1732 }
1733 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1734 x must be executed twice. */
1735 else if (insn_b && side_effects_p (orig_x))
1736 return FALSE;
1737
1738 x = orig_x;
1739 goto success;
1740 }
1741
1742 if (noce_try_store_flag (&if_info))
1743 goto success;
1744 if (noce_try_minmax (&if_info))
1745 goto success;
1746 if (noce_try_abs (&if_info))
1747 goto success;
1748 if (HAVE_conditional_move
1749 && noce_try_cmove (&if_info))
1750 goto success;
1751 if (! HAVE_conditional_execution)
1752 {
1753 if (noce_try_store_flag_constants (&if_info))
1754 goto success;
1755 if (noce_try_store_flag_inc (&if_info))
1756 goto success;
1757 if (noce_try_store_flag_mask (&if_info))
1758 goto success;
1759 if (HAVE_conditional_move
1760 && noce_try_cmove_arith (&if_info))
1761 goto success;
1762 }
1763
1764 return FALSE;
1765
1766 success:
1767 /* The original sets may now be killed. */
1768 if (insn_a == then_bb->end)
1769 then_bb->end = PREV_INSN (insn_a);
1770 flow_delete_insn (insn_a);
1771
1772 /* Several special cases here: First, we may have reused insn_b above,
1773 in which case insn_b is now NULL. Second, we want to delete insn_b
1774 if it came from the ELSE block, because follows the now correct
1775 write that appears in the TEST block. However, if we got insn_b from
1776 the TEST block, it may in fact be loading data needed for the comparison.
1777 We'll let life_analysis remove the insn if it's really dead. */
1778 if (insn_b && else_bb)
1779 {
1780 if (insn_b == else_bb->end)
1781 else_bb->end = PREV_INSN (insn_b);
1782 flow_delete_insn (insn_b);
1783 }
1784
1785 /* The new insns will have been inserted before cond_earliest. We should
1786 be able to remove the jump with impunity, but the condition itself may
1787 have been modified by gcse to be shared across basic blocks. */
1788 test_bb->end = PREV_INSN (jump);
1789 flow_delete_insn (jump);
1790
1791 /* If we used a temporary, fix it up now. */
1792 if (orig_x != x)
1793 {
1794 start_sequence ();
1795 noce_emit_move_insn (orig_x, x);
1796 insn_b = gen_sequence ();
1797 end_sequence ();
1798
1799 test_bb->end = emit_insn_after (insn_b, test_bb->end);
1800 }
1801
1802 /* Merge the blocks! */
1803 merge_if_block (test_bb, then_bb, else_bb, join_bb);
1804
1805 return TRUE;
1806 }
1807 \f
1808 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1809 straight line code. Return true if successful. */
1810
1811 static int
1812 process_if_block (test_bb, then_bb, else_bb, join_bb)
1813 basic_block test_bb; /* Basic block test is in */
1814 basic_block then_bb; /* Basic block for THEN block */
1815 basic_block else_bb; /* Basic block for ELSE block */
1816 basic_block join_bb; /* Basic block the join label is in */
1817 {
1818 if (! reload_completed
1819 && noce_process_if_block (test_bb, then_bb, else_bb, join_bb))
1820 return TRUE;
1821
1822 if (HAVE_conditional_execution
1823 && reload_completed
1824 && cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb))
1825 return TRUE;
1826
1827 return FALSE;
1828 }
1829
1830 /* Merge the blocks and mark for local life update. */
1831
1832 static void
1833 merge_if_block (test_bb, then_bb, else_bb, join_bb)
1834 basic_block test_bb; /* Basic block test is in */
1835 basic_block then_bb; /* Basic block for THEN block */
1836 basic_block else_bb; /* Basic block for ELSE block */
1837 basic_block join_bb; /* Basic block the join label is in */
1838 {
1839 basic_block combo_bb;
1840
1841 /* All block merging is done into the lower block numbers. */
1842
1843 combo_bb = test_bb;
1844
1845 /* First merge TEST block into THEN block. This is a no-brainer since
1846 the THEN block did not have a code label to begin with. */
1847
1848 if (life_data_ok)
1849 COPY_REG_SET (combo_bb->global_live_at_end, then_bb->global_live_at_end);
1850 merge_blocks_nomove (combo_bb, then_bb);
1851 num_removed_blocks++;
1852
1853 /* The ELSE block, if it existed, had a label. That label count
1854 will almost always be zero, but odd things can happen when labels
1855 get their addresses taken. */
1856 if (else_bb)
1857 {
1858 merge_blocks_nomove (combo_bb, else_bb);
1859 num_removed_blocks++;
1860 }
1861
1862 /* If there was no join block reported, that means it was not adjacent
1863 to the others, and so we cannot merge them. */
1864
1865 if (! join_bb)
1866 {
1867 /* The outgoing edge for the current COMBO block should already
1868 be correct. Verify this. */
1869 if (combo_bb->succ == NULL_EDGE)
1870 abort ();
1871
1872 /* There should still be a branch at the end of the THEN or ELSE
1873 blocks taking us to our final destination. */
1874 if (GET_CODE (combo_bb->end) != JUMP_INSN)
1875 abort ();
1876 }
1877
1878 /* The JOIN block may have had quite a number of other predecessors too.
1879 Since we've already merged the TEST, THEN and ELSE blocks, we should
1880 have only one remaining edge from our if-then-else diamond. If there
1881 is more than one remaining edge, it must come from elsewhere. There
1882 may be zero incoming edges if the THEN block didn't actually join
1883 back up (as with a call to abort). */
1884 else if ((join_bb->pred == NULL
1885 || join_bb->pred->pred_next == NULL)
1886 && join_bb != EXIT_BLOCK_PTR)
1887 {
1888 /* We can merge the JOIN. */
1889 if (life_data_ok)
1890 COPY_REG_SET (combo_bb->global_live_at_end,
1891 join_bb->global_live_at_end);
1892 merge_blocks_nomove (combo_bb, join_bb);
1893 num_removed_blocks++;
1894 }
1895 else
1896 {
1897 /* We cannot merge the JOIN. */
1898
1899 /* The outgoing edge for the current COMBO block should already
1900 be correct. Verify this. */
1901 if (combo_bb->succ->succ_next != NULL_EDGE
1902 || combo_bb->succ->dest != join_bb)
1903 abort ();
1904
1905 /* Remove the jump and cruft from the end of the COMBO block. */
1906 if (join_bb != EXIT_BLOCK_PTR)
1907 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
1908 }
1909
1910 /* Make sure we update life info properly. */
1911 SET_UPDATE_LIFE (combo_bb);
1912
1913 num_updated_if_blocks++;
1914 }
1915 \f
1916 /* Find a block ending in a simple IF condition. Return TRUE if
1917 we were able to transform it in some way. */
1918
1919 static int
1920 find_if_header (test_bb)
1921 basic_block test_bb;
1922 {
1923 edge then_edge;
1924 edge else_edge;
1925
1926 /* The kind of block we're looking for has exactly two successors. */
1927 if ((then_edge = test_bb->succ) == NULL_EDGE
1928 || (else_edge = then_edge->succ_next) == NULL_EDGE
1929 || else_edge->succ_next != NULL_EDGE)
1930 return FALSE;
1931
1932 /* Neither edge should be abnormal. */
1933 if ((then_edge->flags & EDGE_COMPLEX)
1934 || (else_edge->flags & EDGE_COMPLEX))
1935 return FALSE;
1936
1937 /* The THEN edge is canonically the one that falls through. */
1938 if (then_edge->flags & EDGE_FALLTHRU)
1939 ;
1940 else if (else_edge->flags & EDGE_FALLTHRU)
1941 {
1942 edge e = else_edge;
1943 else_edge = then_edge;
1944 then_edge = e;
1945 }
1946 else
1947 /* Otherwise this must be a multiway branch of some sort. */
1948 return FALSE;
1949
1950 if (find_if_block (test_bb, then_edge, else_edge))
1951 goto success;
1952 if (HAVE_trap && HAVE_conditional_trap
1953 && find_cond_trap (test_bb, then_edge, else_edge))
1954 goto success;
1955 if (post_dominators
1956 && (! HAVE_conditional_execution || reload_completed))
1957 {
1958 if (find_if_case_1 (test_bb, then_edge, else_edge))
1959 goto success;
1960 if (find_if_case_2 (test_bb, then_edge, else_edge))
1961 goto success;
1962 }
1963
1964 return FALSE;
1965
1966 success:
1967 if (rtl_dump_file)
1968 fprintf (rtl_dump_file, "Conversion succeeded.\n");
1969 return TRUE;
1970 }
1971
1972 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
1973 block. If so, we'll try to convert the insns to not require the branch.
1974 Return TRUE if we were successful at converting the the block. */
1975
1976 static int
1977 find_if_block (test_bb, then_edge, else_edge)
1978 basic_block test_bb;
1979 edge then_edge, else_edge;
1980 {
1981 basic_block then_bb = then_edge->dest;
1982 basic_block else_bb = else_edge->dest;
1983 basic_block join_bb = NULL_BLOCK;
1984 edge then_succ = then_bb->succ;
1985 edge else_succ = else_bb->succ;
1986 int next_index;
1987
1988 /* The THEN block of an IF-THEN combo must have exactly one predecessor. */
1989 if (then_bb->pred->pred_next != NULL_EDGE)
1990 return FALSE;
1991
1992 /* The THEN block of an IF-THEN combo must have zero or one successors. */
1993 if (then_succ != NULL_EDGE
1994 && (then_succ->succ_next != NULL_EDGE
1995 || (then_succ->flags & EDGE_COMPLEX)))
1996 return FALSE;
1997
1998 /* If the THEN block has no successors, conditional execution can still
1999 make a conditional call. Don't do this unless the ELSE block has
2000 only one incoming edge -- the CFG manipulation is too ugly otherwise.
2001 Check for the last insn of the THEN block being an indirect jump, which
2002 is listed as not having any successors, but confuses the rest of the CE
2003 code processing. XXX we should fix this in the future. */
2004 if (then_succ == NULL)
2005 {
2006 if (else_bb->pred->pred_next == NULL_EDGE)
2007 {
2008 rtx last_insn = then_bb->end;
2009
2010 while (last_insn
2011 && GET_CODE (last_insn) == NOTE
2012 && last_insn != then_bb->head)
2013 last_insn = PREV_INSN (last_insn);
2014
2015 if (last_insn
2016 && GET_CODE (last_insn) == JUMP_INSN
2017 && ! simplejump_p (last_insn))
2018 return FALSE;
2019
2020 join_bb = else_bb;
2021 else_bb = NULL_BLOCK;
2022 }
2023 else
2024 return FALSE;
2025 }
2026
2027 /* If the THEN block's successor is the other edge out of the TEST block,
2028 then we have an IF-THEN combo without an ELSE. */
2029 else if (then_succ->dest == else_bb)
2030 {
2031 join_bb = else_bb;
2032 else_bb = NULL_BLOCK;
2033 }
2034
2035 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
2036 has exactly one predecessor and one successor, and the outgoing edge
2037 is not complex, then we have an IF-THEN-ELSE combo. */
2038 else if (else_succ != NULL_EDGE
2039 && then_succ->dest == else_succ->dest
2040 && else_bb->pred->pred_next == NULL_EDGE
2041 && else_succ->succ_next == NULL_EDGE
2042 && ! (else_succ->flags & EDGE_COMPLEX))
2043 join_bb = else_succ->dest;
2044
2045 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
2046 else
2047 return FALSE;
2048
2049 num_possible_if_blocks++;
2050
2051 if (rtl_dump_file)
2052 {
2053 if (else_bb)
2054 fprintf (rtl_dump_file,
2055 "\nIF-THEN-ELSE block found, start %d, then %d, else %d, join %d\n",
2056 test_bb->index, then_bb->index, else_bb->index,
2057 join_bb->index);
2058 else
2059 fprintf (rtl_dump_file,
2060 "\nIF-THEN block found, start %d, then %d, join %d\n",
2061 test_bb->index, then_bb->index, join_bb->index);
2062 }
2063
2064 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we
2065 get the first condition for free, since we've already asserted that
2066 there's a fallthru edge from IF to THEN. */
2067 /* ??? As an enhancement, move the ELSE block. Have to deal with
2068 BLOCK notes, if by no other means than aborting the merge if they
2069 exist. Sticky enough I don't want to think about it now. */
2070 next_index = then_bb->index;
2071 if (else_bb && ++next_index != else_bb->index)
2072 return FALSE;
2073 if (++next_index != join_bb->index && join_bb->index != EXIT_BLOCK)
2074 {
2075 if (else_bb)
2076 join_bb = NULL;
2077 else
2078 return FALSE;
2079 }
2080
2081 /* Do the real work. */
2082 return process_if_block (test_bb, then_bb, else_bb, join_bb);
2083 }
2084
2085 /* Convert a branch over a trap, or a branch to a trap,
2086 into a conditional trap. */
2087
2088 static int
2089 find_cond_trap (test_bb, then_edge, else_edge)
2090 basic_block test_bb;
2091 edge then_edge, else_edge;
2092 {
2093 basic_block then_bb, else_bb, join_bb, trap_bb;
2094 rtx trap, jump, cond, cond_earliest, seq;
2095 enum rtx_code code;
2096
2097 then_bb = then_edge->dest;
2098 else_bb = else_edge->dest;
2099 join_bb = NULL;
2100
2101 /* Locate the block with the trap instruction. */
2102 /* ??? While we look for no successors, we really ought to allow
2103 EH successors. Need to fix merge_if_block for that to work. */
2104 /* ??? We can't currently handle merging the blocks if they are not
2105 already adjacent. Prevent losage in merge_if_block by detecting
2106 this now. */
2107 if (then_bb->succ == NULL)
2108 {
2109 trap_bb = then_bb;
2110 if (else_bb->index != then_bb->index + 1)
2111 return FALSE;
2112 join_bb = else_bb;
2113 else_bb = NULL;
2114 }
2115 else if (else_bb->succ == NULL)
2116 {
2117 trap_bb = else_bb;
2118 if (else_bb->index != then_bb->index + 1)
2119 else_bb = NULL;
2120 else if (then_bb->succ
2121 && ! then_bb->succ->succ_next
2122 && ! (then_bb->succ->flags & EDGE_COMPLEX)
2123 && then_bb->succ->dest->index == else_bb->index + 1)
2124 join_bb = then_bb->succ->dest;
2125 }
2126 else
2127 return FALSE;
2128
2129 /* Don't confuse a conditional return with something we want to
2130 optimize here. */
2131 if (trap_bb == EXIT_BLOCK_PTR)
2132 return FALSE;
2133
2134 /* The only instruction in the THEN block must be the trap. */
2135 trap = first_active_insn (trap_bb);
2136 if (! (trap == trap_bb->end
2137 && GET_CODE (PATTERN (trap)) == TRAP_IF
2138 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
2139 return FALSE;
2140
2141 if (rtl_dump_file)
2142 {
2143 if (trap_bb == then_bb)
2144 fprintf (rtl_dump_file,
2145 "\nTRAP-IF block found, start %d, trap %d",
2146 test_bb->index, then_bb->index);
2147 else
2148 fprintf (rtl_dump_file,
2149 "\nTRAP-IF block found, start %d, then %d, trap %d",
2150 test_bb->index, then_bb->index, trap_bb->index);
2151 if (join_bb)
2152 fprintf (rtl_dump_file, ", join %d\n", join_bb->index);
2153 else
2154 fputc ('\n', rtl_dump_file);
2155 }
2156
2157 /* If this is not a standard conditional jump, we can't parse it. */
2158 jump = test_bb->end;
2159 cond = noce_get_condition (jump, &cond_earliest);
2160 if (! cond)
2161 return FALSE;
2162
2163 /* If the conditional jump is more than just a conditional jump,
2164 then we can not do if-conversion on this block. */
2165 if (! onlyjump_p (jump))
2166 return FALSE;
2167
2168 /* We must be comparing objects whose modes imply the size. */
2169 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2170 return FALSE;
2171
2172 /* Reverse the comparison code, if necessary. */
2173 code = GET_CODE (cond);
2174 if (then_bb == trap_bb)
2175 {
2176 code = reversed_comparison_code (cond, jump);
2177 if (code == UNKNOWN)
2178 return FALSE;
2179 }
2180
2181 /* Attempt to generate the conditional trap. */
2182 seq = gen_cond_trap (code, XEXP (cond, 0), XEXP (cond, 1),
2183 TRAP_CODE (PATTERN (trap)));
2184 if (seq == NULL)
2185 return FALSE;
2186
2187 /* Emit the new insns before cond_earliest; delete the old jump
2188 and trap insns. */
2189
2190 emit_insn_before (seq, cond_earliest);
2191
2192 test_bb->end = PREV_INSN (jump);
2193 flow_delete_insn (jump);
2194
2195 trap_bb->end = PREV_INSN (trap);
2196 flow_delete_insn (trap);
2197
2198 /* Merge the blocks! */
2199 if (trap_bb != then_bb && ! else_bb)
2200 {
2201 flow_delete_block (trap_bb);
2202 num_removed_blocks++;
2203 }
2204 merge_if_block (test_bb, then_bb, else_bb, join_bb);
2205
2206 return TRUE;
2207 }
2208
2209 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
2210 transformable, but not necessarily the other. There need be no
2211 JOIN block.
2212
2213 Return TRUE if we were successful at converting the the block.
2214
2215 Cases we'd like to look at:
2216
2217 (1)
2218 if (test) goto over; // x not live
2219 x = a;
2220 goto label;
2221 over:
2222
2223 becomes
2224
2225 x = a;
2226 if (! test) goto label;
2227
2228 (2)
2229 if (test) goto E; // x not live
2230 x = big();
2231 goto L;
2232 E:
2233 x = b;
2234 goto M;
2235
2236 becomes
2237
2238 x = b;
2239 if (test) goto M;
2240 x = big();
2241 goto L;
2242
2243 (3) // This one's really only interesting for targets that can do
2244 // multiway branching, e.g. IA-64 BBB bundles. For other targets
2245 // it results in multiple branches on a cache line, which often
2246 // does not sit well with predictors.
2247
2248 if (test1) goto E; // predicted not taken
2249 x = a;
2250 if (test2) goto F;
2251 ...
2252 E:
2253 x = b;
2254 J:
2255
2256 becomes
2257
2258 x = a;
2259 if (test1) goto E;
2260 if (test2) goto F;
2261
2262 Notes:
2263
2264 (A) Don't do (2) if the branch is predicted against the block we're
2265 eliminating. Do it anyway if we can eliminate a branch; this requires
2266 that the sole successor of the eliminated block postdominate the other
2267 side of the if.
2268
2269 (B) With CE, on (3) we can steal from both sides of the if, creating
2270
2271 if (test1) x = a;
2272 if (!test1) x = b;
2273 if (test1) goto J;
2274 if (test2) goto F;
2275 ...
2276 J:
2277
2278 Again, this is most useful if J postdominates.
2279
2280 (C) CE substitutes for helpful life information.
2281
2282 (D) These heuristics need a lot of work. */
2283
2284 /* Tests for case 1 above. */
2285
2286 static int
2287 find_if_case_1 (test_bb, then_edge, else_edge)
2288 basic_block test_bb;
2289 edge then_edge, else_edge;
2290 {
2291 basic_block then_bb = then_edge->dest;
2292 basic_block else_bb = else_edge->dest, new_bb;
2293 edge then_succ = then_bb->succ;
2294
2295 /* THEN has one successor. */
2296 if (!then_succ || then_succ->succ_next != NULL)
2297 return FALSE;
2298
2299 /* THEN does not fall through, but is not strange either. */
2300 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2301 return FALSE;
2302
2303 /* THEN has one predecessor. */
2304 if (then_bb->pred->pred_next != NULL)
2305 return FALSE;
2306
2307 /* THEN must do something. */
2308 if (forwarder_block_p (then_bb))
2309 return FALSE;
2310
2311 num_possible_if_blocks++;
2312 if (rtl_dump_file)
2313 fprintf (rtl_dump_file,
2314 "\nIF-CASE-1 found, start %d, then %d\n",
2315 test_bb->index, then_bb->index);
2316
2317 /* THEN is small. */
2318 if (count_bb_insns (then_bb) > BRANCH_COST)
2319 return FALSE;
2320
2321 /* Registers set are dead, or are predicable. */
2322 if (! dead_or_predicable (test_bb, then_bb, else_bb,
2323 then_bb->succ->dest, 1))
2324 return FALSE;
2325
2326 /* Conversion went ok, including moving the insns and fixing up the
2327 jump. Adjust the CFG to match. */
2328
2329 SET_UPDATE_LIFE (test_bb);
2330 bitmap_operation (test_bb->global_live_at_end,
2331 else_bb->global_live_at_start,
2332 then_bb->global_live_at_end, BITMAP_IOR);
2333
2334 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb), else_bb);
2335 /* Make rest of code believe that the newly created block is the THEN_BB
2336 block we are going to remove. */
2337 if (new_bb)
2338 {
2339 new_bb->aux = then_bb->aux;
2340 SET_UPDATE_LIFE (then_bb);
2341 }
2342 flow_delete_block (then_bb);
2343 /* We've possibly created jump to next insn, cleanup_cfg will solve that
2344 later. */
2345
2346 num_removed_blocks++;
2347 num_updated_if_blocks++;
2348
2349 return TRUE;
2350 }
2351
2352 /* Test for case 2 above. */
2353
2354 static int
2355 find_if_case_2 (test_bb, then_edge, else_edge)
2356 basic_block test_bb;
2357 edge then_edge, else_edge;
2358 {
2359 basic_block then_bb = then_edge->dest;
2360 basic_block else_bb = else_edge->dest;
2361 edge else_succ = else_bb->succ;
2362 rtx note;
2363
2364 /* ELSE has one successor. */
2365 if (!else_succ || else_succ->succ_next != NULL)
2366 return FALSE;
2367
2368 /* ELSE outgoing edge is not complex. */
2369 if (else_succ->flags & EDGE_COMPLEX)
2370 return FALSE;
2371
2372 /* ELSE has one predecessor. */
2373 if (else_bb->pred->pred_next != NULL)
2374 return FALSE;
2375
2376 /* THEN is not EXIT. */
2377 if (then_bb->index < 0)
2378 return FALSE;
2379
2380 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2381 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
2382 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2383 ;
2384 else if (else_succ->dest->index < 0
2385 || TEST_BIT (post_dominators[ORIG_INDEX (then_bb)],
2386 ORIG_INDEX (else_succ->dest)))
2387 ;
2388 else
2389 return FALSE;
2390
2391 num_possible_if_blocks++;
2392 if (rtl_dump_file)
2393 fprintf (rtl_dump_file,
2394 "\nIF-CASE-2 found, start %d, else %d\n",
2395 test_bb->index, else_bb->index);
2396
2397 /* ELSE is small. */
2398 if (count_bb_insns (then_bb) > BRANCH_COST)
2399 return FALSE;
2400
2401 /* Registers set are dead, or are predicable. */
2402 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
2403 return FALSE;
2404
2405 /* Conversion went ok, including moving the insns and fixing up the
2406 jump. Adjust the CFG to match. */
2407
2408 SET_UPDATE_LIFE (test_bb);
2409 bitmap_operation (test_bb->global_live_at_end,
2410 then_bb->global_live_at_start,
2411 else_bb->global_live_at_end, BITMAP_IOR);
2412
2413 flow_delete_block (else_bb);
2414
2415 num_removed_blocks++;
2416 num_updated_if_blocks++;
2417
2418 /* ??? We may now fallthru from one of THEN's successors into a join
2419 block. Rerun cleanup_cfg? Examine things manually? Wait? */
2420
2421 return TRUE;
2422 }
2423
2424 /* A subroutine of dead_or_predicable called through for_each_rtx.
2425 Return 1 if a memory is found. */
2426
2427 static int
2428 find_memory (px, data)
2429 rtx *px;
2430 void *data ATTRIBUTE_UNUSED;
2431 {
2432 return GET_CODE (*px) == MEM;
2433 }
2434
2435 /* Used by the code above to perform the actual rtl transformations.
2436 Return TRUE if successful.
2437
2438 TEST_BB is the block containing the conditional branch. MERGE_BB
2439 is the block containing the code to manipulate. NEW_DEST is the
2440 label TEST_BB should be branching to after the conversion.
2441 REVERSEP is true if the sense of the branch should be reversed. */
2442
2443 static int
2444 dead_or_predicable (test_bb, merge_bb, other_bb, new_dest, reversep)
2445 basic_block test_bb, merge_bb, other_bb;
2446 basic_block new_dest;
2447 int reversep;
2448 {
2449 rtx head, end, jump, earliest, old_dest, new_label;
2450
2451 jump = test_bb->end;
2452
2453 /* Find the extent of the real code in the merge block. */
2454 head = merge_bb->head;
2455 end = merge_bb->end;
2456
2457 if (GET_CODE (head) == CODE_LABEL)
2458 head = NEXT_INSN (head);
2459 if (GET_CODE (head) == NOTE)
2460 {
2461 if (head == end)
2462 {
2463 head = end = NULL_RTX;
2464 goto no_body;
2465 }
2466 head = NEXT_INSN (head);
2467 }
2468
2469 if (GET_CODE (end) == JUMP_INSN)
2470 {
2471 if (head == end)
2472 {
2473 head = end = NULL_RTX;
2474 goto no_body;
2475 }
2476 end = PREV_INSN (end);
2477 }
2478
2479 /* Disable handling dead code by conditional execution if the machine needs
2480 to do anything funny with the tests, etc. */
2481 #ifndef IFCVT_MODIFY_TESTS
2482 if (HAVE_conditional_execution)
2483 {
2484 /* In the conditional execution case, we have things easy. We know
2485 the condition is reversable. We don't have to check life info,
2486 becase we're going to conditionally execute the code anyway.
2487 All that's left is making sure the insns involved can actually
2488 be predicated. */
2489
2490 rtx cond, prob_val;
2491
2492 cond = cond_exec_get_condition (jump);
2493 if (! cond)
2494 return FALSE;
2495
2496 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2497 if (prob_val)
2498 prob_val = XEXP (prob_val, 0);
2499
2500 if (reversep)
2501 {
2502 enum rtx_code rev = reversed_comparison_code (cond, jump);
2503 if (rev == UNKNOWN)
2504 return FALSE;
2505 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
2506 XEXP (cond, 1));
2507 if (prob_val)
2508 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
2509 }
2510
2511 if (! cond_exec_process_insns (head, end, cond, prob_val, 0))
2512 goto cancel;
2513
2514 earliest = jump;
2515 }
2516 else
2517 #endif
2518 {
2519 /* In the non-conditional execution case, we have to verify that there
2520 are no trapping operations, no calls, no references to memory, and
2521 that any registers modified are dead at the branch site. */
2522
2523 rtx insn, cond, prev;
2524 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
2525 regset merge_set, tmp, test_live, test_set;
2526 struct propagate_block_info *pbi;
2527 int i, fail = 0;
2528
2529 /* Check for no calls or trapping operations. */
2530 for (insn = head; ; insn = NEXT_INSN (insn))
2531 {
2532 if (GET_CODE (insn) == CALL_INSN)
2533 return FALSE;
2534 if (INSN_P (insn))
2535 {
2536 if (may_trap_p (PATTERN (insn)))
2537 return FALSE;
2538
2539 /* ??? Even non-trapping memories such as stack frame
2540 references must be avoided. For stores, we collect
2541 no lifetime info; for reads, we'd have to assert
2542 true_dependance false against every store in the
2543 TEST range. */
2544 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
2545 return FALSE;
2546 }
2547 if (insn == end)
2548 break;
2549 }
2550
2551 if (! any_condjump_p (jump))
2552 return FALSE;
2553
2554 /* Find the extent of the conditional. */
2555 cond = noce_get_condition (jump, &earliest);
2556 if (! cond)
2557 return FALSE;
2558
2559 /* Collect:
2560 MERGE_SET = set of registers set in MERGE_BB
2561 TEST_LIVE = set of registers live at EARLIEST
2562 TEST_SET = set of registers set between EARLIEST and the
2563 end of the block. */
2564
2565 tmp = INITIALIZE_REG_SET (tmp_head);
2566 merge_set = INITIALIZE_REG_SET (merge_set_head);
2567 test_live = INITIALIZE_REG_SET (test_live_head);
2568 test_set = INITIALIZE_REG_SET (test_set_head);
2569
2570 /* ??? bb->local_set is only valid during calculate_global_regs_live,
2571 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
2572 since we've already asserted that MERGE_BB is small. */
2573 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
2574
2575 /* For small register class machines, don't lengthen lifetimes of
2576 hard registers before reload. */
2577 if (SMALL_REGISTER_CLASSES && ! reload_completed)
2578 {
2579 EXECUTE_IF_SET_IN_BITMAP
2580 (merge_set, 0, i,
2581 {
2582 if (i < FIRST_PSEUDO_REGISTER
2583 && ! fixed_regs[i]
2584 && ! global_regs[i])
2585 fail = 1;
2586 });
2587 }
2588
2589 /* For TEST, we're interested in a range of insns, not a whole block.
2590 Moreover, we're interested in the insns live from OTHER_BB. */
2591
2592 COPY_REG_SET (test_live, other_bb->global_live_at_start);
2593 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
2594 0);
2595
2596 for (insn = jump; ; insn = prev)
2597 {
2598 prev = propagate_one_insn (pbi, insn);
2599 if (insn == earliest)
2600 break;
2601 }
2602
2603 free_propagate_block_info (pbi);
2604
2605 /* We can perform the transformation if
2606 MERGE_SET & (TEST_SET | TEST_LIVE)
2607 and
2608 TEST_SET & merge_bb->global_live_at_start
2609 are empty. */
2610
2611 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
2612 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
2613 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2614
2615 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
2616 BITMAP_AND);
2617 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2618
2619 FREE_REG_SET (tmp);
2620 FREE_REG_SET (merge_set);
2621 FREE_REG_SET (test_live);
2622 FREE_REG_SET (test_set);
2623
2624 if (fail)
2625 return FALSE;
2626 }
2627
2628 no_body:
2629 /* We don't want to use normal invert_jump or redirect_jump because
2630 we don't want to delete_insn called. Also, we want to do our own
2631 change group management. */
2632
2633 old_dest = JUMP_LABEL (jump);
2634 new_label = block_label (new_dest);
2635 if (reversep
2636 ? ! invert_jump_1 (jump, new_label)
2637 : ! redirect_jump_1 (jump, new_label))
2638 goto cancel;
2639
2640 if (! apply_change_group ())
2641 return FALSE;
2642
2643 if (old_dest)
2644 LABEL_NUSES (old_dest) -= 1;
2645 if (new_label)
2646 LABEL_NUSES (new_label) += 1;
2647 JUMP_LABEL (jump) = new_label;
2648
2649 if (reversep)
2650 invert_br_probabilities (jump);
2651
2652 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
2653 if (reversep)
2654 {
2655 gcov_type count, probability;
2656 count = BRANCH_EDGE (test_bb)->count;
2657 BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
2658 FALLTHRU_EDGE (test_bb)->count = count;
2659 probability = BRANCH_EDGE (test_bb)->probability;
2660 BRANCH_EDGE (test_bb)->probability = FALLTHRU_EDGE (test_bb)->probability;
2661 FALLTHRU_EDGE (test_bb)->probability = probability;
2662 }
2663
2664 /* Move the insns out of MERGE_BB to before the branch. */
2665 if (head != NULL)
2666 {
2667 if (end == merge_bb->end)
2668 merge_bb->end = PREV_INSN (head);
2669
2670 head = squeeze_notes (head, end);
2671 if (GET_CODE (end) == NOTE
2672 && (NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_END
2673 || NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_BEG
2674 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_BEG
2675 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_END
2676 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_CONT
2677 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_VTOP))
2678 {
2679 if (head == end)
2680 return TRUE;
2681 end = PREV_INSN (end);
2682 }
2683
2684 reorder_insns (head, end, PREV_INSN (earliest));
2685 }
2686 return TRUE;
2687
2688 cancel:
2689 cancel_changes (0);
2690 return FALSE;
2691 }
2692 \f
2693 /* Main entry point for all if-conversion. */
2694
2695 void
2696 if_convert (x_life_data_ok)
2697 int x_life_data_ok;
2698 {
2699 int block_num;
2700
2701 num_possible_if_blocks = 0;
2702 num_updated_if_blocks = 0;
2703 num_removed_blocks = 0;
2704 life_data_ok = (x_life_data_ok != 0);
2705
2706 /* Free up basic_block_for_insn so that we don't have to keep it
2707 up to date, either here or in merge_blocks_nomove. */
2708 free_basic_block_vars (1);
2709
2710 /* Compute postdominators if we think we'll use them. */
2711 post_dominators = NULL;
2712 if (HAVE_conditional_execution || life_data_ok)
2713 {
2714 post_dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
2715 calculate_dominance_info (NULL, post_dominators, CDI_POST_DOMINATORS);
2716 }
2717
2718 /* Record initial block numbers. */
2719 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2720 SET_ORIG_INDEX (BASIC_BLOCK (block_num), block_num);
2721
2722 /* Go through each of the basic blocks looking for things to convert. */
2723 for (block_num = 0; block_num < n_basic_blocks; )
2724 {
2725 basic_block bb = BASIC_BLOCK (block_num);
2726 if (find_if_header (bb))
2727 block_num = bb->index;
2728 else
2729 block_num++;
2730 }
2731
2732 if (post_dominators)
2733 sbitmap_vector_free (post_dominators);
2734
2735 if (rtl_dump_file)
2736 fflush (rtl_dump_file);
2737
2738 /* Rebuild basic_block_for_insn for update_life_info and for gcse. */
2739 compute_bb_for_insn (get_max_uid ());
2740
2741 /* Rebuild life info for basic blocks that require it. */
2742 if (num_removed_blocks && life_data_ok)
2743 {
2744 sbitmap update_life_blocks = sbitmap_alloc (n_basic_blocks);
2745 sbitmap_zero (update_life_blocks);
2746
2747 /* If we allocated new pseudos, we must resize the array for sched1. */
2748 if (max_regno < max_reg_num ())
2749 {
2750 max_regno = max_reg_num ();
2751 allocate_reg_info (max_regno, FALSE, FALSE);
2752 }
2753
2754 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2755 if (UPDATE_LIFE (BASIC_BLOCK (block_num)))
2756 SET_BIT (update_life_blocks, block_num);
2757
2758 count_or_remove_death_notes (update_life_blocks, 1);
2759 /* ??? See about adding a mode that verifies that the initial
2760 set of blocks don't let registers come live. */
2761 update_life_info (update_life_blocks, UPDATE_LIFE_GLOBAL,
2762 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2763 | PROP_KILL_DEAD_CODE);
2764
2765 sbitmap_free (update_life_blocks);
2766 }
2767
2768 /* Write the final stats. */
2769 if (rtl_dump_file && num_possible_if_blocks > 0)
2770 {
2771 fprintf (rtl_dump_file,
2772 "\n%d possible IF blocks searched.\n",
2773 num_possible_if_blocks);
2774 fprintf (rtl_dump_file,
2775 "%d IF blocks converted.\n",
2776 num_updated_if_blocks);
2777 fprintf (rtl_dump_file,
2778 "%d basic blocks deleted.\n\n\n",
2779 num_removed_blocks);
2780 }
2781
2782 #ifdef ENABLE_CHECKING
2783 verify_flow_info ();
2784 #endif
2785 }