]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ifcvt.c
basic-block.h: Fix comment formatting.
[thirdparty/gcc.git] / gcc / ifcvt.c
1 /* If-conversion support.
2 Copyright (C) 2000, 2001, 2002 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23
24 #include "rtl.h"
25 #include "regs.h"
26 #include "function.h"
27 #include "flags.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "except.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "expr.h"
34 #include "real.h"
35 #include "output.h"
36 #include "toplev.h"
37 #include "tm_p.h"
38
39
40 #ifndef HAVE_conditional_execution
41 #define HAVE_conditional_execution 0
42 #endif
43 #ifndef HAVE_conditional_move
44 #define HAVE_conditional_move 0
45 #endif
46 #ifndef HAVE_incscc
47 #define HAVE_incscc 0
48 #endif
49 #ifndef HAVE_decscc
50 #define HAVE_decscc 0
51 #endif
52 #ifndef HAVE_trap
53 #define HAVE_trap 0
54 #endif
55 #ifndef HAVE_conditional_trap
56 #define HAVE_conditional_trap 0
57 #endif
58
59 #ifndef MAX_CONDITIONAL_EXECUTE
60 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
61 #endif
62
63 #define NULL_EDGE ((struct edge_def *)NULL)
64 #define NULL_BLOCK ((struct basic_block_def *)NULL)
65
66 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
67 static int num_possible_if_blocks;
68
69 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
70 execution. */
71 static int num_updated_if_blocks;
72
73 /* # of basic blocks that were removed. */
74 static int num_removed_blocks;
75
76 /* Whether conditional execution changes were made. */
77 static int cond_exec_changed_p;
78
79 /* True if life data ok at present. */
80 static bool life_data_ok;
81
82 /* The post-dominator relation on the original block numbers. */
83 static dominance_info post_dominators;
84
85 /* Forward references. */
86 static int count_bb_insns PARAMS ((basic_block));
87 static rtx first_active_insn PARAMS ((basic_block));
88 static rtx last_active_insn PARAMS ((basic_block, int));
89 static int seq_contains_jump PARAMS ((rtx));
90 static basic_block block_fallthru PARAMS ((basic_block));
91 static int cond_exec_process_insns PARAMS ((ce_if_block_t *,
92 rtx, rtx, rtx, rtx, int));
93 static rtx cond_exec_get_condition PARAMS ((rtx));
94 static int cond_exec_process_if_block PARAMS ((ce_if_block_t *, int));
95 static rtx noce_get_condition PARAMS ((rtx, rtx *));
96 static int noce_operand_ok PARAMS ((rtx));
97 static int noce_process_if_block PARAMS ((ce_if_block_t *));
98 static int process_if_block PARAMS ((ce_if_block_t *));
99 static void merge_if_block PARAMS ((ce_if_block_t *));
100 static int find_cond_trap PARAMS ((basic_block, edge, edge));
101 static basic_block find_if_header PARAMS ((basic_block, int));
102 static int block_jumps_and_fallthru_p PARAMS ((basic_block, basic_block));
103 static int find_if_block PARAMS ((ce_if_block_t *));
104 static int find_if_case_1 PARAMS ((basic_block, edge, edge));
105 static int find_if_case_2 PARAMS ((basic_block, edge, edge));
106 static int find_memory PARAMS ((rtx *, void *));
107 static int dead_or_predicable PARAMS ((basic_block, basic_block,
108 basic_block, basic_block, int));
109 static void noce_emit_move_insn PARAMS ((rtx, rtx));
110 static rtx block_has_only_trap PARAMS ((basic_block));
111 \f
112 /* Count the number of non-jump active insns in BB. */
113
114 static int
115 count_bb_insns (bb)
116 basic_block bb;
117 {
118 int count = 0;
119 rtx insn = bb->head;
120
121 while (1)
122 {
123 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
124 count++;
125
126 if (insn == bb->end)
127 break;
128 insn = NEXT_INSN (insn);
129 }
130
131 return count;
132 }
133
134 /* Return the first non-jump active insn in the basic block. */
135
136 static rtx
137 first_active_insn (bb)
138 basic_block bb;
139 {
140 rtx insn = bb->head;
141
142 if (GET_CODE (insn) == CODE_LABEL)
143 {
144 if (insn == bb->end)
145 return NULL_RTX;
146 insn = NEXT_INSN (insn);
147 }
148
149 while (GET_CODE (insn) == NOTE)
150 {
151 if (insn == bb->end)
152 return NULL_RTX;
153 insn = NEXT_INSN (insn);
154 }
155
156 if (GET_CODE (insn) == JUMP_INSN)
157 return NULL_RTX;
158
159 return insn;
160 }
161
162 /* Return the last non-jump active (non-jump) insn in the basic block. */
163
164 static rtx
165 last_active_insn (bb, skip_use_p)
166 basic_block bb;
167 int skip_use_p;
168 {
169 rtx insn = bb->end;
170 rtx head = bb->head;
171
172 while (GET_CODE (insn) == NOTE
173 || GET_CODE (insn) == JUMP_INSN
174 || (skip_use_p
175 && GET_CODE (insn) == INSN
176 && GET_CODE (PATTERN (insn)) == USE))
177 {
178 if (insn == head)
179 return NULL_RTX;
180 insn = PREV_INSN (insn);
181 }
182
183 if (GET_CODE (insn) == CODE_LABEL)
184 return NULL_RTX;
185
186 return insn;
187 }
188
189 /* It is possible, especially when having dealt with multi-word
190 arithmetic, for the expanders to have emitted jumps. Search
191 through the sequence and return TRUE if a jump exists so that
192 we can abort the conversion. */
193
194 static int
195 seq_contains_jump (insn)
196 rtx insn;
197 {
198 while (insn)
199 {
200 if (GET_CODE (insn) == JUMP_INSN)
201 return 1;
202 insn = NEXT_INSN (insn);
203 }
204 return 0;
205 }
206
207 static basic_block
208 block_fallthru (bb)
209 basic_block bb;
210 {
211 edge e;
212
213 for (e = bb->succ;
214 e != NULL_EDGE && (e->flags & EDGE_FALLTHRU) == 0;
215 e = e->succ_next)
216 ;
217
218 return (e) ? e->dest : NULL_BLOCK;
219 }
220 \f
221 /* Go through a bunch of insns, converting them to conditional
222 execution format if possible. Return TRUE if all of the non-note
223 insns were processed. */
224
225 static int
226 cond_exec_process_insns (ce_info, start, end, test, prob_val, mod_ok)
227 ce_if_block_t *ce_info ATTRIBUTE_UNUSED; /* if block information */
228 rtx start; /* first insn to look at */
229 rtx end; /* last insn to look at */
230 rtx test; /* conditional execution test */
231 rtx prob_val; /* probability of branch taken. */
232 int mod_ok; /* true if modifications ok last insn. */
233 {
234 int must_be_last = FALSE;
235 rtx insn;
236 rtx xtest;
237 rtx pattern;
238
239 if (!start || !end)
240 return FALSE;
241
242 for (insn = start; ; insn = NEXT_INSN (insn))
243 {
244 if (GET_CODE (insn) == NOTE)
245 goto insn_done;
246
247 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
248 abort ();
249
250 /* Remove USE insns that get in the way. */
251 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
252 {
253 /* ??? Ug. Actually unlinking the thing is problematic,
254 given what we'd have to coordinate with our callers. */
255 PUT_CODE (insn, NOTE);
256 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
257 NOTE_SOURCE_FILE (insn) = 0;
258 goto insn_done;
259 }
260
261 /* Last insn wasn't last? */
262 if (must_be_last)
263 return FALSE;
264
265 if (modified_in_p (test, insn))
266 {
267 if (!mod_ok)
268 return FALSE;
269 must_be_last = TRUE;
270 }
271
272 /* Now build the conditional form of the instruction. */
273 pattern = PATTERN (insn);
274 xtest = copy_rtx (test);
275
276 /* If this is already a COND_EXEC, rewrite the test to be an AND of the
277 two conditions. */
278 if (GET_CODE (pattern) == COND_EXEC)
279 {
280 if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
281 return FALSE;
282
283 xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
284 COND_EXEC_TEST (pattern));
285 pattern = COND_EXEC_CODE (pattern);
286 }
287
288 pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
289
290 /* If the machine needs to modify the insn being conditionally executed,
291 say for example to force a constant integer operand into a temp
292 register, do so here. */
293 #ifdef IFCVT_MODIFY_INSN
294 IFCVT_MODIFY_INSN (ce_info, pattern, insn);
295 if (! pattern)
296 return FALSE;
297 #endif
298
299 validate_change (insn, &PATTERN (insn), pattern, 1);
300
301 if (GET_CODE (insn) == CALL_INSN && prob_val)
302 validate_change (insn, &REG_NOTES (insn),
303 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
304 REG_NOTES (insn)), 1);
305
306 insn_done:
307 if (insn == end)
308 break;
309 }
310
311 return TRUE;
312 }
313
314 /* Return the condition for a jump. Do not do any special processing. */
315
316 static rtx
317 cond_exec_get_condition (jump)
318 rtx jump;
319 {
320 rtx test_if, cond;
321
322 if (any_condjump_p (jump))
323 test_if = SET_SRC (pc_set (jump));
324 else
325 return NULL_RTX;
326 cond = XEXP (test_if, 0);
327
328 /* If this branches to JUMP_LABEL when the condition is false,
329 reverse the condition. */
330 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
331 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
332 {
333 enum rtx_code rev = reversed_comparison_code (cond, jump);
334 if (rev == UNKNOWN)
335 return NULL_RTX;
336
337 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
338 XEXP (cond, 1));
339 }
340
341 return cond;
342 }
343
344 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
345 to conditional execution. Return TRUE if we were successful at
346 converting the block. */
347
348 static int
349 cond_exec_process_if_block (ce_info, do_multiple_p)
350 ce_if_block_t * ce_info; /* if block information */
351 int do_multiple_p; /* != 0 if we should handle && and || blocks */
352 {
353 basic_block test_bb = ce_info->test_bb; /* last test block */
354 basic_block then_bb = ce_info->then_bb; /* THEN */
355 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
356 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
357 rtx then_start; /* first insn in THEN block */
358 rtx then_end; /* last insn + 1 in THEN block */
359 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
360 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
361 int max; /* max # of insns to convert. */
362 int then_mod_ok; /* whether conditional mods are ok in THEN */
363 rtx true_expr; /* test for else block insns */
364 rtx false_expr; /* test for then block insns */
365 rtx true_prob_val; /* probability of else block */
366 rtx false_prob_val; /* probability of then block */
367 int n_insns;
368 enum rtx_code false_code;
369
370 /* If test is comprised of && or || elements, and we've failed at handling
371 all of them together, just use the last test if it is the special case of
372 && elements without an ELSE block. */
373 if (!do_multiple_p && ce_info->num_multiple_test_blocks)
374 {
375 if (else_bb || ! ce_info->and_and_p)
376 return FALSE;
377
378 ce_info->test_bb = test_bb = ce_info->last_test_bb;
379 ce_info->num_multiple_test_blocks = 0;
380 ce_info->num_and_and_blocks = 0;
381 ce_info->num_or_or_blocks = 0;
382 }
383
384 /* Find the conditional jump to the ELSE or JOIN part, and isolate
385 the test. */
386 test_expr = cond_exec_get_condition (test_bb->end);
387 if (! test_expr)
388 return FALSE;
389
390 /* If the conditional jump is more than just a conditional jump,
391 then we can not do conditional execution conversion on this block. */
392 if (! onlyjump_p (test_bb->end))
393 return FALSE;
394
395 /* Collect the bounds of where we're to search, skipping any labels, jumps
396 and notes at the beginning and end of the block. Then count the total
397 number of insns and see if it is small enough to convert. */
398 then_start = first_active_insn (then_bb);
399 then_end = last_active_insn (then_bb, TRUE);
400 n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
401 max = MAX_CONDITIONAL_EXECUTE;
402
403 if (else_bb)
404 {
405 max *= 2;
406 else_start = first_active_insn (else_bb);
407 else_end = last_active_insn (else_bb, TRUE);
408 n_insns += ce_info->num_else_insns = count_bb_insns (else_bb);
409 }
410
411 if (n_insns > max)
412 return FALSE;
413
414 /* Map test_expr/test_jump into the appropriate MD tests to use on
415 the conditionally executed code. */
416
417 true_expr = test_expr;
418
419 false_code = reversed_comparison_code (true_expr, test_bb->end);
420 if (false_code != UNKNOWN)
421 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
422 XEXP (true_expr, 0), XEXP (true_expr, 1));
423 else
424 false_expr = NULL_RTX;
425
426 #ifdef IFCVT_MODIFY_TESTS
427 /* If the machine description needs to modify the tests, such as setting a
428 conditional execution register from a comparison, it can do so here. */
429 IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
430
431 /* See if the conversion failed */
432 if (!true_expr || !false_expr)
433 goto fail;
434 #endif
435
436 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
437 if (true_prob_val)
438 {
439 true_prob_val = XEXP (true_prob_val, 0);
440 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
441 }
442 else
443 false_prob_val = NULL_RTX;
444
445 /* If we have && or || tests, do them here. These tests are in the adjacent
446 blocks after the first block containing the test. */
447 if (ce_info->num_multiple_test_blocks > 0)
448 {
449 basic_block bb = test_bb;
450 basic_block last_test_bb = ce_info->last_test_bb;
451
452 if (! false_expr)
453 goto fail;
454
455 do
456 {
457 rtx start, end;
458 rtx t, f;
459
460 bb = block_fallthru (bb);
461 start = first_active_insn (bb);
462 end = last_active_insn (bb, TRUE);
463 if (start
464 && ! cond_exec_process_insns (ce_info, start, end, false_expr,
465 false_prob_val, FALSE))
466 goto fail;
467
468 /* If the conditional jump is more than just a conditional jump, then
469 we can not do conditional execution conversion on this block. */
470 if (! onlyjump_p (bb->end))
471 goto fail;
472
473 /* Find the conditional jump and isolate the test. */
474 t = cond_exec_get_condition (bb->end);
475 if (! t)
476 goto fail;
477
478 f = gen_rtx_fmt_ee (reverse_condition (GET_CODE (t)),
479 GET_MODE (t),
480 XEXP (t, 0),
481 XEXP (t, 1));
482
483 if (ce_info->and_and_p)
484 {
485 t = gen_rtx_AND (GET_MODE (t), true_expr, t);
486 f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
487 }
488 else
489 {
490 t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
491 f = gen_rtx_AND (GET_MODE (t), false_expr, f);
492 }
493
494 /* If the machine description needs to modify the tests, such as
495 setting a conditional execution register from a comparison, it can
496 do so here. */
497 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
498 IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
499
500 /* See if the conversion failed */
501 if (!t || !f)
502 goto fail;
503 #endif
504
505 true_expr = t;
506 false_expr = f;
507 }
508 while (bb != last_test_bb);
509 }
510
511 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
512 on then THEN block. */
513 then_mod_ok = (else_bb == NULL_BLOCK);
514
515 /* Go through the THEN and ELSE blocks converting the insns if possible
516 to conditional execution. */
517
518 if (then_end
519 && (! false_expr
520 || ! cond_exec_process_insns (ce_info, then_start, then_end,
521 false_expr, false_prob_val,
522 then_mod_ok)))
523 goto fail;
524
525 if (else_bb && else_end
526 && ! cond_exec_process_insns (ce_info, else_start, else_end,
527 true_expr, true_prob_val, TRUE))
528 goto fail;
529
530 /* If we cannot apply the changes, fail. Do not go through the normal fail
531 processing, since apply_change_group will call cancel_changes. */
532 if (! apply_change_group ())
533 {
534 #ifdef IFCVT_MODIFY_CANCEL
535 /* Cancel any machine dependent changes. */
536 IFCVT_MODIFY_CANCEL (ce_info);
537 #endif
538 return FALSE;
539 }
540
541 #ifdef IFCVT_MODIFY_FINAL
542 /* Do any machine dependent final modifications */
543 IFCVT_MODIFY_FINAL (ce_info);
544 #endif
545
546 /* Conversion succeeded. */
547 if (rtl_dump_file)
548 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
549 n_insns, (n_insns == 1) ? " was" : "s were");
550
551 /* Merge the blocks! */
552 merge_if_block (ce_info);
553 cond_exec_changed_p = TRUE;
554 return TRUE;
555
556 fail:
557 #ifdef IFCVT_MODIFY_CANCEL
558 /* Cancel any machine dependent changes. */
559 IFCVT_MODIFY_CANCEL (ce_info);
560 #endif
561
562 cancel_changes (0);
563 return FALSE;
564 }
565 \f
566 /* Used by noce_process_if_block to communicate with its subroutines.
567
568 The subroutines know that A and B may be evaluated freely. They
569 know that X is a register. They should insert new instructions
570 before cond_earliest. */
571
572 struct noce_if_info
573 {
574 basic_block test_bb;
575 rtx insn_a, insn_b;
576 rtx x, a, b;
577 rtx jump, cond, cond_earliest;
578 };
579
580 static rtx noce_emit_store_flag PARAMS ((struct noce_if_info *,
581 rtx, int, int));
582 static int noce_try_store_flag PARAMS ((struct noce_if_info *));
583 static int noce_try_store_flag_inc PARAMS ((struct noce_if_info *));
584 static int noce_try_store_flag_constants PARAMS ((struct noce_if_info *));
585 static int noce_try_store_flag_mask PARAMS ((struct noce_if_info *));
586 static rtx noce_emit_cmove PARAMS ((struct noce_if_info *,
587 rtx, enum rtx_code, rtx,
588 rtx, rtx, rtx));
589 static int noce_try_cmove PARAMS ((struct noce_if_info *));
590 static int noce_try_cmove_arith PARAMS ((struct noce_if_info *));
591 static rtx noce_get_alt_condition PARAMS ((struct noce_if_info *,
592 rtx, rtx *));
593 static int noce_try_minmax PARAMS ((struct noce_if_info *));
594 static int noce_try_abs PARAMS ((struct noce_if_info *));
595
596 /* Helper function for noce_try_store_flag*. */
597
598 static rtx
599 noce_emit_store_flag (if_info, x, reversep, normalize)
600 struct noce_if_info *if_info;
601 rtx x;
602 int reversep, normalize;
603 {
604 rtx cond = if_info->cond;
605 int cond_complex;
606 enum rtx_code code;
607
608 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
609 || ! general_operand (XEXP (cond, 1), VOIDmode));
610
611 /* If earliest == jump, or when the condition is complex, try to
612 build the store_flag insn directly. */
613
614 if (cond_complex)
615 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
616
617 if (reversep)
618 code = reversed_comparison_code (cond, if_info->jump);
619 else
620 code = GET_CODE (cond);
621
622 if ((if_info->cond_earliest == if_info->jump || cond_complex)
623 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
624 {
625 rtx tmp;
626
627 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
628 XEXP (cond, 1));
629 tmp = gen_rtx_SET (VOIDmode, x, tmp);
630
631 start_sequence ();
632 tmp = emit_insn (tmp);
633
634 if (recog_memoized (tmp) >= 0)
635 {
636 tmp = get_insns ();
637 end_sequence ();
638 emit_insn (tmp);
639
640 if_info->cond_earliest = if_info->jump;
641
642 return x;
643 }
644
645 end_sequence ();
646 }
647
648 /* Don't even try if the comparison operands are weird. */
649 if (cond_complex)
650 return NULL_RTX;
651
652 return emit_store_flag (x, code, XEXP (cond, 0),
653 XEXP (cond, 1), VOIDmode,
654 (code == LTU || code == LEU
655 || code == GEU || code == GTU), normalize);
656 }
657
658 /* Emit instruction to move an rtx into STRICT_LOW_PART. */
659 static void
660 noce_emit_move_insn (x, y)
661 rtx x, y;
662 {
663 enum machine_mode outmode, inmode;
664 rtx outer, inner;
665 int bitpos;
666
667 if (GET_CODE (x) != STRICT_LOW_PART)
668 {
669 emit_move_insn (x, y);
670 return;
671 }
672
673 outer = XEXP (x, 0);
674 inner = XEXP (outer, 0);
675 outmode = GET_MODE (outer);
676 inmode = GET_MODE (inner);
677 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
678 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y,
679 GET_MODE_BITSIZE (inmode));
680 }
681
682 /* Convert "if (test) x = 1; else x = 0".
683
684 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
685 tried in noce_try_store_flag_constants after noce_try_cmove has had
686 a go at the conversion. */
687
688 static int
689 noce_try_store_flag (if_info)
690 struct noce_if_info *if_info;
691 {
692 int reversep;
693 rtx target, seq;
694
695 if (GET_CODE (if_info->b) == CONST_INT
696 && INTVAL (if_info->b) == STORE_FLAG_VALUE
697 && if_info->a == const0_rtx)
698 reversep = 0;
699 else if (if_info->b == const0_rtx
700 && GET_CODE (if_info->a) == CONST_INT
701 && INTVAL (if_info->a) == STORE_FLAG_VALUE
702 && (reversed_comparison_code (if_info->cond, if_info->jump)
703 != UNKNOWN))
704 reversep = 1;
705 else
706 return FALSE;
707
708 start_sequence ();
709
710 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
711 if (target)
712 {
713 if (target != if_info->x)
714 noce_emit_move_insn (if_info->x, target);
715
716 seq = get_insns ();
717 end_sequence ();
718 emit_insn_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
719
720 return TRUE;
721 }
722 else
723 {
724 end_sequence ();
725 return FALSE;
726 }
727 }
728
729 /* Convert "if (test) x = a; else x = b", for A and B constant. */
730
731 static int
732 noce_try_store_flag_constants (if_info)
733 struct noce_if_info *if_info;
734 {
735 rtx target, seq;
736 int reversep;
737 HOST_WIDE_INT itrue, ifalse, diff, tmp;
738 int normalize, can_reverse;
739 enum machine_mode mode;
740
741 if (! no_new_pseudos
742 && GET_CODE (if_info->a) == CONST_INT
743 && GET_CODE (if_info->b) == CONST_INT)
744 {
745 mode = GET_MODE (if_info->x);
746 ifalse = INTVAL (if_info->a);
747 itrue = INTVAL (if_info->b);
748
749 /* Make sure we can represent the difference between the two values. */
750 if ((itrue - ifalse > 0)
751 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
752 return FALSE;
753
754 diff = trunc_int_for_mode (itrue - ifalse, mode);
755
756 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
757 != UNKNOWN);
758
759 reversep = 0;
760 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
761 normalize = 0;
762 else if (ifalse == 0 && exact_log2 (itrue) >= 0
763 && (STORE_FLAG_VALUE == 1
764 || BRANCH_COST >= 2))
765 normalize = 1;
766 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
767 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
768 normalize = 1, reversep = 1;
769 else if (itrue == -1
770 && (STORE_FLAG_VALUE == -1
771 || BRANCH_COST >= 2))
772 normalize = -1;
773 else if (ifalse == -1 && can_reverse
774 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
775 normalize = -1, reversep = 1;
776 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
777 || BRANCH_COST >= 3)
778 normalize = -1;
779 else
780 return FALSE;
781
782 if (reversep)
783 {
784 tmp = itrue; itrue = ifalse; ifalse = tmp;
785 diff = trunc_int_for_mode (-diff, mode);
786 }
787
788 start_sequence ();
789 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
790 if (! target)
791 {
792 end_sequence ();
793 return FALSE;
794 }
795
796 /* if (test) x = 3; else x = 4;
797 => x = 3 + (test == 0); */
798 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
799 {
800 target = expand_simple_binop (mode,
801 (diff == STORE_FLAG_VALUE
802 ? PLUS : MINUS),
803 GEN_INT (ifalse), target, if_info->x, 0,
804 OPTAB_WIDEN);
805 }
806
807 /* if (test) x = 8; else x = 0;
808 => x = (test != 0) << 3; */
809 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
810 {
811 target = expand_simple_binop (mode, ASHIFT,
812 target, GEN_INT (tmp), if_info->x, 0,
813 OPTAB_WIDEN);
814 }
815
816 /* if (test) x = -1; else x = b;
817 => x = -(test != 0) | b; */
818 else if (itrue == -1)
819 {
820 target = expand_simple_binop (mode, IOR,
821 target, GEN_INT (ifalse), if_info->x, 0,
822 OPTAB_WIDEN);
823 }
824
825 /* if (test) x = a; else x = b;
826 => x = (-(test != 0) & (b - a)) + a; */
827 else
828 {
829 target = expand_simple_binop (mode, AND,
830 target, GEN_INT (diff), if_info->x, 0,
831 OPTAB_WIDEN);
832 if (target)
833 target = expand_simple_binop (mode, PLUS,
834 target, GEN_INT (ifalse),
835 if_info->x, 0, OPTAB_WIDEN);
836 }
837
838 if (! target)
839 {
840 end_sequence ();
841 return FALSE;
842 }
843
844 if (target != if_info->x)
845 noce_emit_move_insn (if_info->x, target);
846
847 seq = get_insns ();
848 end_sequence ();
849
850 if (seq_contains_jump (seq))
851 return FALSE;
852
853 emit_insn_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
854
855 return TRUE;
856 }
857
858 return FALSE;
859 }
860
861 /* Convert "if (test) foo++" into "foo += (test != 0)", and
862 similarly for "foo--". */
863
864 static int
865 noce_try_store_flag_inc (if_info)
866 struct noce_if_info *if_info;
867 {
868 rtx target, seq;
869 int subtract, normalize;
870
871 if (! no_new_pseudos
872 && (BRANCH_COST >= 2
873 || HAVE_incscc
874 || HAVE_decscc)
875 /* Should be no `else' case to worry about. */
876 && if_info->b == if_info->x
877 && GET_CODE (if_info->a) == PLUS
878 && (XEXP (if_info->a, 1) == const1_rtx
879 || XEXP (if_info->a, 1) == constm1_rtx)
880 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
881 && (reversed_comparison_code (if_info->cond, if_info->jump)
882 != UNKNOWN))
883 {
884 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
885 subtract = 0, normalize = 0;
886 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
887 subtract = 1, normalize = 0;
888 else
889 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
890
891 start_sequence ();
892
893 target = noce_emit_store_flag (if_info,
894 gen_reg_rtx (GET_MODE (if_info->x)),
895 1, normalize);
896
897 if (target)
898 target = expand_simple_binop (GET_MODE (if_info->x),
899 subtract ? MINUS : PLUS,
900 if_info->x, target, if_info->x,
901 0, OPTAB_WIDEN);
902 if (target)
903 {
904 if (target != if_info->x)
905 noce_emit_move_insn (if_info->x, target);
906
907 seq = get_insns ();
908 end_sequence ();
909
910 if (seq_contains_jump (seq))
911 return FALSE;
912
913 emit_insn_before_scope (seq, if_info->jump,
914 INSN_SCOPE (if_info->insn_a));
915
916 return TRUE;
917 }
918
919 end_sequence ();
920 }
921
922 return FALSE;
923 }
924
925 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
926
927 static int
928 noce_try_store_flag_mask (if_info)
929 struct noce_if_info *if_info;
930 {
931 rtx target, seq;
932 int reversep;
933
934 reversep = 0;
935 if (! no_new_pseudos
936 && (BRANCH_COST >= 2
937 || STORE_FLAG_VALUE == -1)
938 && ((if_info->a == const0_rtx
939 && rtx_equal_p (if_info->b, if_info->x))
940 || ((reversep = (reversed_comparison_code (if_info->cond,
941 if_info->jump)
942 != UNKNOWN))
943 && if_info->b == const0_rtx
944 && rtx_equal_p (if_info->a, if_info->x))))
945 {
946 start_sequence ();
947 target = noce_emit_store_flag (if_info,
948 gen_reg_rtx (GET_MODE (if_info->x)),
949 reversep, -1);
950 if (target)
951 target = expand_simple_binop (GET_MODE (if_info->x), AND,
952 if_info->x, target, if_info->x, 0,
953 OPTAB_WIDEN);
954
955 if (target)
956 {
957 if (target != if_info->x)
958 noce_emit_move_insn (if_info->x, target);
959
960 seq = get_insns ();
961 end_sequence ();
962
963 if (seq_contains_jump (seq))
964 return FALSE;
965
966 emit_insn_before_scope (seq, if_info->jump,
967 INSN_SCOPE (if_info->insn_a));
968
969 return TRUE;
970 }
971
972 end_sequence ();
973 }
974
975 return FALSE;
976 }
977
978 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
979
980 static rtx
981 noce_emit_cmove (if_info, x, code, cmp_a, cmp_b, vfalse, vtrue)
982 struct noce_if_info *if_info;
983 rtx x, cmp_a, cmp_b, vfalse, vtrue;
984 enum rtx_code code;
985 {
986 /* If earliest == jump, try to build the cmove insn directly.
987 This is helpful when combine has created some complex condition
988 (like for alpha's cmovlbs) that we can't hope to regenerate
989 through the normal interface. */
990
991 if (if_info->cond_earliest == if_info->jump)
992 {
993 rtx tmp;
994
995 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
996 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
997 tmp = gen_rtx_SET (VOIDmode, x, tmp);
998
999 start_sequence ();
1000 tmp = emit_insn (tmp);
1001
1002 if (recog_memoized (tmp) >= 0)
1003 {
1004 tmp = get_insns ();
1005 end_sequence ();
1006 emit_insn (tmp);
1007
1008 return x;
1009 }
1010
1011 end_sequence ();
1012 }
1013
1014 /* Don't even try if the comparison operands are weird. */
1015 if (! general_operand (cmp_a, GET_MODE (cmp_a))
1016 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1017 return NULL_RTX;
1018
1019 #if HAVE_conditional_move
1020 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1021 vtrue, vfalse, GET_MODE (x),
1022 (code == LTU || code == GEU
1023 || code == LEU || code == GTU));
1024 #else
1025 /* We'll never get here, as noce_process_if_block doesn't call the
1026 functions involved. Ifdef code, however, should be discouraged
1027 because it leads to typos in the code not selected. However,
1028 emit_conditional_move won't exist either. */
1029 return NULL_RTX;
1030 #endif
1031 }
1032
1033 /* Try only simple constants and registers here. More complex cases
1034 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1035 has had a go at it. */
1036
1037 static int
1038 noce_try_cmove (if_info)
1039 struct noce_if_info *if_info;
1040 {
1041 enum rtx_code code;
1042 rtx target, seq;
1043
1044 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1045 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1046 {
1047 start_sequence ();
1048
1049 code = GET_CODE (if_info->cond);
1050 target = noce_emit_cmove (if_info, if_info->x, code,
1051 XEXP (if_info->cond, 0),
1052 XEXP (if_info->cond, 1),
1053 if_info->a, if_info->b);
1054
1055 if (target)
1056 {
1057 if (target != if_info->x)
1058 noce_emit_move_insn (if_info->x, target);
1059
1060 seq = get_insns ();
1061 end_sequence ();
1062 emit_insn_before_scope (seq, if_info->jump,
1063 INSN_SCOPE (if_info->insn_a));
1064 return TRUE;
1065 }
1066 else
1067 {
1068 end_sequence ();
1069 return FALSE;
1070 }
1071 }
1072
1073 return FALSE;
1074 }
1075
1076 /* Try more complex cases involving conditional_move. */
1077
1078 static int
1079 noce_try_cmove_arith (if_info)
1080 struct noce_if_info *if_info;
1081 {
1082 rtx a = if_info->a;
1083 rtx b = if_info->b;
1084 rtx x = if_info->x;
1085 rtx insn_a, insn_b;
1086 rtx tmp, target;
1087 int is_mem = 0;
1088 enum rtx_code code;
1089
1090 /* A conditional move from two memory sources is equivalent to a
1091 conditional on their addresses followed by a load. Don't do this
1092 early because it'll screw alias analysis. Note that we've
1093 already checked for no side effects. */
1094 if (! no_new_pseudos && cse_not_expected
1095 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
1096 && BRANCH_COST >= 5)
1097 {
1098 a = XEXP (a, 0);
1099 b = XEXP (b, 0);
1100 x = gen_reg_rtx (Pmode);
1101 is_mem = 1;
1102 }
1103
1104 /* ??? We could handle this if we knew that a load from A or B could
1105 not fault. This is also true if we've already loaded
1106 from the address along the path from ENTRY. */
1107 else if (may_trap_p (a) || may_trap_p (b))
1108 return FALSE;
1109
1110 /* if (test) x = a + b; else x = c - d;
1111 => y = a + b;
1112 x = c - d;
1113 if (test)
1114 x = y;
1115 */
1116
1117 code = GET_CODE (if_info->cond);
1118 insn_a = if_info->insn_a;
1119 insn_b = if_info->insn_b;
1120
1121 /* Possibly rearrange operands to make things come out more natural. */
1122 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1123 {
1124 int reversep = 0;
1125 if (rtx_equal_p (b, x))
1126 reversep = 1;
1127 else if (general_operand (b, GET_MODE (b)))
1128 reversep = 1;
1129
1130 if (reversep)
1131 {
1132 code = reversed_comparison_code (if_info->cond, if_info->jump);
1133 tmp = a, a = b, b = tmp;
1134 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1135 }
1136 }
1137
1138 start_sequence ();
1139
1140 /* If either operand is complex, load it into a register first.
1141 The best way to do this is to copy the original insn. In this
1142 way we preserve any clobbers etc that the insn may have had.
1143 This is of course not possible in the IS_MEM case. */
1144 if (! general_operand (a, GET_MODE (a)))
1145 {
1146 rtx set;
1147
1148 if (no_new_pseudos)
1149 goto end_seq_and_fail;
1150
1151 if (is_mem)
1152 {
1153 tmp = gen_reg_rtx (GET_MODE (a));
1154 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1155 }
1156 else if (! insn_a)
1157 goto end_seq_and_fail;
1158 else
1159 {
1160 a = gen_reg_rtx (GET_MODE (a));
1161 tmp = copy_rtx (insn_a);
1162 set = single_set (tmp);
1163 SET_DEST (set) = a;
1164 tmp = emit_insn (PATTERN (tmp));
1165 }
1166 if (recog_memoized (tmp) < 0)
1167 goto end_seq_and_fail;
1168 }
1169 if (! general_operand (b, GET_MODE (b)))
1170 {
1171 rtx set;
1172
1173 if (no_new_pseudos)
1174 goto end_seq_and_fail;
1175
1176 if (is_mem)
1177 {
1178 tmp = gen_reg_rtx (GET_MODE (b));
1179 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
1180 }
1181 else if (! insn_b)
1182 goto end_seq_and_fail;
1183 else
1184 {
1185 b = gen_reg_rtx (GET_MODE (b));
1186 tmp = copy_rtx (insn_b);
1187 set = single_set (tmp);
1188 SET_DEST (set) = b;
1189 tmp = emit_insn (PATTERN (tmp));
1190 }
1191 if (recog_memoized (tmp) < 0)
1192 goto end_seq_and_fail;
1193 }
1194
1195 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1196 XEXP (if_info->cond, 1), a, b);
1197
1198 if (! target)
1199 goto end_seq_and_fail;
1200
1201 /* If we're handling a memory for above, emit the load now. */
1202 if (is_mem)
1203 {
1204 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1205
1206 /* Copy over flags as appropriate. */
1207 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1208 MEM_VOLATILE_P (tmp) = 1;
1209 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1210 MEM_IN_STRUCT_P (tmp) = 1;
1211 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1212 MEM_SCALAR_P (tmp) = 1;
1213 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1214 set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1215 set_mem_align (tmp,
1216 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1217
1218 noce_emit_move_insn (if_info->x, tmp);
1219 }
1220 else if (target != x)
1221 noce_emit_move_insn (x, target);
1222
1223 tmp = get_insns ();
1224 end_sequence ();
1225 emit_insn_before_scope (tmp, if_info->jump, INSN_SCOPE (if_info->insn_a));
1226 return TRUE;
1227
1228 end_seq_and_fail:
1229 end_sequence ();
1230 return FALSE;
1231 }
1232
1233 /* For most cases, the simplified condition we found is the best
1234 choice, but this is not the case for the min/max/abs transforms.
1235 For these we wish to know that it is A or B in the condition. */
1236
1237 static rtx
1238 noce_get_alt_condition (if_info, target, earliest)
1239 struct noce_if_info *if_info;
1240 rtx target;
1241 rtx *earliest;
1242 {
1243 rtx cond, set, insn;
1244 int reverse;
1245
1246 /* If target is already mentioned in the known condition, return it. */
1247 if (reg_mentioned_p (target, if_info->cond))
1248 {
1249 *earliest = if_info->cond_earliest;
1250 return if_info->cond;
1251 }
1252
1253 set = pc_set (if_info->jump);
1254 cond = XEXP (SET_SRC (set), 0);
1255 reverse
1256 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1257 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1258
1259 /* If we're looking for a constant, try to make the conditional
1260 have that constant in it. There are two reasons why it may
1261 not have the constant we want:
1262
1263 1. GCC may have needed to put the constant in a register, because
1264 the target can't compare directly against that constant. For
1265 this case, we look for a SET immediately before the comparison
1266 that puts a constant in that register.
1267
1268 2. GCC may have canonicalized the conditional, for example
1269 replacing "if x < 4" with "if x <= 3". We can undo that (or
1270 make equivalent types of changes) to get the constants we need
1271 if they're off by one in the right direction. */
1272
1273 if (GET_CODE (target) == CONST_INT)
1274 {
1275 enum rtx_code code = GET_CODE (if_info->cond);
1276 rtx op_a = XEXP (if_info->cond, 0);
1277 rtx op_b = XEXP (if_info->cond, 1);
1278 rtx prev_insn;
1279
1280 /* First, look to see if we put a constant in a register. */
1281 prev_insn = PREV_INSN (if_info->cond_earliest);
1282 if (prev_insn
1283 && INSN_P (prev_insn)
1284 && GET_CODE (PATTERN (prev_insn)) == SET)
1285 {
1286 rtx src = find_reg_equal_equiv_note (prev_insn);
1287 if (!src)
1288 src = SET_SRC (PATTERN (prev_insn));
1289 if (GET_CODE (src) == CONST_INT)
1290 {
1291 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1292 op_a = src;
1293 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1294 op_b = src;
1295
1296 if (GET_CODE (op_a) == CONST_INT)
1297 {
1298 rtx tmp = op_a;
1299 op_a = op_b;
1300 op_b = tmp;
1301 code = swap_condition (code);
1302 }
1303 }
1304 }
1305
1306 /* Now, look to see if we can get the right constant by
1307 adjusting the conditional. */
1308 if (GET_CODE (op_b) == CONST_INT)
1309 {
1310 HOST_WIDE_INT desired_val = INTVAL (target);
1311 HOST_WIDE_INT actual_val = INTVAL (op_b);
1312
1313 switch (code)
1314 {
1315 case LT:
1316 if (actual_val == desired_val + 1)
1317 {
1318 code = LE;
1319 op_b = GEN_INT (desired_val);
1320 }
1321 break;
1322 case LE:
1323 if (actual_val == desired_val - 1)
1324 {
1325 code = LT;
1326 op_b = GEN_INT (desired_val);
1327 }
1328 break;
1329 case GT:
1330 if (actual_val == desired_val - 1)
1331 {
1332 code = GE;
1333 op_b = GEN_INT (desired_val);
1334 }
1335 break;
1336 case GE:
1337 if (actual_val == desired_val + 1)
1338 {
1339 code = GT;
1340 op_b = GEN_INT (desired_val);
1341 }
1342 break;
1343 default:
1344 break;
1345 }
1346 }
1347
1348 /* If we made any changes, generate a new conditional that is
1349 equivalent to what we started with, but has the right
1350 constants in it. */
1351 if (code != GET_CODE (if_info->cond)
1352 || op_a != XEXP (if_info->cond, 0)
1353 || op_b != XEXP (if_info->cond, 1))
1354 {
1355 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1356 *earliest = if_info->cond_earliest;
1357 return cond;
1358 }
1359 }
1360
1361 cond = canonicalize_condition (if_info->jump, cond, reverse,
1362 earliest, target);
1363 if (! cond || ! reg_mentioned_p (target, cond))
1364 return NULL;
1365
1366 /* We almost certainly searched back to a different place.
1367 Need to re-verify correct lifetimes. */
1368
1369 /* X may not be mentioned in the range (cond_earliest, jump]. */
1370 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1371 if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1372 return NULL;
1373
1374 /* A and B may not be modified in the range [cond_earliest, jump). */
1375 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1376 if (INSN_P (insn)
1377 && (modified_in_p (if_info->a, insn)
1378 || modified_in_p (if_info->b, insn)))
1379 return NULL;
1380
1381 return cond;
1382 }
1383
1384 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1385
1386 static int
1387 noce_try_minmax (if_info)
1388 struct noce_if_info *if_info;
1389 {
1390 rtx cond, earliest, target, seq;
1391 enum rtx_code code, op;
1392 int unsignedp;
1393
1394 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1395 if (no_new_pseudos)
1396 return FALSE;
1397
1398 /* ??? Reject modes with NaNs or signed zeros since we don't know how
1399 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
1400 to get the target to tell us... */
1401 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1402 || HONOR_NANS (GET_MODE (if_info->x)))
1403 return FALSE;
1404
1405 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1406 if (!cond)
1407 return FALSE;
1408
1409 /* Verify the condition is of the form we expect, and canonicalize
1410 the comparison code. */
1411 code = GET_CODE (cond);
1412 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1413 {
1414 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1415 return FALSE;
1416 }
1417 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1418 {
1419 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1420 return FALSE;
1421 code = swap_condition (code);
1422 }
1423 else
1424 return FALSE;
1425
1426 /* Determine what sort of operation this is. Note that the code is for
1427 a taken branch, so the code->operation mapping appears backwards. */
1428 switch (code)
1429 {
1430 case LT:
1431 case LE:
1432 case UNLT:
1433 case UNLE:
1434 op = SMAX;
1435 unsignedp = 0;
1436 break;
1437 case GT:
1438 case GE:
1439 case UNGT:
1440 case UNGE:
1441 op = SMIN;
1442 unsignedp = 0;
1443 break;
1444 case LTU:
1445 case LEU:
1446 op = UMAX;
1447 unsignedp = 1;
1448 break;
1449 case GTU:
1450 case GEU:
1451 op = UMIN;
1452 unsignedp = 1;
1453 break;
1454 default:
1455 return FALSE;
1456 }
1457
1458 start_sequence ();
1459
1460 target = expand_simple_binop (GET_MODE (if_info->x), op,
1461 if_info->a, if_info->b,
1462 if_info->x, unsignedp, OPTAB_WIDEN);
1463 if (! target)
1464 {
1465 end_sequence ();
1466 return FALSE;
1467 }
1468 if (target != if_info->x)
1469 noce_emit_move_insn (if_info->x, target);
1470
1471 seq = get_insns ();
1472 end_sequence ();
1473
1474 if (seq_contains_jump (seq))
1475 return FALSE;
1476
1477 emit_insn_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
1478 if_info->cond = cond;
1479 if_info->cond_earliest = earliest;
1480
1481 return TRUE;
1482 }
1483
1484 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1485
1486 static int
1487 noce_try_abs (if_info)
1488 struct noce_if_info *if_info;
1489 {
1490 rtx cond, earliest, target, seq, a, b, c;
1491 int negate;
1492
1493 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1494 if (no_new_pseudos)
1495 return FALSE;
1496
1497 /* Recognize A and B as constituting an ABS or NABS. */
1498 a = if_info->a;
1499 b = if_info->b;
1500 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1501 negate = 0;
1502 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1503 {
1504 c = a; a = b; b = c;
1505 negate = 1;
1506 }
1507 else
1508 return FALSE;
1509
1510 cond = noce_get_alt_condition (if_info, b, &earliest);
1511 if (!cond)
1512 return FALSE;
1513
1514 /* Verify the condition is of the form we expect. */
1515 if (rtx_equal_p (XEXP (cond, 0), b))
1516 c = XEXP (cond, 1);
1517 else if (rtx_equal_p (XEXP (cond, 1), b))
1518 c = XEXP (cond, 0);
1519 else
1520 return FALSE;
1521
1522 /* Verify that C is zero. Search backward through the block for
1523 a REG_EQUAL note if necessary. */
1524 if (REG_P (c))
1525 {
1526 rtx insn, note = NULL;
1527 for (insn = earliest;
1528 insn != if_info->test_bb->head;
1529 insn = PREV_INSN (insn))
1530 if (INSN_P (insn)
1531 && ((note = find_reg_note (insn, REG_EQUAL, c))
1532 || (note = find_reg_note (insn, REG_EQUIV, c))))
1533 break;
1534 if (! note)
1535 return FALSE;
1536 c = XEXP (note, 0);
1537 }
1538 if (GET_CODE (c) == MEM
1539 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1540 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1541 c = get_pool_constant (XEXP (c, 0));
1542
1543 /* Work around funny ideas get_condition has wrt canonicalization.
1544 Note that these rtx constants are known to be CONST_INT, and
1545 therefore imply integer comparisons. */
1546 if (c == constm1_rtx && GET_CODE (cond) == GT)
1547 ;
1548 else if (c == const1_rtx && GET_CODE (cond) == LT)
1549 ;
1550 else if (c != CONST0_RTX (GET_MODE (b)))
1551 return FALSE;
1552
1553 /* Determine what sort of operation this is. */
1554 switch (GET_CODE (cond))
1555 {
1556 case LT:
1557 case LE:
1558 case UNLT:
1559 case UNLE:
1560 negate = !negate;
1561 break;
1562 case GT:
1563 case GE:
1564 case UNGT:
1565 case UNGE:
1566 break;
1567 default:
1568 return FALSE;
1569 }
1570
1571 start_sequence ();
1572
1573 target = expand_simple_unop (GET_MODE (if_info->x), ABS, b, if_info->x, 0);
1574
1575 /* ??? It's a quandry whether cmove would be better here, especially
1576 for integers. Perhaps combine will clean things up. */
1577 if (target && negate)
1578 target = expand_simple_unop (GET_MODE (target), NEG, target, if_info->x, 0);
1579
1580 if (! target)
1581 {
1582 end_sequence ();
1583 return FALSE;
1584 }
1585
1586 if (target != if_info->x)
1587 noce_emit_move_insn (if_info->x, target);
1588
1589 seq = get_insns ();
1590 end_sequence ();
1591
1592 if (seq_contains_jump (seq))
1593 return FALSE;
1594
1595 emit_insn_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
1596 if_info->cond = cond;
1597 if_info->cond_earliest = earliest;
1598
1599 return TRUE;
1600 }
1601
1602 /* Similar to get_condition, only the resulting condition must be
1603 valid at JUMP, instead of at EARLIEST. */
1604
1605 static rtx
1606 noce_get_condition (jump, earliest)
1607 rtx jump;
1608 rtx *earliest;
1609 {
1610 rtx cond, set, tmp, insn;
1611 bool reverse;
1612
1613 if (! any_condjump_p (jump))
1614 return NULL_RTX;
1615
1616 set = pc_set (jump);
1617
1618 /* If this branches to JUMP_LABEL when the condition is false,
1619 reverse the condition. */
1620 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1621 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
1622
1623 /* If the condition variable is a register and is MODE_INT, accept it. */
1624
1625 cond = XEXP (SET_SRC (set), 0);
1626 tmp = XEXP (cond, 0);
1627 if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT)
1628 {
1629 *earliest = jump;
1630
1631 if (reverse)
1632 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1633 GET_MODE (cond), tmp, XEXP (cond, 1));
1634 return cond;
1635 }
1636
1637 /* Otherwise, fall back on canonicalize_condition to do the dirty
1638 work of manipulating MODE_CC values and COMPARE rtx codes. */
1639
1640 tmp = canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX);
1641 if (!tmp)
1642 return NULL_RTX;
1643
1644 /* We are going to insert code before JUMP, not before EARLIEST.
1645 We must therefore be certain that the given condition is valid
1646 at JUMP by virtue of not having been modified since. */
1647 for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1648 if (INSN_P (insn) && modified_in_p (tmp, insn))
1649 break;
1650 if (insn == jump)
1651 return tmp;
1652
1653 /* The condition was modified. See if we can get a partial result
1654 that doesn't follow all the reversals. Perhaps combine can fold
1655 them together later. */
1656 tmp = XEXP (tmp, 0);
1657 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
1658 return NULL_RTX;
1659 tmp = canonicalize_condition (jump, cond, reverse, earliest, tmp);
1660 if (!tmp)
1661 return NULL_RTX;
1662
1663 /* For sanity's sake, re-validate the new result. */
1664 for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1665 if (INSN_P (insn) && modified_in_p (tmp, insn))
1666 return NULL_RTX;
1667
1668 return tmp;
1669 }
1670
1671 /* Return true if OP is ok for if-then-else processing. */
1672
1673 static int
1674 noce_operand_ok (op)
1675 rtx op;
1676 {
1677 /* We special-case memories, so handle any of them with
1678 no address side effects. */
1679 if (GET_CODE (op) == MEM)
1680 return ! side_effects_p (XEXP (op, 0));
1681
1682 if (side_effects_p (op))
1683 return FALSE;
1684
1685 return ! may_trap_p (op);
1686 }
1687
1688 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1689 without using conditional execution. Return TRUE if we were
1690 successful at converting the block. */
1691
1692 static int
1693 noce_process_if_block (ce_info)
1694 struct ce_if_block * ce_info;
1695 {
1696 basic_block test_bb = ce_info->test_bb; /* test block */
1697 basic_block then_bb = ce_info->then_bb; /* THEN */
1698 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
1699 struct noce_if_info if_info;
1700 rtx insn_a, insn_b;
1701 rtx set_a, set_b;
1702 rtx orig_x, x, a, b;
1703 rtx jump, cond, insn;
1704
1705 /* We're looking for patterns of the form
1706
1707 (1) if (...) x = a; else x = b;
1708 (2) x = b; if (...) x = a;
1709 (3) if (...) x = a; // as if with an initial x = x.
1710
1711 The later patterns require jumps to be more expensive.
1712
1713 ??? For future expansion, look for multiple X in such patterns. */
1714
1715 /* If test is comprised of && or || elements, don't handle it unless it is
1716 the special case of && elements without an ELSE block. */
1717 if (ce_info->num_multiple_test_blocks)
1718 {
1719 if (else_bb || ! ce_info->and_and_p)
1720 return FALSE;
1721
1722 ce_info->test_bb = test_bb = ce_info->last_test_bb;
1723 ce_info->num_multiple_test_blocks = 0;
1724 ce_info->num_and_and_blocks = 0;
1725 ce_info->num_or_or_blocks = 0;
1726 }
1727
1728 /* If this is not a standard conditional jump, we can't parse it. */
1729 jump = test_bb->end;
1730 cond = noce_get_condition (jump, &if_info.cond_earliest);
1731 if (! cond)
1732 return FALSE;
1733
1734 /* If the conditional jump is more than just a conditional
1735 jump, then we can not do if-conversion on this block. */
1736 if (! onlyjump_p (jump))
1737 return FALSE;
1738
1739 /* We must be comparing objects whose modes imply the size. */
1740 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1741 return FALSE;
1742
1743 /* Look for one of the potential sets. */
1744 insn_a = first_active_insn (then_bb);
1745 if (! insn_a
1746 || insn_a != last_active_insn (then_bb, FALSE)
1747 || (set_a = single_set (insn_a)) == NULL_RTX)
1748 return FALSE;
1749
1750 x = SET_DEST (set_a);
1751 a = SET_SRC (set_a);
1752
1753 /* Look for the other potential set. Make sure we've got equivalent
1754 destinations. */
1755 /* ??? This is overconservative. Storing to two different mems is
1756 as easy as conditionally computing the address. Storing to a
1757 single mem merely requires a scratch memory to use as one of the
1758 destination addresses; often the memory immediately below the
1759 stack pointer is available for this. */
1760 set_b = NULL_RTX;
1761 if (else_bb)
1762 {
1763 insn_b = first_active_insn (else_bb);
1764 if (! insn_b
1765 || insn_b != last_active_insn (else_bb, FALSE)
1766 || (set_b = single_set (insn_b)) == NULL_RTX
1767 || ! rtx_equal_p (x, SET_DEST (set_b)))
1768 return FALSE;
1769 }
1770 else
1771 {
1772 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1773 if (! insn_b
1774 || GET_CODE (insn_b) != INSN
1775 || (set_b = single_set (insn_b)) == NULL_RTX
1776 || ! rtx_equal_p (x, SET_DEST (set_b))
1777 || reg_overlap_mentioned_p (x, cond)
1778 || reg_overlap_mentioned_p (x, a)
1779 || reg_overlap_mentioned_p (x, SET_SRC (set_b)))
1780 insn_b = set_b = NULL_RTX;
1781 }
1782 b = (set_b ? SET_SRC (set_b) : x);
1783
1784 /* X may not be mentioned in the range (cond_earliest, jump].
1785 Note the use of reg_overlap_mentioned_p, which handles memories
1786 properly, as opposed to reg_mentioned_p, which doesn't. */
1787 for (insn = jump; insn != if_info.cond_earliest; insn = PREV_INSN (insn))
1788 if (INSN_P (insn) && reg_overlap_mentioned_p (x, PATTERN (insn)))
1789 return FALSE;
1790
1791 /* A and B may not be modified in the range [cond_earliest, jump). */
1792 for (insn = if_info.cond_earliest; insn != jump; insn = NEXT_INSN (insn))
1793 if (INSN_P (insn)
1794 && (modified_in_p (a, insn) || modified_in_p (b, insn)))
1795 return FALSE;
1796
1797 /* Only operate on register destinations, and even then avoid extending
1798 the lifetime of hard registers on small register class machines. */
1799 orig_x = x;
1800 if (GET_CODE (x) != REG
1801 || (SMALL_REGISTER_CLASSES
1802 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1803 {
1804 if (no_new_pseudos)
1805 return FALSE;
1806 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1807 ? XEXP (x, 0) : x));
1808 }
1809
1810 /* Don't operate on sources that may trap or are volatile. */
1811 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1812 return FALSE;
1813
1814 /* Set up the info block for our subroutines. */
1815 if_info.test_bb = test_bb;
1816 if_info.cond = cond;
1817 if_info.jump = jump;
1818 if_info.insn_a = insn_a;
1819 if_info.insn_b = insn_b;
1820 if_info.x = x;
1821 if_info.a = a;
1822 if_info.b = b;
1823
1824 /* Try optimizations in some approximation of a useful order. */
1825 /* ??? Should first look to see if X is live incoming at all. If it
1826 isn't, we don't need anything but an unconditional set. */
1827
1828 /* Look and see if A and B are really the same. Avoid creating silly
1829 cmove constructs that no one will fix up later. */
1830 if (rtx_equal_p (a, b))
1831 {
1832 /* If we have an INSN_B, we don't have to create any new rtl. Just
1833 move the instruction that we already have. If we don't have an
1834 INSN_B, that means that A == X, and we've got a noop move. In
1835 that case don't do anything and let the code below delete INSN_A. */
1836 if (insn_b && else_bb)
1837 {
1838 rtx note;
1839
1840 if (else_bb && insn_b == else_bb->end)
1841 else_bb->end = PREV_INSN (insn_b);
1842 reorder_insns (insn_b, insn_b, PREV_INSN (if_info.cond_earliest));
1843
1844 /* If there was a REG_EQUAL note, delete it since it may have been
1845 true due to this insn being after a jump. */
1846 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
1847 remove_note (insn_b, note);
1848
1849 insn_b = NULL_RTX;
1850 }
1851 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1852 x must be executed twice. */
1853 else if (insn_b && side_effects_p (orig_x))
1854 return FALSE;
1855
1856 x = orig_x;
1857 goto success;
1858 }
1859
1860 if (noce_try_store_flag (&if_info))
1861 goto success;
1862 if (noce_try_minmax (&if_info))
1863 goto success;
1864 if (noce_try_abs (&if_info))
1865 goto success;
1866 if (HAVE_conditional_move
1867 && noce_try_cmove (&if_info))
1868 goto success;
1869 if (! HAVE_conditional_execution)
1870 {
1871 if (noce_try_store_flag_constants (&if_info))
1872 goto success;
1873 if (noce_try_store_flag_inc (&if_info))
1874 goto success;
1875 if (noce_try_store_flag_mask (&if_info))
1876 goto success;
1877 if (HAVE_conditional_move
1878 && noce_try_cmove_arith (&if_info))
1879 goto success;
1880 }
1881
1882 return FALSE;
1883
1884 success:
1885 /* The original sets may now be killed. */
1886 delete_insn (insn_a);
1887
1888 /* Several special cases here: First, we may have reused insn_b above,
1889 in which case insn_b is now NULL. Second, we want to delete insn_b
1890 if it came from the ELSE block, because follows the now correct
1891 write that appears in the TEST block. However, if we got insn_b from
1892 the TEST block, it may in fact be loading data needed for the comparison.
1893 We'll let life_analysis remove the insn if it's really dead. */
1894 if (insn_b && else_bb)
1895 delete_insn (insn_b);
1896
1897 /* The new insns will have been inserted before cond_earliest. We should
1898 be able to remove the jump with impunity, but the condition itself may
1899 have been modified by gcse to be shared across basic blocks. */
1900 delete_insn (jump);
1901
1902 /* If we used a temporary, fix it up now. */
1903 if (orig_x != x)
1904 {
1905 start_sequence ();
1906 noce_emit_move_insn (copy_rtx (orig_x), x);
1907 insn_b = get_insns ();
1908 end_sequence ();
1909
1910 emit_insn_after_scope (insn_b, test_bb->end, INSN_SCOPE (insn_a));
1911 }
1912
1913 /* Merge the blocks! */
1914 merge_if_block (ce_info);
1915
1916 return TRUE;
1917 }
1918 \f
1919 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1920 straight line code. Return true if successful. */
1921
1922 static int
1923 process_if_block (ce_info)
1924 struct ce_if_block * ce_info;
1925 {
1926 if (! reload_completed
1927 && noce_process_if_block (ce_info))
1928 return TRUE;
1929
1930 if (HAVE_conditional_execution && reload_completed)
1931 {
1932 /* If we have && and || tests, try to first handle combining the && and
1933 || tests into the conditional code, and if that fails, go back and
1934 handle it without the && and ||, which at present handles the && case
1935 if there was no ELSE block. */
1936 if (cond_exec_process_if_block (ce_info, TRUE))
1937 return TRUE;
1938
1939 if (ce_info->num_multiple_test_blocks)
1940 {
1941 cancel_changes (0);
1942
1943 if (cond_exec_process_if_block (ce_info, FALSE))
1944 return TRUE;
1945 }
1946 }
1947
1948 return FALSE;
1949 }
1950
1951 /* Merge the blocks and mark for local life update. */
1952
1953 static void
1954 merge_if_block (ce_info)
1955 struct ce_if_block * ce_info;
1956 {
1957 basic_block test_bb = ce_info->test_bb; /* last test block */
1958 basic_block then_bb = ce_info->then_bb; /* THEN */
1959 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
1960 basic_block join_bb = ce_info->join_bb; /* join block */
1961 basic_block combo_bb;
1962
1963 /* All block merging is done into the lower block numbers. */
1964
1965 combo_bb = test_bb;
1966
1967 /* Merge any basic blocks to handle && and || subtests. Each of
1968 the blocks are on the fallthru path from the predecessor block. */
1969 if (ce_info->num_multiple_test_blocks > 0)
1970 {
1971 basic_block bb = test_bb;
1972 basic_block last_test_bb = ce_info->last_test_bb;
1973 basic_block fallthru = block_fallthru (bb);
1974
1975 do
1976 {
1977 bb = fallthru;
1978 fallthru = block_fallthru (bb);
1979 if (post_dominators)
1980 delete_from_dominance_info (post_dominators, bb);
1981 merge_blocks_nomove (combo_bb, bb);
1982 num_removed_blocks++;
1983 }
1984 while (bb != last_test_bb);
1985 }
1986
1987 /* Merge TEST block into THEN block. Normally the THEN block won't have a
1988 label, but it might if there were || tests. That label's count should be
1989 zero, and it normally should be removed. */
1990
1991 if (then_bb)
1992 {
1993 if (combo_bb->global_live_at_end)
1994 COPY_REG_SET (combo_bb->global_live_at_end,
1995 then_bb->global_live_at_end);
1996 if (post_dominators)
1997 delete_from_dominance_info (post_dominators, then_bb);
1998 merge_blocks_nomove (combo_bb, then_bb);
1999 num_removed_blocks++;
2000 }
2001
2002 /* The ELSE block, if it existed, had a label. That label count
2003 will almost always be zero, but odd things can happen when labels
2004 get their addresses taken. */
2005 if (else_bb)
2006 {
2007 if (post_dominators)
2008 delete_from_dominance_info (post_dominators, else_bb);
2009 merge_blocks_nomove (combo_bb, else_bb);
2010 num_removed_blocks++;
2011 }
2012
2013 /* If there was no join block reported, that means it was not adjacent
2014 to the others, and so we cannot merge them. */
2015
2016 if (! join_bb)
2017 {
2018 rtx last = combo_bb->end;
2019
2020 /* The outgoing edge for the current COMBO block should already
2021 be correct. Verify this. */
2022 if (combo_bb->succ == NULL_EDGE)
2023 {
2024 if (find_reg_note (last, REG_NORETURN, NULL))
2025 ;
2026 else if (GET_CODE (last) == INSN
2027 && GET_CODE (PATTERN (last)) == TRAP_IF
2028 && TRAP_CONDITION (PATTERN (last)) == const_true_rtx)
2029 ;
2030 else
2031 abort ();
2032 }
2033
2034 /* There should still be something at the end of the THEN or ELSE
2035 blocks taking us to our final destination. */
2036 else if (GET_CODE (last) == JUMP_INSN)
2037 ;
2038 else if (combo_bb->succ->dest == EXIT_BLOCK_PTR
2039 && GET_CODE (last) == CALL_INSN
2040 && SIBLING_CALL_P (last))
2041 ;
2042 else if ((combo_bb->succ->flags & EDGE_EH)
2043 && can_throw_internal (last))
2044 ;
2045 else
2046 abort ();
2047 }
2048
2049 /* The JOIN block may have had quite a number of other predecessors too.
2050 Since we've already merged the TEST, THEN and ELSE blocks, we should
2051 have only one remaining edge from our if-then-else diamond. If there
2052 is more than one remaining edge, it must come from elsewhere. There
2053 may be zero incoming edges if the THEN block didn't actually join
2054 back up (as with a call to abort). */
2055 else if ((join_bb->pred == NULL
2056 || join_bb->pred->pred_next == NULL)
2057 && join_bb != EXIT_BLOCK_PTR)
2058 {
2059 /* We can merge the JOIN. */
2060 if (combo_bb->global_live_at_end)
2061 COPY_REG_SET (combo_bb->global_live_at_end,
2062 join_bb->global_live_at_end);
2063
2064 if (post_dominators)
2065 delete_from_dominance_info (post_dominators, join_bb);
2066 merge_blocks_nomove (combo_bb, join_bb);
2067 num_removed_blocks++;
2068 }
2069 else
2070 {
2071 /* We cannot merge the JOIN. */
2072
2073 /* The outgoing edge for the current COMBO block should already
2074 be correct. Verify this. */
2075 if (combo_bb->succ->succ_next != NULL_EDGE
2076 || combo_bb->succ->dest != join_bb)
2077 abort ();
2078
2079 /* Remove the jump and cruft from the end of the COMBO block. */
2080 if (join_bb != EXIT_BLOCK_PTR)
2081 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
2082 }
2083
2084 num_updated_if_blocks++;
2085 }
2086 \f
2087 /* Find a block ending in a simple IF condition and try to transform it
2088 in some way. When converting a multi-block condition, put the new code
2089 in the first such block and delete the rest. Return a pointer to this
2090 first block if some transformation was done. Return NULL otherwise. */
2091
2092 static basic_block
2093 find_if_header (test_bb, pass)
2094 basic_block test_bb;
2095 int pass;
2096 {
2097 ce_if_block_t ce_info;
2098 edge then_edge;
2099 edge else_edge;
2100
2101 /* The kind of block we're looking for has exactly two successors. */
2102 if ((then_edge = test_bb->succ) == NULL_EDGE
2103 || (else_edge = then_edge->succ_next) == NULL_EDGE
2104 || else_edge->succ_next != NULL_EDGE)
2105 return NULL;
2106
2107 /* Neither edge should be abnormal. */
2108 if ((then_edge->flags & EDGE_COMPLEX)
2109 || (else_edge->flags & EDGE_COMPLEX))
2110 return NULL;
2111
2112 /* The THEN edge is canonically the one that falls through. */
2113 if (then_edge->flags & EDGE_FALLTHRU)
2114 ;
2115 else if (else_edge->flags & EDGE_FALLTHRU)
2116 {
2117 edge e = else_edge;
2118 else_edge = then_edge;
2119 then_edge = e;
2120 }
2121 else
2122 /* Otherwise this must be a multiway branch of some sort. */
2123 return NULL;
2124
2125 memset ((PTR) &ce_info, '\0', sizeof (ce_info));
2126 ce_info.test_bb = test_bb;
2127 ce_info.then_bb = then_edge->dest;
2128 ce_info.else_bb = else_edge->dest;
2129 ce_info.pass = pass;
2130
2131 #ifdef IFCVT_INIT_EXTRA_FIELDS
2132 IFCVT_INIT_EXTRA_FIELDS (&ce_info);
2133 #endif
2134
2135 if (find_if_block (&ce_info))
2136 goto success;
2137
2138 if (HAVE_trap && HAVE_conditional_trap
2139 && find_cond_trap (test_bb, then_edge, else_edge))
2140 goto success;
2141
2142 if (post_dominators
2143 && (! HAVE_conditional_execution || reload_completed))
2144 {
2145 if (find_if_case_1 (test_bb, then_edge, else_edge))
2146 goto success;
2147 if (find_if_case_2 (test_bb, then_edge, else_edge))
2148 goto success;
2149 }
2150
2151 return NULL;
2152
2153 success:
2154 if (rtl_dump_file)
2155 fprintf (rtl_dump_file, "Conversion succeeded on pass %d.\n", pass);
2156 return ce_info.test_bb;
2157 }
2158
2159 /* Return true if a block has two edges, one of which falls through to the next
2160 block, and the other jumps to a specific block, so that we can tell if the
2161 block is part of an && test or an || test. Returns either -1 or the number
2162 of non-note, non-jump, non-USE/CLOBBER insns in the block. */
2163
2164 static int
2165 block_jumps_and_fallthru_p (cur_bb, target_bb)
2166 basic_block cur_bb;
2167 basic_block target_bb;
2168 {
2169 edge cur_edge;
2170 int fallthru_p = FALSE;
2171 int jump_p = FALSE;
2172 rtx insn;
2173 rtx end;
2174 int n_insns = 0;
2175
2176 if (!cur_bb || !target_bb)
2177 return -1;
2178
2179 /* If no edges, obviously it doesn't jump or fallthru. */
2180 if (cur_bb->succ == NULL_EDGE)
2181 return FALSE;
2182
2183 for (cur_edge = cur_bb->succ;
2184 cur_edge != NULL_EDGE;
2185 cur_edge = cur_edge->succ_next)
2186 {
2187 if (cur_edge->flags & EDGE_COMPLEX)
2188 /* Anything complex isn't what we want. */
2189 return -1;
2190
2191 else if (cur_edge->flags & EDGE_FALLTHRU)
2192 fallthru_p = TRUE;
2193
2194 else if (cur_edge->dest == target_bb)
2195 jump_p = TRUE;
2196
2197 else
2198 return -1;
2199 }
2200
2201 if ((jump_p & fallthru_p) == 0)
2202 return -1;
2203
2204 /* Don't allow calls in the block, since this is used to group && and ||
2205 together for conditional execution support. ??? we should support
2206 conditional execution support across calls for IA-64 some day, but
2207 for now it makes the code simpler. */
2208 end = cur_bb->end;
2209 insn = cur_bb->head;
2210
2211 while (insn != NULL_RTX)
2212 {
2213 if (GET_CODE (insn) == CALL_INSN)
2214 return -1;
2215
2216 if (INSN_P (insn)
2217 && GET_CODE (insn) != JUMP_INSN
2218 && GET_CODE (PATTERN (insn)) != USE
2219 && GET_CODE (PATTERN (insn)) != CLOBBER)
2220 n_insns++;
2221
2222 if (insn == end)
2223 break;
2224
2225 insn = NEXT_INSN (insn);
2226 }
2227
2228 return n_insns;
2229 }
2230
2231 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
2232 block. If so, we'll try to convert the insns to not require the branch.
2233 Return TRUE if we were successful at converting the block. */
2234
2235 static int
2236 find_if_block (ce_info)
2237 struct ce_if_block * ce_info;
2238 {
2239 basic_block test_bb = ce_info->test_bb;
2240 basic_block then_bb = ce_info->then_bb;
2241 basic_block else_bb = ce_info->else_bb;
2242 basic_block join_bb = NULL_BLOCK;
2243 edge then_succ = then_bb->succ;
2244 edge else_succ = else_bb->succ;
2245 int then_predecessors;
2246 int else_predecessors;
2247 edge cur_edge;
2248 basic_block next;
2249
2250 ce_info->last_test_bb = test_bb;
2251
2252 /* Discover if any fall through predecessors of the current test basic block
2253 were && tests (which jump to the else block) or || tests (which jump to
2254 the then block). */
2255 if (HAVE_conditional_execution && reload_completed
2256 && test_bb->pred != NULL_EDGE
2257 && test_bb->pred->pred_next == NULL_EDGE
2258 && test_bb->pred->flags == EDGE_FALLTHRU)
2259 {
2260 basic_block bb = test_bb->pred->src;
2261 basic_block target_bb;
2262 int max_insns = MAX_CONDITIONAL_EXECUTE;
2263 int n_insns;
2264
2265 /* Determine if the preceeding block is an && or || block. */
2266 if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
2267 {
2268 ce_info->and_and_p = TRUE;
2269 target_bb = else_bb;
2270 }
2271 else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
2272 {
2273 ce_info->and_and_p = FALSE;
2274 target_bb = then_bb;
2275 }
2276 else
2277 target_bb = NULL_BLOCK;
2278
2279 if (target_bb && n_insns <= max_insns)
2280 {
2281 int total_insns = 0;
2282 int blocks = 0;
2283
2284 ce_info->last_test_bb = test_bb;
2285
2286 /* Found at least one && or || block, look for more. */
2287 do
2288 {
2289 ce_info->test_bb = test_bb = bb;
2290 total_insns += n_insns;
2291 blocks++;
2292
2293 if (bb->pred == NULL_EDGE || bb->pred->pred_next != NULL_EDGE)
2294 break;
2295
2296 bb = bb->pred->src;
2297 n_insns = block_jumps_and_fallthru_p (bb, target_bb);
2298 }
2299 while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
2300
2301 ce_info->num_multiple_test_blocks = blocks;
2302 ce_info->num_multiple_test_insns = total_insns;
2303
2304 if (ce_info->and_and_p)
2305 ce_info->num_and_and_blocks = blocks;
2306 else
2307 ce_info->num_or_or_blocks = blocks;
2308 }
2309 }
2310
2311 /* Count the number of edges the THEN and ELSE blocks have. */
2312 then_predecessors = 0;
2313 for (cur_edge = then_bb->pred;
2314 cur_edge != NULL_EDGE;
2315 cur_edge = cur_edge->pred_next)
2316 {
2317 then_predecessors++;
2318 if (cur_edge->flags & EDGE_COMPLEX)
2319 return FALSE;
2320 }
2321
2322 else_predecessors = 0;
2323 for (cur_edge = else_bb->pred;
2324 cur_edge != NULL_EDGE;
2325 cur_edge = cur_edge->pred_next)
2326 {
2327 else_predecessors++;
2328 if (cur_edge->flags & EDGE_COMPLEX)
2329 return FALSE;
2330 }
2331
2332 /* The THEN block of an IF-THEN combo must have exactly one predecessor,
2333 other than any || blocks which jump to the THEN block. */
2334 if ((then_predecessors - ce_info->num_or_or_blocks) != 1)
2335 return FALSE;
2336
2337 /* The THEN block of an IF-THEN combo must have zero or one successors. */
2338 if (then_succ != NULL_EDGE
2339 && (then_succ->succ_next != NULL_EDGE
2340 || (then_succ->flags & EDGE_COMPLEX)))
2341 return FALSE;
2342
2343 /* If the THEN block has no successors, conditional execution can still
2344 make a conditional call. Don't do this unless the ELSE block has
2345 only one incoming edge -- the CFG manipulation is too ugly otherwise.
2346 Check for the last insn of the THEN block being an indirect jump, which
2347 is listed as not having any successors, but confuses the rest of the CE
2348 code processing. ??? we should fix this in the future. */
2349 if (then_succ == NULL)
2350 {
2351 if (else_bb->pred->pred_next == NULL_EDGE)
2352 {
2353 rtx last_insn = then_bb->end;
2354
2355 while (last_insn
2356 && GET_CODE (last_insn) == NOTE
2357 && last_insn != then_bb->head)
2358 last_insn = PREV_INSN (last_insn);
2359
2360 if (last_insn
2361 && GET_CODE (last_insn) == JUMP_INSN
2362 && ! simplejump_p (last_insn))
2363 return FALSE;
2364
2365 join_bb = else_bb;
2366 else_bb = NULL_BLOCK;
2367 }
2368 else
2369 return FALSE;
2370 }
2371
2372 /* If the THEN block's successor is the other edge out of the TEST block,
2373 then we have an IF-THEN combo without an ELSE. */
2374 else if (then_succ->dest == else_bb)
2375 {
2376 join_bb = else_bb;
2377 else_bb = NULL_BLOCK;
2378 }
2379
2380 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
2381 has exactly one predecessor and one successor, and the outgoing edge
2382 is not complex, then we have an IF-THEN-ELSE combo. */
2383 else if (else_succ != NULL_EDGE
2384 && then_succ->dest == else_succ->dest
2385 && else_bb->pred->pred_next == NULL_EDGE
2386 && else_succ->succ_next == NULL_EDGE
2387 && ! (else_succ->flags & EDGE_COMPLEX))
2388 join_bb = else_succ->dest;
2389
2390 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
2391 else
2392 return FALSE;
2393
2394 num_possible_if_blocks++;
2395
2396 if (rtl_dump_file)
2397 {
2398 fprintf (rtl_dump_file, "\nIF-THEN%s block found, pass %d, start block %d [insn %d], then %d [%d]",
2399 (else_bb) ? "-ELSE" : "",
2400 ce_info->pass,
2401 test_bb->index, (test_bb->head) ? (int)INSN_UID (test_bb->head) : -1,
2402 then_bb->index, (then_bb->head) ? (int)INSN_UID (then_bb->head) : -1);
2403
2404 if (else_bb)
2405 fprintf (rtl_dump_file, ", else %d [%d]",
2406 else_bb->index, (else_bb->head) ? (int)INSN_UID (else_bb->head) : -1);
2407
2408 fprintf (rtl_dump_file, ", join %d [%d]",
2409 join_bb->index, (join_bb->head) ? (int)INSN_UID (join_bb->head) : -1);
2410
2411 if (ce_info->num_multiple_test_blocks > 0)
2412 fprintf (rtl_dump_file, ", %d %s block%s last test %d [%d]",
2413 ce_info->num_multiple_test_blocks,
2414 (ce_info->and_and_p) ? "&&" : "||",
2415 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
2416 ce_info->last_test_bb->index,
2417 ((ce_info->last_test_bb->head)
2418 ? (int)INSN_UID (ce_info->last_test_bb->head)
2419 : -1));
2420
2421 fputc ('\n', rtl_dump_file);
2422 }
2423
2424 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we get the
2425 first condition for free, since we've already asserted that there's a
2426 fallthru edge from IF to THEN. Likewise for the && and || blocks, since
2427 we checked the FALLTHRU flag, those are already adjacent to the last IF
2428 block. */
2429 /* ??? As an enhancement, move the ELSE block. Have to deal with
2430 BLOCK notes, if by no other means than aborting the merge if they
2431 exist. Sticky enough I don't want to think about it now. */
2432 next = then_bb;
2433 if (else_bb && (next = next->next_bb) != else_bb)
2434 return FALSE;
2435 if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
2436 {
2437 if (else_bb)
2438 join_bb = NULL;
2439 else
2440 return FALSE;
2441 }
2442
2443 /* Do the real work. */
2444 ce_info->else_bb = else_bb;
2445 ce_info->join_bb = join_bb;
2446
2447 return process_if_block (ce_info);
2448 }
2449
2450 /* Convert a branch over a trap, or a branch
2451 to a trap, into a conditional trap. */
2452
2453 static int
2454 find_cond_trap (test_bb, then_edge, else_edge)
2455 basic_block test_bb;
2456 edge then_edge, else_edge;
2457 {
2458 basic_block then_bb = then_edge->dest;
2459 basic_block else_bb = else_edge->dest;
2460 basic_block other_bb, trap_bb;
2461 rtx trap, jump, cond, cond_earliest, seq;
2462 enum rtx_code code;
2463
2464 /* Locate the block with the trap instruction. */
2465 /* ??? While we look for no successors, we really ought to allow
2466 EH successors. Need to fix merge_if_block for that to work. */
2467 if ((trap = block_has_only_trap (then_bb)) != NULL)
2468 trap_bb = then_bb, other_bb = else_bb;
2469 else if ((trap = block_has_only_trap (else_bb)) != NULL)
2470 trap_bb = else_bb, other_bb = then_bb;
2471 else
2472 return FALSE;
2473
2474 if (rtl_dump_file)
2475 {
2476 fprintf (rtl_dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
2477 test_bb->index, trap_bb->index);
2478 }
2479
2480 /* If this is not a standard conditional jump, we can't parse it. */
2481 jump = test_bb->end;
2482 cond = noce_get_condition (jump, &cond_earliest);
2483 if (! cond)
2484 return FALSE;
2485
2486 /* If the conditional jump is more than just a conditional jump, then
2487 we can not do if-conversion on this block. */
2488 if (! onlyjump_p (jump))
2489 return FALSE;
2490
2491 /* We must be comparing objects whose modes imply the size. */
2492 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2493 return FALSE;
2494
2495 /* Reverse the comparison code, if necessary. */
2496 code = GET_CODE (cond);
2497 if (then_bb == trap_bb)
2498 {
2499 code = reversed_comparison_code (cond, jump);
2500 if (code == UNKNOWN)
2501 return FALSE;
2502 }
2503
2504 /* Attempt to generate the conditional trap. */
2505 seq = gen_cond_trap (code, XEXP (cond, 0), XEXP (cond, 1),
2506 TRAP_CODE (PATTERN (trap)));
2507 if (seq == NULL)
2508 return FALSE;
2509
2510 /* Emit the new insns before cond_earliest. */
2511 emit_insn_before_scope (seq, cond_earliest, INSN_SCOPE (trap));
2512
2513 /* Delete the trap block if possible. */
2514 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
2515 if (trap_bb->pred == NULL)
2516 {
2517 if (post_dominators)
2518 delete_from_dominance_info (post_dominators, trap_bb);
2519 flow_delete_block (trap_bb);
2520 num_removed_blocks++;
2521 }
2522
2523 /* If the non-trap block and the test are now adjacent, merge them.
2524 Otherwise we must insert a direct branch. */
2525 if (test_bb->next_bb == other_bb)
2526 {
2527 struct ce_if_block new_ce_info;
2528 delete_insn (jump);
2529 memset ((PTR) &new_ce_info, '\0', sizeof (new_ce_info));
2530 new_ce_info.test_bb = test_bb;
2531 new_ce_info.then_bb = NULL;
2532 new_ce_info.else_bb = NULL;
2533 new_ce_info.join_bb = other_bb;
2534 merge_if_block (&new_ce_info);
2535 }
2536 else
2537 {
2538 rtx lab, newjump;
2539
2540 lab = JUMP_LABEL (jump);
2541 newjump = emit_jump_insn_after (gen_jump (lab), jump);
2542 LABEL_NUSES (lab) += 1;
2543 JUMP_LABEL (newjump) = lab;
2544 emit_barrier_after (newjump);
2545
2546 delete_insn (jump);
2547 }
2548
2549 return TRUE;
2550 }
2551
2552 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
2553 return it. */
2554
2555 static rtx
2556 block_has_only_trap (bb)
2557 basic_block bb;
2558 {
2559 rtx trap;
2560
2561 /* We're not the exit block. */
2562 if (bb == EXIT_BLOCK_PTR)
2563 return NULL_RTX;
2564
2565 /* The block must have no successors. */
2566 if (bb->succ)
2567 return NULL_RTX;
2568
2569 /* The only instruction in the THEN block must be the trap. */
2570 trap = first_active_insn (bb);
2571 if (! (trap == bb->end
2572 && GET_CODE (PATTERN (trap)) == TRAP_IF
2573 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
2574 return NULL_RTX;
2575
2576 return trap;
2577 }
2578
2579 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
2580 transformable, but not necessarily the other. There need be no
2581 JOIN block.
2582
2583 Return TRUE if we were successful at converting the block.
2584
2585 Cases we'd like to look at:
2586
2587 (1)
2588 if (test) goto over; // x not live
2589 x = a;
2590 goto label;
2591 over:
2592
2593 becomes
2594
2595 x = a;
2596 if (! test) goto label;
2597
2598 (2)
2599 if (test) goto E; // x not live
2600 x = big();
2601 goto L;
2602 E:
2603 x = b;
2604 goto M;
2605
2606 becomes
2607
2608 x = b;
2609 if (test) goto M;
2610 x = big();
2611 goto L;
2612
2613 (3) // This one's really only interesting for targets that can do
2614 // multiway branching, e.g. IA-64 BBB bundles. For other targets
2615 // it results in multiple branches on a cache line, which often
2616 // does not sit well with predictors.
2617
2618 if (test1) goto E; // predicted not taken
2619 x = a;
2620 if (test2) goto F;
2621 ...
2622 E:
2623 x = b;
2624 J:
2625
2626 becomes
2627
2628 x = a;
2629 if (test1) goto E;
2630 if (test2) goto F;
2631
2632 Notes:
2633
2634 (A) Don't do (2) if the branch is predicted against the block we're
2635 eliminating. Do it anyway if we can eliminate a branch; this requires
2636 that the sole successor of the eliminated block postdominate the other
2637 side of the if.
2638
2639 (B) With CE, on (3) we can steal from both sides of the if, creating
2640
2641 if (test1) x = a;
2642 if (!test1) x = b;
2643 if (test1) goto J;
2644 if (test2) goto F;
2645 ...
2646 J:
2647
2648 Again, this is most useful if J postdominates.
2649
2650 (C) CE substitutes for helpful life information.
2651
2652 (D) These heuristics need a lot of work. */
2653
2654 /* Tests for case 1 above. */
2655
2656 static int
2657 find_if_case_1 (test_bb, then_edge, else_edge)
2658 basic_block test_bb;
2659 edge then_edge, else_edge;
2660 {
2661 basic_block then_bb = then_edge->dest;
2662 basic_block else_bb = else_edge->dest, new_bb;
2663 edge then_succ = then_bb->succ;
2664 int then_bb_index;
2665
2666 /* THEN has one successor. */
2667 if (!then_succ || then_succ->succ_next != NULL)
2668 return FALSE;
2669
2670 /* THEN does not fall through, but is not strange either. */
2671 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2672 return FALSE;
2673
2674 /* THEN has one predecessor. */
2675 if (then_bb->pred->pred_next != NULL)
2676 return FALSE;
2677
2678 /* THEN must do something. */
2679 if (forwarder_block_p (then_bb))
2680 return FALSE;
2681
2682 num_possible_if_blocks++;
2683 if (rtl_dump_file)
2684 fprintf (rtl_dump_file,
2685 "\nIF-CASE-1 found, start %d, then %d\n",
2686 test_bb->index, then_bb->index);
2687
2688 /* THEN is small. */
2689 if (count_bb_insns (then_bb) > BRANCH_COST)
2690 return FALSE;
2691
2692 /* Registers set are dead, or are predicable. */
2693 if (! dead_or_predicable (test_bb, then_bb, else_bb,
2694 then_bb->succ->dest, 1))
2695 return FALSE;
2696
2697 /* Conversion went ok, including moving the insns and fixing up the
2698 jump. Adjust the CFG to match. */
2699
2700 bitmap_operation (test_bb->global_live_at_end,
2701 else_bb->global_live_at_start,
2702 then_bb->global_live_at_end, BITMAP_IOR);
2703
2704 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb), else_bb);
2705 then_bb_index = then_bb->index;
2706 if (post_dominators)
2707 delete_from_dominance_info (post_dominators, then_bb);
2708 flow_delete_block (then_bb);
2709
2710 /* Make rest of code believe that the newly created block is the THEN_BB
2711 block we removed. */
2712 if (new_bb)
2713 {
2714 new_bb->index = then_bb_index;
2715 BASIC_BLOCK (then_bb_index) = new_bb;
2716 }
2717 /* We've possibly created jump to next insn, cleanup_cfg will solve that
2718 later. */
2719
2720 num_removed_blocks++;
2721 num_updated_if_blocks++;
2722
2723 return TRUE;
2724 }
2725
2726 /* Test for case 2 above. */
2727
2728 static int
2729 find_if_case_2 (test_bb, then_edge, else_edge)
2730 basic_block test_bb;
2731 edge then_edge, else_edge;
2732 {
2733 basic_block then_bb = then_edge->dest;
2734 basic_block else_bb = else_edge->dest;
2735 edge else_succ = else_bb->succ;
2736 rtx note;
2737
2738 /* ELSE has one successor. */
2739 if (!else_succ || else_succ->succ_next != NULL)
2740 return FALSE;
2741
2742 /* ELSE outgoing edge is not complex. */
2743 if (else_succ->flags & EDGE_COMPLEX)
2744 return FALSE;
2745
2746 /* ELSE has one predecessor. */
2747 if (else_bb->pred->pred_next != NULL)
2748 return FALSE;
2749
2750 /* THEN is not EXIT. */
2751 if (then_bb->index < 0)
2752 return FALSE;
2753
2754 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2755 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
2756 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2757 ;
2758 else if (else_succ->dest->index < 0
2759 || dominated_by_p (post_dominators, then_bb,
2760 else_succ->dest))
2761 ;
2762 else
2763 return FALSE;
2764
2765 num_possible_if_blocks++;
2766 if (rtl_dump_file)
2767 fprintf (rtl_dump_file,
2768 "\nIF-CASE-2 found, start %d, else %d\n",
2769 test_bb->index, else_bb->index);
2770
2771 /* ELSE is small. */
2772 if (count_bb_insns (else_bb) > BRANCH_COST)
2773 return FALSE;
2774
2775 /* Registers set are dead, or are predicable. */
2776 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
2777 return FALSE;
2778
2779 /* Conversion went ok, including moving the insns and fixing up the
2780 jump. Adjust the CFG to match. */
2781
2782 bitmap_operation (test_bb->global_live_at_end,
2783 then_bb->global_live_at_start,
2784 else_bb->global_live_at_end, BITMAP_IOR);
2785
2786 if (post_dominators)
2787 delete_from_dominance_info (post_dominators, else_bb);
2788 flow_delete_block (else_bb);
2789
2790 num_removed_blocks++;
2791 num_updated_if_blocks++;
2792
2793 /* ??? We may now fallthru from one of THEN's successors into a join
2794 block. Rerun cleanup_cfg? Examine things manually? Wait? */
2795
2796 return TRUE;
2797 }
2798
2799 /* A subroutine of dead_or_predicable called through for_each_rtx.
2800 Return 1 if a memory is found. */
2801
2802 static int
2803 find_memory (px, data)
2804 rtx *px;
2805 void *data ATTRIBUTE_UNUSED;
2806 {
2807 return GET_CODE (*px) == MEM;
2808 }
2809
2810 /* Used by the code above to perform the actual rtl transformations.
2811 Return TRUE if successful.
2812
2813 TEST_BB is the block containing the conditional branch. MERGE_BB
2814 is the block containing the code to manipulate. NEW_DEST is the
2815 label TEST_BB should be branching to after the conversion.
2816 REVERSEP is true if the sense of the branch should be reversed. */
2817
2818 static int
2819 dead_or_predicable (test_bb, merge_bb, other_bb, new_dest, reversep)
2820 basic_block test_bb, merge_bb, other_bb;
2821 basic_block new_dest;
2822 int reversep;
2823 {
2824 rtx head, end, jump, earliest, old_dest, new_label = NULL_RTX;
2825
2826 jump = test_bb->end;
2827
2828 /* Find the extent of the real code in the merge block. */
2829 head = merge_bb->head;
2830 end = merge_bb->end;
2831
2832 if (GET_CODE (head) == CODE_LABEL)
2833 head = NEXT_INSN (head);
2834 if (GET_CODE (head) == NOTE)
2835 {
2836 if (head == end)
2837 {
2838 head = end = NULL_RTX;
2839 goto no_body;
2840 }
2841 head = NEXT_INSN (head);
2842 }
2843
2844 if (GET_CODE (end) == JUMP_INSN)
2845 {
2846 if (head == end)
2847 {
2848 head = end = NULL_RTX;
2849 goto no_body;
2850 }
2851 end = PREV_INSN (end);
2852 }
2853
2854 /* Disable handling dead code by conditional execution if the machine needs
2855 to do anything funny with the tests, etc. */
2856 #ifndef IFCVT_MODIFY_TESTS
2857 if (HAVE_conditional_execution)
2858 {
2859 /* In the conditional execution case, we have things easy. We know
2860 the condition is reversable. We don't have to check life info,
2861 becase we're going to conditionally execute the code anyway.
2862 All that's left is making sure the insns involved can actually
2863 be predicated. */
2864
2865 rtx cond, prob_val;
2866
2867 cond = cond_exec_get_condition (jump);
2868 if (! cond)
2869 return FALSE;
2870
2871 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2872 if (prob_val)
2873 prob_val = XEXP (prob_val, 0);
2874
2875 if (reversep)
2876 {
2877 enum rtx_code rev = reversed_comparison_code (cond, jump);
2878 if (rev == UNKNOWN)
2879 return FALSE;
2880 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
2881 XEXP (cond, 1));
2882 if (prob_val)
2883 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
2884 }
2885
2886 if (! cond_exec_process_insns ((ce_if_block_t *)0, head, end, cond,
2887 prob_val, 0))
2888 goto cancel;
2889
2890 earliest = jump;
2891 }
2892 else
2893 #endif
2894 {
2895 /* In the non-conditional execution case, we have to verify that there
2896 are no trapping operations, no calls, no references to memory, and
2897 that any registers modified are dead at the branch site. */
2898
2899 rtx insn, cond, prev;
2900 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
2901 regset merge_set, tmp, test_live, test_set;
2902 struct propagate_block_info *pbi;
2903 int i, fail = 0;
2904
2905 /* Check for no calls or trapping operations. */
2906 for (insn = head; ; insn = NEXT_INSN (insn))
2907 {
2908 if (GET_CODE (insn) == CALL_INSN)
2909 return FALSE;
2910 if (INSN_P (insn))
2911 {
2912 if (may_trap_p (PATTERN (insn)))
2913 return FALSE;
2914
2915 /* ??? Even non-trapping memories such as stack frame
2916 references must be avoided. For stores, we collect
2917 no lifetime info; for reads, we'd have to assert
2918 true_dependence false against every store in the
2919 TEST range. */
2920 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
2921 return FALSE;
2922 }
2923 if (insn == end)
2924 break;
2925 }
2926
2927 if (! any_condjump_p (jump))
2928 return FALSE;
2929
2930 /* Find the extent of the conditional. */
2931 cond = noce_get_condition (jump, &earliest);
2932 if (! cond)
2933 return FALSE;
2934
2935 /* Collect:
2936 MERGE_SET = set of registers set in MERGE_BB
2937 TEST_LIVE = set of registers live at EARLIEST
2938 TEST_SET = set of registers set between EARLIEST and the
2939 end of the block. */
2940
2941 tmp = INITIALIZE_REG_SET (tmp_head);
2942 merge_set = INITIALIZE_REG_SET (merge_set_head);
2943 test_live = INITIALIZE_REG_SET (test_live_head);
2944 test_set = INITIALIZE_REG_SET (test_set_head);
2945
2946 /* ??? bb->local_set is only valid during calculate_global_regs_live,
2947 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
2948 since we've already asserted that MERGE_BB is small. */
2949 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
2950
2951 /* For small register class machines, don't lengthen lifetimes of
2952 hard registers before reload. */
2953 if (SMALL_REGISTER_CLASSES && ! reload_completed)
2954 {
2955 EXECUTE_IF_SET_IN_BITMAP
2956 (merge_set, 0, i,
2957 {
2958 if (i < FIRST_PSEUDO_REGISTER
2959 && ! fixed_regs[i]
2960 && ! global_regs[i])
2961 fail = 1;
2962 });
2963 }
2964
2965 /* For TEST, we're interested in a range of insns, not a whole block.
2966 Moreover, we're interested in the insns live from OTHER_BB. */
2967
2968 COPY_REG_SET (test_live, other_bb->global_live_at_start);
2969 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
2970 0);
2971
2972 for (insn = jump; ; insn = prev)
2973 {
2974 prev = propagate_one_insn (pbi, insn);
2975 if (insn == earliest)
2976 break;
2977 }
2978
2979 free_propagate_block_info (pbi);
2980
2981 /* We can perform the transformation if
2982 MERGE_SET & (TEST_SET | TEST_LIVE)
2983 and
2984 TEST_SET & merge_bb->global_live_at_start
2985 are empty. */
2986
2987 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
2988 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
2989 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2990
2991 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
2992 BITMAP_AND);
2993 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2994
2995 FREE_REG_SET (tmp);
2996 FREE_REG_SET (merge_set);
2997 FREE_REG_SET (test_live);
2998 FREE_REG_SET (test_set);
2999
3000 if (fail)
3001 return FALSE;
3002 }
3003
3004 no_body:
3005 /* We don't want to use normal invert_jump or redirect_jump because
3006 we don't want to delete_insn called. Also, we want to do our own
3007 change group management. */
3008
3009 old_dest = JUMP_LABEL (jump);
3010 if (other_bb != new_dest)
3011 {
3012 new_label = block_label (new_dest);
3013 if (reversep
3014 ? ! invert_jump_1 (jump, new_label)
3015 : ! redirect_jump_1 (jump, new_label))
3016 goto cancel;
3017 }
3018
3019 if (! apply_change_group ())
3020 return FALSE;
3021
3022 if (other_bb != new_dest)
3023 {
3024 if (old_dest)
3025 LABEL_NUSES (old_dest) -= 1;
3026 if (new_label)
3027 LABEL_NUSES (new_label) += 1;
3028 JUMP_LABEL (jump) = new_label;
3029 if (reversep)
3030 invert_br_probabilities (jump);
3031
3032 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
3033 if (reversep)
3034 {
3035 gcov_type count, probability;
3036 count = BRANCH_EDGE (test_bb)->count;
3037 BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
3038 FALLTHRU_EDGE (test_bb)->count = count;
3039 probability = BRANCH_EDGE (test_bb)->probability;
3040 BRANCH_EDGE (test_bb)->probability
3041 = FALLTHRU_EDGE (test_bb)->probability;
3042 FALLTHRU_EDGE (test_bb)->probability = probability;
3043 update_br_prob_note (test_bb);
3044 }
3045 }
3046
3047 /* Move the insns out of MERGE_BB to before the branch. */
3048 if (head != NULL)
3049 {
3050 if (end == merge_bb->end)
3051 merge_bb->end = PREV_INSN (head);
3052
3053 if (squeeze_notes (&head, &end))
3054 return TRUE;
3055
3056 reorder_insns (head, end, PREV_INSN (earliest));
3057 }
3058
3059 /* Remove the jump and edge if we can. */
3060 if (other_bb == new_dest)
3061 {
3062 delete_insn (jump);
3063 remove_edge (BRANCH_EDGE (test_bb));
3064 /* ??? Can't merge blocks here, as then_bb is still in use.
3065 At minimum, the merge will get done just before bb-reorder. */
3066 }
3067
3068 return TRUE;
3069
3070 cancel:
3071 cancel_changes (0);
3072 return FALSE;
3073 }
3074 \f
3075 /* Main entry point for all if-conversion. */
3076
3077 void
3078 if_convert (x_life_data_ok)
3079 int x_life_data_ok;
3080 {
3081 basic_block bb;
3082 int pass;
3083
3084 num_possible_if_blocks = 0;
3085 num_updated_if_blocks = 0;
3086 num_removed_blocks = 0;
3087 life_data_ok = (x_life_data_ok != 0);
3088
3089 /* Free up basic_block_for_insn so that we don't have to keep it
3090 up to date, either here or in merge_blocks_nomove. */
3091 free_basic_block_vars (1);
3092
3093 /* Compute postdominators if we think we'll use them. */
3094 post_dominators = NULL;
3095 if (HAVE_conditional_execution || life_data_ok)
3096 {
3097 post_dominators = calculate_dominance_info (CDI_POST_DOMINATORS);
3098 }
3099 if (life_data_ok)
3100 clear_bb_flags ();
3101
3102 /* Go through each of the basic blocks looking for things to convert. If we
3103 have conditional execution, we make multiple passes to allow us to handle
3104 IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks. */
3105 pass = 0;
3106 do
3107 {
3108 cond_exec_changed_p = FALSE;
3109 pass++;
3110
3111 #ifdef IFCVT_MULTIPLE_DUMPS
3112 if (rtl_dump_file && pass > 1)
3113 fprintf (rtl_dump_file, "\n\n========== Pass %d ==========\n", pass);
3114 #endif
3115
3116 FOR_EACH_BB (bb)
3117 {
3118 basic_block new_bb = find_if_header (bb, pass);
3119 if (new_bb)
3120 bb = new_bb;
3121 }
3122
3123 #ifdef IFCVT_MULTIPLE_DUMPS
3124 if (rtl_dump_file && cond_exec_changed_p)
3125 print_rtl_with_bb (rtl_dump_file, get_insns ());
3126 #endif
3127 }
3128 while (cond_exec_changed_p);
3129
3130 #ifdef IFCVT_MULTIPLE_DUMPS
3131 if (rtl_dump_file)
3132 fprintf (rtl_dump_file, "\n\n========== no more changes\n");
3133 #endif
3134
3135 if (post_dominators)
3136 free_dominance_info (post_dominators);
3137
3138 if (rtl_dump_file)
3139 fflush (rtl_dump_file);
3140
3141 clear_aux_for_blocks ();
3142
3143 /* Rebuild life info for basic blocks that require it. */
3144 if (num_removed_blocks && life_data_ok)
3145 {
3146 /* If we allocated new pseudos, we must resize the array for sched1. */
3147 if (max_regno < max_reg_num ())
3148 {
3149 max_regno = max_reg_num ();
3150 allocate_reg_info (max_regno, FALSE, FALSE);
3151 }
3152 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
3153 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
3154 | PROP_KILL_DEAD_CODE);
3155 }
3156
3157 /* Write the final stats. */
3158 if (rtl_dump_file && num_possible_if_blocks > 0)
3159 {
3160 fprintf (rtl_dump_file,
3161 "\n%d possible IF blocks searched.\n",
3162 num_possible_if_blocks);
3163 fprintf (rtl_dump_file,
3164 "%d IF blocks converted.\n",
3165 num_updated_if_blocks);
3166 fprintf (rtl_dump_file,
3167 "%d basic blocks deleted.\n\n\n",
3168 num_removed_blocks);
3169 }
3170
3171 #ifdef ENABLE_CHECKING
3172 verify_flow_info ();
3173 #endif
3174 }