]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ifcvt.c
[RTL-ifcvt] PR rtl-optimization/68506: Fix emitting order of insns in IF-THEN-JOIN...
[thirdparty/gcc.git] / gcc / ifcvt.c
1 /* If-conversion support.
2 Copyright (C) 2000-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "cfghooks.h"
28 #include "df.h"
29 #include "tm_p.h"
30 #include "expmed.h"
31 #include "optabs.h"
32 #include "regs.h"
33 #include "emit-rtl.h"
34 #include "recog.h"
35
36 #include "cfgrtl.h"
37 #include "cfganal.h"
38 #include "cfgcleanup.h"
39 #include "expr.h"
40 #include "output.h"
41 #include "cfgloop.h"
42 #include "tree-pass.h"
43 #include "dbgcnt.h"
44 #include "shrink-wrap.h"
45 #include "rtl-iter.h"
46 #include "ifcvt.h"
47
48 #ifndef MAX_CONDITIONAL_EXECUTE
49 #define MAX_CONDITIONAL_EXECUTE \
50 (BRANCH_COST (optimize_function_for_speed_p (cfun), false) \
51 + 1)
52 #endif
53
54 #define IFCVT_MULTIPLE_DUMPS 1
55
56 #define NULL_BLOCK ((basic_block) NULL)
57
58 /* True if after combine pass. */
59 static bool ifcvt_after_combine;
60
61 /* True if the target has the cbranchcc4 optab. */
62 static bool have_cbranchcc4;
63
64 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
65 static int num_possible_if_blocks;
66
67 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
68 execution. */
69 static int num_updated_if_blocks;
70
71 /* # of changes made. */
72 static int num_true_changes;
73
74 /* Whether conditional execution changes were made. */
75 static int cond_exec_changed_p;
76
77 /* Forward references. */
78 static int count_bb_insns (const_basic_block);
79 static bool cheap_bb_rtx_cost_p (const_basic_block, int, int);
80 static rtx_insn *first_active_insn (basic_block);
81 static rtx_insn *last_active_insn (basic_block, int);
82 static rtx_insn *find_active_insn_before (basic_block, rtx_insn *);
83 static rtx_insn *find_active_insn_after (basic_block, rtx_insn *);
84 static basic_block block_fallthru (basic_block);
85 static int cond_exec_process_insns (ce_if_block *, rtx_insn *, rtx, rtx, int,
86 int);
87 static rtx cond_exec_get_condition (rtx_insn *);
88 static rtx noce_get_condition (rtx_insn *, rtx_insn **, bool);
89 static int noce_operand_ok (const_rtx);
90 static void merge_if_block (ce_if_block *);
91 static int find_cond_trap (basic_block, edge, edge);
92 static basic_block find_if_header (basic_block, int);
93 static int block_jumps_and_fallthru_p (basic_block, basic_block);
94 static int noce_find_if_block (basic_block, edge, edge, int);
95 static int cond_exec_find_if_block (ce_if_block *);
96 static int find_if_case_1 (basic_block, edge, edge);
97 static int find_if_case_2 (basic_block, edge, edge);
98 static int dead_or_predicable (basic_block, basic_block, basic_block,
99 edge, int);
100 static void noce_emit_move_insn (rtx, rtx);
101 static rtx_insn *block_has_only_trap (basic_block);
102 \f
103 /* Count the number of non-jump active insns in BB. */
104
105 static int
106 count_bb_insns (const_basic_block bb)
107 {
108 int count = 0;
109 rtx_insn *insn = BB_HEAD (bb);
110
111 while (1)
112 {
113 if (active_insn_p (insn) && !JUMP_P (insn))
114 count++;
115
116 if (insn == BB_END (bb))
117 break;
118 insn = NEXT_INSN (insn);
119 }
120
121 return count;
122 }
123
124 /* Determine whether the total insn_rtx_cost on non-jump insns in
125 basic block BB is less than MAX_COST. This function returns
126 false if the cost of any instruction could not be estimated.
127
128 The cost of the non-jump insns in BB is scaled by REG_BR_PROB_BASE
129 as those insns are being speculated. MAX_COST is scaled with SCALE
130 plus a small fudge factor. */
131
132 static bool
133 cheap_bb_rtx_cost_p (const_basic_block bb, int scale, int max_cost)
134 {
135 int count = 0;
136 rtx_insn *insn = BB_HEAD (bb);
137 bool speed = optimize_bb_for_speed_p (bb);
138
139 /* Set scale to REG_BR_PROB_BASE to void the identical scaling
140 applied to insn_rtx_cost when optimizing for size. Only do
141 this after combine because if-conversion might interfere with
142 passes before combine.
143
144 Use optimize_function_for_speed_p instead of the pre-defined
145 variable speed to make sure it is set to same value for all
146 basic blocks in one if-conversion transformation. */
147 if (!optimize_function_for_speed_p (cfun) && ifcvt_after_combine)
148 scale = REG_BR_PROB_BASE;
149 /* Our branch probability/scaling factors are just estimates and don't
150 account for cases where we can get speculation for free and other
151 secondary benefits. So we fudge the scale factor to make speculating
152 appear a little more profitable when optimizing for performance. */
153 else
154 scale += REG_BR_PROB_BASE / 8;
155
156
157 max_cost *= scale;
158
159 while (1)
160 {
161 if (NONJUMP_INSN_P (insn))
162 {
163 int cost = insn_rtx_cost (PATTERN (insn), speed) * REG_BR_PROB_BASE;
164 if (cost == 0)
165 return false;
166
167 /* If this instruction is the load or set of a "stack" register,
168 such as a floating point register on x87, then the cost of
169 speculatively executing this insn may need to include
170 the additional cost of popping its result off of the
171 register stack. Unfortunately, correctly recognizing and
172 accounting for this additional overhead is tricky, so for
173 now we simply prohibit such speculative execution. */
174 #ifdef STACK_REGS
175 {
176 rtx set = single_set (insn);
177 if (set && STACK_REG_P (SET_DEST (set)))
178 return false;
179 }
180 #endif
181
182 count += cost;
183 if (count >= max_cost)
184 return false;
185 }
186 else if (CALL_P (insn))
187 return false;
188
189 if (insn == BB_END (bb))
190 break;
191 insn = NEXT_INSN (insn);
192 }
193
194 return true;
195 }
196
197 /* Return the first non-jump active insn in the basic block. */
198
199 static rtx_insn *
200 first_active_insn (basic_block bb)
201 {
202 rtx_insn *insn = BB_HEAD (bb);
203
204 if (LABEL_P (insn))
205 {
206 if (insn == BB_END (bb))
207 return NULL;
208 insn = NEXT_INSN (insn);
209 }
210
211 while (NOTE_P (insn) || DEBUG_INSN_P (insn))
212 {
213 if (insn == BB_END (bb))
214 return NULL;
215 insn = NEXT_INSN (insn);
216 }
217
218 if (JUMP_P (insn))
219 return NULL;
220
221 return insn;
222 }
223
224 /* Return the last non-jump active (non-jump) insn in the basic block. */
225
226 static rtx_insn *
227 last_active_insn (basic_block bb, int skip_use_p)
228 {
229 rtx_insn *insn = BB_END (bb);
230 rtx_insn *head = BB_HEAD (bb);
231
232 while (NOTE_P (insn)
233 || JUMP_P (insn)
234 || DEBUG_INSN_P (insn)
235 || (skip_use_p
236 && NONJUMP_INSN_P (insn)
237 && GET_CODE (PATTERN (insn)) == USE))
238 {
239 if (insn == head)
240 return NULL;
241 insn = PREV_INSN (insn);
242 }
243
244 if (LABEL_P (insn))
245 return NULL;
246
247 return insn;
248 }
249
250 /* Return the active insn before INSN inside basic block CURR_BB. */
251
252 static rtx_insn *
253 find_active_insn_before (basic_block curr_bb, rtx_insn *insn)
254 {
255 if (!insn || insn == BB_HEAD (curr_bb))
256 return NULL;
257
258 while ((insn = PREV_INSN (insn)) != NULL_RTX)
259 {
260 if (NONJUMP_INSN_P (insn) || JUMP_P (insn) || CALL_P (insn))
261 break;
262
263 /* No other active insn all the way to the start of the basic block. */
264 if (insn == BB_HEAD (curr_bb))
265 return NULL;
266 }
267
268 return insn;
269 }
270
271 /* Return the active insn after INSN inside basic block CURR_BB. */
272
273 static rtx_insn *
274 find_active_insn_after (basic_block curr_bb, rtx_insn *insn)
275 {
276 if (!insn || insn == BB_END (curr_bb))
277 return NULL;
278
279 while ((insn = NEXT_INSN (insn)) != NULL_RTX)
280 {
281 if (NONJUMP_INSN_P (insn) || JUMP_P (insn) || CALL_P (insn))
282 break;
283
284 /* No other active insn all the way to the end of the basic block. */
285 if (insn == BB_END (curr_bb))
286 return NULL;
287 }
288
289 return insn;
290 }
291
292 /* Return the basic block reached by falling though the basic block BB. */
293
294 static basic_block
295 block_fallthru (basic_block bb)
296 {
297 edge e = find_fallthru_edge (bb->succs);
298
299 return (e) ? e->dest : NULL_BLOCK;
300 }
301
302 /* Return true if RTXs A and B can be safely interchanged. */
303
304 static bool
305 rtx_interchangeable_p (const_rtx a, const_rtx b)
306 {
307 if (!rtx_equal_p (a, b))
308 return false;
309
310 if (GET_CODE (a) != MEM)
311 return true;
312
313 /* A dead type-unsafe memory reference is legal, but a live type-unsafe memory
314 reference is not. Interchanging a dead type-unsafe memory reference with
315 a live type-safe one creates a live type-unsafe memory reference, in other
316 words, it makes the program illegal.
317 We check here conservatively whether the two memory references have equal
318 memory attributes. */
319
320 return mem_attrs_eq_p (get_mem_attrs (a), get_mem_attrs (b));
321 }
322
323 \f
324 /* Go through a bunch of insns, converting them to conditional
325 execution format if possible. Return TRUE if all of the non-note
326 insns were processed. */
327
328 static int
329 cond_exec_process_insns (ce_if_block *ce_info ATTRIBUTE_UNUSED,
330 /* if block information */rtx_insn *start,
331 /* first insn to look at */rtx end,
332 /* last insn to look at */rtx test,
333 /* conditional execution test */int prob_val,
334 /* probability of branch taken. */int mod_ok)
335 {
336 int must_be_last = FALSE;
337 rtx_insn *insn;
338 rtx xtest;
339 rtx pattern;
340
341 if (!start || !end)
342 return FALSE;
343
344 for (insn = start; ; insn = NEXT_INSN (insn))
345 {
346 /* dwarf2out can't cope with conditional prologues. */
347 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
348 return FALSE;
349
350 if (NOTE_P (insn) || DEBUG_INSN_P (insn))
351 goto insn_done;
352
353 gcc_assert (NONJUMP_INSN_P (insn) || CALL_P (insn));
354
355 /* dwarf2out can't cope with conditional unwind info. */
356 if (RTX_FRAME_RELATED_P (insn))
357 return FALSE;
358
359 /* Remove USE insns that get in the way. */
360 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
361 {
362 /* ??? Ug. Actually unlinking the thing is problematic,
363 given what we'd have to coordinate with our callers. */
364 SET_INSN_DELETED (insn);
365 goto insn_done;
366 }
367
368 /* Last insn wasn't last? */
369 if (must_be_last)
370 return FALSE;
371
372 if (modified_in_p (test, insn))
373 {
374 if (!mod_ok)
375 return FALSE;
376 must_be_last = TRUE;
377 }
378
379 /* Now build the conditional form of the instruction. */
380 pattern = PATTERN (insn);
381 xtest = copy_rtx (test);
382
383 /* If this is already a COND_EXEC, rewrite the test to be an AND of the
384 two conditions. */
385 if (GET_CODE (pattern) == COND_EXEC)
386 {
387 if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
388 return FALSE;
389
390 xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
391 COND_EXEC_TEST (pattern));
392 pattern = COND_EXEC_CODE (pattern);
393 }
394
395 pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
396
397 /* If the machine needs to modify the insn being conditionally executed,
398 say for example to force a constant integer operand into a temp
399 register, do so here. */
400 #ifdef IFCVT_MODIFY_INSN
401 IFCVT_MODIFY_INSN (ce_info, pattern, insn);
402 if (! pattern)
403 return FALSE;
404 #endif
405
406 validate_change (insn, &PATTERN (insn), pattern, 1);
407
408 if (CALL_P (insn) && prob_val >= 0)
409 validate_change (insn, &REG_NOTES (insn),
410 gen_rtx_INT_LIST ((machine_mode) REG_BR_PROB,
411 prob_val, REG_NOTES (insn)), 1);
412
413 insn_done:
414 if (insn == end)
415 break;
416 }
417
418 return TRUE;
419 }
420
421 /* Return the condition for a jump. Do not do any special processing. */
422
423 static rtx
424 cond_exec_get_condition (rtx_insn *jump)
425 {
426 rtx test_if, cond;
427
428 if (any_condjump_p (jump))
429 test_if = SET_SRC (pc_set (jump));
430 else
431 return NULL_RTX;
432 cond = XEXP (test_if, 0);
433
434 /* If this branches to JUMP_LABEL when the condition is false,
435 reverse the condition. */
436 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
437 && LABEL_REF_LABEL (XEXP (test_if, 2)) == JUMP_LABEL (jump))
438 {
439 enum rtx_code rev = reversed_comparison_code (cond, jump);
440 if (rev == UNKNOWN)
441 return NULL_RTX;
442
443 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
444 XEXP (cond, 1));
445 }
446
447 return cond;
448 }
449
450 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
451 to conditional execution. Return TRUE if we were successful at
452 converting the block. */
453
454 static int
455 cond_exec_process_if_block (ce_if_block * ce_info,
456 /* if block information */int do_multiple_p)
457 {
458 basic_block test_bb = ce_info->test_bb; /* last test block */
459 basic_block then_bb = ce_info->then_bb; /* THEN */
460 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
461 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
462 rtx_insn *then_start; /* first insn in THEN block */
463 rtx_insn *then_end; /* last insn + 1 in THEN block */
464 rtx_insn *else_start = NULL; /* first insn in ELSE block or NULL */
465 rtx_insn *else_end = NULL; /* last insn + 1 in ELSE block */
466 int max; /* max # of insns to convert. */
467 int then_mod_ok; /* whether conditional mods are ok in THEN */
468 rtx true_expr; /* test for else block insns */
469 rtx false_expr; /* test for then block insns */
470 int true_prob_val; /* probability of else block */
471 int false_prob_val; /* probability of then block */
472 rtx_insn *then_last_head = NULL; /* Last match at the head of THEN */
473 rtx_insn *else_last_head = NULL; /* Last match at the head of ELSE */
474 rtx_insn *then_first_tail = NULL; /* First match at the tail of THEN */
475 rtx_insn *else_first_tail = NULL; /* First match at the tail of ELSE */
476 int then_n_insns, else_n_insns, n_insns;
477 enum rtx_code false_code;
478 rtx note;
479
480 /* If test is comprised of && or || elements, and we've failed at handling
481 all of them together, just use the last test if it is the special case of
482 && elements without an ELSE block. */
483 if (!do_multiple_p && ce_info->num_multiple_test_blocks)
484 {
485 if (else_bb || ! ce_info->and_and_p)
486 return FALSE;
487
488 ce_info->test_bb = test_bb = ce_info->last_test_bb;
489 ce_info->num_multiple_test_blocks = 0;
490 ce_info->num_and_and_blocks = 0;
491 ce_info->num_or_or_blocks = 0;
492 }
493
494 /* Find the conditional jump to the ELSE or JOIN part, and isolate
495 the test. */
496 test_expr = cond_exec_get_condition (BB_END (test_bb));
497 if (! test_expr)
498 return FALSE;
499
500 /* If the conditional jump is more than just a conditional jump,
501 then we can not do conditional execution conversion on this block. */
502 if (! onlyjump_p (BB_END (test_bb)))
503 return FALSE;
504
505 /* Collect the bounds of where we're to search, skipping any labels, jumps
506 and notes at the beginning and end of the block. Then count the total
507 number of insns and see if it is small enough to convert. */
508 then_start = first_active_insn (then_bb);
509 then_end = last_active_insn (then_bb, TRUE);
510 then_n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
511 n_insns = then_n_insns;
512 max = MAX_CONDITIONAL_EXECUTE;
513
514 if (else_bb)
515 {
516 int n_matching;
517
518 max *= 2;
519 else_start = first_active_insn (else_bb);
520 else_end = last_active_insn (else_bb, TRUE);
521 else_n_insns = ce_info->num_else_insns = count_bb_insns (else_bb);
522 n_insns += else_n_insns;
523
524 /* Look for matching sequences at the head and tail of the two blocks,
525 and limit the range of insns to be converted if possible. */
526 n_matching = flow_find_cross_jump (then_bb, else_bb,
527 &then_first_tail, &else_first_tail,
528 NULL);
529 if (then_first_tail == BB_HEAD (then_bb))
530 then_start = then_end = NULL;
531 if (else_first_tail == BB_HEAD (else_bb))
532 else_start = else_end = NULL;
533
534 if (n_matching > 0)
535 {
536 if (then_end)
537 then_end = find_active_insn_before (then_bb, then_first_tail);
538 if (else_end)
539 else_end = find_active_insn_before (else_bb, else_first_tail);
540 n_insns -= 2 * n_matching;
541 }
542
543 if (then_start
544 && else_start
545 && then_n_insns > n_matching
546 && else_n_insns > n_matching)
547 {
548 int longest_match = MIN (then_n_insns - n_matching,
549 else_n_insns - n_matching);
550 n_matching
551 = flow_find_head_matching_sequence (then_bb, else_bb,
552 &then_last_head,
553 &else_last_head,
554 longest_match);
555
556 if (n_matching > 0)
557 {
558 rtx_insn *insn;
559
560 /* We won't pass the insns in the head sequence to
561 cond_exec_process_insns, so we need to test them here
562 to make sure that they don't clobber the condition. */
563 for (insn = BB_HEAD (then_bb);
564 insn != NEXT_INSN (then_last_head);
565 insn = NEXT_INSN (insn))
566 if (!LABEL_P (insn) && !NOTE_P (insn)
567 && !DEBUG_INSN_P (insn)
568 && modified_in_p (test_expr, insn))
569 return FALSE;
570 }
571
572 if (then_last_head == then_end)
573 then_start = then_end = NULL;
574 if (else_last_head == else_end)
575 else_start = else_end = NULL;
576
577 if (n_matching > 0)
578 {
579 if (then_start)
580 then_start = find_active_insn_after (then_bb, then_last_head);
581 if (else_start)
582 else_start = find_active_insn_after (else_bb, else_last_head);
583 n_insns -= 2 * n_matching;
584 }
585 }
586 }
587
588 if (n_insns > max)
589 return FALSE;
590
591 /* Map test_expr/test_jump into the appropriate MD tests to use on
592 the conditionally executed code. */
593
594 true_expr = test_expr;
595
596 false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
597 if (false_code != UNKNOWN)
598 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
599 XEXP (true_expr, 0), XEXP (true_expr, 1));
600 else
601 false_expr = NULL_RTX;
602
603 #ifdef IFCVT_MODIFY_TESTS
604 /* If the machine description needs to modify the tests, such as setting a
605 conditional execution register from a comparison, it can do so here. */
606 IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
607
608 /* See if the conversion failed. */
609 if (!true_expr || !false_expr)
610 goto fail;
611 #endif
612
613 note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
614 if (note)
615 {
616 true_prob_val = XINT (note, 0);
617 false_prob_val = REG_BR_PROB_BASE - true_prob_val;
618 }
619 else
620 {
621 true_prob_val = -1;
622 false_prob_val = -1;
623 }
624
625 /* If we have && or || tests, do them here. These tests are in the adjacent
626 blocks after the first block containing the test. */
627 if (ce_info->num_multiple_test_blocks > 0)
628 {
629 basic_block bb = test_bb;
630 basic_block last_test_bb = ce_info->last_test_bb;
631
632 if (! false_expr)
633 goto fail;
634
635 do
636 {
637 rtx_insn *start, *end;
638 rtx t, f;
639 enum rtx_code f_code;
640
641 bb = block_fallthru (bb);
642 start = first_active_insn (bb);
643 end = last_active_insn (bb, TRUE);
644 if (start
645 && ! cond_exec_process_insns (ce_info, start, end, false_expr,
646 false_prob_val, FALSE))
647 goto fail;
648
649 /* If the conditional jump is more than just a conditional jump, then
650 we can not do conditional execution conversion on this block. */
651 if (! onlyjump_p (BB_END (bb)))
652 goto fail;
653
654 /* Find the conditional jump and isolate the test. */
655 t = cond_exec_get_condition (BB_END (bb));
656 if (! t)
657 goto fail;
658
659 f_code = reversed_comparison_code (t, BB_END (bb));
660 if (f_code == UNKNOWN)
661 goto fail;
662
663 f = gen_rtx_fmt_ee (f_code, GET_MODE (t), XEXP (t, 0), XEXP (t, 1));
664 if (ce_info->and_and_p)
665 {
666 t = gen_rtx_AND (GET_MODE (t), true_expr, t);
667 f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
668 }
669 else
670 {
671 t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
672 f = gen_rtx_AND (GET_MODE (t), false_expr, f);
673 }
674
675 /* If the machine description needs to modify the tests, such as
676 setting a conditional execution register from a comparison, it can
677 do so here. */
678 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
679 IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
680
681 /* See if the conversion failed. */
682 if (!t || !f)
683 goto fail;
684 #endif
685
686 true_expr = t;
687 false_expr = f;
688 }
689 while (bb != last_test_bb);
690 }
691
692 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
693 on then THEN block. */
694 then_mod_ok = (else_bb == NULL_BLOCK);
695
696 /* Go through the THEN and ELSE blocks converting the insns if possible
697 to conditional execution. */
698
699 if (then_end
700 && (! false_expr
701 || ! cond_exec_process_insns (ce_info, then_start, then_end,
702 false_expr, false_prob_val,
703 then_mod_ok)))
704 goto fail;
705
706 if (else_bb && else_end
707 && ! cond_exec_process_insns (ce_info, else_start, else_end,
708 true_expr, true_prob_val, TRUE))
709 goto fail;
710
711 /* If we cannot apply the changes, fail. Do not go through the normal fail
712 processing, since apply_change_group will call cancel_changes. */
713 if (! apply_change_group ())
714 {
715 #ifdef IFCVT_MODIFY_CANCEL
716 /* Cancel any machine dependent changes. */
717 IFCVT_MODIFY_CANCEL (ce_info);
718 #endif
719 return FALSE;
720 }
721
722 #ifdef IFCVT_MODIFY_FINAL
723 /* Do any machine dependent final modifications. */
724 IFCVT_MODIFY_FINAL (ce_info);
725 #endif
726
727 /* Conversion succeeded. */
728 if (dump_file)
729 fprintf (dump_file, "%d insn%s converted to conditional execution.\n",
730 n_insns, (n_insns == 1) ? " was" : "s were");
731
732 /* Merge the blocks! If we had matching sequences, make sure to delete one
733 copy at the appropriate location first: delete the copy in the THEN branch
734 for a tail sequence so that the remaining one is executed last for both
735 branches, and delete the copy in the ELSE branch for a head sequence so
736 that the remaining one is executed first for both branches. */
737 if (then_first_tail)
738 {
739 rtx_insn *from = then_first_tail;
740 if (!INSN_P (from))
741 from = find_active_insn_after (then_bb, from);
742 delete_insn_chain (from, BB_END (then_bb), false);
743 }
744 if (else_last_head)
745 delete_insn_chain (first_active_insn (else_bb), else_last_head, false);
746
747 merge_if_block (ce_info);
748 cond_exec_changed_p = TRUE;
749 return TRUE;
750
751 fail:
752 #ifdef IFCVT_MODIFY_CANCEL
753 /* Cancel any machine dependent changes. */
754 IFCVT_MODIFY_CANCEL (ce_info);
755 #endif
756
757 cancel_changes (0);
758 return FALSE;
759 }
760 \f
761 /* Used by noce_process_if_block to communicate with its subroutines.
762
763 The subroutines know that A and B may be evaluated freely. They
764 know that X is a register. They should insert new instructions
765 before cond_earliest. */
766
767 struct noce_if_info
768 {
769 /* The basic blocks that make up the IF-THEN-{ELSE-,}JOIN block. */
770 basic_block test_bb, then_bb, else_bb, join_bb;
771
772 /* The jump that ends TEST_BB. */
773 rtx_insn *jump;
774
775 /* The jump condition. */
776 rtx cond;
777
778 /* New insns should be inserted before this one. */
779 rtx_insn *cond_earliest;
780
781 /* Insns in the THEN and ELSE block. There is always just this
782 one insns in those blocks. The insns are single_set insns.
783 If there was no ELSE block, INSN_B is the last insn before
784 COND_EARLIEST, or NULL_RTX. In the former case, the insn
785 operands are still valid, as if INSN_B was moved down below
786 the jump. */
787 rtx_insn *insn_a, *insn_b;
788
789 /* The SET_SRC of INSN_A and INSN_B. */
790 rtx a, b;
791
792 /* The SET_DEST of INSN_A. */
793 rtx x;
794
795 /* True if this if block is not canonical. In the canonical form of
796 if blocks, the THEN_BB is the block reached via the fallthru edge
797 from TEST_BB. For the noce transformations, we allow the symmetric
798 form as well. */
799 bool then_else_reversed;
800
801 /* True if the contents of then_bb and else_bb are a
802 simple single set instruction. */
803 bool then_simple;
804 bool else_simple;
805
806 /* The total rtx cost of the instructions in then_bb and else_bb. */
807 unsigned int then_cost;
808 unsigned int else_cost;
809
810 /* Estimated cost of the particular branch instruction. */
811 unsigned int branch_cost;
812 };
813
814 static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
815 static int noce_try_move (struct noce_if_info *);
816 static int noce_try_store_flag (struct noce_if_info *);
817 static int noce_try_addcc (struct noce_if_info *);
818 static int noce_try_store_flag_constants (struct noce_if_info *);
819 static int noce_try_store_flag_mask (struct noce_if_info *);
820 static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
821 rtx, rtx, rtx);
822 static int noce_try_cmove (struct noce_if_info *);
823 static int noce_try_cmove_arith (struct noce_if_info *);
824 static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx_insn **);
825 static int noce_try_minmax (struct noce_if_info *);
826 static int noce_try_abs (struct noce_if_info *);
827 static int noce_try_sign_mask (struct noce_if_info *);
828
829 /* Helper function for noce_try_store_flag*. */
830
831 static rtx
832 noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
833 int normalize)
834 {
835 rtx cond = if_info->cond;
836 int cond_complex;
837 enum rtx_code code;
838
839 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
840 || ! general_operand (XEXP (cond, 1), VOIDmode));
841
842 /* If earliest == jump, or when the condition is complex, try to
843 build the store_flag insn directly. */
844
845 if (cond_complex)
846 {
847 rtx set = pc_set (if_info->jump);
848 cond = XEXP (SET_SRC (set), 0);
849 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
850 && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (if_info->jump))
851 reversep = !reversep;
852 if (if_info->then_else_reversed)
853 reversep = !reversep;
854 }
855
856 if (reversep)
857 code = reversed_comparison_code (cond, if_info->jump);
858 else
859 code = GET_CODE (cond);
860
861 if ((if_info->cond_earliest == if_info->jump || cond_complex)
862 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
863 {
864 rtx src = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
865 XEXP (cond, 1));
866 rtx set = gen_rtx_SET (x, src);
867
868 start_sequence ();
869 rtx_insn *insn = emit_insn (set);
870
871 if (recog_memoized (insn) >= 0)
872 {
873 rtx_insn *seq = get_insns ();
874 end_sequence ();
875 emit_insn (seq);
876
877 if_info->cond_earliest = if_info->jump;
878
879 return x;
880 }
881
882 end_sequence ();
883 }
884
885 /* Don't even try if the comparison operands or the mode of X are weird. */
886 if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
887 return NULL_RTX;
888
889 return emit_store_flag (x, code, XEXP (cond, 0),
890 XEXP (cond, 1), VOIDmode,
891 (code == LTU || code == LEU
892 || code == GEU || code == GTU), normalize);
893 }
894
895 /* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
896 X is the destination/target and Y is the value to copy. */
897
898 static void
899 noce_emit_move_insn (rtx x, rtx y)
900 {
901 machine_mode outmode;
902 rtx outer, inner;
903 int bitpos;
904
905 if (GET_CODE (x) != STRICT_LOW_PART)
906 {
907 rtx_insn *seq, *insn;
908 rtx target;
909 optab ot;
910
911 start_sequence ();
912 /* Check that the SET_SRC is reasonable before calling emit_move_insn,
913 otherwise construct a suitable SET pattern ourselves. */
914 insn = (OBJECT_P (y) || CONSTANT_P (y) || GET_CODE (y) == SUBREG)
915 ? emit_move_insn (x, y)
916 : emit_insn (gen_rtx_SET (x, y));
917 seq = get_insns ();
918 end_sequence ();
919
920 if (recog_memoized (insn) <= 0)
921 {
922 if (GET_CODE (x) == ZERO_EXTRACT)
923 {
924 rtx op = XEXP (x, 0);
925 unsigned HOST_WIDE_INT size = INTVAL (XEXP (x, 1));
926 unsigned HOST_WIDE_INT start = INTVAL (XEXP (x, 2));
927
928 /* store_bit_field expects START to be relative to
929 BYTES_BIG_ENDIAN and adjusts this value for machines with
930 BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN. In order to be able to
931 invoke store_bit_field again it is necessary to have the START
932 value from the first call. */
933 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
934 {
935 if (MEM_P (op))
936 start = BITS_PER_UNIT - start - size;
937 else
938 {
939 gcc_assert (REG_P (op));
940 start = BITS_PER_WORD - start - size;
941 }
942 }
943
944 gcc_assert (start < (MEM_P (op) ? BITS_PER_UNIT : BITS_PER_WORD));
945 store_bit_field (op, size, start, 0, 0, GET_MODE (x), y, false);
946 return;
947 }
948
949 switch (GET_RTX_CLASS (GET_CODE (y)))
950 {
951 case RTX_UNARY:
952 ot = code_to_optab (GET_CODE (y));
953 if (ot)
954 {
955 start_sequence ();
956 target = expand_unop (GET_MODE (y), ot, XEXP (y, 0), x, 0);
957 if (target != NULL_RTX)
958 {
959 if (target != x)
960 emit_move_insn (x, target);
961 seq = get_insns ();
962 }
963 end_sequence ();
964 }
965 break;
966
967 case RTX_BIN_ARITH:
968 case RTX_COMM_ARITH:
969 ot = code_to_optab (GET_CODE (y));
970 if (ot)
971 {
972 start_sequence ();
973 target = expand_binop (GET_MODE (y), ot,
974 XEXP (y, 0), XEXP (y, 1),
975 x, 0, OPTAB_DIRECT);
976 if (target != NULL_RTX)
977 {
978 if (target != x)
979 emit_move_insn (x, target);
980 seq = get_insns ();
981 }
982 end_sequence ();
983 }
984 break;
985
986 default:
987 break;
988 }
989 }
990
991 emit_insn (seq);
992 return;
993 }
994
995 outer = XEXP (x, 0);
996 inner = XEXP (outer, 0);
997 outmode = GET_MODE (outer);
998 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
999 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos,
1000 0, 0, outmode, y, false);
1001 }
1002
1003 /* Return the CC reg if it is used in COND. */
1004
1005 static rtx
1006 cc_in_cond (rtx cond)
1007 {
1008 if (have_cbranchcc4 && cond
1009 && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_CC)
1010 return XEXP (cond, 0);
1011
1012 return NULL_RTX;
1013 }
1014
1015 /* Return sequence of instructions generated by if conversion. This
1016 function calls end_sequence() to end the current stream, ensures
1017 that the instructions are unshared, recognizable non-jump insns.
1018 On failure, this function returns a NULL_RTX. */
1019
1020 static rtx_insn *
1021 end_ifcvt_sequence (struct noce_if_info *if_info)
1022 {
1023 rtx_insn *insn;
1024 rtx_insn *seq = get_insns ();
1025 rtx cc = cc_in_cond (if_info->cond);
1026
1027 set_used_flags (if_info->x);
1028 set_used_flags (if_info->cond);
1029 set_used_flags (if_info->a);
1030 set_used_flags (if_info->b);
1031
1032 for (insn = seq; insn; insn = NEXT_INSN (insn))
1033 set_used_flags (insn);
1034
1035 unshare_all_rtl_in_chain (seq);
1036 end_sequence ();
1037
1038 /* Make sure that all of the instructions emitted are recognizable,
1039 and that we haven't introduced a new jump instruction.
1040 As an exercise for the reader, build a general mechanism that
1041 allows proper placement of required clobbers. */
1042 for (insn = seq; insn; insn = NEXT_INSN (insn))
1043 if (JUMP_P (insn)
1044 || recog_memoized (insn) == -1
1045 /* Make sure new generated code does not clobber CC. */
1046 || (cc && set_of (cc, insn)))
1047 return NULL;
1048
1049 return seq;
1050 }
1051
1052 /* Return true iff the then and else basic block (if it exists)
1053 consist of a single simple set instruction. */
1054
1055 static bool
1056 noce_simple_bbs (struct noce_if_info *if_info)
1057 {
1058 if (!if_info->then_simple)
1059 return false;
1060
1061 if (if_info->else_bb)
1062 return if_info->else_simple;
1063
1064 return true;
1065 }
1066
1067 /* Convert "if (a != b) x = a; else x = b" into "x = a" and
1068 "if (a == b) x = a; else x = b" into "x = b". */
1069
1070 static int
1071 noce_try_move (struct noce_if_info *if_info)
1072 {
1073 rtx cond = if_info->cond;
1074 enum rtx_code code = GET_CODE (cond);
1075 rtx y;
1076 rtx_insn *seq;
1077
1078 if (code != NE && code != EQ)
1079 return FALSE;
1080
1081 if (!noce_simple_bbs (if_info))
1082 return FALSE;
1083
1084 /* This optimization isn't valid if either A or B could be a NaN
1085 or a signed zero. */
1086 if (HONOR_NANS (if_info->x)
1087 || HONOR_SIGNED_ZEROS (if_info->x))
1088 return FALSE;
1089
1090 /* Check whether the operands of the comparison are A and in
1091 either order. */
1092 if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
1093 && rtx_equal_p (if_info->b, XEXP (cond, 1)))
1094 || (rtx_equal_p (if_info->a, XEXP (cond, 1))
1095 && rtx_equal_p (if_info->b, XEXP (cond, 0))))
1096 {
1097 if (!rtx_interchangeable_p (if_info->a, if_info->b))
1098 return FALSE;
1099
1100 y = (code == EQ) ? if_info->a : if_info->b;
1101
1102 /* Avoid generating the move if the source is the destination. */
1103 if (! rtx_equal_p (if_info->x, y))
1104 {
1105 start_sequence ();
1106 noce_emit_move_insn (if_info->x, y);
1107 seq = end_ifcvt_sequence (if_info);
1108 if (!seq)
1109 return FALSE;
1110
1111 emit_insn_before_setloc (seq, if_info->jump,
1112 INSN_LOCATION (if_info->insn_a));
1113 }
1114 return TRUE;
1115 }
1116 return FALSE;
1117 }
1118
1119 /* Convert "if (test) x = 1; else x = 0".
1120
1121 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
1122 tried in noce_try_store_flag_constants after noce_try_cmove has had
1123 a go at the conversion. */
1124
1125 static int
1126 noce_try_store_flag (struct noce_if_info *if_info)
1127 {
1128 int reversep;
1129 rtx target;
1130 rtx_insn *seq;
1131
1132 if (!noce_simple_bbs (if_info))
1133 return FALSE;
1134
1135 if (CONST_INT_P (if_info->b)
1136 && INTVAL (if_info->b) == STORE_FLAG_VALUE
1137 && if_info->a == const0_rtx)
1138 reversep = 0;
1139 else if (if_info->b == const0_rtx
1140 && CONST_INT_P (if_info->a)
1141 && INTVAL (if_info->a) == STORE_FLAG_VALUE
1142 && (reversed_comparison_code (if_info->cond, if_info->jump)
1143 != UNKNOWN))
1144 reversep = 1;
1145 else
1146 return FALSE;
1147
1148 start_sequence ();
1149
1150 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
1151 if (target)
1152 {
1153 if (target != if_info->x)
1154 noce_emit_move_insn (if_info->x, target);
1155
1156 seq = end_ifcvt_sequence (if_info);
1157 if (! seq)
1158 return FALSE;
1159
1160 emit_insn_before_setloc (seq, if_info->jump,
1161 INSN_LOCATION (if_info->insn_a));
1162 return TRUE;
1163 }
1164 else
1165 {
1166 end_sequence ();
1167 return FALSE;
1168 }
1169 }
1170
1171
1172 /* Convert "if (test) x = -A; else x = A" into
1173 x = A; if (test) x = -x if the machine can do the
1174 conditional negate form of this cheaply.
1175 Try this before noce_try_cmove that will just load the
1176 immediates into two registers and do a conditional select
1177 between them. If the target has a conditional negate or
1178 conditional invert operation we can save a potentially
1179 expensive constant synthesis. */
1180
1181 static bool
1182 noce_try_inverse_constants (struct noce_if_info *if_info)
1183 {
1184 if (!noce_simple_bbs (if_info))
1185 return false;
1186
1187 if (!CONST_INT_P (if_info->a)
1188 || !CONST_INT_P (if_info->b)
1189 || !REG_P (if_info->x))
1190 return false;
1191
1192 machine_mode mode = GET_MODE (if_info->x);
1193
1194 HOST_WIDE_INT val_a = INTVAL (if_info->a);
1195 HOST_WIDE_INT val_b = INTVAL (if_info->b);
1196
1197 rtx cond = if_info->cond;
1198
1199 rtx x = if_info->x;
1200 rtx target;
1201
1202 start_sequence ();
1203
1204 rtx_code code;
1205 if (val_b != HOST_WIDE_INT_MIN && val_a == -val_b)
1206 code = NEG;
1207 else if (val_a == ~val_b)
1208 code = NOT;
1209 else
1210 {
1211 end_sequence ();
1212 return false;
1213 }
1214
1215 rtx tmp = gen_reg_rtx (mode);
1216 noce_emit_move_insn (tmp, if_info->a);
1217
1218 target = emit_conditional_neg_or_complement (x, code, mode, cond, tmp, tmp);
1219
1220 if (target)
1221 {
1222 rtx_insn *seq = get_insns ();
1223
1224 if (!seq)
1225 {
1226 end_sequence ();
1227 return false;
1228 }
1229
1230 if (target != if_info->x)
1231 noce_emit_move_insn (if_info->x, target);
1232
1233 seq = end_ifcvt_sequence (if_info);
1234
1235 if (!seq)
1236 return false;
1237
1238 emit_insn_before_setloc (seq, if_info->jump,
1239 INSN_LOCATION (if_info->insn_a));
1240 return true;
1241 }
1242
1243 end_sequence ();
1244 return false;
1245 }
1246
1247
1248 /* Convert "if (test) x = a; else x = b", for A and B constant.
1249 Also allow A = y + c1, B = y + c2, with a common y between A
1250 and B. */
1251
1252 static int
1253 noce_try_store_flag_constants (struct noce_if_info *if_info)
1254 {
1255 rtx target;
1256 rtx_insn *seq;
1257 bool reversep;
1258 HOST_WIDE_INT itrue, ifalse, diff, tmp;
1259 int normalize;
1260 bool can_reverse;
1261 machine_mode mode = GET_MODE (if_info->x);;
1262 rtx common = NULL_RTX;
1263
1264 rtx a = if_info->a;
1265 rtx b = if_info->b;
1266
1267 /* Handle cases like x := test ? y + 3 : y + 4. */
1268 if (GET_CODE (a) == PLUS
1269 && GET_CODE (b) == PLUS
1270 && CONST_INT_P (XEXP (a, 1))
1271 && CONST_INT_P (XEXP (b, 1))
1272 && rtx_equal_p (XEXP (a, 0), XEXP (b, 0))
1273 && noce_operand_ok (XEXP (a, 0))
1274 && if_info->branch_cost >= 2)
1275 {
1276 common = XEXP (a, 0);
1277 a = XEXP (a, 1);
1278 b = XEXP (b, 1);
1279 }
1280
1281 if (!noce_simple_bbs (if_info))
1282 return FALSE;
1283
1284 if (CONST_INT_P (a)
1285 && CONST_INT_P (b))
1286 {
1287 ifalse = INTVAL (a);
1288 itrue = INTVAL (b);
1289 bool subtract_flag_p = false;
1290
1291 diff = (unsigned HOST_WIDE_INT) itrue - ifalse;
1292 /* Make sure we can represent the difference between the two values. */
1293 if ((diff > 0)
1294 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
1295 return FALSE;
1296
1297 diff = trunc_int_for_mode (diff, mode);
1298
1299 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
1300 != UNKNOWN);
1301
1302 reversep = false;
1303 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
1304 {
1305 normalize = 0;
1306 /* We could collapse these cases but it is easier to follow the
1307 diff/STORE_FLAG_VALUE combinations when they are listed
1308 explicitly. */
1309
1310 /* test ? 3 : 4
1311 => 4 + (test != 0). */
1312 if (diff < 0 && STORE_FLAG_VALUE < 0)
1313 reversep = false;
1314 /* test ? 4 : 3
1315 => can_reverse | 4 + (test == 0)
1316 !can_reverse | 3 - (test != 0). */
1317 else if (diff > 0 && STORE_FLAG_VALUE < 0)
1318 {
1319 reversep = can_reverse;
1320 subtract_flag_p = !can_reverse;
1321 /* If we need to subtract the flag and we have PLUS-immediate
1322 A and B then it is unlikely to be beneficial to play tricks
1323 here. */
1324 if (subtract_flag_p && common)
1325 return FALSE;
1326 }
1327 /* test ? 3 : 4
1328 => can_reverse | 3 + (test == 0)
1329 !can_reverse | 4 - (test != 0). */
1330 else if (diff < 0 && STORE_FLAG_VALUE > 0)
1331 {
1332 reversep = can_reverse;
1333 subtract_flag_p = !can_reverse;
1334 /* If we need to subtract the flag and we have PLUS-immediate
1335 A and B then it is unlikely to be beneficial to play tricks
1336 here. */
1337 if (subtract_flag_p && common)
1338 return FALSE;
1339 }
1340 /* test ? 4 : 3
1341 => 4 + (test != 0). */
1342 else if (diff > 0 && STORE_FLAG_VALUE > 0)
1343 reversep = false;
1344 else
1345 gcc_unreachable ();
1346 }
1347 else if (ifalse == 0 && exact_log2 (itrue) >= 0
1348 && (STORE_FLAG_VALUE == 1
1349 || if_info->branch_cost >= 2))
1350 normalize = 1;
1351 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
1352 && (STORE_FLAG_VALUE == 1 || if_info->branch_cost >= 2))
1353 {
1354 normalize = 1;
1355 reversep = true;
1356 }
1357 else if (itrue == -1
1358 && (STORE_FLAG_VALUE == -1
1359 || if_info->branch_cost >= 2))
1360 normalize = -1;
1361 else if (ifalse == -1 && can_reverse
1362 && (STORE_FLAG_VALUE == -1 || if_info->branch_cost >= 2))
1363 {
1364 normalize = -1;
1365 reversep = true;
1366 }
1367 else
1368 return FALSE;
1369
1370 if (reversep)
1371 {
1372 std::swap (itrue, ifalse);
1373 diff = trunc_int_for_mode (-(unsigned HOST_WIDE_INT) diff, mode);
1374 }
1375
1376 start_sequence ();
1377
1378 /* If we have x := test ? x + 3 : x + 4 then move the original
1379 x out of the way while we store flags. */
1380 if (common && rtx_equal_p (common, if_info->x))
1381 {
1382 common = gen_reg_rtx (mode);
1383 noce_emit_move_insn (common, if_info->x);
1384 }
1385
1386 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
1387 if (! target)
1388 {
1389 end_sequence ();
1390 return FALSE;
1391 }
1392
1393 /* if (test) x = 3; else x = 4;
1394 => x = 3 + (test == 0); */
1395 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
1396 {
1397 /* Add the common part now. This may allow combine to merge this
1398 with the store flag operation earlier into some sort of conditional
1399 increment/decrement if the target allows it. */
1400 if (common)
1401 target = expand_simple_binop (mode, PLUS,
1402 target, common,
1403 target, 0, OPTAB_WIDEN);
1404
1405 /* Always use ifalse here. It should have been swapped with itrue
1406 when appropriate when reversep is true. */
1407 target = expand_simple_binop (mode, subtract_flag_p ? MINUS : PLUS,
1408 gen_int_mode (ifalse, mode), target,
1409 if_info->x, 0, OPTAB_WIDEN);
1410 }
1411 /* Other cases are not beneficial when the original A and B are PLUS
1412 expressions. */
1413 else if (common)
1414 {
1415 end_sequence ();
1416 return FALSE;
1417 }
1418 /* if (test) x = 8; else x = 0;
1419 => x = (test != 0) << 3; */
1420 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
1421 {
1422 target = expand_simple_binop (mode, ASHIFT,
1423 target, GEN_INT (tmp), if_info->x, 0,
1424 OPTAB_WIDEN);
1425 }
1426
1427 /* if (test) x = -1; else x = b;
1428 => x = -(test != 0) | b; */
1429 else if (itrue == -1)
1430 {
1431 target = expand_simple_binop (mode, IOR,
1432 target, gen_int_mode (ifalse, mode),
1433 if_info->x, 0, OPTAB_WIDEN);
1434 }
1435 else
1436 {
1437 end_sequence ();
1438 return FALSE;
1439 }
1440
1441 if (! target)
1442 {
1443 end_sequence ();
1444 return FALSE;
1445 }
1446
1447 if (target != if_info->x)
1448 noce_emit_move_insn (if_info->x, target);
1449
1450 seq = end_ifcvt_sequence (if_info);
1451 if (!seq)
1452 return FALSE;
1453
1454 emit_insn_before_setloc (seq, if_info->jump,
1455 INSN_LOCATION (if_info->insn_a));
1456 return TRUE;
1457 }
1458
1459 return FALSE;
1460 }
1461
1462 /* Convert "if (test) foo++" into "foo += (test != 0)", and
1463 similarly for "foo--". */
1464
1465 static int
1466 noce_try_addcc (struct noce_if_info *if_info)
1467 {
1468 rtx target;
1469 rtx_insn *seq;
1470 int subtract, normalize;
1471
1472 if (!noce_simple_bbs (if_info))
1473 return FALSE;
1474
1475 if (GET_CODE (if_info->a) == PLUS
1476 && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
1477 && (reversed_comparison_code (if_info->cond, if_info->jump)
1478 != UNKNOWN))
1479 {
1480 rtx cond = if_info->cond;
1481 enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
1482
1483 /* First try to use addcc pattern. */
1484 if (general_operand (XEXP (cond, 0), VOIDmode)
1485 && general_operand (XEXP (cond, 1), VOIDmode))
1486 {
1487 start_sequence ();
1488 target = emit_conditional_add (if_info->x, code,
1489 XEXP (cond, 0),
1490 XEXP (cond, 1),
1491 VOIDmode,
1492 if_info->b,
1493 XEXP (if_info->a, 1),
1494 GET_MODE (if_info->x),
1495 (code == LTU || code == GEU
1496 || code == LEU || code == GTU));
1497 if (target)
1498 {
1499 if (target != if_info->x)
1500 noce_emit_move_insn (if_info->x, target);
1501
1502 seq = end_ifcvt_sequence (if_info);
1503 if (!seq)
1504 return FALSE;
1505
1506 emit_insn_before_setloc (seq, if_info->jump,
1507 INSN_LOCATION (if_info->insn_a));
1508 return TRUE;
1509 }
1510 end_sequence ();
1511 }
1512
1513 /* If that fails, construct conditional increment or decrement using
1514 setcc. */
1515 if (if_info->branch_cost >= 2
1516 && (XEXP (if_info->a, 1) == const1_rtx
1517 || XEXP (if_info->a, 1) == constm1_rtx))
1518 {
1519 start_sequence ();
1520 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1521 subtract = 0, normalize = 0;
1522 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1523 subtract = 1, normalize = 0;
1524 else
1525 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
1526
1527
1528 target = noce_emit_store_flag (if_info,
1529 gen_reg_rtx (GET_MODE (if_info->x)),
1530 1, normalize);
1531
1532 if (target)
1533 target = expand_simple_binop (GET_MODE (if_info->x),
1534 subtract ? MINUS : PLUS,
1535 if_info->b, target, if_info->x,
1536 0, OPTAB_WIDEN);
1537 if (target)
1538 {
1539 if (target != if_info->x)
1540 noce_emit_move_insn (if_info->x, target);
1541
1542 seq = end_ifcvt_sequence (if_info);
1543 if (!seq)
1544 return FALSE;
1545
1546 emit_insn_before_setloc (seq, if_info->jump,
1547 INSN_LOCATION (if_info->insn_a));
1548 return TRUE;
1549 }
1550 end_sequence ();
1551 }
1552 }
1553
1554 return FALSE;
1555 }
1556
1557 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
1558
1559 static int
1560 noce_try_store_flag_mask (struct noce_if_info *if_info)
1561 {
1562 rtx target;
1563 rtx_insn *seq;
1564 int reversep;
1565
1566 if (!noce_simple_bbs (if_info))
1567 return FALSE;
1568
1569 reversep = 0;
1570 if ((if_info->branch_cost >= 2
1571 || STORE_FLAG_VALUE == -1)
1572 && ((if_info->a == const0_rtx
1573 && rtx_equal_p (if_info->b, if_info->x))
1574 || ((reversep = (reversed_comparison_code (if_info->cond,
1575 if_info->jump)
1576 != UNKNOWN))
1577 && if_info->b == const0_rtx
1578 && rtx_equal_p (if_info->a, if_info->x))))
1579 {
1580 start_sequence ();
1581 target = noce_emit_store_flag (if_info,
1582 gen_reg_rtx (GET_MODE (if_info->x)),
1583 reversep, -1);
1584 if (target)
1585 target = expand_simple_binop (GET_MODE (if_info->x), AND,
1586 if_info->x,
1587 target, if_info->x, 0,
1588 OPTAB_WIDEN);
1589
1590 if (target)
1591 {
1592 int old_cost, new_cost, insn_cost;
1593 int speed_p;
1594
1595 if (target != if_info->x)
1596 noce_emit_move_insn (if_info->x, target);
1597
1598 seq = end_ifcvt_sequence (if_info);
1599 if (!seq)
1600 return FALSE;
1601
1602 speed_p = optimize_bb_for_speed_p (BLOCK_FOR_INSN (if_info->insn_a));
1603 insn_cost = insn_rtx_cost (PATTERN (if_info->insn_a), speed_p);
1604 old_cost = COSTS_N_INSNS (if_info->branch_cost) + insn_cost;
1605 new_cost = seq_cost (seq, speed_p);
1606
1607 if (new_cost > old_cost)
1608 return FALSE;
1609
1610 emit_insn_before_setloc (seq, if_info->jump,
1611 INSN_LOCATION (if_info->insn_a));
1612 return TRUE;
1613 }
1614
1615 end_sequence ();
1616 }
1617
1618 return FALSE;
1619 }
1620
1621 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
1622
1623 static rtx
1624 noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
1625 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
1626 {
1627 rtx target ATTRIBUTE_UNUSED;
1628 int unsignedp ATTRIBUTE_UNUSED;
1629
1630 /* If earliest == jump, try to build the cmove insn directly.
1631 This is helpful when combine has created some complex condition
1632 (like for alpha's cmovlbs) that we can't hope to regenerate
1633 through the normal interface. */
1634
1635 if (if_info->cond_earliest == if_info->jump)
1636 {
1637 rtx cond = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1638 rtx if_then_else = gen_rtx_IF_THEN_ELSE (GET_MODE (x),
1639 cond, vtrue, vfalse);
1640 rtx set = gen_rtx_SET (x, if_then_else);
1641
1642 start_sequence ();
1643 rtx_insn *insn = emit_insn (set);
1644
1645 if (recog_memoized (insn) >= 0)
1646 {
1647 rtx_insn *seq = get_insns ();
1648 end_sequence ();
1649 emit_insn (seq);
1650
1651 return x;
1652 }
1653
1654 end_sequence ();
1655 }
1656
1657 /* Don't even try if the comparison operands are weird
1658 except that the target supports cbranchcc4. */
1659 if (! general_operand (cmp_a, GET_MODE (cmp_a))
1660 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1661 {
1662 if (!have_cbranchcc4
1663 || GET_MODE_CLASS (GET_MODE (cmp_a)) != MODE_CC
1664 || cmp_b != const0_rtx)
1665 return NULL_RTX;
1666 }
1667
1668 unsignedp = (code == LTU || code == GEU
1669 || code == LEU || code == GTU);
1670
1671 target = emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1672 vtrue, vfalse, GET_MODE (x),
1673 unsignedp);
1674 if (target)
1675 return target;
1676
1677 /* We might be faced with a situation like:
1678
1679 x = (reg:M TARGET)
1680 vtrue = (subreg:M (reg:N VTRUE) BYTE)
1681 vfalse = (subreg:M (reg:N VFALSE) BYTE)
1682
1683 We can't do a conditional move in mode M, but it's possible that we
1684 could do a conditional move in mode N instead and take a subreg of
1685 the result.
1686
1687 If we can't create new pseudos, though, don't bother. */
1688 if (reload_completed)
1689 return NULL_RTX;
1690
1691 if (GET_CODE (vtrue) == SUBREG && GET_CODE (vfalse) == SUBREG)
1692 {
1693 rtx reg_vtrue = SUBREG_REG (vtrue);
1694 rtx reg_vfalse = SUBREG_REG (vfalse);
1695 unsigned int byte_vtrue = SUBREG_BYTE (vtrue);
1696 unsigned int byte_vfalse = SUBREG_BYTE (vfalse);
1697 rtx promoted_target;
1698
1699 if (GET_MODE (reg_vtrue) != GET_MODE (reg_vfalse)
1700 || byte_vtrue != byte_vfalse
1701 || (SUBREG_PROMOTED_VAR_P (vtrue)
1702 != SUBREG_PROMOTED_VAR_P (vfalse))
1703 || (SUBREG_PROMOTED_GET (vtrue)
1704 != SUBREG_PROMOTED_GET (vfalse)))
1705 return NULL_RTX;
1706
1707 promoted_target = gen_reg_rtx (GET_MODE (reg_vtrue));
1708
1709 target = emit_conditional_move (promoted_target, code, cmp_a, cmp_b,
1710 VOIDmode, reg_vtrue, reg_vfalse,
1711 GET_MODE (reg_vtrue), unsignedp);
1712 /* Nope, couldn't do it in that mode either. */
1713 if (!target)
1714 return NULL_RTX;
1715
1716 target = gen_rtx_SUBREG (GET_MODE (vtrue), promoted_target, byte_vtrue);
1717 SUBREG_PROMOTED_VAR_P (target) = SUBREG_PROMOTED_VAR_P (vtrue);
1718 SUBREG_PROMOTED_SET (target, SUBREG_PROMOTED_GET (vtrue));
1719 emit_move_insn (x, target);
1720 return x;
1721 }
1722 else
1723 return NULL_RTX;
1724 }
1725
1726 /* Try only simple constants and registers here. More complex cases
1727 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1728 has had a go at it. */
1729
1730 static int
1731 noce_try_cmove (struct noce_if_info *if_info)
1732 {
1733 enum rtx_code code;
1734 rtx target;
1735 rtx_insn *seq;
1736
1737 if (!noce_simple_bbs (if_info))
1738 return FALSE;
1739
1740 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1741 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1742 {
1743 start_sequence ();
1744
1745 code = GET_CODE (if_info->cond);
1746 target = noce_emit_cmove (if_info, if_info->x, code,
1747 XEXP (if_info->cond, 0),
1748 XEXP (if_info->cond, 1),
1749 if_info->a, if_info->b);
1750
1751 if (target)
1752 {
1753 if (target != if_info->x)
1754 noce_emit_move_insn (if_info->x, target);
1755
1756 seq = end_ifcvt_sequence (if_info);
1757 if (!seq)
1758 return FALSE;
1759
1760 emit_insn_before_setloc (seq, if_info->jump,
1761 INSN_LOCATION (if_info->insn_a));
1762 return TRUE;
1763 }
1764 /* If both a and b are constants try a last-ditch transformation:
1765 if (test) x = a; else x = b;
1766 => x = (-(test != 0) & (b - a)) + a;
1767 Try this only if the target-specific expansion above has failed.
1768 The target-specific expander may want to generate sequences that
1769 we don't know about, so give them a chance before trying this
1770 approach. */
1771 else if (!targetm.have_conditional_execution ()
1772 && CONST_INT_P (if_info->a) && CONST_INT_P (if_info->b)
1773 && ((if_info->branch_cost >= 2 && STORE_FLAG_VALUE == -1)
1774 || if_info->branch_cost >= 3))
1775 {
1776 machine_mode mode = GET_MODE (if_info->x);
1777 HOST_WIDE_INT ifalse = INTVAL (if_info->a);
1778 HOST_WIDE_INT itrue = INTVAL (if_info->b);
1779 rtx target = noce_emit_store_flag (if_info, if_info->x, false, -1);
1780 if (!target)
1781 {
1782 end_sequence ();
1783 return FALSE;
1784 }
1785
1786 HOST_WIDE_INT diff = (unsigned HOST_WIDE_INT) itrue - ifalse;
1787 /* Make sure we can represent the difference
1788 between the two values. */
1789 if ((diff > 0)
1790 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
1791 {
1792 end_sequence ();
1793 return FALSE;
1794 }
1795
1796 diff = trunc_int_for_mode (diff, mode);
1797 target = expand_simple_binop (mode, AND,
1798 target, gen_int_mode (diff, mode),
1799 if_info->x, 0, OPTAB_WIDEN);
1800 if (target)
1801 target = expand_simple_binop (mode, PLUS,
1802 target, gen_int_mode (ifalse, mode),
1803 if_info->x, 0, OPTAB_WIDEN);
1804 if (target)
1805 {
1806 if (target != if_info->x)
1807 noce_emit_move_insn (if_info->x, target);
1808
1809 seq = end_ifcvt_sequence (if_info);
1810 if (!seq)
1811 return FALSE;
1812
1813 emit_insn_before_setloc (seq, if_info->jump,
1814 INSN_LOCATION (if_info->insn_a));
1815 return TRUE;
1816 }
1817 else
1818 {
1819 end_sequence ();
1820 return FALSE;
1821 }
1822 }
1823 else
1824 end_sequence ();
1825 }
1826
1827 return FALSE;
1828 }
1829
1830 /* Return true if X contains a conditional code mode rtx. */
1831
1832 static bool
1833 contains_ccmode_rtx_p (rtx x)
1834 {
1835 subrtx_iterator::array_type array;
1836 FOR_EACH_SUBRTX (iter, array, x, ALL)
1837 if (GET_MODE_CLASS (GET_MODE (*iter)) == MODE_CC)
1838 return true;
1839
1840 return false;
1841 }
1842
1843 /* Helper for bb_valid_for_noce_process_p. Validate that
1844 the rtx insn INSN is a single set that does not set
1845 the conditional register CC and is in general valid for
1846 if-conversion. */
1847
1848 static bool
1849 insn_valid_noce_process_p (rtx_insn *insn, rtx cc)
1850 {
1851 if (!insn
1852 || !NONJUMP_INSN_P (insn)
1853 || (cc && set_of (cc, insn)))
1854 return false;
1855
1856 rtx sset = single_set (insn);
1857
1858 /* Currently support only simple single sets in test_bb. */
1859 if (!sset
1860 || !noce_operand_ok (SET_DEST (sset))
1861 || contains_ccmode_rtx_p (SET_DEST (sset))
1862 || !noce_operand_ok (SET_SRC (sset)))
1863 return false;
1864
1865 return true;
1866 }
1867
1868
1869 /* Return true iff the registers that the insns in BB_A set do not
1870 get used in BB_B. */
1871
1872 static bool
1873 bbs_ok_for_cmove_arith (basic_block bb_a, basic_block bb_b)
1874 {
1875 rtx_insn *a_insn;
1876 bitmap bba_sets = BITMAP_ALLOC (&reg_obstack);
1877
1878 df_ref def;
1879 df_ref use;
1880
1881 FOR_BB_INSNS (bb_a, a_insn)
1882 {
1883 if (!active_insn_p (a_insn))
1884 continue;
1885
1886 rtx sset_a = single_set (a_insn);
1887
1888 if (!sset_a)
1889 {
1890 BITMAP_FREE (bba_sets);
1891 return false;
1892 }
1893
1894 /* Record all registers that BB_A sets. */
1895 FOR_EACH_INSN_DEF (def, a_insn)
1896 bitmap_set_bit (bba_sets, DF_REF_REGNO (def));
1897 }
1898
1899 rtx_insn *b_insn;
1900
1901 FOR_BB_INSNS (bb_b, b_insn)
1902 {
1903 if (!active_insn_p (b_insn))
1904 continue;
1905
1906 rtx sset_b = single_set (b_insn);
1907
1908 if (!sset_b)
1909 {
1910 BITMAP_FREE (bba_sets);
1911 return false;
1912 }
1913
1914 /* Make sure this is a REG and not some instance
1915 of ZERO_EXTRACT or SUBREG or other dangerous stuff. */
1916 if (!REG_P (SET_DEST (sset_b)))
1917 {
1918 BITMAP_FREE (bba_sets);
1919 return false;
1920 }
1921
1922 /* If the insn uses a reg set in BB_A return false. */
1923 FOR_EACH_INSN_USE (use, b_insn)
1924 {
1925 if (bitmap_bit_p (bba_sets, DF_REF_REGNO (use)))
1926 {
1927 BITMAP_FREE (bba_sets);
1928 return false;
1929 }
1930 }
1931
1932 }
1933
1934 BITMAP_FREE (bba_sets);
1935 return true;
1936 }
1937
1938 /* Emit copies of all the active instructions in BB except the last.
1939 This is a helper for noce_try_cmove_arith. */
1940
1941 static void
1942 noce_emit_all_but_last (basic_block bb)
1943 {
1944 rtx_insn *last = last_active_insn (bb, FALSE);
1945 rtx_insn *insn;
1946 FOR_BB_INSNS (bb, insn)
1947 {
1948 if (insn != last && active_insn_p (insn))
1949 {
1950 rtx_insn *to_emit = as_a <rtx_insn *> (copy_rtx (insn));
1951
1952 emit_insn (PATTERN (to_emit));
1953 }
1954 }
1955 }
1956
1957 /* Helper for noce_try_cmove_arith. Emit the pattern TO_EMIT and return
1958 the resulting insn or NULL if it's not a valid insn. */
1959
1960 static rtx_insn *
1961 noce_emit_insn (rtx to_emit)
1962 {
1963 gcc_assert (to_emit);
1964 rtx_insn *insn = emit_insn (to_emit);
1965
1966 if (recog_memoized (insn) < 0)
1967 return NULL;
1968
1969 return insn;
1970 }
1971
1972 /* Helper for noce_try_cmove_arith. Emit a copy of the insns up to
1973 and including the penultimate one in BB if it is not simple
1974 (as indicated by SIMPLE). Then emit LAST_INSN as the last
1975 insn in the block. The reason for that is that LAST_INSN may
1976 have been modified by the preparation in noce_try_cmove_arith. */
1977
1978 static bool
1979 noce_emit_bb (rtx last_insn, basic_block bb, bool simple)
1980 {
1981 if (bb && !simple)
1982 noce_emit_all_but_last (bb);
1983
1984 if (last_insn && !noce_emit_insn (last_insn))
1985 return false;
1986
1987 return true;
1988 }
1989
1990 /* Try more complex cases involving conditional_move. */
1991
1992 static int
1993 noce_try_cmove_arith (struct noce_if_info *if_info)
1994 {
1995 rtx a = if_info->a;
1996 rtx b = if_info->b;
1997 rtx x = if_info->x;
1998 rtx orig_a, orig_b;
1999 rtx_insn *insn_a, *insn_b;
2000 bool a_simple = if_info->then_simple;
2001 bool b_simple = if_info->else_simple;
2002 basic_block then_bb = if_info->then_bb;
2003 basic_block else_bb = if_info->else_bb;
2004 rtx target;
2005 int is_mem = 0;
2006 enum rtx_code code;
2007 rtx_insn *ifcvt_seq;
2008
2009 /* A conditional move from two memory sources is equivalent to a
2010 conditional on their addresses followed by a load. Don't do this
2011 early because it'll screw alias analysis. Note that we've
2012 already checked for no side effects. */
2013 /* ??? FIXME: Magic number 5. */
2014 if (cse_not_expected
2015 && MEM_P (a) && MEM_P (b)
2016 && MEM_ADDR_SPACE (a) == MEM_ADDR_SPACE (b)
2017 && if_info->branch_cost >= 5)
2018 {
2019 machine_mode address_mode = get_address_mode (a);
2020
2021 a = XEXP (a, 0);
2022 b = XEXP (b, 0);
2023 x = gen_reg_rtx (address_mode);
2024 is_mem = 1;
2025 }
2026
2027 /* ??? We could handle this if we knew that a load from A or B could
2028 not trap or fault. This is also true if we've already loaded
2029 from the address along the path from ENTRY. */
2030 else if (may_trap_or_fault_p (a) || may_trap_or_fault_p (b))
2031 return FALSE;
2032
2033 /* if (test) x = a + b; else x = c - d;
2034 => y = a + b;
2035 x = c - d;
2036 if (test)
2037 x = y;
2038 */
2039
2040 code = GET_CODE (if_info->cond);
2041 insn_a = if_info->insn_a;
2042 insn_b = if_info->insn_b;
2043
2044 machine_mode x_mode = GET_MODE (x);
2045
2046 if (!can_conditionally_move_p (x_mode))
2047 return FALSE;
2048
2049 unsigned int then_cost;
2050 unsigned int else_cost;
2051 if (insn_a)
2052 then_cost = if_info->then_cost;
2053 else
2054 then_cost = 0;
2055
2056 if (insn_b)
2057 else_cost = if_info->else_cost;
2058 else
2059 else_cost = 0;
2060
2061 /* We're going to execute one of the basic blocks anyway, so
2062 bail out if the most expensive of the two blocks is unacceptable. */
2063 if (MAX (then_cost, else_cost) > COSTS_N_INSNS (if_info->branch_cost))
2064 return FALSE;
2065
2066 /* Possibly rearrange operands to make things come out more natural. */
2067 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
2068 {
2069 int reversep = 0;
2070 if (rtx_equal_p (b, x))
2071 reversep = 1;
2072 else if (general_operand (b, GET_MODE (b)))
2073 reversep = 1;
2074
2075 if (reversep)
2076 {
2077 code = reversed_comparison_code (if_info->cond, if_info->jump);
2078 std::swap (a, b);
2079 std::swap (insn_a, insn_b);
2080 std::swap (a_simple, b_simple);
2081 std::swap (then_bb, else_bb);
2082 }
2083 }
2084
2085 if (then_bb && else_bb && !a_simple && !b_simple
2086 && (!bbs_ok_for_cmove_arith (then_bb, else_bb)
2087 || !bbs_ok_for_cmove_arith (else_bb, then_bb)))
2088 return FALSE;
2089
2090 start_sequence ();
2091
2092 /* If one of the blocks is empty then the corresponding B or A value
2093 came from the test block. The non-empty complex block that we will
2094 emit might clobber the register used by B or A, so move it to a pseudo
2095 first. */
2096
2097 rtx tmp_a = NULL_RTX;
2098 rtx tmp_b = NULL_RTX;
2099
2100 if (b_simple || !else_bb)
2101 tmp_b = gen_reg_rtx (x_mode);
2102
2103 if (a_simple || !then_bb)
2104 tmp_a = gen_reg_rtx (x_mode);
2105
2106 orig_a = a;
2107 orig_b = b;
2108
2109 rtx emit_a = NULL_RTX;
2110 rtx emit_b = NULL_RTX;
2111 rtx_insn *tmp_insn = NULL;
2112 bool modified_in_a = false;
2113 bool modified_in_b = false;
2114 /* If either operand is complex, load it into a register first.
2115 The best way to do this is to copy the original insn. In this
2116 way we preserve any clobbers etc that the insn may have had.
2117 This is of course not possible in the IS_MEM case. */
2118
2119 if (! general_operand (a, GET_MODE (a)) || tmp_a)
2120 {
2121
2122 if (is_mem)
2123 {
2124 rtx reg = gen_reg_rtx (GET_MODE (a));
2125 emit_a = gen_rtx_SET (reg, a);
2126 }
2127 else
2128 {
2129 if (insn_a)
2130 {
2131 a = tmp_a ? tmp_a : gen_reg_rtx (GET_MODE (a));
2132
2133 rtx_insn *copy_of_a = as_a <rtx_insn *> (copy_rtx (insn_a));
2134 rtx set = single_set (copy_of_a);
2135 SET_DEST (set) = a;
2136
2137 emit_a = PATTERN (copy_of_a);
2138 }
2139 else
2140 {
2141 rtx tmp_reg = tmp_a ? tmp_a : gen_reg_rtx (GET_MODE (a));
2142 emit_a = gen_rtx_SET (tmp_reg, a);
2143 a = tmp_reg;
2144 }
2145 }
2146 }
2147
2148 if (! general_operand (b, GET_MODE (b)) || tmp_b)
2149 {
2150 if (is_mem)
2151 {
2152 rtx reg = gen_reg_rtx (GET_MODE (b));
2153 emit_b = gen_rtx_SET (reg, b);
2154 }
2155 else
2156 {
2157 if (insn_b)
2158 {
2159 b = tmp_b ? tmp_b : gen_reg_rtx (GET_MODE (b));
2160 rtx_insn *copy_of_b = as_a <rtx_insn *> (copy_rtx (insn_b));
2161 rtx set = single_set (copy_of_b);
2162
2163 SET_DEST (set) = b;
2164 emit_b = PATTERN (copy_of_b);
2165 }
2166 else
2167 {
2168 rtx tmp_reg = tmp_b ? tmp_b : gen_reg_rtx (GET_MODE (b));
2169 emit_b = gen_rtx_SET (tmp_reg, b);
2170 b = tmp_reg;
2171 }
2172 }
2173 }
2174
2175 /* If insn to set up A clobbers any registers B depends on, try to
2176 swap insn that sets up A with the one that sets up B. If even
2177 that doesn't help, punt. */
2178
2179 modified_in_a = emit_a != NULL_RTX && modified_in_p (orig_b, emit_a);
2180 if (tmp_b && then_bb)
2181 {
2182 FOR_BB_INSNS (then_bb, tmp_insn)
2183 /* Don't check inside insn_a. We will have changed it to emit_a
2184 with a destination that doesn't conflict. */
2185 if (!(insn_a && tmp_insn == insn_a)
2186 && modified_in_p (orig_b, tmp_insn))
2187 {
2188 modified_in_a = true;
2189 break;
2190 }
2191
2192 }
2193 if (emit_a || modified_in_a)
2194 {
2195 modified_in_b = emit_b != NULL_RTX && modified_in_p (orig_a, emit_b);
2196 if (tmp_b && else_bb)
2197 {
2198 FOR_BB_INSNS (else_bb, tmp_insn)
2199 /* Don't check inside insn_b. We will have changed it to emit_b
2200 with a destination that doesn't conflict. */
2201 if (!(insn_b && tmp_insn == insn_b)
2202 && modified_in_p (orig_a, tmp_insn))
2203 {
2204 modified_in_b = true;
2205 break;
2206 }
2207 }
2208 if (modified_in_b)
2209 goto end_seq_and_fail;
2210
2211 if (!noce_emit_bb (emit_b, else_bb, b_simple))
2212 goto end_seq_and_fail;
2213
2214 if (!noce_emit_bb (emit_a, then_bb, a_simple))
2215 goto end_seq_and_fail;
2216 }
2217 else
2218 {
2219 if (!noce_emit_bb (emit_a, then_bb, a_simple))
2220 goto end_seq_and_fail;
2221
2222 if (!noce_emit_bb (emit_b, else_bb, b_simple))
2223 goto end_seq_and_fail;
2224 }
2225
2226 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
2227 XEXP (if_info->cond, 1), a, b);
2228
2229 if (! target)
2230 goto end_seq_and_fail;
2231
2232 /* If we're handling a memory for above, emit the load now. */
2233 if (is_mem)
2234 {
2235 rtx mem = gen_rtx_MEM (GET_MODE (if_info->x), target);
2236
2237 /* Copy over flags as appropriate. */
2238 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
2239 MEM_VOLATILE_P (mem) = 1;
2240 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
2241 set_mem_alias_set (mem, MEM_ALIAS_SET (if_info->a));
2242 set_mem_align (mem,
2243 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
2244
2245 gcc_assert (MEM_ADDR_SPACE (if_info->a) == MEM_ADDR_SPACE (if_info->b));
2246 set_mem_addr_space (mem, MEM_ADDR_SPACE (if_info->a));
2247
2248 noce_emit_move_insn (if_info->x, mem);
2249 }
2250 else if (target != x)
2251 noce_emit_move_insn (x, target);
2252
2253 ifcvt_seq = end_ifcvt_sequence (if_info);
2254 if (!ifcvt_seq)
2255 return FALSE;
2256
2257 emit_insn_before_setloc (ifcvt_seq, if_info->jump,
2258 INSN_LOCATION (if_info->insn_a));
2259 return TRUE;
2260
2261 end_seq_and_fail:
2262 end_sequence ();
2263 return FALSE;
2264 }
2265
2266 /* For most cases, the simplified condition we found is the best
2267 choice, but this is not the case for the min/max/abs transforms.
2268 For these we wish to know that it is A or B in the condition. */
2269
2270 static rtx
2271 noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
2272 rtx_insn **earliest)
2273 {
2274 rtx cond, set;
2275 rtx_insn *insn;
2276 int reverse;
2277
2278 /* If target is already mentioned in the known condition, return it. */
2279 if (reg_mentioned_p (target, if_info->cond))
2280 {
2281 *earliest = if_info->cond_earliest;
2282 return if_info->cond;
2283 }
2284
2285 set = pc_set (if_info->jump);
2286 cond = XEXP (SET_SRC (set), 0);
2287 reverse
2288 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
2289 && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (if_info->jump);
2290 if (if_info->then_else_reversed)
2291 reverse = !reverse;
2292
2293 /* If we're looking for a constant, try to make the conditional
2294 have that constant in it. There are two reasons why it may
2295 not have the constant we want:
2296
2297 1. GCC may have needed to put the constant in a register, because
2298 the target can't compare directly against that constant. For
2299 this case, we look for a SET immediately before the comparison
2300 that puts a constant in that register.
2301
2302 2. GCC may have canonicalized the conditional, for example
2303 replacing "if x < 4" with "if x <= 3". We can undo that (or
2304 make equivalent types of changes) to get the constants we need
2305 if they're off by one in the right direction. */
2306
2307 if (CONST_INT_P (target))
2308 {
2309 enum rtx_code code = GET_CODE (if_info->cond);
2310 rtx op_a = XEXP (if_info->cond, 0);
2311 rtx op_b = XEXP (if_info->cond, 1);
2312 rtx_insn *prev_insn;
2313
2314 /* First, look to see if we put a constant in a register. */
2315 prev_insn = prev_nonnote_insn (if_info->cond_earliest);
2316 if (prev_insn
2317 && BLOCK_FOR_INSN (prev_insn)
2318 == BLOCK_FOR_INSN (if_info->cond_earliest)
2319 && INSN_P (prev_insn)
2320 && GET_CODE (PATTERN (prev_insn)) == SET)
2321 {
2322 rtx src = find_reg_equal_equiv_note (prev_insn);
2323 if (!src)
2324 src = SET_SRC (PATTERN (prev_insn));
2325 if (CONST_INT_P (src))
2326 {
2327 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
2328 op_a = src;
2329 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
2330 op_b = src;
2331
2332 if (CONST_INT_P (op_a))
2333 {
2334 std::swap (op_a, op_b);
2335 code = swap_condition (code);
2336 }
2337 }
2338 }
2339
2340 /* Now, look to see if we can get the right constant by
2341 adjusting the conditional. */
2342 if (CONST_INT_P (op_b))
2343 {
2344 HOST_WIDE_INT desired_val = INTVAL (target);
2345 HOST_WIDE_INT actual_val = INTVAL (op_b);
2346
2347 switch (code)
2348 {
2349 case LT:
2350 if (actual_val == desired_val + 1)
2351 {
2352 code = LE;
2353 op_b = GEN_INT (desired_val);
2354 }
2355 break;
2356 case LE:
2357 if (actual_val == desired_val - 1)
2358 {
2359 code = LT;
2360 op_b = GEN_INT (desired_val);
2361 }
2362 break;
2363 case GT:
2364 if (actual_val == desired_val - 1)
2365 {
2366 code = GE;
2367 op_b = GEN_INT (desired_val);
2368 }
2369 break;
2370 case GE:
2371 if (actual_val == desired_val + 1)
2372 {
2373 code = GT;
2374 op_b = GEN_INT (desired_val);
2375 }
2376 break;
2377 default:
2378 break;
2379 }
2380 }
2381
2382 /* If we made any changes, generate a new conditional that is
2383 equivalent to what we started with, but has the right
2384 constants in it. */
2385 if (code != GET_CODE (if_info->cond)
2386 || op_a != XEXP (if_info->cond, 0)
2387 || op_b != XEXP (if_info->cond, 1))
2388 {
2389 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
2390 *earliest = if_info->cond_earliest;
2391 return cond;
2392 }
2393 }
2394
2395 cond = canonicalize_condition (if_info->jump, cond, reverse,
2396 earliest, target, have_cbranchcc4, true);
2397 if (! cond || ! reg_mentioned_p (target, cond))
2398 return NULL;
2399
2400 /* We almost certainly searched back to a different place.
2401 Need to re-verify correct lifetimes. */
2402
2403 /* X may not be mentioned in the range (cond_earliest, jump]. */
2404 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
2405 if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
2406 return NULL;
2407
2408 /* A and B may not be modified in the range [cond_earliest, jump). */
2409 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
2410 if (INSN_P (insn)
2411 && (modified_in_p (if_info->a, insn)
2412 || modified_in_p (if_info->b, insn)))
2413 return NULL;
2414
2415 return cond;
2416 }
2417
2418 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
2419
2420 static int
2421 noce_try_minmax (struct noce_if_info *if_info)
2422 {
2423 rtx cond, target;
2424 rtx_insn *earliest, *seq;
2425 enum rtx_code code, op;
2426 int unsignedp;
2427
2428 if (!noce_simple_bbs (if_info))
2429 return FALSE;
2430
2431 /* ??? Reject modes with NaNs or signed zeros since we don't know how
2432 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
2433 to get the target to tell us... */
2434 if (HONOR_SIGNED_ZEROS (if_info->x)
2435 || HONOR_NANS (if_info->x))
2436 return FALSE;
2437
2438 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
2439 if (!cond)
2440 return FALSE;
2441
2442 /* Verify the condition is of the form we expect, and canonicalize
2443 the comparison code. */
2444 code = GET_CODE (cond);
2445 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
2446 {
2447 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
2448 return FALSE;
2449 }
2450 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
2451 {
2452 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
2453 return FALSE;
2454 code = swap_condition (code);
2455 }
2456 else
2457 return FALSE;
2458
2459 /* Determine what sort of operation this is. Note that the code is for
2460 a taken branch, so the code->operation mapping appears backwards. */
2461 switch (code)
2462 {
2463 case LT:
2464 case LE:
2465 case UNLT:
2466 case UNLE:
2467 op = SMAX;
2468 unsignedp = 0;
2469 break;
2470 case GT:
2471 case GE:
2472 case UNGT:
2473 case UNGE:
2474 op = SMIN;
2475 unsignedp = 0;
2476 break;
2477 case LTU:
2478 case LEU:
2479 op = UMAX;
2480 unsignedp = 1;
2481 break;
2482 case GTU:
2483 case GEU:
2484 op = UMIN;
2485 unsignedp = 1;
2486 break;
2487 default:
2488 return FALSE;
2489 }
2490
2491 start_sequence ();
2492
2493 target = expand_simple_binop (GET_MODE (if_info->x), op,
2494 if_info->a, if_info->b,
2495 if_info->x, unsignedp, OPTAB_WIDEN);
2496 if (! target)
2497 {
2498 end_sequence ();
2499 return FALSE;
2500 }
2501 if (target != if_info->x)
2502 noce_emit_move_insn (if_info->x, target);
2503
2504 seq = end_ifcvt_sequence (if_info);
2505 if (!seq)
2506 return FALSE;
2507
2508 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2509 if_info->cond = cond;
2510 if_info->cond_earliest = earliest;
2511
2512 return TRUE;
2513 }
2514
2515 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);",
2516 "if (a < 0) x = ~a; else x = a;" to "x = one_cmpl_abs(a);",
2517 etc. */
2518
2519 static int
2520 noce_try_abs (struct noce_if_info *if_info)
2521 {
2522 rtx cond, target, a, b, c;
2523 rtx_insn *earliest, *seq;
2524 int negate;
2525 bool one_cmpl = false;
2526
2527 if (!noce_simple_bbs (if_info))
2528 return FALSE;
2529
2530 /* Reject modes with signed zeros. */
2531 if (HONOR_SIGNED_ZEROS (if_info->x))
2532 return FALSE;
2533
2534 /* Recognize A and B as constituting an ABS or NABS. The canonical
2535 form is a branch around the negation, taken when the object is the
2536 first operand of a comparison against 0 that evaluates to true. */
2537 a = if_info->a;
2538 b = if_info->b;
2539 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
2540 negate = 0;
2541 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
2542 {
2543 std::swap (a, b);
2544 negate = 1;
2545 }
2546 else if (GET_CODE (a) == NOT && rtx_equal_p (XEXP (a, 0), b))
2547 {
2548 negate = 0;
2549 one_cmpl = true;
2550 }
2551 else if (GET_CODE (b) == NOT && rtx_equal_p (XEXP (b, 0), a))
2552 {
2553 std::swap (a, b);
2554 negate = 1;
2555 one_cmpl = true;
2556 }
2557 else
2558 return FALSE;
2559
2560 cond = noce_get_alt_condition (if_info, b, &earliest);
2561 if (!cond)
2562 return FALSE;
2563
2564 /* Verify the condition is of the form we expect. */
2565 if (rtx_equal_p (XEXP (cond, 0), b))
2566 c = XEXP (cond, 1);
2567 else if (rtx_equal_p (XEXP (cond, 1), b))
2568 {
2569 c = XEXP (cond, 0);
2570 negate = !negate;
2571 }
2572 else
2573 return FALSE;
2574
2575 /* Verify that C is zero. Search one step backward for a
2576 REG_EQUAL note or a simple source if necessary. */
2577 if (REG_P (c))
2578 {
2579 rtx set;
2580 rtx_insn *insn = prev_nonnote_insn (earliest);
2581 if (insn
2582 && BLOCK_FOR_INSN (insn) == BLOCK_FOR_INSN (earliest)
2583 && (set = single_set (insn))
2584 && rtx_equal_p (SET_DEST (set), c))
2585 {
2586 rtx note = find_reg_equal_equiv_note (insn);
2587 if (note)
2588 c = XEXP (note, 0);
2589 else
2590 c = SET_SRC (set);
2591 }
2592 else
2593 return FALSE;
2594 }
2595 if (MEM_P (c)
2596 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
2597 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
2598 c = get_pool_constant (XEXP (c, 0));
2599
2600 /* Work around funny ideas get_condition has wrt canonicalization.
2601 Note that these rtx constants are known to be CONST_INT, and
2602 therefore imply integer comparisons.
2603 The one_cmpl case is more complicated, as we want to handle
2604 only x < 0 ? ~x : x or x >= 0 ? ~x : x but not
2605 x <= 0 ? ~x : x or x > 0 ? ~x : x, as the latter two
2606 have different result for x == 0. */
2607 if (c == constm1_rtx && GET_CODE (cond) == GT)
2608 {
2609 if (one_cmpl && negate)
2610 return FALSE;
2611 }
2612 else if (c == const1_rtx && GET_CODE (cond) == LT)
2613 {
2614 if (one_cmpl && !negate)
2615 return FALSE;
2616 }
2617 else if (c == CONST0_RTX (GET_MODE (b)))
2618 {
2619 if (one_cmpl)
2620 switch (GET_CODE (cond))
2621 {
2622 case GT:
2623 if (!negate)
2624 return FALSE;
2625 break;
2626 case GE:
2627 /* >= 0 is the same case as above > -1. */
2628 if (negate)
2629 return FALSE;
2630 break;
2631 case LT:
2632 if (negate)
2633 return FALSE;
2634 break;
2635 case LE:
2636 /* <= 0 is the same case as above < 1. */
2637 if (!negate)
2638 return FALSE;
2639 break;
2640 default:
2641 return FALSE;
2642 }
2643 }
2644 else
2645 return FALSE;
2646
2647 /* Determine what sort of operation this is. */
2648 switch (GET_CODE (cond))
2649 {
2650 case LT:
2651 case LE:
2652 case UNLT:
2653 case UNLE:
2654 negate = !negate;
2655 break;
2656 case GT:
2657 case GE:
2658 case UNGT:
2659 case UNGE:
2660 break;
2661 default:
2662 return FALSE;
2663 }
2664
2665 start_sequence ();
2666 if (one_cmpl)
2667 target = expand_one_cmpl_abs_nojump (GET_MODE (if_info->x), b,
2668 if_info->x);
2669 else
2670 target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
2671
2672 /* ??? It's a quandary whether cmove would be better here, especially
2673 for integers. Perhaps combine will clean things up. */
2674 if (target && negate)
2675 {
2676 if (one_cmpl)
2677 target = expand_simple_unop (GET_MODE (target), NOT, target,
2678 if_info->x, 0);
2679 else
2680 target = expand_simple_unop (GET_MODE (target), NEG, target,
2681 if_info->x, 0);
2682 }
2683
2684 if (! target)
2685 {
2686 end_sequence ();
2687 return FALSE;
2688 }
2689
2690 if (target != if_info->x)
2691 noce_emit_move_insn (if_info->x, target);
2692
2693 seq = end_ifcvt_sequence (if_info);
2694 if (!seq)
2695 return FALSE;
2696
2697 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2698 if_info->cond = cond;
2699 if_info->cond_earliest = earliest;
2700
2701 return TRUE;
2702 }
2703
2704 /* Convert "if (m < 0) x = b; else x = 0;" to "x = (m >> C) & b;". */
2705
2706 static int
2707 noce_try_sign_mask (struct noce_if_info *if_info)
2708 {
2709 rtx cond, t, m, c;
2710 rtx_insn *seq;
2711 machine_mode mode;
2712 enum rtx_code code;
2713 bool t_unconditional;
2714
2715 if (!noce_simple_bbs (if_info))
2716 return FALSE;
2717
2718 cond = if_info->cond;
2719 code = GET_CODE (cond);
2720 m = XEXP (cond, 0);
2721 c = XEXP (cond, 1);
2722
2723 t = NULL_RTX;
2724 if (if_info->a == const0_rtx)
2725 {
2726 if ((code == LT && c == const0_rtx)
2727 || (code == LE && c == constm1_rtx))
2728 t = if_info->b;
2729 }
2730 else if (if_info->b == const0_rtx)
2731 {
2732 if ((code == GE && c == const0_rtx)
2733 || (code == GT && c == constm1_rtx))
2734 t = if_info->a;
2735 }
2736
2737 if (! t || side_effects_p (t))
2738 return FALSE;
2739
2740 /* We currently don't handle different modes. */
2741 mode = GET_MODE (t);
2742 if (GET_MODE (m) != mode)
2743 return FALSE;
2744
2745 /* This is only profitable if T is unconditionally executed/evaluated in the
2746 original insn sequence or T is cheap. The former happens if B is the
2747 non-zero (T) value and if INSN_B was taken from TEST_BB, or there was no
2748 INSN_B which can happen for e.g. conditional stores to memory. For the
2749 cost computation use the block TEST_BB where the evaluation will end up
2750 after the transformation. */
2751 t_unconditional =
2752 (t == if_info->b
2753 && (if_info->insn_b == NULL_RTX
2754 || BLOCK_FOR_INSN (if_info->insn_b) == if_info->test_bb));
2755 if (!(t_unconditional
2756 || (set_src_cost (t, mode, optimize_bb_for_speed_p (if_info->test_bb))
2757 < COSTS_N_INSNS (2))))
2758 return FALSE;
2759
2760 start_sequence ();
2761 /* Use emit_store_flag to generate "m < 0 ? -1 : 0" instead of expanding
2762 "(signed) m >> 31" directly. This benefits targets with specialized
2763 insns to obtain the signmask, but still uses ashr_optab otherwise. */
2764 m = emit_store_flag (gen_reg_rtx (mode), LT, m, const0_rtx, mode, 0, -1);
2765 t = m ? expand_binop (mode, and_optab, m, t, NULL_RTX, 0, OPTAB_DIRECT)
2766 : NULL_RTX;
2767
2768 if (!t)
2769 {
2770 end_sequence ();
2771 return FALSE;
2772 }
2773
2774 noce_emit_move_insn (if_info->x, t);
2775
2776 seq = end_ifcvt_sequence (if_info);
2777 if (!seq)
2778 return FALSE;
2779
2780 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2781 return TRUE;
2782 }
2783
2784
2785 /* Optimize away "if (x & C) x |= C" and similar bit manipulation
2786 transformations. */
2787
2788 static int
2789 noce_try_bitop (struct noce_if_info *if_info)
2790 {
2791 rtx cond, x, a, result;
2792 rtx_insn *seq;
2793 machine_mode mode;
2794 enum rtx_code code;
2795 int bitnum;
2796
2797 x = if_info->x;
2798 cond = if_info->cond;
2799 code = GET_CODE (cond);
2800
2801 if (!noce_simple_bbs (if_info))
2802 return FALSE;
2803
2804 /* Check for no else condition. */
2805 if (! rtx_equal_p (x, if_info->b))
2806 return FALSE;
2807
2808 /* Check for a suitable condition. */
2809 if (code != NE && code != EQ)
2810 return FALSE;
2811 if (XEXP (cond, 1) != const0_rtx)
2812 return FALSE;
2813 cond = XEXP (cond, 0);
2814
2815 /* ??? We could also handle AND here. */
2816 if (GET_CODE (cond) == ZERO_EXTRACT)
2817 {
2818 if (XEXP (cond, 1) != const1_rtx
2819 || !CONST_INT_P (XEXP (cond, 2))
2820 || ! rtx_equal_p (x, XEXP (cond, 0)))
2821 return FALSE;
2822 bitnum = INTVAL (XEXP (cond, 2));
2823 mode = GET_MODE (x);
2824 if (BITS_BIG_ENDIAN)
2825 bitnum = GET_MODE_BITSIZE (mode) - 1 - bitnum;
2826 if (bitnum < 0 || bitnum >= HOST_BITS_PER_WIDE_INT)
2827 return FALSE;
2828 }
2829 else
2830 return FALSE;
2831
2832 a = if_info->a;
2833 if (GET_CODE (a) == IOR || GET_CODE (a) == XOR)
2834 {
2835 /* Check for "if (X & C) x = x op C". */
2836 if (! rtx_equal_p (x, XEXP (a, 0))
2837 || !CONST_INT_P (XEXP (a, 1))
2838 || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2839 != (unsigned HOST_WIDE_INT) 1 << bitnum)
2840 return FALSE;
2841
2842 /* if ((x & C) == 0) x |= C; is transformed to x |= C. */
2843 /* if ((x & C) != 0) x |= C; is transformed to nothing. */
2844 if (GET_CODE (a) == IOR)
2845 result = (code == NE) ? a : NULL_RTX;
2846 else if (code == NE)
2847 {
2848 /* if ((x & C) == 0) x ^= C; is transformed to x |= C. */
2849 result = gen_int_mode ((HOST_WIDE_INT) 1 << bitnum, mode);
2850 result = simplify_gen_binary (IOR, mode, x, result);
2851 }
2852 else
2853 {
2854 /* if ((x & C) != 0) x ^= C; is transformed to x &= ~C. */
2855 result = gen_int_mode (~((HOST_WIDE_INT) 1 << bitnum), mode);
2856 result = simplify_gen_binary (AND, mode, x, result);
2857 }
2858 }
2859 else if (GET_CODE (a) == AND)
2860 {
2861 /* Check for "if (X & C) x &= ~C". */
2862 if (! rtx_equal_p (x, XEXP (a, 0))
2863 || !CONST_INT_P (XEXP (a, 1))
2864 || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2865 != (~((HOST_WIDE_INT) 1 << bitnum) & GET_MODE_MASK (mode)))
2866 return FALSE;
2867
2868 /* if ((x & C) == 0) x &= ~C; is transformed to nothing. */
2869 /* if ((x & C) != 0) x &= ~C; is transformed to x &= ~C. */
2870 result = (code == EQ) ? a : NULL_RTX;
2871 }
2872 else
2873 return FALSE;
2874
2875 if (result)
2876 {
2877 start_sequence ();
2878 noce_emit_move_insn (x, result);
2879 seq = end_ifcvt_sequence (if_info);
2880 if (!seq)
2881 return FALSE;
2882
2883 emit_insn_before_setloc (seq, if_info->jump,
2884 INSN_LOCATION (if_info->insn_a));
2885 }
2886 return TRUE;
2887 }
2888
2889
2890 /* Similar to get_condition, only the resulting condition must be
2891 valid at JUMP, instead of at EARLIEST.
2892
2893 If THEN_ELSE_REVERSED is true, the fallthrough does not go to the
2894 THEN block of the caller, and we have to reverse the condition. */
2895
2896 static rtx
2897 noce_get_condition (rtx_insn *jump, rtx_insn **earliest, bool then_else_reversed)
2898 {
2899 rtx cond, set, tmp;
2900 bool reverse;
2901
2902 if (! any_condjump_p (jump))
2903 return NULL_RTX;
2904
2905 set = pc_set (jump);
2906
2907 /* If this branches to JUMP_LABEL when the condition is false,
2908 reverse the condition. */
2909 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
2910 && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (jump));
2911
2912 /* We may have to reverse because the caller's if block is not canonical,
2913 i.e. the THEN block isn't the fallthrough block for the TEST block
2914 (see find_if_header). */
2915 if (then_else_reversed)
2916 reverse = !reverse;
2917
2918 /* If the condition variable is a register and is MODE_INT, accept it. */
2919
2920 cond = XEXP (SET_SRC (set), 0);
2921 tmp = XEXP (cond, 0);
2922 if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT
2923 && (GET_MODE (tmp) != BImode
2924 || !targetm.small_register_classes_for_mode_p (BImode)))
2925 {
2926 *earliest = jump;
2927
2928 if (reverse)
2929 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
2930 GET_MODE (cond), tmp, XEXP (cond, 1));
2931 return cond;
2932 }
2933
2934 /* Otherwise, fall back on canonicalize_condition to do the dirty
2935 work of manipulating MODE_CC values and COMPARE rtx codes. */
2936 tmp = canonicalize_condition (jump, cond, reverse, earliest,
2937 NULL_RTX, have_cbranchcc4, true);
2938
2939 /* We don't handle side-effects in the condition, like handling
2940 REG_INC notes and making sure no duplicate conditions are emitted. */
2941 if (tmp != NULL_RTX && side_effects_p (tmp))
2942 return NULL_RTX;
2943
2944 return tmp;
2945 }
2946
2947 /* Return true if OP is ok for if-then-else processing. */
2948
2949 static int
2950 noce_operand_ok (const_rtx op)
2951 {
2952 if (side_effects_p (op))
2953 return FALSE;
2954
2955 /* We special-case memories, so handle any of them with
2956 no address side effects. */
2957 if (MEM_P (op))
2958 return ! side_effects_p (XEXP (op, 0));
2959
2960 return ! may_trap_p (op);
2961 }
2962
2963 /* Return true if X contains a MEM subrtx. */
2964
2965 static bool
2966 contains_mem_rtx_p (rtx x)
2967 {
2968 subrtx_iterator::array_type array;
2969 FOR_EACH_SUBRTX (iter, array, x, ALL)
2970 if (MEM_P (*iter))
2971 return true;
2972
2973 return false;
2974 }
2975
2976 /* Return true iff basic block TEST_BB is valid for noce if-conversion.
2977 The condition used in this if-conversion is in COND.
2978 In practice, check that TEST_BB ends with a single set
2979 x := a and all previous computations
2980 in TEST_BB don't produce any values that are live after TEST_BB.
2981 In other words, all the insns in TEST_BB are there only
2982 to compute a value for x. Put the rtx cost of the insns
2983 in TEST_BB into COST. Record whether TEST_BB is a single simple
2984 set instruction in SIMPLE_P. */
2985
2986 static bool
2987 bb_valid_for_noce_process_p (basic_block test_bb, rtx cond,
2988 unsigned int *cost, bool *simple_p)
2989 {
2990 if (!test_bb)
2991 return false;
2992
2993 rtx_insn *last_insn = last_active_insn (test_bb, FALSE);
2994 rtx last_set = NULL_RTX;
2995
2996 rtx cc = cc_in_cond (cond);
2997
2998 if (!insn_valid_noce_process_p (last_insn, cc))
2999 return false;
3000 last_set = single_set (last_insn);
3001
3002 rtx x = SET_DEST (last_set);
3003 rtx_insn *first_insn = first_active_insn (test_bb);
3004 rtx first_set = single_set (first_insn);
3005
3006 if (!first_set)
3007 return false;
3008
3009 /* We have a single simple set, that's okay. */
3010 bool speed_p = optimize_bb_for_speed_p (test_bb);
3011
3012 if (first_insn == last_insn)
3013 {
3014 *simple_p = noce_operand_ok (SET_DEST (first_set));
3015 *cost = insn_rtx_cost (first_set, speed_p);
3016 return *simple_p;
3017 }
3018
3019 rtx_insn *prev_last_insn = PREV_INSN (last_insn);
3020 gcc_assert (prev_last_insn);
3021
3022 /* For now, disallow setting x multiple times in test_bb. */
3023 if (REG_P (x) && reg_set_between_p (x, first_insn, prev_last_insn))
3024 return false;
3025
3026 bitmap test_bb_temps = BITMAP_ALLOC (&reg_obstack);
3027
3028 /* The regs that are live out of test_bb. */
3029 bitmap test_bb_live_out = df_get_live_out (test_bb);
3030
3031 int potential_cost = insn_rtx_cost (last_set, speed_p);
3032 rtx_insn *insn;
3033 FOR_BB_INSNS (test_bb, insn)
3034 {
3035 if (insn != last_insn)
3036 {
3037 if (!active_insn_p (insn))
3038 continue;
3039
3040 if (!insn_valid_noce_process_p (insn, cc))
3041 goto free_bitmap_and_fail;
3042
3043 rtx sset = single_set (insn);
3044 gcc_assert (sset);
3045
3046 if (contains_mem_rtx_p (SET_SRC (sset))
3047 || !REG_P (SET_DEST (sset))
3048 || reg_overlap_mentioned_p (SET_DEST (sset), cond))
3049 goto free_bitmap_and_fail;
3050
3051 potential_cost += insn_rtx_cost (sset, speed_p);
3052 bitmap_set_bit (test_bb_temps, REGNO (SET_DEST (sset)));
3053 }
3054 }
3055
3056 /* If any of the intermediate results in test_bb are live after test_bb
3057 then fail. */
3058 if (bitmap_intersect_p (test_bb_live_out, test_bb_temps))
3059 goto free_bitmap_and_fail;
3060
3061 BITMAP_FREE (test_bb_temps);
3062 *cost = potential_cost;
3063 *simple_p = false;
3064 return true;
3065
3066 free_bitmap_and_fail:
3067 BITMAP_FREE (test_bb_temps);
3068 return false;
3069 }
3070
3071 /* We have something like:
3072
3073 if (x > y)
3074 { i = a; j = b; k = c; }
3075
3076 Make it:
3077
3078 tmp_i = (x > y) ? a : i;
3079 tmp_j = (x > y) ? b : j;
3080 tmp_k = (x > y) ? c : k;
3081 i = tmp_i;
3082 j = tmp_j;
3083 k = tmp_k;
3084
3085 Subsequent passes are expected to clean up the extra moves.
3086
3087 Look for special cases such as writes to one register which are
3088 read back in another SET, as might occur in a swap idiom or
3089 similar.
3090
3091 These look like:
3092
3093 if (x > y)
3094 i = a;
3095 j = i;
3096
3097 Which we want to rewrite to:
3098
3099 tmp_i = (x > y) ? a : i;
3100 tmp_j = (x > y) ? tmp_i : j;
3101 i = tmp_i;
3102 j = tmp_j;
3103
3104 We can catch these when looking at (SET x y) by keeping a list of the
3105 registers we would have targeted before if-conversion and looking back
3106 through it for an overlap with Y. If we find one, we rewire the
3107 conditional set to use the temporary we introduced earlier.
3108
3109 IF_INFO contains the useful information about the block structure and
3110 jump instructions. */
3111
3112 static int
3113 noce_convert_multiple_sets (struct noce_if_info *if_info)
3114 {
3115 basic_block test_bb = if_info->test_bb;
3116 basic_block then_bb = if_info->then_bb;
3117 basic_block join_bb = if_info->join_bb;
3118 rtx_insn *jump = if_info->jump;
3119 rtx_insn *cond_earliest;
3120 rtx_insn *insn;
3121
3122 start_sequence ();
3123
3124 /* Decompose the condition attached to the jump. */
3125 rtx cond = noce_get_condition (jump, &cond_earliest, false);
3126 rtx x = XEXP (cond, 0);
3127 rtx y = XEXP (cond, 1);
3128 rtx_code cond_code = GET_CODE (cond);
3129
3130 /* The true targets for a conditional move. */
3131 auto_vec<rtx> targets;
3132 /* The temporaries introduced to allow us to not consider register
3133 overlap. */
3134 auto_vec<rtx> temporaries;
3135 /* The insns we've emitted. */
3136 auto_vec<rtx_insn *> unmodified_insns;
3137 int count = 0;
3138
3139 FOR_BB_INSNS (then_bb, insn)
3140 {
3141 /* Skip over non-insns. */
3142 if (!active_insn_p (insn))
3143 continue;
3144
3145 rtx set = single_set (insn);
3146 gcc_checking_assert (set);
3147
3148 rtx target = SET_DEST (set);
3149 rtx temp = gen_reg_rtx (GET_MODE (target));
3150 rtx new_val = SET_SRC (set);
3151 rtx old_val = target;
3152
3153 /* If we were supposed to read from an earlier write in this block,
3154 we've changed the register allocation. Rewire the read. While
3155 we are looking, also try to catch a swap idiom. */
3156 for (int i = count - 1; i >= 0; --i)
3157 if (reg_overlap_mentioned_p (new_val, targets[i]))
3158 {
3159 /* Catch a "swap" style idiom. */
3160 if (find_reg_note (insn, REG_DEAD, new_val) != NULL_RTX)
3161 /* The write to targets[i] is only live until the read
3162 here. As the condition codes match, we can propagate
3163 the set to here. */
3164 new_val = SET_SRC (single_set (unmodified_insns[i]));
3165 else
3166 new_val = temporaries[i];
3167 break;
3168 }
3169
3170 /* If we had a non-canonical conditional jump (i.e. one where
3171 the fallthrough is to the "else" case) we need to reverse
3172 the conditional select. */
3173 if (if_info->then_else_reversed)
3174 std::swap (old_val, new_val);
3175
3176 /* Actually emit the conditional move. */
3177 rtx temp_dest = noce_emit_cmove (if_info, temp, cond_code,
3178 x, y, new_val, old_val);
3179
3180 /* If we failed to expand the conditional move, drop out and don't
3181 try to continue. */
3182 if (temp_dest == NULL_RTX)
3183 {
3184 end_sequence ();
3185 return FALSE;
3186 }
3187
3188 /* Bookkeeping. */
3189 count++;
3190 targets.safe_push (target);
3191 temporaries.safe_push (temp_dest);
3192 unmodified_insns.safe_push (insn);
3193 }
3194
3195 /* We must have seen some sort of insn to insert, otherwise we were
3196 given an empty BB to convert, and we can't handle that. */
3197 gcc_assert (!unmodified_insns.is_empty ());
3198
3199 /* Now fixup the assignments. */
3200 for (int i = 0; i < count; i++)
3201 noce_emit_move_insn (targets[i], temporaries[i]);
3202
3203 /* Actually emit the sequence. */
3204 rtx_insn *seq = get_insns ();
3205
3206 for (insn = seq; insn; insn = NEXT_INSN (insn))
3207 set_used_flags (insn);
3208
3209 /* Mark all our temporaries and targets as used. */
3210 for (int i = 0; i < count; i++)
3211 {
3212 set_used_flags (temporaries[i]);
3213 set_used_flags (targets[i]);
3214 }
3215
3216 set_used_flags (cond);
3217 set_used_flags (x);
3218 set_used_flags (y);
3219
3220 unshare_all_rtl_in_chain (seq);
3221 end_sequence ();
3222
3223 if (!seq)
3224 return FALSE;
3225
3226 for (insn = seq; insn; insn = NEXT_INSN (insn))
3227 if (JUMP_P (insn)
3228 || recog_memoized (insn) == -1)
3229 return FALSE;
3230
3231 emit_insn_before_setloc (seq, if_info->jump,
3232 INSN_LOCATION (unmodified_insns.last ()));
3233
3234 /* Clean up THEN_BB and the edges in and out of it. */
3235 remove_edge (find_edge (test_bb, join_bb));
3236 remove_edge (find_edge (then_bb, join_bb));
3237 redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
3238 delete_basic_block (then_bb);
3239 num_true_changes++;
3240
3241 /* Maybe merge blocks now the jump is simple enough. */
3242 if (can_merge_blocks_p (test_bb, join_bb))
3243 {
3244 merge_blocks (test_bb, join_bb);
3245 num_true_changes++;
3246 }
3247
3248 num_updated_if_blocks++;
3249 return TRUE;
3250 }
3251
3252 /* Return true iff basic block TEST_BB is comprised of only
3253 (SET (REG) (REG)) insns suitable for conversion to a series
3254 of conditional moves. FORNOW: Use II to find the expected cost of
3255 the branch into/over TEST_BB.
3256
3257 TODO: This creates an implicit "magic number" for branch_cost.
3258 II->branch_cost now guides the maximum number of set instructions in
3259 a basic block which is considered profitable to completely
3260 if-convert. */
3261
3262 static bool
3263 bb_ok_for_noce_convert_multiple_sets (basic_block test_bb,
3264 struct noce_if_info *ii)
3265 {
3266 rtx_insn *insn;
3267 unsigned count = 0;
3268
3269 FOR_BB_INSNS (test_bb, insn)
3270 {
3271 /* Skip over notes etc. */
3272 if (!active_insn_p (insn))
3273 continue;
3274
3275 /* We only handle SET insns. */
3276 rtx set = single_set (insn);
3277 if (set == NULL_RTX)
3278 return false;
3279
3280 rtx dest = SET_DEST (set);
3281 rtx src = SET_SRC (set);
3282
3283 /* We can possibly relax this, but for now only handle REG to REG
3284 moves. This avoids any issues that might come from introducing
3285 loads/stores that might violate data-race-freedom guarantees. */
3286 if (!(REG_P (src) && REG_P (dest)))
3287 return false;
3288
3289 /* Destination must be appropriate for a conditional write. */
3290 if (!noce_operand_ok (dest))
3291 return false;
3292
3293 /* We must be able to conditionally move in this mode. */
3294 if (!can_conditionally_move_p (GET_MODE (dest)))
3295 return false;
3296
3297 ++count;
3298 }
3299
3300 /* FORNOW: Our cost model is a count of the number of instructions we
3301 would if-convert. This is suboptimal, and should be improved as part
3302 of a wider rework of branch_cost. */
3303 if (count > ii->branch_cost)
3304 return FALSE;
3305
3306 return count > 0;
3307 }
3308
3309 /* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
3310 it without using conditional execution. Return TRUE if we were successful
3311 at converting the block. */
3312
3313 static int
3314 noce_process_if_block (struct noce_if_info *if_info)
3315 {
3316 basic_block test_bb = if_info->test_bb; /* test block */
3317 basic_block then_bb = if_info->then_bb; /* THEN */
3318 basic_block else_bb = if_info->else_bb; /* ELSE or NULL */
3319 basic_block join_bb = if_info->join_bb; /* JOIN */
3320 rtx_insn *jump = if_info->jump;
3321 rtx cond = if_info->cond;
3322 rtx_insn *insn_a, *insn_b;
3323 rtx set_a, set_b;
3324 rtx orig_x, x, a, b;
3325
3326 /* We're looking for patterns of the form
3327
3328 (1) if (...) x = a; else x = b;
3329 (2) x = b; if (...) x = a;
3330 (3) if (...) x = a; // as if with an initial x = x.
3331 (4) if (...) { x = a; y = b; z = c; } // Like 3, for multiple SETS.
3332 The later patterns require jumps to be more expensive.
3333 For the if (...) x = a; else x = b; case we allow multiple insns
3334 inside the then and else blocks as long as their only effect is
3335 to calculate a value for x.
3336 ??? For future expansion, further expand the "multiple X" rules. */
3337
3338 /* First look for multiple SETS. */
3339 if (!else_bb
3340 && HAVE_conditional_move
3341 && !HAVE_cc0
3342 && bb_ok_for_noce_convert_multiple_sets (then_bb, if_info))
3343 {
3344 if (noce_convert_multiple_sets (if_info))
3345 return TRUE;
3346 }
3347
3348 if (! bb_valid_for_noce_process_p (then_bb, cond, &if_info->then_cost,
3349 &if_info->then_simple))
3350 return false;
3351
3352 if (else_bb
3353 && ! bb_valid_for_noce_process_p (else_bb, cond, &if_info->else_cost,
3354 &if_info->else_simple))
3355 return false;
3356
3357 insn_a = last_active_insn (then_bb, FALSE);
3358 set_a = single_set (insn_a);
3359 gcc_assert (set_a);
3360
3361 x = SET_DEST (set_a);
3362 a = SET_SRC (set_a);
3363
3364 /* Look for the other potential set. Make sure we've got equivalent
3365 destinations. */
3366 /* ??? This is overconservative. Storing to two different mems is
3367 as easy as conditionally computing the address. Storing to a
3368 single mem merely requires a scratch memory to use as one of the
3369 destination addresses; often the memory immediately below the
3370 stack pointer is available for this. */
3371 set_b = NULL_RTX;
3372 if (else_bb)
3373 {
3374 insn_b = last_active_insn (else_bb, FALSE);
3375 set_b = single_set (insn_b);
3376 gcc_assert (set_b);
3377
3378 if (!rtx_interchangeable_p (x, SET_DEST (set_b)))
3379 return FALSE;
3380 }
3381 else
3382 {
3383 insn_b = prev_nonnote_nondebug_insn (if_info->cond_earliest);
3384 /* We're going to be moving the evaluation of B down from above
3385 COND_EARLIEST to JUMP. Make sure the relevant data is still
3386 intact. */
3387 if (! insn_b
3388 || BLOCK_FOR_INSN (insn_b) != BLOCK_FOR_INSN (if_info->cond_earliest)
3389 || !NONJUMP_INSN_P (insn_b)
3390 || (set_b = single_set (insn_b)) == NULL_RTX
3391 || ! rtx_interchangeable_p (x, SET_DEST (set_b))
3392 || ! noce_operand_ok (SET_SRC (set_b))
3393 || reg_overlap_mentioned_p (x, SET_SRC (set_b))
3394 || modified_between_p (SET_SRC (set_b), insn_b, jump)
3395 /* Avoid extending the lifetime of hard registers on small
3396 register class machines. */
3397 || (REG_P (SET_SRC (set_b))
3398 && HARD_REGISTER_P (SET_SRC (set_b))
3399 && targetm.small_register_classes_for_mode_p
3400 (GET_MODE (SET_SRC (set_b))))
3401 /* Likewise with X. In particular this can happen when
3402 noce_get_condition looks farther back in the instruction
3403 stream than one might expect. */
3404 || reg_overlap_mentioned_p (x, cond)
3405 || reg_overlap_mentioned_p (x, a)
3406 || modified_between_p (x, insn_b, jump))
3407 {
3408 insn_b = NULL;
3409 set_b = NULL_RTX;
3410 }
3411 }
3412
3413 /* If x has side effects then only the if-then-else form is safe to
3414 convert. But even in that case we would need to restore any notes
3415 (such as REG_INC) at then end. That can be tricky if
3416 noce_emit_move_insn expands to more than one insn, so disable the
3417 optimization entirely for now if there are side effects. */
3418 if (side_effects_p (x))
3419 return FALSE;
3420
3421 b = (set_b ? SET_SRC (set_b) : x);
3422
3423 /* Only operate on register destinations, and even then avoid extending
3424 the lifetime of hard registers on small register class machines. */
3425 orig_x = x;
3426 if (!REG_P (x)
3427 || (HARD_REGISTER_P (x)
3428 && targetm.small_register_classes_for_mode_p (GET_MODE (x))))
3429 {
3430 if (GET_MODE (x) == BLKmode)
3431 return FALSE;
3432
3433 if (GET_CODE (x) == ZERO_EXTRACT
3434 && (!CONST_INT_P (XEXP (x, 1))
3435 || !CONST_INT_P (XEXP (x, 2))))
3436 return FALSE;
3437
3438 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
3439 ? XEXP (x, 0) : x));
3440 }
3441
3442 /* Don't operate on sources that may trap or are volatile. */
3443 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
3444 return FALSE;
3445
3446 retry:
3447 /* Set up the info block for our subroutines. */
3448 if_info->insn_a = insn_a;
3449 if_info->insn_b = insn_b;
3450 if_info->x = x;
3451 if_info->a = a;
3452 if_info->b = b;
3453
3454 /* Try optimizations in some approximation of a useful order. */
3455 /* ??? Should first look to see if X is live incoming at all. If it
3456 isn't, we don't need anything but an unconditional set. */
3457
3458 /* Look and see if A and B are really the same. Avoid creating silly
3459 cmove constructs that no one will fix up later. */
3460 if (noce_simple_bbs (if_info)
3461 && rtx_interchangeable_p (a, b))
3462 {
3463 /* If we have an INSN_B, we don't have to create any new rtl. Just
3464 move the instruction that we already have. If we don't have an
3465 INSN_B, that means that A == X, and we've got a noop move. In
3466 that case don't do anything and let the code below delete INSN_A. */
3467 if (insn_b && else_bb)
3468 {
3469 rtx note;
3470
3471 if (else_bb && insn_b == BB_END (else_bb))
3472 BB_END (else_bb) = PREV_INSN (insn_b);
3473 reorder_insns (insn_b, insn_b, PREV_INSN (jump));
3474
3475 /* If there was a REG_EQUAL note, delete it since it may have been
3476 true due to this insn being after a jump. */
3477 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
3478 remove_note (insn_b, note);
3479
3480 insn_b = NULL;
3481 }
3482 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
3483 x must be executed twice. */
3484 else if (insn_b && side_effects_p (orig_x))
3485 return FALSE;
3486
3487 x = orig_x;
3488 goto success;
3489 }
3490
3491 if (!set_b && MEM_P (orig_x))
3492 /* We want to avoid store speculation to avoid cases like
3493 if (pthread_mutex_trylock(mutex))
3494 ++global_variable;
3495 Rather than go to much effort here, we rely on the SSA optimizers,
3496 which do a good enough job these days. */
3497 return FALSE;
3498
3499 if (noce_try_move (if_info))
3500 goto success;
3501 if (noce_try_store_flag (if_info))
3502 goto success;
3503 if (noce_try_bitop (if_info))
3504 goto success;
3505 if (noce_try_minmax (if_info))
3506 goto success;
3507 if (noce_try_abs (if_info))
3508 goto success;
3509 if (noce_try_inverse_constants (if_info))
3510 goto success;
3511 if (!targetm.have_conditional_execution ()
3512 && noce_try_store_flag_constants (if_info))
3513 goto success;
3514 if (HAVE_conditional_move
3515 && noce_try_cmove (if_info))
3516 goto success;
3517 if (! targetm.have_conditional_execution ())
3518 {
3519 if (noce_try_addcc (if_info))
3520 goto success;
3521 if (noce_try_store_flag_mask (if_info))
3522 goto success;
3523 if (HAVE_conditional_move
3524 && noce_try_cmove_arith (if_info))
3525 goto success;
3526 if (noce_try_sign_mask (if_info))
3527 goto success;
3528 }
3529
3530 if (!else_bb && set_b)
3531 {
3532 insn_b = NULL;
3533 set_b = NULL_RTX;
3534 b = orig_x;
3535 goto retry;
3536 }
3537
3538 return FALSE;
3539
3540 success:
3541
3542 /* If we used a temporary, fix it up now. */
3543 if (orig_x != x)
3544 {
3545 rtx_insn *seq;
3546
3547 start_sequence ();
3548 noce_emit_move_insn (orig_x, x);
3549 seq = get_insns ();
3550 set_used_flags (orig_x);
3551 unshare_all_rtl_in_chain (seq);
3552 end_sequence ();
3553
3554 emit_insn_before_setloc (seq, BB_END (test_bb), INSN_LOCATION (insn_a));
3555 }
3556
3557 /* The original THEN and ELSE blocks may now be removed. The test block
3558 must now jump to the join block. If the test block and the join block
3559 can be merged, do so. */
3560 if (else_bb)
3561 {
3562 delete_basic_block (else_bb);
3563 num_true_changes++;
3564 }
3565 else
3566 remove_edge (find_edge (test_bb, join_bb));
3567
3568 remove_edge (find_edge (then_bb, join_bb));
3569 redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
3570 delete_basic_block (then_bb);
3571 num_true_changes++;
3572
3573 if (can_merge_blocks_p (test_bb, join_bb))
3574 {
3575 merge_blocks (test_bb, join_bb);
3576 num_true_changes++;
3577 }
3578
3579 num_updated_if_blocks++;
3580 return TRUE;
3581 }
3582
3583 /* Check whether a block is suitable for conditional move conversion.
3584 Every insn must be a simple set of a register to a constant or a
3585 register. For each assignment, store the value in the pointer map
3586 VALS, keyed indexed by register pointer, then store the register
3587 pointer in REGS. COND is the condition we will test. */
3588
3589 static int
3590 check_cond_move_block (basic_block bb,
3591 hash_map<rtx, rtx> *vals,
3592 vec<rtx> *regs,
3593 rtx cond)
3594 {
3595 rtx_insn *insn;
3596 rtx cc = cc_in_cond (cond);
3597
3598 /* We can only handle simple jumps at the end of the basic block.
3599 It is almost impossible to update the CFG otherwise. */
3600 insn = BB_END (bb);
3601 if (JUMP_P (insn) && !onlyjump_p (insn))
3602 return FALSE;
3603
3604 FOR_BB_INSNS (bb, insn)
3605 {
3606 rtx set, dest, src;
3607
3608 if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
3609 continue;
3610 set = single_set (insn);
3611 if (!set)
3612 return FALSE;
3613
3614 dest = SET_DEST (set);
3615 src = SET_SRC (set);
3616 if (!REG_P (dest)
3617 || (HARD_REGISTER_P (dest)
3618 && targetm.small_register_classes_for_mode_p (GET_MODE (dest))))
3619 return FALSE;
3620
3621 if (!CONSTANT_P (src) && !register_operand (src, VOIDmode))
3622 return FALSE;
3623
3624 if (side_effects_p (src) || side_effects_p (dest))
3625 return FALSE;
3626
3627 if (may_trap_p (src) || may_trap_p (dest))
3628 return FALSE;
3629
3630 /* Don't try to handle this if the source register was
3631 modified earlier in the block. */
3632 if ((REG_P (src)
3633 && vals->get (src))
3634 || (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src))
3635 && vals->get (SUBREG_REG (src))))
3636 return FALSE;
3637
3638 /* Don't try to handle this if the destination register was
3639 modified earlier in the block. */
3640 if (vals->get (dest))
3641 return FALSE;
3642
3643 /* Don't try to handle this if the condition uses the
3644 destination register. */
3645 if (reg_overlap_mentioned_p (dest, cond))
3646 return FALSE;
3647
3648 /* Don't try to handle this if the source register is modified
3649 later in the block. */
3650 if (!CONSTANT_P (src)
3651 && modified_between_p (src, insn, NEXT_INSN (BB_END (bb))))
3652 return FALSE;
3653
3654 /* Skip it if the instruction to be moved might clobber CC. */
3655 if (cc && set_of (cc, insn))
3656 return FALSE;
3657
3658 vals->put (dest, src);
3659
3660 regs->safe_push (dest);
3661 }
3662
3663 return TRUE;
3664 }
3665
3666 /* Given a basic block BB suitable for conditional move conversion,
3667 a condition COND, and pointer maps THEN_VALS and ELSE_VALS containing
3668 the register values depending on COND, emit the insns in the block as
3669 conditional moves. If ELSE_BLOCK is true, THEN_BB was already
3670 processed. The caller has started a sequence for the conversion.
3671 Return true if successful, false if something goes wrong. */
3672
3673 static bool
3674 cond_move_convert_if_block (struct noce_if_info *if_infop,
3675 basic_block bb, rtx cond,
3676 hash_map<rtx, rtx> *then_vals,
3677 hash_map<rtx, rtx> *else_vals,
3678 bool else_block_p)
3679 {
3680 enum rtx_code code;
3681 rtx_insn *insn;
3682 rtx cond_arg0, cond_arg1;
3683
3684 code = GET_CODE (cond);
3685 cond_arg0 = XEXP (cond, 0);
3686 cond_arg1 = XEXP (cond, 1);
3687
3688 FOR_BB_INSNS (bb, insn)
3689 {
3690 rtx set, target, dest, t, e;
3691
3692 /* ??? Maybe emit conditional debug insn? */
3693 if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
3694 continue;
3695 set = single_set (insn);
3696 gcc_assert (set && REG_P (SET_DEST (set)));
3697
3698 dest = SET_DEST (set);
3699
3700 rtx *then_slot = then_vals->get (dest);
3701 rtx *else_slot = else_vals->get (dest);
3702 t = then_slot ? *then_slot : NULL_RTX;
3703 e = else_slot ? *else_slot : NULL_RTX;
3704
3705 if (else_block_p)
3706 {
3707 /* If this register was set in the then block, we already
3708 handled this case there. */
3709 if (t)
3710 continue;
3711 t = dest;
3712 gcc_assert (e);
3713 }
3714 else
3715 {
3716 gcc_assert (t);
3717 if (!e)
3718 e = dest;
3719 }
3720
3721 target = noce_emit_cmove (if_infop, dest, code, cond_arg0, cond_arg1,
3722 t, e);
3723 if (!target)
3724 return false;
3725
3726 if (target != dest)
3727 noce_emit_move_insn (dest, target);
3728 }
3729
3730 return true;
3731 }
3732
3733 /* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
3734 it using only conditional moves. Return TRUE if we were successful at
3735 converting the block. */
3736
3737 static int
3738 cond_move_process_if_block (struct noce_if_info *if_info)
3739 {
3740 basic_block test_bb = if_info->test_bb;
3741 basic_block then_bb = if_info->then_bb;
3742 basic_block else_bb = if_info->else_bb;
3743 basic_block join_bb = if_info->join_bb;
3744 rtx_insn *jump = if_info->jump;
3745 rtx cond = if_info->cond;
3746 rtx_insn *seq, *loc_insn;
3747 rtx reg;
3748 int c;
3749 vec<rtx> then_regs = vNULL;
3750 vec<rtx> else_regs = vNULL;
3751 unsigned int i;
3752 int success_p = FALSE;
3753
3754 /* Build a mapping for each block to the value used for each
3755 register. */
3756 hash_map<rtx, rtx> then_vals;
3757 hash_map<rtx, rtx> else_vals;
3758
3759 /* Make sure the blocks are suitable. */
3760 if (!check_cond_move_block (then_bb, &then_vals, &then_regs, cond)
3761 || (else_bb
3762 && !check_cond_move_block (else_bb, &else_vals, &else_regs, cond)))
3763 goto done;
3764
3765 /* Make sure the blocks can be used together. If the same register
3766 is set in both blocks, and is not set to a constant in both
3767 cases, then both blocks must set it to the same register. We
3768 have already verified that if it is set to a register, that the
3769 source register does not change after the assignment. Also count
3770 the number of registers set in only one of the blocks. */
3771 c = 0;
3772 FOR_EACH_VEC_ELT (then_regs, i, reg)
3773 {
3774 rtx *then_slot = then_vals.get (reg);
3775 rtx *else_slot = else_vals.get (reg);
3776
3777 gcc_checking_assert (then_slot);
3778 if (!else_slot)
3779 ++c;
3780 else
3781 {
3782 rtx then_val = *then_slot;
3783 rtx else_val = *else_slot;
3784 if (!CONSTANT_P (then_val) && !CONSTANT_P (else_val)
3785 && !rtx_equal_p (then_val, else_val))
3786 goto done;
3787 }
3788 }
3789
3790 /* Finish off c for MAX_CONDITIONAL_EXECUTE. */
3791 FOR_EACH_VEC_ELT (else_regs, i, reg)
3792 {
3793 gcc_checking_assert (else_vals.get (reg));
3794 if (!then_vals.get (reg))
3795 ++c;
3796 }
3797
3798 /* Make sure it is reasonable to convert this block. What matters
3799 is the number of assignments currently made in only one of the
3800 branches, since if we convert we are going to always execute
3801 them. */
3802 if (c > MAX_CONDITIONAL_EXECUTE)
3803 goto done;
3804
3805 /* Try to emit the conditional moves. First do the then block,
3806 then do anything left in the else blocks. */
3807 start_sequence ();
3808 if (!cond_move_convert_if_block (if_info, then_bb, cond,
3809 &then_vals, &else_vals, false)
3810 || (else_bb
3811 && !cond_move_convert_if_block (if_info, else_bb, cond,
3812 &then_vals, &else_vals, true)))
3813 {
3814 end_sequence ();
3815 goto done;
3816 }
3817 seq = end_ifcvt_sequence (if_info);
3818 if (!seq)
3819 goto done;
3820
3821 loc_insn = first_active_insn (then_bb);
3822 if (!loc_insn)
3823 {
3824 loc_insn = first_active_insn (else_bb);
3825 gcc_assert (loc_insn);
3826 }
3827 emit_insn_before_setloc (seq, jump, INSN_LOCATION (loc_insn));
3828
3829 if (else_bb)
3830 {
3831 delete_basic_block (else_bb);
3832 num_true_changes++;
3833 }
3834 else
3835 remove_edge (find_edge (test_bb, join_bb));
3836
3837 remove_edge (find_edge (then_bb, join_bb));
3838 redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
3839 delete_basic_block (then_bb);
3840 num_true_changes++;
3841
3842 if (can_merge_blocks_p (test_bb, join_bb))
3843 {
3844 merge_blocks (test_bb, join_bb);
3845 num_true_changes++;
3846 }
3847
3848 num_updated_if_blocks++;
3849
3850 success_p = TRUE;
3851
3852 done:
3853 then_regs.release ();
3854 else_regs.release ();
3855 return success_p;
3856 }
3857
3858 \f
3859 /* Determine if a given basic block heads a simple IF-THEN-JOIN or an
3860 IF-THEN-ELSE-JOIN block.
3861
3862 If so, we'll try to convert the insns to not require the branch,
3863 using only transformations that do not require conditional execution.
3864
3865 Return TRUE if we were successful at converting the block. */
3866
3867 static int
3868 noce_find_if_block (basic_block test_bb, edge then_edge, edge else_edge,
3869 int pass)
3870 {
3871 basic_block then_bb, else_bb, join_bb;
3872 bool then_else_reversed = false;
3873 rtx_insn *jump;
3874 rtx cond;
3875 rtx_insn *cond_earliest;
3876 struct noce_if_info if_info;
3877
3878 /* We only ever should get here before reload. */
3879 gcc_assert (!reload_completed);
3880
3881 /* Recognize an IF-THEN-ELSE-JOIN block. */
3882 if (single_pred_p (then_edge->dest)
3883 && single_succ_p (then_edge->dest)
3884 && single_pred_p (else_edge->dest)
3885 && single_succ_p (else_edge->dest)
3886 && single_succ (then_edge->dest) == single_succ (else_edge->dest))
3887 {
3888 then_bb = then_edge->dest;
3889 else_bb = else_edge->dest;
3890 join_bb = single_succ (then_bb);
3891 }
3892 /* Recognize an IF-THEN-JOIN block. */
3893 else if (single_pred_p (then_edge->dest)
3894 && single_succ_p (then_edge->dest)
3895 && single_succ (then_edge->dest) == else_edge->dest)
3896 {
3897 then_bb = then_edge->dest;
3898 else_bb = NULL_BLOCK;
3899 join_bb = else_edge->dest;
3900 }
3901 /* Recognize an IF-ELSE-JOIN block. We can have those because the order
3902 of basic blocks in cfglayout mode does not matter, so the fallthrough
3903 edge can go to any basic block (and not just to bb->next_bb, like in
3904 cfgrtl mode). */
3905 else if (single_pred_p (else_edge->dest)
3906 && single_succ_p (else_edge->dest)
3907 && single_succ (else_edge->dest) == then_edge->dest)
3908 {
3909 /* The noce transformations do not apply to IF-ELSE-JOIN blocks.
3910 To make this work, we have to invert the THEN and ELSE blocks
3911 and reverse the jump condition. */
3912 then_bb = else_edge->dest;
3913 else_bb = NULL_BLOCK;
3914 join_bb = single_succ (then_bb);
3915 then_else_reversed = true;
3916 }
3917 else
3918 /* Not a form we can handle. */
3919 return FALSE;
3920
3921 /* The edges of the THEN and ELSE blocks cannot have complex edges. */
3922 if (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
3923 return FALSE;
3924 if (else_bb
3925 && single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
3926 return FALSE;
3927
3928 num_possible_if_blocks++;
3929
3930 if (dump_file)
3931 {
3932 fprintf (dump_file,
3933 "\nIF-THEN%s-JOIN block found, pass %d, test %d, then %d",
3934 (else_bb) ? "-ELSE" : "",
3935 pass, test_bb->index, then_bb->index);
3936
3937 if (else_bb)
3938 fprintf (dump_file, ", else %d", else_bb->index);
3939
3940 fprintf (dump_file, ", join %d\n", join_bb->index);
3941 }
3942
3943 /* If the conditional jump is more than just a conditional
3944 jump, then we can not do if-conversion on this block. */
3945 jump = BB_END (test_bb);
3946 if (! onlyjump_p (jump))
3947 return FALSE;
3948
3949 /* If this is not a standard conditional jump, we can't parse it. */
3950 cond = noce_get_condition (jump, &cond_earliest, then_else_reversed);
3951 if (!cond)
3952 return FALSE;
3953
3954 /* We must be comparing objects whose modes imply the size. */
3955 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
3956 return FALSE;
3957
3958 /* Initialize an IF_INFO struct to pass around. */
3959 memset (&if_info, 0, sizeof if_info);
3960 if_info.test_bb = test_bb;
3961 if_info.then_bb = then_bb;
3962 if_info.else_bb = else_bb;
3963 if_info.join_bb = join_bb;
3964 if_info.cond = cond;
3965 if_info.cond_earliest = cond_earliest;
3966 if_info.jump = jump;
3967 if_info.then_else_reversed = then_else_reversed;
3968 if_info.branch_cost = BRANCH_COST (optimize_bb_for_speed_p (test_bb),
3969 predictable_edge_p (then_edge));
3970
3971 /* Do the real work. */
3972
3973 if (noce_process_if_block (&if_info))
3974 return TRUE;
3975
3976 if (HAVE_conditional_move
3977 && cond_move_process_if_block (&if_info))
3978 return TRUE;
3979
3980 return FALSE;
3981 }
3982 \f
3983
3984 /* Merge the blocks and mark for local life update. */
3985
3986 static void
3987 merge_if_block (struct ce_if_block * ce_info)
3988 {
3989 basic_block test_bb = ce_info->test_bb; /* last test block */
3990 basic_block then_bb = ce_info->then_bb; /* THEN */
3991 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
3992 basic_block join_bb = ce_info->join_bb; /* join block */
3993 basic_block combo_bb;
3994
3995 /* All block merging is done into the lower block numbers. */
3996
3997 combo_bb = test_bb;
3998 df_set_bb_dirty (test_bb);
3999
4000 /* Merge any basic blocks to handle && and || subtests. Each of
4001 the blocks are on the fallthru path from the predecessor block. */
4002 if (ce_info->num_multiple_test_blocks > 0)
4003 {
4004 basic_block bb = test_bb;
4005 basic_block last_test_bb = ce_info->last_test_bb;
4006 basic_block fallthru = block_fallthru (bb);
4007
4008 do
4009 {
4010 bb = fallthru;
4011 fallthru = block_fallthru (bb);
4012 merge_blocks (combo_bb, bb);
4013 num_true_changes++;
4014 }
4015 while (bb != last_test_bb);
4016 }
4017
4018 /* Merge TEST block into THEN block. Normally the THEN block won't have a
4019 label, but it might if there were || tests. That label's count should be
4020 zero, and it normally should be removed. */
4021
4022 if (then_bb)
4023 {
4024 /* If THEN_BB has no successors, then there's a BARRIER after it.
4025 If COMBO_BB has more than one successor (THEN_BB), then that BARRIER
4026 is no longer needed, and in fact it is incorrect to leave it in
4027 the insn stream. */
4028 if (EDGE_COUNT (then_bb->succs) == 0
4029 && EDGE_COUNT (combo_bb->succs) > 1)
4030 {
4031 rtx_insn *end = NEXT_INSN (BB_END (then_bb));
4032 while (end && NOTE_P (end) && !NOTE_INSN_BASIC_BLOCK_P (end))
4033 end = NEXT_INSN (end);
4034
4035 if (end && BARRIER_P (end))
4036 delete_insn (end);
4037 }
4038 merge_blocks (combo_bb, then_bb);
4039 num_true_changes++;
4040 }
4041
4042 /* The ELSE block, if it existed, had a label. That label count
4043 will almost always be zero, but odd things can happen when labels
4044 get their addresses taken. */
4045 if (else_bb)
4046 {
4047 /* If ELSE_BB has no successors, then there's a BARRIER after it.
4048 If COMBO_BB has more than one successor (ELSE_BB), then that BARRIER
4049 is no longer needed, and in fact it is incorrect to leave it in
4050 the insn stream. */
4051 if (EDGE_COUNT (else_bb->succs) == 0
4052 && EDGE_COUNT (combo_bb->succs) > 1)
4053 {
4054 rtx_insn *end = NEXT_INSN (BB_END (else_bb));
4055 while (end && NOTE_P (end) && !NOTE_INSN_BASIC_BLOCK_P (end))
4056 end = NEXT_INSN (end);
4057
4058 if (end && BARRIER_P (end))
4059 delete_insn (end);
4060 }
4061 merge_blocks (combo_bb, else_bb);
4062 num_true_changes++;
4063 }
4064
4065 /* If there was no join block reported, that means it was not adjacent
4066 to the others, and so we cannot merge them. */
4067
4068 if (! join_bb)
4069 {
4070 rtx_insn *last = BB_END (combo_bb);
4071
4072 /* The outgoing edge for the current COMBO block should already
4073 be correct. Verify this. */
4074 if (EDGE_COUNT (combo_bb->succs) == 0)
4075 gcc_assert (find_reg_note (last, REG_NORETURN, NULL)
4076 || (NONJUMP_INSN_P (last)
4077 && GET_CODE (PATTERN (last)) == TRAP_IF
4078 && (TRAP_CONDITION (PATTERN (last))
4079 == const_true_rtx)));
4080
4081 else
4082 /* There should still be something at the end of the THEN or ELSE
4083 blocks taking us to our final destination. */
4084 gcc_assert (JUMP_P (last)
4085 || (EDGE_SUCC (combo_bb, 0)->dest
4086 == EXIT_BLOCK_PTR_FOR_FN (cfun)
4087 && CALL_P (last)
4088 && SIBLING_CALL_P (last))
4089 || ((EDGE_SUCC (combo_bb, 0)->flags & EDGE_EH)
4090 && can_throw_internal (last)));
4091 }
4092
4093 /* The JOIN block may have had quite a number of other predecessors too.
4094 Since we've already merged the TEST, THEN and ELSE blocks, we should
4095 have only one remaining edge from our if-then-else diamond. If there
4096 is more than one remaining edge, it must come from elsewhere. There
4097 may be zero incoming edges if the THEN block didn't actually join
4098 back up (as with a call to a non-return function). */
4099 else if (EDGE_COUNT (join_bb->preds) < 2
4100 && join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4101 {
4102 /* We can merge the JOIN cleanly and update the dataflow try
4103 again on this pass.*/
4104 merge_blocks (combo_bb, join_bb);
4105 num_true_changes++;
4106 }
4107 else
4108 {
4109 /* We cannot merge the JOIN. */
4110
4111 /* The outgoing edge for the current COMBO block should already
4112 be correct. Verify this. */
4113 gcc_assert (single_succ_p (combo_bb)
4114 && single_succ (combo_bb) == join_bb);
4115
4116 /* Remove the jump and cruft from the end of the COMBO block. */
4117 if (join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4118 tidy_fallthru_edge (single_succ_edge (combo_bb));
4119 }
4120
4121 num_updated_if_blocks++;
4122 }
4123 \f
4124 /* Find a block ending in a simple IF condition and try to transform it
4125 in some way. When converting a multi-block condition, put the new code
4126 in the first such block and delete the rest. Return a pointer to this
4127 first block if some transformation was done. Return NULL otherwise. */
4128
4129 static basic_block
4130 find_if_header (basic_block test_bb, int pass)
4131 {
4132 ce_if_block ce_info;
4133 edge then_edge;
4134 edge else_edge;
4135
4136 /* The kind of block we're looking for has exactly two successors. */
4137 if (EDGE_COUNT (test_bb->succs) != 2)
4138 return NULL;
4139
4140 then_edge = EDGE_SUCC (test_bb, 0);
4141 else_edge = EDGE_SUCC (test_bb, 1);
4142
4143 if (df_get_bb_dirty (then_edge->dest))
4144 return NULL;
4145 if (df_get_bb_dirty (else_edge->dest))
4146 return NULL;
4147
4148 /* Neither edge should be abnormal. */
4149 if ((then_edge->flags & EDGE_COMPLEX)
4150 || (else_edge->flags & EDGE_COMPLEX))
4151 return NULL;
4152
4153 /* Nor exit the loop. */
4154 if ((then_edge->flags & EDGE_LOOP_EXIT)
4155 || (else_edge->flags & EDGE_LOOP_EXIT))
4156 return NULL;
4157
4158 /* The THEN edge is canonically the one that falls through. */
4159 if (then_edge->flags & EDGE_FALLTHRU)
4160 ;
4161 else if (else_edge->flags & EDGE_FALLTHRU)
4162 std::swap (then_edge, else_edge);
4163 else
4164 /* Otherwise this must be a multiway branch of some sort. */
4165 return NULL;
4166
4167 memset (&ce_info, 0, sizeof (ce_info));
4168 ce_info.test_bb = test_bb;
4169 ce_info.then_bb = then_edge->dest;
4170 ce_info.else_bb = else_edge->dest;
4171 ce_info.pass = pass;
4172
4173 #ifdef IFCVT_MACHDEP_INIT
4174 IFCVT_MACHDEP_INIT (&ce_info);
4175 #endif
4176
4177 if (!reload_completed
4178 && noce_find_if_block (test_bb, then_edge, else_edge, pass))
4179 goto success;
4180
4181 if (reload_completed
4182 && targetm.have_conditional_execution ()
4183 && cond_exec_find_if_block (&ce_info))
4184 goto success;
4185
4186 if (targetm.have_trap ()
4187 && optab_handler (ctrap_optab, word_mode) != CODE_FOR_nothing
4188 && find_cond_trap (test_bb, then_edge, else_edge))
4189 goto success;
4190
4191 if (dom_info_state (CDI_POST_DOMINATORS) >= DOM_NO_FAST_QUERY
4192 && (reload_completed || !targetm.have_conditional_execution ()))
4193 {
4194 if (find_if_case_1 (test_bb, then_edge, else_edge))
4195 goto success;
4196 if (find_if_case_2 (test_bb, then_edge, else_edge))
4197 goto success;
4198 }
4199
4200 return NULL;
4201
4202 success:
4203 if (dump_file)
4204 fprintf (dump_file, "Conversion succeeded on pass %d.\n", pass);
4205 /* Set this so we continue looking. */
4206 cond_exec_changed_p = TRUE;
4207 return ce_info.test_bb;
4208 }
4209
4210 /* Return true if a block has two edges, one of which falls through to the next
4211 block, and the other jumps to a specific block, so that we can tell if the
4212 block is part of an && test or an || test. Returns either -1 or the number
4213 of non-note, non-jump, non-USE/CLOBBER insns in the block. */
4214
4215 static int
4216 block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
4217 {
4218 edge cur_edge;
4219 int fallthru_p = FALSE;
4220 int jump_p = FALSE;
4221 rtx_insn *insn;
4222 rtx_insn *end;
4223 int n_insns = 0;
4224 edge_iterator ei;
4225
4226 if (!cur_bb || !target_bb)
4227 return -1;
4228
4229 /* If no edges, obviously it doesn't jump or fallthru. */
4230 if (EDGE_COUNT (cur_bb->succs) == 0)
4231 return FALSE;
4232
4233 FOR_EACH_EDGE (cur_edge, ei, cur_bb->succs)
4234 {
4235 if (cur_edge->flags & EDGE_COMPLEX)
4236 /* Anything complex isn't what we want. */
4237 return -1;
4238
4239 else if (cur_edge->flags & EDGE_FALLTHRU)
4240 fallthru_p = TRUE;
4241
4242 else if (cur_edge->dest == target_bb)
4243 jump_p = TRUE;
4244
4245 else
4246 return -1;
4247 }
4248
4249 if ((jump_p & fallthru_p) == 0)
4250 return -1;
4251
4252 /* Don't allow calls in the block, since this is used to group && and ||
4253 together for conditional execution support. ??? we should support
4254 conditional execution support across calls for IA-64 some day, but
4255 for now it makes the code simpler. */
4256 end = BB_END (cur_bb);
4257 insn = BB_HEAD (cur_bb);
4258
4259 while (insn != NULL_RTX)
4260 {
4261 if (CALL_P (insn))
4262 return -1;
4263
4264 if (INSN_P (insn)
4265 && !JUMP_P (insn)
4266 && !DEBUG_INSN_P (insn)
4267 && GET_CODE (PATTERN (insn)) != USE
4268 && GET_CODE (PATTERN (insn)) != CLOBBER)
4269 n_insns++;
4270
4271 if (insn == end)
4272 break;
4273
4274 insn = NEXT_INSN (insn);
4275 }
4276
4277 return n_insns;
4278 }
4279
4280 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
4281 block. If so, we'll try to convert the insns to not require the branch.
4282 Return TRUE if we were successful at converting the block. */
4283
4284 static int
4285 cond_exec_find_if_block (struct ce_if_block * ce_info)
4286 {
4287 basic_block test_bb = ce_info->test_bb;
4288 basic_block then_bb = ce_info->then_bb;
4289 basic_block else_bb = ce_info->else_bb;
4290 basic_block join_bb = NULL_BLOCK;
4291 edge cur_edge;
4292 basic_block next;
4293 edge_iterator ei;
4294
4295 ce_info->last_test_bb = test_bb;
4296
4297 /* We only ever should get here after reload,
4298 and if we have conditional execution. */
4299 gcc_assert (reload_completed && targetm.have_conditional_execution ());
4300
4301 /* Discover if any fall through predecessors of the current test basic block
4302 were && tests (which jump to the else block) or || tests (which jump to
4303 the then block). */
4304 if (single_pred_p (test_bb)
4305 && single_pred_edge (test_bb)->flags == EDGE_FALLTHRU)
4306 {
4307 basic_block bb = single_pred (test_bb);
4308 basic_block target_bb;
4309 int max_insns = MAX_CONDITIONAL_EXECUTE;
4310 int n_insns;
4311
4312 /* Determine if the preceding block is an && or || block. */
4313 if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
4314 {
4315 ce_info->and_and_p = TRUE;
4316 target_bb = else_bb;
4317 }
4318 else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
4319 {
4320 ce_info->and_and_p = FALSE;
4321 target_bb = then_bb;
4322 }
4323 else
4324 target_bb = NULL_BLOCK;
4325
4326 if (target_bb && n_insns <= max_insns)
4327 {
4328 int total_insns = 0;
4329 int blocks = 0;
4330
4331 ce_info->last_test_bb = test_bb;
4332
4333 /* Found at least one && or || block, look for more. */
4334 do
4335 {
4336 ce_info->test_bb = test_bb = bb;
4337 total_insns += n_insns;
4338 blocks++;
4339
4340 if (!single_pred_p (bb))
4341 break;
4342
4343 bb = single_pred (bb);
4344 n_insns = block_jumps_and_fallthru_p (bb, target_bb);
4345 }
4346 while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
4347
4348 ce_info->num_multiple_test_blocks = blocks;
4349 ce_info->num_multiple_test_insns = total_insns;
4350
4351 if (ce_info->and_and_p)
4352 ce_info->num_and_and_blocks = blocks;
4353 else
4354 ce_info->num_or_or_blocks = blocks;
4355 }
4356 }
4357
4358 /* The THEN block of an IF-THEN combo must have exactly one predecessor,
4359 other than any || blocks which jump to the THEN block. */
4360 if ((EDGE_COUNT (then_bb->preds) - ce_info->num_or_or_blocks) != 1)
4361 return FALSE;
4362
4363 /* The edges of the THEN and ELSE blocks cannot have complex edges. */
4364 FOR_EACH_EDGE (cur_edge, ei, then_bb->preds)
4365 {
4366 if (cur_edge->flags & EDGE_COMPLEX)
4367 return FALSE;
4368 }
4369
4370 FOR_EACH_EDGE (cur_edge, ei, else_bb->preds)
4371 {
4372 if (cur_edge->flags & EDGE_COMPLEX)
4373 return FALSE;
4374 }
4375
4376 /* The THEN block of an IF-THEN combo must have zero or one successors. */
4377 if (EDGE_COUNT (then_bb->succs) > 0
4378 && (!single_succ_p (then_bb)
4379 || (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
4380 || (epilogue_completed
4381 && tablejump_p (BB_END (then_bb), NULL, NULL))))
4382 return FALSE;
4383
4384 /* If the THEN block has no successors, conditional execution can still
4385 make a conditional call. Don't do this unless the ELSE block has
4386 only one incoming edge -- the CFG manipulation is too ugly otherwise.
4387 Check for the last insn of the THEN block being an indirect jump, which
4388 is listed as not having any successors, but confuses the rest of the CE
4389 code processing. ??? we should fix this in the future. */
4390 if (EDGE_COUNT (then_bb->succs) == 0)
4391 {
4392 if (single_pred_p (else_bb) && else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4393 {
4394 rtx_insn *last_insn = BB_END (then_bb);
4395
4396 while (last_insn
4397 && NOTE_P (last_insn)
4398 && last_insn != BB_HEAD (then_bb))
4399 last_insn = PREV_INSN (last_insn);
4400
4401 if (last_insn
4402 && JUMP_P (last_insn)
4403 && ! simplejump_p (last_insn))
4404 return FALSE;
4405
4406 join_bb = else_bb;
4407 else_bb = NULL_BLOCK;
4408 }
4409 else
4410 return FALSE;
4411 }
4412
4413 /* If the THEN block's successor is the other edge out of the TEST block,
4414 then we have an IF-THEN combo without an ELSE. */
4415 else if (single_succ (then_bb) == else_bb)
4416 {
4417 join_bb = else_bb;
4418 else_bb = NULL_BLOCK;
4419 }
4420
4421 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
4422 has exactly one predecessor and one successor, and the outgoing edge
4423 is not complex, then we have an IF-THEN-ELSE combo. */
4424 else if (single_succ_p (else_bb)
4425 && single_succ (then_bb) == single_succ (else_bb)
4426 && single_pred_p (else_bb)
4427 && !(single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
4428 && !(epilogue_completed
4429 && tablejump_p (BB_END (else_bb), NULL, NULL)))
4430 join_bb = single_succ (else_bb);
4431
4432 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
4433 else
4434 return FALSE;
4435
4436 num_possible_if_blocks++;
4437
4438 if (dump_file)
4439 {
4440 fprintf (dump_file,
4441 "\nIF-THEN%s block found, pass %d, start block %d "
4442 "[insn %d], then %d [%d]",
4443 (else_bb) ? "-ELSE" : "",
4444 ce_info->pass,
4445 test_bb->index,
4446 BB_HEAD (test_bb) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
4447 then_bb->index,
4448 BB_HEAD (then_bb) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
4449
4450 if (else_bb)
4451 fprintf (dump_file, ", else %d [%d]",
4452 else_bb->index,
4453 BB_HEAD (else_bb) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
4454
4455 fprintf (dump_file, ", join %d [%d]",
4456 join_bb->index,
4457 BB_HEAD (join_bb) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
4458
4459 if (ce_info->num_multiple_test_blocks > 0)
4460 fprintf (dump_file, ", %d %s block%s last test %d [%d]",
4461 ce_info->num_multiple_test_blocks,
4462 (ce_info->and_and_p) ? "&&" : "||",
4463 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
4464 ce_info->last_test_bb->index,
4465 ((BB_HEAD (ce_info->last_test_bb))
4466 ? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
4467 : -1));
4468
4469 fputc ('\n', dump_file);
4470 }
4471
4472 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we get the
4473 first condition for free, since we've already asserted that there's a
4474 fallthru edge from IF to THEN. Likewise for the && and || blocks, since
4475 we checked the FALLTHRU flag, those are already adjacent to the last IF
4476 block. */
4477 /* ??? As an enhancement, move the ELSE block. Have to deal with
4478 BLOCK notes, if by no other means than backing out the merge if they
4479 exist. Sticky enough I don't want to think about it now. */
4480 next = then_bb;
4481 if (else_bb && (next = next->next_bb) != else_bb)
4482 return FALSE;
4483 if ((next = next->next_bb) != join_bb
4484 && join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4485 {
4486 if (else_bb)
4487 join_bb = NULL;
4488 else
4489 return FALSE;
4490 }
4491
4492 /* Do the real work. */
4493
4494 ce_info->else_bb = else_bb;
4495 ce_info->join_bb = join_bb;
4496
4497 /* If we have && and || tests, try to first handle combining the && and ||
4498 tests into the conditional code, and if that fails, go back and handle
4499 it without the && and ||, which at present handles the && case if there
4500 was no ELSE block. */
4501 if (cond_exec_process_if_block (ce_info, TRUE))
4502 return TRUE;
4503
4504 if (ce_info->num_multiple_test_blocks)
4505 {
4506 cancel_changes (0);
4507
4508 if (cond_exec_process_if_block (ce_info, FALSE))
4509 return TRUE;
4510 }
4511
4512 return FALSE;
4513 }
4514
4515 /* Convert a branch over a trap, or a branch
4516 to a trap, into a conditional trap. */
4517
4518 static int
4519 find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
4520 {
4521 basic_block then_bb = then_edge->dest;
4522 basic_block else_bb = else_edge->dest;
4523 basic_block other_bb, trap_bb;
4524 rtx_insn *trap, *jump;
4525 rtx cond;
4526 rtx_insn *cond_earliest;
4527 enum rtx_code code;
4528
4529 /* Locate the block with the trap instruction. */
4530 /* ??? While we look for no successors, we really ought to allow
4531 EH successors. Need to fix merge_if_block for that to work. */
4532 if ((trap = block_has_only_trap (then_bb)) != NULL)
4533 trap_bb = then_bb, other_bb = else_bb;
4534 else if ((trap = block_has_only_trap (else_bb)) != NULL)
4535 trap_bb = else_bb, other_bb = then_bb;
4536 else
4537 return FALSE;
4538
4539 if (dump_file)
4540 {
4541 fprintf (dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
4542 test_bb->index, trap_bb->index);
4543 }
4544
4545 /* If this is not a standard conditional jump, we can't parse it. */
4546 jump = BB_END (test_bb);
4547 cond = noce_get_condition (jump, &cond_earliest, false);
4548 if (! cond)
4549 return FALSE;
4550
4551 /* If the conditional jump is more than just a conditional jump, then
4552 we can not do if-conversion on this block. */
4553 if (! onlyjump_p (jump))
4554 return FALSE;
4555
4556 /* We must be comparing objects whose modes imply the size. */
4557 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
4558 return FALSE;
4559
4560 /* Reverse the comparison code, if necessary. */
4561 code = GET_CODE (cond);
4562 if (then_bb == trap_bb)
4563 {
4564 code = reversed_comparison_code (cond, jump);
4565 if (code == UNKNOWN)
4566 return FALSE;
4567 }
4568
4569 /* Attempt to generate the conditional trap. */
4570 rtx_insn *seq = gen_cond_trap (code, copy_rtx (XEXP (cond, 0)),
4571 copy_rtx (XEXP (cond, 1)),
4572 TRAP_CODE (PATTERN (trap)));
4573 if (seq == NULL)
4574 return FALSE;
4575
4576 /* Emit the new insns before cond_earliest. */
4577 emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATION (trap));
4578
4579 /* Delete the trap block if possible. */
4580 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
4581 df_set_bb_dirty (test_bb);
4582 df_set_bb_dirty (then_bb);
4583 df_set_bb_dirty (else_bb);
4584
4585 if (EDGE_COUNT (trap_bb->preds) == 0)
4586 {
4587 delete_basic_block (trap_bb);
4588 num_true_changes++;
4589 }
4590
4591 /* Wire together the blocks again. */
4592 if (current_ir_type () == IR_RTL_CFGLAYOUT)
4593 single_succ_edge (test_bb)->flags |= EDGE_FALLTHRU;
4594 else if (trap_bb == then_bb)
4595 {
4596 rtx lab = JUMP_LABEL (jump);
4597 rtx_insn *seq = targetm.gen_jump (lab);
4598 rtx_jump_insn *newjump = emit_jump_insn_after (seq, jump);
4599 LABEL_NUSES (lab) += 1;
4600 JUMP_LABEL (newjump) = lab;
4601 emit_barrier_after (newjump);
4602 }
4603 delete_insn (jump);
4604
4605 if (can_merge_blocks_p (test_bb, other_bb))
4606 {
4607 merge_blocks (test_bb, other_bb);
4608 num_true_changes++;
4609 }
4610
4611 num_updated_if_blocks++;
4612 return TRUE;
4613 }
4614
4615 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
4616 return it. */
4617
4618 static rtx_insn *
4619 block_has_only_trap (basic_block bb)
4620 {
4621 rtx_insn *trap;
4622
4623 /* We're not the exit block. */
4624 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4625 return NULL;
4626
4627 /* The block must have no successors. */
4628 if (EDGE_COUNT (bb->succs) > 0)
4629 return NULL;
4630
4631 /* The only instruction in the THEN block must be the trap. */
4632 trap = first_active_insn (bb);
4633 if (! (trap == BB_END (bb)
4634 && GET_CODE (PATTERN (trap)) == TRAP_IF
4635 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
4636 return NULL;
4637
4638 return trap;
4639 }
4640
4641 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
4642 transformable, but not necessarily the other. There need be no
4643 JOIN block.
4644
4645 Return TRUE if we were successful at converting the block.
4646
4647 Cases we'd like to look at:
4648
4649 (1)
4650 if (test) goto over; // x not live
4651 x = a;
4652 goto label;
4653 over:
4654
4655 becomes
4656
4657 x = a;
4658 if (! test) goto label;
4659
4660 (2)
4661 if (test) goto E; // x not live
4662 x = big();
4663 goto L;
4664 E:
4665 x = b;
4666 goto M;
4667
4668 becomes
4669
4670 x = b;
4671 if (test) goto M;
4672 x = big();
4673 goto L;
4674
4675 (3) // This one's really only interesting for targets that can do
4676 // multiway branching, e.g. IA-64 BBB bundles. For other targets
4677 // it results in multiple branches on a cache line, which often
4678 // does not sit well with predictors.
4679
4680 if (test1) goto E; // predicted not taken
4681 x = a;
4682 if (test2) goto F;
4683 ...
4684 E:
4685 x = b;
4686 J:
4687
4688 becomes
4689
4690 x = a;
4691 if (test1) goto E;
4692 if (test2) goto F;
4693
4694 Notes:
4695
4696 (A) Don't do (2) if the branch is predicted against the block we're
4697 eliminating. Do it anyway if we can eliminate a branch; this requires
4698 that the sole successor of the eliminated block postdominate the other
4699 side of the if.
4700
4701 (B) With CE, on (3) we can steal from both sides of the if, creating
4702
4703 if (test1) x = a;
4704 if (!test1) x = b;
4705 if (test1) goto J;
4706 if (test2) goto F;
4707 ...
4708 J:
4709
4710 Again, this is most useful if J postdominates.
4711
4712 (C) CE substitutes for helpful life information.
4713
4714 (D) These heuristics need a lot of work. */
4715
4716 /* Tests for case 1 above. */
4717
4718 static int
4719 find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
4720 {
4721 basic_block then_bb = then_edge->dest;
4722 basic_block else_bb = else_edge->dest;
4723 basic_block new_bb;
4724 int then_bb_index, then_prob;
4725 rtx else_target = NULL_RTX;
4726
4727 /* If we are partitioning hot/cold basic blocks, we don't want to
4728 mess up unconditional or indirect jumps that cross between hot
4729 and cold sections.
4730
4731 Basic block partitioning may result in some jumps that appear to
4732 be optimizable (or blocks that appear to be mergeable), but which really
4733 must be left untouched (they are required to make it safely across
4734 partition boundaries). See the comments at the top of
4735 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4736
4737 if ((BB_END (then_bb)
4738 && JUMP_P (BB_END (then_bb))
4739 && CROSSING_JUMP_P (BB_END (then_bb)))
4740 || (BB_END (test_bb)
4741 && JUMP_P (BB_END (test_bb))
4742 && CROSSING_JUMP_P (BB_END (test_bb)))
4743 || (BB_END (else_bb)
4744 && JUMP_P (BB_END (else_bb))
4745 && CROSSING_JUMP_P (BB_END (else_bb))))
4746 return FALSE;
4747
4748 /* THEN has one successor. */
4749 if (!single_succ_p (then_bb))
4750 return FALSE;
4751
4752 /* THEN does not fall through, but is not strange either. */
4753 if (single_succ_edge (then_bb)->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
4754 return FALSE;
4755
4756 /* THEN has one predecessor. */
4757 if (!single_pred_p (then_bb))
4758 return FALSE;
4759
4760 /* THEN must do something. */
4761 if (forwarder_block_p (then_bb))
4762 return FALSE;
4763
4764 num_possible_if_blocks++;
4765 if (dump_file)
4766 fprintf (dump_file,
4767 "\nIF-CASE-1 found, start %d, then %d\n",
4768 test_bb->index, then_bb->index);
4769
4770 if (then_edge->probability)
4771 then_prob = REG_BR_PROB_BASE - then_edge->probability;
4772 else
4773 then_prob = REG_BR_PROB_BASE / 2;
4774
4775 /* We're speculating from the THEN path, we want to make sure the cost
4776 of speculation is within reason. */
4777 if (! cheap_bb_rtx_cost_p (then_bb, then_prob,
4778 COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (then_edge->src),
4779 predictable_edge_p (then_edge)))))
4780 return FALSE;
4781
4782 if (else_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4783 {
4784 rtx_insn *jump = BB_END (else_edge->src);
4785 gcc_assert (JUMP_P (jump));
4786 else_target = JUMP_LABEL (jump);
4787 }
4788
4789 /* Registers set are dead, or are predicable. */
4790 if (! dead_or_predicable (test_bb, then_bb, else_bb,
4791 single_succ_edge (then_bb), 1))
4792 return FALSE;
4793
4794 /* Conversion went ok, including moving the insns and fixing up the
4795 jump. Adjust the CFG to match. */
4796
4797 /* We can avoid creating a new basic block if then_bb is immediately
4798 followed by else_bb, i.e. deleting then_bb allows test_bb to fall
4799 through to else_bb. */
4800
4801 if (then_bb->next_bb == else_bb
4802 && then_bb->prev_bb == test_bb
4803 && else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4804 {
4805 redirect_edge_succ (FALLTHRU_EDGE (test_bb), else_bb);
4806 new_bb = 0;
4807 }
4808 else if (else_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4809 new_bb = force_nonfallthru_and_redirect (FALLTHRU_EDGE (test_bb),
4810 else_bb, else_target);
4811 else
4812 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb),
4813 else_bb);
4814
4815 df_set_bb_dirty (test_bb);
4816 df_set_bb_dirty (else_bb);
4817
4818 then_bb_index = then_bb->index;
4819 delete_basic_block (then_bb);
4820
4821 /* Make rest of code believe that the newly created block is the THEN_BB
4822 block we removed. */
4823 if (new_bb)
4824 {
4825 df_bb_replace (then_bb_index, new_bb);
4826 /* This should have been done above via force_nonfallthru_and_redirect
4827 (possibly called from redirect_edge_and_branch_force). */
4828 gcc_checking_assert (BB_PARTITION (new_bb) == BB_PARTITION (test_bb));
4829 }
4830
4831 num_true_changes++;
4832 num_updated_if_blocks++;
4833
4834 return TRUE;
4835 }
4836
4837 /* Test for case 2 above. */
4838
4839 static int
4840 find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
4841 {
4842 basic_block then_bb = then_edge->dest;
4843 basic_block else_bb = else_edge->dest;
4844 edge else_succ;
4845 int then_prob, else_prob;
4846
4847 /* We do not want to speculate (empty) loop latches. */
4848 if (current_loops
4849 && else_bb->loop_father->latch == else_bb)
4850 return FALSE;
4851
4852 /* If we are partitioning hot/cold basic blocks, we don't want to
4853 mess up unconditional or indirect jumps that cross between hot
4854 and cold sections.
4855
4856 Basic block partitioning may result in some jumps that appear to
4857 be optimizable (or blocks that appear to be mergeable), but which really
4858 must be left untouched (they are required to make it safely across
4859 partition boundaries). See the comments at the top of
4860 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4861
4862 if ((BB_END (then_bb)
4863 && JUMP_P (BB_END (then_bb))
4864 && CROSSING_JUMP_P (BB_END (then_bb)))
4865 || (BB_END (test_bb)
4866 && JUMP_P (BB_END (test_bb))
4867 && CROSSING_JUMP_P (BB_END (test_bb)))
4868 || (BB_END (else_bb)
4869 && JUMP_P (BB_END (else_bb))
4870 && CROSSING_JUMP_P (BB_END (else_bb))))
4871 return FALSE;
4872
4873 /* ELSE has one successor. */
4874 if (!single_succ_p (else_bb))
4875 return FALSE;
4876 else
4877 else_succ = single_succ_edge (else_bb);
4878
4879 /* ELSE outgoing edge is not complex. */
4880 if (else_succ->flags & EDGE_COMPLEX)
4881 return FALSE;
4882
4883 /* ELSE has one predecessor. */
4884 if (!single_pred_p (else_bb))
4885 return FALSE;
4886
4887 /* THEN is not EXIT. */
4888 if (then_bb->index < NUM_FIXED_BLOCKS)
4889 return FALSE;
4890
4891 if (else_edge->probability)
4892 {
4893 else_prob = else_edge->probability;
4894 then_prob = REG_BR_PROB_BASE - else_prob;
4895 }
4896 else
4897 {
4898 else_prob = REG_BR_PROB_BASE / 2;
4899 then_prob = REG_BR_PROB_BASE / 2;
4900 }
4901
4902 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
4903 if (else_prob > then_prob)
4904 ;
4905 else if (else_succ->dest->index < NUM_FIXED_BLOCKS
4906 || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
4907 else_succ->dest))
4908 ;
4909 else
4910 return FALSE;
4911
4912 num_possible_if_blocks++;
4913 if (dump_file)
4914 fprintf (dump_file,
4915 "\nIF-CASE-2 found, start %d, else %d\n",
4916 test_bb->index, else_bb->index);
4917
4918 /* We're speculating from the ELSE path, we want to make sure the cost
4919 of speculation is within reason. */
4920 if (! cheap_bb_rtx_cost_p (else_bb, else_prob,
4921 COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (else_edge->src),
4922 predictable_edge_p (else_edge)))))
4923 return FALSE;
4924
4925 /* Registers set are dead, or are predicable. */
4926 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ, 0))
4927 return FALSE;
4928
4929 /* Conversion went ok, including moving the insns and fixing up the
4930 jump. Adjust the CFG to match. */
4931
4932 df_set_bb_dirty (test_bb);
4933 df_set_bb_dirty (then_bb);
4934 delete_basic_block (else_bb);
4935
4936 num_true_changes++;
4937 num_updated_if_blocks++;
4938
4939 /* ??? We may now fallthru from one of THEN's successors into a join
4940 block. Rerun cleanup_cfg? Examine things manually? Wait? */
4941
4942 return TRUE;
4943 }
4944
4945 /* Used by the code above to perform the actual rtl transformations.
4946 Return TRUE if successful.
4947
4948 TEST_BB is the block containing the conditional branch. MERGE_BB
4949 is the block containing the code to manipulate. DEST_EDGE is an
4950 edge representing a jump to the join block; after the conversion,
4951 TEST_BB should be branching to its destination.
4952 REVERSEP is true if the sense of the branch should be reversed. */
4953
4954 static int
4955 dead_or_predicable (basic_block test_bb, basic_block merge_bb,
4956 basic_block other_bb, edge dest_edge, int reversep)
4957 {
4958 basic_block new_dest = dest_edge->dest;
4959 rtx_insn *head, *end, *jump;
4960 rtx_insn *earliest = NULL;
4961 rtx old_dest;
4962 bitmap merge_set = NULL;
4963 /* Number of pending changes. */
4964 int n_validated_changes = 0;
4965 rtx new_dest_label = NULL_RTX;
4966
4967 jump = BB_END (test_bb);
4968
4969 /* Find the extent of the real code in the merge block. */
4970 head = BB_HEAD (merge_bb);
4971 end = BB_END (merge_bb);
4972
4973 while (DEBUG_INSN_P (end) && end != head)
4974 end = PREV_INSN (end);
4975
4976 /* If merge_bb ends with a tablejump, predicating/moving insn's
4977 into test_bb and then deleting merge_bb will result in the jumptable
4978 that follows merge_bb being removed along with merge_bb and then we
4979 get an unresolved reference to the jumptable. */
4980 if (tablejump_p (end, NULL, NULL))
4981 return FALSE;
4982
4983 if (LABEL_P (head))
4984 head = NEXT_INSN (head);
4985 while (DEBUG_INSN_P (head) && head != end)
4986 head = NEXT_INSN (head);
4987 if (NOTE_P (head))
4988 {
4989 if (head == end)
4990 {
4991 head = end = NULL;
4992 goto no_body;
4993 }
4994 head = NEXT_INSN (head);
4995 while (DEBUG_INSN_P (head) && head != end)
4996 head = NEXT_INSN (head);
4997 }
4998
4999 if (JUMP_P (end))
5000 {
5001 if (!onlyjump_p (end))
5002 return FALSE;
5003 if (head == end)
5004 {
5005 head = end = NULL;
5006 goto no_body;
5007 }
5008 end = PREV_INSN (end);
5009 while (DEBUG_INSN_P (end) && end != head)
5010 end = PREV_INSN (end);
5011 }
5012
5013 /* Don't move frame-related insn across the conditional branch. This
5014 can lead to one of the paths of the branch having wrong unwind info. */
5015 if (epilogue_completed)
5016 {
5017 rtx_insn *insn = head;
5018 while (1)
5019 {
5020 if (INSN_P (insn) && RTX_FRAME_RELATED_P (insn))
5021 return FALSE;
5022 if (insn == end)
5023 break;
5024 insn = NEXT_INSN (insn);
5025 }
5026 }
5027
5028 /* Disable handling dead code by conditional execution if the machine needs
5029 to do anything funny with the tests, etc. */
5030 #ifndef IFCVT_MODIFY_TESTS
5031 if (targetm.have_conditional_execution ())
5032 {
5033 /* In the conditional execution case, we have things easy. We know
5034 the condition is reversible. We don't have to check life info
5035 because we're going to conditionally execute the code anyway.
5036 All that's left is making sure the insns involved can actually
5037 be predicated. */
5038
5039 rtx cond;
5040
5041 cond = cond_exec_get_condition (jump);
5042 if (! cond)
5043 return FALSE;
5044
5045 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
5046 int prob_val = (note ? XINT (note, 0) : -1);
5047
5048 if (reversep)
5049 {
5050 enum rtx_code rev = reversed_comparison_code (cond, jump);
5051 if (rev == UNKNOWN)
5052 return FALSE;
5053 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
5054 XEXP (cond, 1));
5055 if (prob_val >= 0)
5056 prob_val = REG_BR_PROB_BASE - prob_val;
5057 }
5058
5059 if (cond_exec_process_insns (NULL, head, end, cond, prob_val, 0)
5060 && verify_changes (0))
5061 n_validated_changes = num_validated_changes ();
5062 else
5063 cancel_changes (0);
5064
5065 earliest = jump;
5066 }
5067 #endif
5068
5069 /* If we allocated new pseudos (e.g. in the conditional move
5070 expander called from noce_emit_cmove), we must resize the
5071 array first. */
5072 if (max_regno < max_reg_num ())
5073 max_regno = max_reg_num ();
5074
5075 /* Try the NCE path if the CE path did not result in any changes. */
5076 if (n_validated_changes == 0)
5077 {
5078 rtx cond;
5079 rtx_insn *insn;
5080 regset live;
5081 bool success;
5082
5083 /* In the non-conditional execution case, we have to verify that there
5084 are no trapping operations, no calls, no references to memory, and
5085 that any registers modified are dead at the branch site. */
5086
5087 if (!any_condjump_p (jump))
5088 return FALSE;
5089
5090 /* Find the extent of the conditional. */
5091 cond = noce_get_condition (jump, &earliest, false);
5092 if (!cond)
5093 return FALSE;
5094
5095 live = BITMAP_ALLOC (&reg_obstack);
5096 simulate_backwards_to_point (merge_bb, live, end);
5097 success = can_move_insns_across (head, end, earliest, jump,
5098 merge_bb, live,
5099 df_get_live_in (other_bb), NULL);
5100 BITMAP_FREE (live);
5101 if (!success)
5102 return FALSE;
5103
5104 /* Collect the set of registers set in MERGE_BB. */
5105 merge_set = BITMAP_ALLOC (&reg_obstack);
5106
5107 FOR_BB_INSNS (merge_bb, insn)
5108 if (NONDEBUG_INSN_P (insn))
5109 df_simulate_find_defs (insn, merge_set);
5110
5111 /* If shrink-wrapping, disable this optimization when test_bb is
5112 the first basic block and merge_bb exits. The idea is to not
5113 move code setting up a return register as that may clobber a
5114 register used to pass function parameters, which then must be
5115 saved in caller-saved regs. A caller-saved reg requires the
5116 prologue, killing a shrink-wrap opportunity. */
5117 if ((SHRINK_WRAPPING_ENABLED && !epilogue_completed)
5118 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == test_bb
5119 && single_succ_p (new_dest)
5120 && single_succ (new_dest) == EXIT_BLOCK_PTR_FOR_FN (cfun)
5121 && bitmap_intersect_p (df_get_live_in (new_dest), merge_set))
5122 {
5123 regset return_regs;
5124 unsigned int i;
5125
5126 return_regs = BITMAP_ALLOC (&reg_obstack);
5127
5128 /* Start off with the intersection of regs used to pass
5129 params and regs used to return values. */
5130 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5131 if (FUNCTION_ARG_REGNO_P (i)
5132 && targetm.calls.function_value_regno_p (i))
5133 bitmap_set_bit (return_regs, INCOMING_REGNO (i));
5134
5135 bitmap_and_into (return_regs,
5136 df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5137 bitmap_and_into (return_regs,
5138 df_get_live_in (EXIT_BLOCK_PTR_FOR_FN (cfun)));
5139 if (!bitmap_empty_p (return_regs))
5140 {
5141 FOR_BB_INSNS_REVERSE (new_dest, insn)
5142 if (NONDEBUG_INSN_P (insn))
5143 {
5144 df_ref def;
5145
5146 /* If this insn sets any reg in return_regs, add all
5147 reg uses to the set of regs we're interested in. */
5148 FOR_EACH_INSN_DEF (def, insn)
5149 if (bitmap_bit_p (return_regs, DF_REF_REGNO (def)))
5150 {
5151 df_simulate_uses (insn, return_regs);
5152 break;
5153 }
5154 }
5155 if (bitmap_intersect_p (merge_set, return_regs))
5156 {
5157 BITMAP_FREE (return_regs);
5158 BITMAP_FREE (merge_set);
5159 return FALSE;
5160 }
5161 }
5162 BITMAP_FREE (return_regs);
5163 }
5164 }
5165
5166 no_body:
5167 /* We don't want to use normal invert_jump or redirect_jump because
5168 we don't want to delete_insn called. Also, we want to do our own
5169 change group management. */
5170
5171 old_dest = JUMP_LABEL (jump);
5172 if (other_bb != new_dest)
5173 {
5174 if (!any_condjump_p (jump))
5175 goto cancel;
5176
5177 if (JUMP_P (BB_END (dest_edge->src)))
5178 new_dest_label = JUMP_LABEL (BB_END (dest_edge->src));
5179 else if (new_dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
5180 new_dest_label = ret_rtx;
5181 else
5182 new_dest_label = block_label (new_dest);
5183
5184 rtx_jump_insn *jump_insn = as_a <rtx_jump_insn *> (jump);
5185 if (reversep
5186 ? ! invert_jump_1 (jump_insn, new_dest_label)
5187 : ! redirect_jump_1 (jump_insn, new_dest_label))
5188 goto cancel;
5189 }
5190
5191 if (verify_changes (n_validated_changes))
5192 confirm_change_group ();
5193 else
5194 goto cancel;
5195
5196 if (other_bb != new_dest)
5197 {
5198 redirect_jump_2 (as_a <rtx_jump_insn *> (jump), old_dest, new_dest_label,
5199 0, reversep);
5200
5201 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
5202 if (reversep)
5203 {
5204 std::swap (BRANCH_EDGE (test_bb)->count,
5205 FALLTHRU_EDGE (test_bb)->count);
5206 std::swap (BRANCH_EDGE (test_bb)->probability,
5207 FALLTHRU_EDGE (test_bb)->probability);
5208 update_br_prob_note (test_bb);
5209 }
5210 }
5211
5212 /* Move the insns out of MERGE_BB to before the branch. */
5213 if (head != NULL)
5214 {
5215 rtx_insn *insn;
5216
5217 if (end == BB_END (merge_bb))
5218 BB_END (merge_bb) = PREV_INSN (head);
5219
5220 /* PR 21767: when moving insns above a conditional branch, the REG_EQUAL
5221 notes being moved might become invalid. */
5222 insn = head;
5223 do
5224 {
5225 rtx note;
5226
5227 if (! INSN_P (insn))
5228 continue;
5229 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5230 if (! note)
5231 continue;
5232 remove_note (insn, note);
5233 } while (insn != end && (insn = NEXT_INSN (insn)));
5234
5235 /* PR46315: when moving insns above a conditional branch, the REG_EQUAL
5236 notes referring to the registers being set might become invalid. */
5237 if (merge_set)
5238 {
5239 unsigned i;
5240 bitmap_iterator bi;
5241
5242 EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
5243 remove_reg_equal_equiv_notes_for_regno (i);
5244
5245 BITMAP_FREE (merge_set);
5246 }
5247
5248 reorder_insns (head, end, PREV_INSN (earliest));
5249 }
5250
5251 /* Remove the jump and edge if we can. */
5252 if (other_bb == new_dest)
5253 {
5254 delete_insn (jump);
5255 remove_edge (BRANCH_EDGE (test_bb));
5256 /* ??? Can't merge blocks here, as then_bb is still in use.
5257 At minimum, the merge will get done just before bb-reorder. */
5258 }
5259
5260 return TRUE;
5261
5262 cancel:
5263 cancel_changes (0);
5264
5265 if (merge_set)
5266 BITMAP_FREE (merge_set);
5267
5268 return FALSE;
5269 }
5270 \f
5271 /* Main entry point for all if-conversion. AFTER_COMBINE is true if
5272 we are after combine pass. */
5273
5274 static void
5275 if_convert (bool after_combine)
5276 {
5277 basic_block bb;
5278 int pass;
5279
5280 if (optimize == 1)
5281 {
5282 df_live_add_problem ();
5283 df_live_set_all_dirty ();
5284 }
5285
5286 /* Record whether we are after combine pass. */
5287 ifcvt_after_combine = after_combine;
5288 have_cbranchcc4 = (direct_optab_handler (cbranch_optab, CCmode)
5289 != CODE_FOR_nothing);
5290 num_possible_if_blocks = 0;
5291 num_updated_if_blocks = 0;
5292 num_true_changes = 0;
5293
5294 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5295 mark_loop_exit_edges ();
5296 loop_optimizer_finalize ();
5297 free_dominance_info (CDI_DOMINATORS);
5298
5299 /* Compute postdominators. */
5300 calculate_dominance_info (CDI_POST_DOMINATORS);
5301
5302 df_set_flags (DF_LR_RUN_DCE);
5303
5304 /* Go through each of the basic blocks looking for things to convert. If we
5305 have conditional execution, we make multiple passes to allow us to handle
5306 IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks. */
5307 pass = 0;
5308 do
5309 {
5310 df_analyze ();
5311 /* Only need to do dce on the first pass. */
5312 df_clear_flags (DF_LR_RUN_DCE);
5313 cond_exec_changed_p = FALSE;
5314 pass++;
5315
5316 #ifdef IFCVT_MULTIPLE_DUMPS
5317 if (dump_file && pass > 1)
5318 fprintf (dump_file, "\n\n========== Pass %d ==========\n", pass);
5319 #endif
5320
5321 FOR_EACH_BB_FN (bb, cfun)
5322 {
5323 basic_block new_bb;
5324 while (!df_get_bb_dirty (bb)
5325 && (new_bb = find_if_header (bb, pass)) != NULL)
5326 bb = new_bb;
5327 }
5328
5329 #ifdef IFCVT_MULTIPLE_DUMPS
5330 if (dump_file && cond_exec_changed_p)
5331 print_rtl_with_bb (dump_file, get_insns (), dump_flags);
5332 #endif
5333 }
5334 while (cond_exec_changed_p);
5335
5336 #ifdef IFCVT_MULTIPLE_DUMPS
5337 if (dump_file)
5338 fprintf (dump_file, "\n\n========== no more changes\n");
5339 #endif
5340
5341 free_dominance_info (CDI_POST_DOMINATORS);
5342
5343 if (dump_file)
5344 fflush (dump_file);
5345
5346 clear_aux_for_blocks ();
5347
5348 /* If we allocated new pseudos, we must resize the array for sched1. */
5349 if (max_regno < max_reg_num ())
5350 max_regno = max_reg_num ();
5351
5352 /* Write the final stats. */
5353 if (dump_file && num_possible_if_blocks > 0)
5354 {
5355 fprintf (dump_file,
5356 "\n%d possible IF blocks searched.\n",
5357 num_possible_if_blocks);
5358 fprintf (dump_file,
5359 "%d IF blocks converted.\n",
5360 num_updated_if_blocks);
5361 fprintf (dump_file,
5362 "%d true changes made.\n\n\n",
5363 num_true_changes);
5364 }
5365
5366 if (optimize == 1)
5367 df_remove_problem (df_live);
5368
5369 checking_verify_flow_info ();
5370 }
5371 \f
5372 /* If-conversion and CFG cleanup. */
5373 static unsigned int
5374 rest_of_handle_if_conversion (void)
5375 {
5376 if (flag_if_conversion)
5377 {
5378 if (dump_file)
5379 {
5380 dump_reg_info (dump_file);
5381 dump_flow_info (dump_file, dump_flags);
5382 }
5383 cleanup_cfg (CLEANUP_EXPENSIVE);
5384 if_convert (false);
5385 }
5386
5387 cleanup_cfg (0);
5388 return 0;
5389 }
5390
5391 namespace {
5392
5393 const pass_data pass_data_rtl_ifcvt =
5394 {
5395 RTL_PASS, /* type */
5396 "ce1", /* name */
5397 OPTGROUP_NONE, /* optinfo_flags */
5398 TV_IFCVT, /* tv_id */
5399 0, /* properties_required */
5400 0, /* properties_provided */
5401 0, /* properties_destroyed */
5402 0, /* todo_flags_start */
5403 TODO_df_finish, /* todo_flags_finish */
5404 };
5405
5406 class pass_rtl_ifcvt : public rtl_opt_pass
5407 {
5408 public:
5409 pass_rtl_ifcvt (gcc::context *ctxt)
5410 : rtl_opt_pass (pass_data_rtl_ifcvt, ctxt)
5411 {}
5412
5413 /* opt_pass methods: */
5414 virtual bool gate (function *)
5415 {
5416 return (optimize > 0) && dbg_cnt (if_conversion);
5417 }
5418
5419 virtual unsigned int execute (function *)
5420 {
5421 return rest_of_handle_if_conversion ();
5422 }
5423
5424 }; // class pass_rtl_ifcvt
5425
5426 } // anon namespace
5427
5428 rtl_opt_pass *
5429 make_pass_rtl_ifcvt (gcc::context *ctxt)
5430 {
5431 return new pass_rtl_ifcvt (ctxt);
5432 }
5433
5434
5435 /* Rerun if-conversion, as combine may have simplified things enough
5436 to now meet sequence length restrictions. */
5437
5438 namespace {
5439
5440 const pass_data pass_data_if_after_combine =
5441 {
5442 RTL_PASS, /* type */
5443 "ce2", /* name */
5444 OPTGROUP_NONE, /* optinfo_flags */
5445 TV_IFCVT, /* tv_id */
5446 0, /* properties_required */
5447 0, /* properties_provided */
5448 0, /* properties_destroyed */
5449 0, /* todo_flags_start */
5450 TODO_df_finish, /* todo_flags_finish */
5451 };
5452
5453 class pass_if_after_combine : public rtl_opt_pass
5454 {
5455 public:
5456 pass_if_after_combine (gcc::context *ctxt)
5457 : rtl_opt_pass (pass_data_if_after_combine, ctxt)
5458 {}
5459
5460 /* opt_pass methods: */
5461 virtual bool gate (function *)
5462 {
5463 return optimize > 0 && flag_if_conversion
5464 && dbg_cnt (if_after_combine);
5465 }
5466
5467 virtual unsigned int execute (function *)
5468 {
5469 if_convert (true);
5470 return 0;
5471 }
5472
5473 }; // class pass_if_after_combine
5474
5475 } // anon namespace
5476
5477 rtl_opt_pass *
5478 make_pass_if_after_combine (gcc::context *ctxt)
5479 {
5480 return new pass_if_after_combine (ctxt);
5481 }
5482
5483
5484 namespace {
5485
5486 const pass_data pass_data_if_after_reload =
5487 {
5488 RTL_PASS, /* type */
5489 "ce3", /* name */
5490 OPTGROUP_NONE, /* optinfo_flags */
5491 TV_IFCVT2, /* tv_id */
5492 0, /* properties_required */
5493 0, /* properties_provided */
5494 0, /* properties_destroyed */
5495 0, /* todo_flags_start */
5496 TODO_df_finish, /* todo_flags_finish */
5497 };
5498
5499 class pass_if_after_reload : public rtl_opt_pass
5500 {
5501 public:
5502 pass_if_after_reload (gcc::context *ctxt)
5503 : rtl_opt_pass (pass_data_if_after_reload, ctxt)
5504 {}
5505
5506 /* opt_pass methods: */
5507 virtual bool gate (function *)
5508 {
5509 return optimize > 0 && flag_if_conversion2
5510 && dbg_cnt (if_after_reload);
5511 }
5512
5513 virtual unsigned int execute (function *)
5514 {
5515 if_convert (true);
5516 return 0;
5517 }
5518
5519 }; // class pass_if_after_reload
5520
5521 } // anon namespace
5522
5523 rtl_opt_pass *
5524 make_pass_if_after_reload (gcc::context *ctxt)
5525 {
5526 return new pass_if_after_reload (ctxt);
5527 }