1 /* An SH specific RTL pass that tries to combine comparisons and redundant
2 condition code register stores across multiple basic blocks.
3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 #include "basic-block.h"
28 #include "insn-config.h"
29 #include "insn-codes.h"
32 #include "tree-pass.h"
41 This pass tries to optimize for example this:
48 into something simpler:
53 Such sequences can be identified by looking for conditional branches and
54 checking whether the ccreg is set before the conditional branch
55 by testing another register for != 0, which was set by a ccreg store.
56 This can be optimized by eliminating the redundant comparison and
57 inverting the branch condition. There can be multiple comparisons in
58 different basic blocks that all end up in the redunant test insn before the
59 conditional branch. Some example RTL ...
65 (set (reg:SI 147 t) (eq:SI (reg:SI 173) (const_int 0)))
66 (set (reg:SI 167) (xor:SI (reg:SI 147 t) (const_int 1)))
70 (set (reg:SI 147 t) (eq:SI (reg:SI 177) (const_int 0)))
71 (set (reg:SI 167) (reg:SI 147 t))
75 (set (reg:SI 147 t) (eq:SI (reg:SI 167) (const_int 0)))
76 (set (pc) (if_then_else (ne (reg:SI 147 t) (const_int 0))
77 (label_ref:SI 50) (pc)))
79 In [bb 4] elimination of the comparison would require inversion of the branch
80 condition and compensation of other BBs.
81 Instead an inverting reg-move can be used:
84 (set (reg:SI 167) (reg:SI 173))
88 (set (reg:SI 167) (not:SI (reg:SI 177)))
92 (set (reg:SI 147 t) (eq:SI (reg:SI 167) (const_int 0)))
93 (set (pc) (if_then_else (ne (reg:SI 147 t) (const_int 0)))
94 (label_ref:SI 50) (pc)))
101 (set (reg:SI 147 t) (gt:SI (reg:SI 173) (reg:SI 175)))
102 (set (reg:SI 167) (reg:SI 147 t))
106 (set (reg:SI 147 t) (gt:SI (reg:SI 177) (reg:SI 179)))
107 (set (reg:SI 167) (reg:SI 147 t))
111 (set (reg:SI 147 t) (eq:SI (reg:SI 167) (const_int 0)))
112 (set (pc) (if_then_else (ne (reg:SI 147 t) (const_int 0))
113 (label_ref:SI 51) (pc)))
115 The common comparison is factored out and the branch condition is inverted:
118 (set (reg:SI 167) (reg:SI 173))
119 (set (reg:SI 200) (reg:SI 175))
123 (set (reg:SI 167) (reg:SI 177))
124 (set (reg:SI 200) (reg:SI 179))
128 (set (reg:SI 147 t) (gt:SI (reg:SI 167) (reg:SI 200)))
129 (set (pc) (if_then_else (eq (reg:SI 147 t) (const_int 0))
130 (label_ref:SI 51) (pc)))
137 (set (reg:SI 147 t) (gt:SI (reg:SI 173) (reg:SI 175)))
138 (set (reg:SI 167) (reg:SI 147 t))
142 (set (reg:SI 147 t) (ge:SI (reg:SI 179) (reg:SI 177)))
143 (set (reg:SI 167) (reg:SI 147 t))
147 (set (reg:SI 147 t) (eq:SI (reg:SI 167) (const_int 0)))
148 (set (pc) (if_then_else (ne (reg:SI 147 t) (const_int 0))
149 (label_ref:SI 51) (pc)))
151 The T bit lifetime is extended and the branch condition is inverted:
154 (set (reg:SI 147 t) (gt:SI (reg:SI 173) (reg:SI 175)))
158 (set (reg:SI 147 t) (ge:SI (reg:SI 179) (reg:SI 177)))
162 (set (pc) (if_then_else (eq (reg:SI 147 t) (const_int 0))
163 (label_ref:SI 51) (pc)))
170 (set (reg:SI 147 t) (eq:SI (reg:SI 173) (const_int 5)))
171 (set (reg:SI 167) (reg:SI 147 t))
175 (set (reg:SI 147 t) (eq:SI (reg:SI 176) (const_int 5)))
176 (set (reg:SI 167) (xor:SI (reg:SI 147 t) (const_int 1)))
180 (set (reg:SI 147 t) (eq:SI (reg:SI 167) (const_int 0)))
181 (set (pc) (if_then_else (ne (reg:SI 147 t) (const_int 0))
182 (label_ref:SI 50) (pc)))
184 In this case the comparisons are the same and could be combined, but the
185 branch condition is different for [bb 3] and [bb 5]. Since the comparison
186 is not a zero comparison, we can't negate one of the operands. The best thing
187 we can do here is to eliminate the comparison before the cbranch and invert
188 the ccreg in one of the BBs. On SH2A this will utilize the 'nott' instruction.
191 (set (reg:SI 147 t) (eq:SI (reg:SI 173) (const_int 5)))
195 (set (reg:SI 147 t) (eq:SI (reg:SI 176) (const_int 5)))
196 (set (reg:SI 147 t) (xor:SI (reg:SI 147 t) (const_int 1)))
200 (set (pc) (if_then_else (eq (reg:SI 147 t) (const_int 0)) // inverted
201 (label_ref:SI 50) (pc)))
204 In order to handle cases such as above the RTL pass does the following:
206 - Find the ccreg sets (comparisons) and ccreg stores
207 (inverting and non-inverting) in all related BBs.
209 - If the comparison types in the BBs are all the same, try to combine the
210 comparisons in the BBs and replace the zero comparison before the cbranch
211 with the common comparison.
213 - If the cstores are the same, move the comparison before the cbranch
214 and replace the comparisons in the BBs with reg-reg copies to get the
215 operands in place (create new pseudo regs).
217 - If the cstores differ, try to apply the special case
218 (eq (reg) (const_int 0)) -> inverted = (not (reg)).
219 for the subordinate cstore types and eliminate the dominating ones.
221 - If the comparison types in the BBs are not the same, or the first approach
222 doesn't work out for some reason, try to eliminate the comparison before the
223 cbranch by extending the lifetime of the ccreg by leaving the individual
224 comparisons but eliminating the cstores.
225 If the cstores are all the same this is straight forward.
226 If they're not, try to reverse the ccreg for the subordinate cstore type
227 and eliminate the dominating one.
230 // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
233 #define log_msg(...)\
234 do { if (dump_file != NULL) fprintf (dump_file, __VA_ARGS__); } while (0)
237 do { if (dump_file != NULL) print_rtl_single (dump_file, \
238 (const_rtx)i); } while (0)
241 do { if (dump_file != NULL) print_rtl (dump_file, (const_rtx)r); } while (0)
243 #define log_return(retval, ...)\
244 do { if (dump_file != NULL) fprintf (dump_file, __VA_ARGS__); \
245 return retval; } while (0)
247 #define log_return_void(...)\
248 do { if (dump_file != NULL) fprintf (dump_file, __VA_ARGS__); \
253 // The insn where the search stopped or NULL_RTX.
256 // The set rtx of the specified reg if found, NULL_RTX otherwise.
257 // Notice that the set rtx can also be in a parallel.
260 // The set source operand rtx if found, NULL_RTX otherwise.
264 return set_rtx
== NULL_RTX
? NULL_RTX
: XEXP (set_rtx
, 1);
267 // The set destination operand rtx if found, NULL_RTX otherwise.
271 return set_rtx
== NULL_RTX
? NULL_RTX
: XEXP (set_rtx
, 0);
277 return insn
== NULL_RTX
|| set_rtx
== NULL_RTX
;
281 // Given a reg rtx and a start insn find the insn (in the same basic block)
282 // that sets the reg.
284 find_set_of_reg_bb (rtx reg
, rtx insn
)
286 set_of_reg result
= { insn
, NULL_RTX
};
288 if (!REG_P (reg
) || insn
== NULL_RTX
)
291 for (result
.insn
= insn
; result
.insn
!= NULL_RTX
;
292 result
.insn
= prev_nonnote_insn_bb (result
.insn
))
294 if (BARRIER_P (result
.insn
))
296 if (!NONJUMP_INSN_P (result
.insn
))
298 if (reg_set_p (reg
, result
.insn
))
300 result
.set_rtx
= set_of (reg
, result
.insn
);
301 if (result
.set_rtx
== NULL_RTX
|| GET_CODE (result
.set_rtx
) != SET
)
302 result
.set_rtx
= NULL_RTX
;
311 reg_dead_after_insn (const_rtx reg
, const_rtx insn
)
313 return find_regno_note (insn
, REG_DEAD
, REGNO (reg
)) != NULL_RTX
;
317 reg_unused_after_insn (const_rtx reg
, const_rtx insn
)
319 return find_regno_note (insn
, REG_UNUSED
, REGNO (reg
)) != NULL_RTX
;
322 // Check whether the two specified basic blocks are adjacent, i.e. there's no
323 // other basic block in between them.
325 is_adjacent_bb (basic_block a
, basic_block b
)
327 basic_block bb0
[] = { a
, b
};
328 basic_block bb1
[] = { b
, a
};
330 for (int i
= 0; i
< 2; ++i
)
331 for (edge_iterator ei
= ei_start (bb0
[i
]->succs
);
332 !ei_end_p (ei
); ei_next (&ei
))
333 if (ei_edge (ei
)->dest
== bb1
[i
])
339 // Internal function of trace_reg_uses.
341 trace_reg_uses_1 (rtx reg
, rtx start_insn
, basic_block bb
, int& count
,
342 std::vector
<basic_block
>& visited_bb
, rtx abort_at_insn
)
347 if (std::find (visited_bb
.begin (), visited_bb
.end (), bb
)
348 != visited_bb
.end ())
349 log_return_void ("[bb %d] already visited\n", bb
->index
);
351 visited_bb
.push_back (bb
);
353 if (BB_END (bb
) == NULL_RTX
)
354 log_return_void ("[bb %d] BB_END is null\n", bb
->index
);
356 if (start_insn
== NULL_RTX
)
357 log_return_void ("[bb %d] start_insn is null\n", bb
->index
);
359 rtx end_insn
= NEXT_INSN (BB_END (bb
));
360 if (end_insn
== NULL_RTX
)
361 log_return_void ("[bb %d] end_insn is null\n", bb
->index
);
363 for (rtx i
= NEXT_INSN (start_insn
); i
!= end_insn
; i
= NEXT_INSN (i
))
367 if (NONDEBUG_INSN_P (i
)
368 && (reg_overlap_mentioned_p (reg
, PATTERN (i
))
369 || (CALL_P (i
) && find_reg_fusage (i
, USE
, reg
))))
371 log_msg ("found use in [bb %d] at insn:\n", bb
->index
);
377 // Stop following this BB if the reg is set or dies along the way.
378 if (reg_set_p (reg
, i
) || reg_dead_after_insn (reg
, i
))
382 if (abort_at_insn
!= NULL_RTX
&& abort_at_insn
== i
)
386 for (edge_iterator ei
= ei_start (bb
->succs
); !ei_end_p (ei
); ei_next (&ei
))
388 basic_block succ_bb
= ei_edge (ei
)->dest
;
389 trace_reg_uses_1 (reg
, BB_HEAD (succ_bb
), succ_bb
, count
, visited_bb
,
394 // Trace uses of the specified reg in all basic blocks that are reachable from
395 // the specified insn. If 'abort_at_insn' is not null, abort the trace at
396 // that insn. If the insn 'abort_at_insn' uses the specified reg, it is also
399 trace_reg_uses (rtx reg
, rtx start_insn
, rtx abort_at_insn
)
401 log_msg ("\ntrace_reg_uses\nreg = ");
403 log_msg ("\nstart_insn = ");
404 log_insn (start_insn
);
407 std::vector
<basic_block
> visited_bb
;
408 visited_bb
.reserve (32);
410 trace_reg_uses_1 (reg
, start_insn
, BLOCK_FOR_INSN (start_insn
),
411 count
, visited_bb
, abort_at_insn
);
415 // FIXME: Remove dependency on SH predicate function somehow.
416 extern int t_reg_operand (rtx
, machine_mode
);
417 extern int negt_reg_operand (rtx
, machine_mode
);
419 // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
422 class sh_treg_combine
: public rtl_opt_pass
425 sh_treg_combine (gcc::context
* ctx
, bool split_insns
, const char* name
);
426 virtual ~sh_treg_combine (void);
427 virtual bool gate (void);
428 virtual unsigned int execute (void);
431 // Type of ccreg store that is supported.
439 // Type of branch condition that is supported.
440 enum branch_condition_type_t
444 unknown_branch_condition
= -1
447 // For each basic block there can be a trace entry which consists of an
448 // insn that sets the ccreg (usually a comparison) and a ccreg store.
454 cstore_type_t cstore_type
;
455 std::vector
<set_of_reg
> cstore_reg_reg_copies
;
457 bb_entry (basic_block b
)
458 : bb (b
), setcc (), cstore (), cstore_type (cstore_unknown
) { }
460 rtx
comparison_rtx (void) const { return setcc
.set_src (); }
463 // A ccreg trace for a conditional branch.
467 branch_condition_type_t cbranch_type
;
469 // The comparison against zero right before the conditional branch.
472 // All BBs that are related to the cbranch. The last BB in the list is
473 // the BB of the cbranch itself and might be empty.
474 std::list
<bb_entry
> bb_entries
;
476 cbranch_trace (rtx insn
)
477 : cbranch_insn (insn
),
478 cbranch_type (unknown_branch_condition
),
483 basic_block
bb (void) const { return BLOCK_FOR_INSN (cbranch_insn
); }
486 branch_condition_rtx (void) const
488 rtx x
= pc_set (cbranch_insn
);
489 return x
== NULL_RTX
? NULL_RTX
: XEXP (XEXP (x
, 1), 0);
493 can_invert_condition (void) const
495 // The branch condition can be inverted safely only if the condition
496 // reg is dead after the cbranch.
497 return reg_dead_after_insn (XEXP (branch_condition_rtx (), 0),
502 static const pass_data default_pass_data
;
504 // Tells whether modified or newly added insns are to be split at the end
506 const bool m_split_insns
;
508 // rtx of the ccreg that is obtained from the target.
511 // Newly added or modified insns.
512 std::vector
<rtx
> m_touched_insns
;
514 // Given an rtx determine whether it's a comparison with a constant zero.
515 static bool is_cmp_eq_zero (const_rtx i
);
517 // Update the stored mode of the ccreg from the given branch condition rtx.
518 void update_ccreg_mode (const_rtx cond
);
520 // Given an rtx, figure out the branch condition, assuming that it is
521 // in canonical form:
522 // (ne (reg) (const_int 0))
523 // (eq (reg) (const_int 0))
524 branch_condition_type_t
branch_condition_type (const_rtx cond
) const;
526 // Return true if the specified rtx is either a normal ccreg or
527 // a negated form of the ccreg.
528 bool is_normal_ccreg (const_rtx x
) const;
529 bool is_inverted_ccreg (const_rtx x
) const;
531 // Given a reg rtx and a start insn rtx, try to find the insn in the same
532 // basic block that sets the specified reg.
533 // Return how the search ended and the insn where it stopped or NULL_RTX.
540 record_return_t
record_set_of_reg (rtx reg
, rtx start_insn
, bb_entry
& e
);
542 // Tells whether the cbranch insn of the specified bb_entry can be removed
543 // safely without triggering any side effects.
544 bool can_remove_cstore (const bb_entry
& e
,
545 const cbranch_trace
& trace
) const;
547 // Tells whether the setcc insn of the specified bb_entry can be removed
548 // safely without triggering any side effects.
549 bool can_remove_comparison (const bb_entry
& e
,
550 const cbranch_trace
& trace
) const;
552 // Tells whether the two specified comparison rtx can be combined into a
553 // single comparison.
554 bool can_combine_comparisons (const_rtx x
, const_rtx y
) const;
556 // Tells whether the ccreg usage can be extended from the bb_entry on until
557 // the final cbranch of the trace.
558 bool can_extend_ccreg_usage (const bb_entry
& e
,
559 const cbranch_trace
& trace
) const;
561 // Create an insn rtx that is a negating reg move (not operation).
562 rtx
make_not_reg_insn (rtx dst_reg
, rtx src_reg
) const;
564 // Create an insn rtx that inverts the ccreg.
565 rtx
make_inv_ccreg_insn (void) const;
567 // Adds the specified insn to the set of modified or newly added insns that
568 // might need splitting at the end of the pass.
569 rtx
touched_insn (rtx i
);
571 // Try to invert the branch condition of the specified trace.
572 bool try_invert_branch_condition (cbranch_trace
& trace
);
574 // Try to optimize a cbranch trace by combining comparisons in BBs and
575 // eliminate the cstores.
576 bool try_combine_comparisons (cbranch_trace
& trace
,
577 int cstore_count
, int inv_cstore_count
,
578 cstore_type_t dominating_cstore
);
580 // Try to optimize a cbranch trace by eliminating the cstores in BBs only.
581 bool try_eliminate_cstores (cbranch_trace
& trace
,
582 int cstore_count
, int inv_cstore_count
,
583 cstore_type_t dominating_cstore
);
585 // Given a branch insn, try to optimize its branch condition.
586 // If any insns are modified or added they are added to 'm_touched_insns'.
587 void try_optimize_cbranch (rtx i
);
591 const pass_data
sh_treg_combine::default_pass_data
=
594 "", // name (overwritten by the constructor)
595 OPTGROUP_NONE
, // optinfo_flags
598 TV_OPTIMIZE
, // tv_id
599 0, // properties_required
600 0, // properties_provided
601 0, // properties_destroyed
602 0, // todo_flags_start
603 TODO_df_finish
| TODO_df_verify
// todo_flags_finish
604 | TODO_verify_rtl_sharing
607 sh_treg_combine::sh_treg_combine (gcc::context
* ctx
, bool split_insns
,
609 : rtl_opt_pass (default_pass_data
, ctx
),
610 m_split_insns (split_insns
),
613 // Overwrite default name in pass_data base class.
617 sh_treg_combine::~sh_treg_combine (void)
621 void sh_treg_combine::update_ccreg_mode (const_rtx cond
)
623 if (REG_P (XEXP (cond
, 0)) && REGNO (XEXP (cond
, 0)) != REGNO (m_ccreg
))
626 machine_mode m
= GET_MODE (XEXP (cond
, 0));
627 if (m
== GET_MODE (m_ccreg
))
630 PUT_MODE (m_ccreg
, m
);
631 log_msg ("updated ccreg mode: ");
637 sh_treg_combine::is_cmp_eq_zero (const_rtx i
)
639 return i
!= NULL_RTX
&& GET_CODE (i
) == EQ
640 && REG_P (XEXP (i
, 0)) && XEXP (i
, 1) == const0_rtx
;
643 sh_treg_combine::branch_condition_type_t
644 sh_treg_combine::branch_condition_type (const_rtx cond
) const
646 if (cond
== NULL_RTX
)
647 return unknown_branch_condition
;
649 if (GET_CODE (cond
) == NE
650 && REG_P (XEXP (cond
, 0)) && REGNO (XEXP (cond
, 0)) == REGNO (m_ccreg
)
651 && XEXP (cond
, 1) == const0_rtx
)
652 return branch_if_true
;
654 else if (GET_CODE (cond
) == EQ
655 && REG_P (XEXP (cond
, 0)) && REGNO (XEXP (cond
, 0)) == REGNO (m_ccreg
)
656 && XEXP (cond
, 1) == const0_rtx
)
657 return branch_if_false
;
660 return unknown_branch_condition
;
664 sh_treg_combine::is_normal_ccreg (const_rtx x
) const
666 return t_reg_operand (const_cast<rtx
> (x
), VOIDmode
);
670 sh_treg_combine::is_inverted_ccreg (const_rtx x
) const
672 return negt_reg_operand (const_cast<rtx
> (x
), VOIDmode
);
675 sh_treg_combine::record_return_t
676 sh_treg_combine::record_set_of_reg (rtx reg
, rtx start_insn
,
679 log_msg ("\n[bb %d]\n", new_entry
.bb
->index
);
681 if (start_insn
== NULL_RTX
)
682 log_return (set_not_found
, "set of reg not found. empty BB?\n");
684 new_entry
.cstore_type
= cstore_unknown
;
686 for (rtx i
= start_insn
; i
!= NULL_RTX
; )
688 new_entry
.cstore
= find_set_of_reg_bb (reg
, i
);
690 if (new_entry
.cstore
.set_src () == NULL_RTX
)
691 log_return (set_not_found
, "set of reg not found (cstore)\n");
693 log_insn (new_entry
.cstore
.insn
);
696 if (is_normal_ccreg (new_entry
.cstore
.set_src ()))
698 log_msg ("normal condition store\n");
699 new_entry
.cstore_type
= cstore_normal
;
701 else if (is_inverted_ccreg (new_entry
.cstore
.set_src ()))
703 log_msg ("inverted condition store\n");
704 new_entry
.cstore_type
= cstore_inverted
;
706 else if (REG_P (new_entry
.cstore
.set_src ()))
708 // If it's a reg-reg copy follow the copied reg.
709 new_entry
.cstore_reg_reg_copies
.push_back (new_entry
.cstore
);
710 reg
= new_entry
.cstore
.set_src ();
711 i
= new_entry
.cstore
.insn
;
713 log_msg ("reg-reg copy. tracing ");
719 log_return (other_set_found
, "not a condition store\n");
721 gcc_assert (new_entry
.cstore_type
!= cstore_unknown
);
723 // Now see how the ccreg was set.
724 // For now it must be in the same BB.
725 log_msg ("tracing ccreg\n");
727 find_set_of_reg_bb (m_ccreg
,
728 prev_nonnote_insn_bb (new_entry
.cstore
.insn
));
730 // If cstore was found but setcc was not found continue anyway, as
731 // for some of the optimization types the setcc is irrelevant.
732 if (new_entry
.setcc
.set_src () == NULL_RTX
)
733 log_return (set_found
, "set of ccreg not found\n");
735 else if (GET_CODE (new_entry
.setcc
.set_rtx
) == SET
)
737 // Also allow insns that set the ccreg, but are not true comparison
738 // insns, as long as they are sets and not e.g. clobbers.
739 log_insn (new_entry
.setcc
.insn
);
744 // If cstore was found but setcc was not found continue anyway, as
745 // for some of the optimization types the setcc is irrelevant.
746 log_return (set_found
, "unknown set of ccreg\n");
749 log_return (set_not_found
, "set of reg not found\n");
753 sh_treg_combine::can_remove_cstore (const bb_entry
& e
,
754 const cbranch_trace
& trace
) const
756 if (volatile_insn_p (PATTERN (e
.cstore
.insn
)))
758 log_msg ("can't remove insn\n");
759 log_insn (e
.cstore
.insn
);
760 log_return (false, "\nbecause it's volatile\n");
763 // On SH there are parallel patterns which store the ccreg multiple times.
764 // In this case it's not safe.
765 rtx cstore_pat
= PATTERN (e
.cstore
.insn
);
766 if (GET_CODE (cstore_pat
) == PARALLEL
)
767 for (int i
= 0; i
< XVECLEN (cstore_pat
, 0); ++i
)
769 rtx x
= XVECEXP (cstore_pat
, 0, i
);
771 // It's the cstore set that we're referring to, ignore that one.
772 if (x
!= e
.cstore
.set_rtx
773 && GET_CODE (x
) == SET
&& reg_referenced_p (m_ccreg
, x
))
775 log_msg ("can't remove insn\n");
776 log_insn (e
.cstore
.insn
);
777 log_return (false, "\nbecause it's a multiple ccreg store\n");
781 // If the cstore sets the ccreg (e.g. negc) and the ccreg is used afterwards
783 if (modified_in_p (m_ccreg
, e
.cstore
.insn
)
784 && !(reg_dead_after_insn (m_ccreg
, e
.cstore
.insn
)
785 || reg_unused_after_insn (m_ccreg
, e
.cstore
.insn
)))
787 log_msg ("can't remove insn\n");
788 log_insn (e
.cstore
.insn
);
789 log_return (false, "\nbecause it sets the ccreg\n");
792 // If the cstore destination reg is copied around check the reg-reg
793 // copies. At every reg-reg copy the copied reg must be dead and there
794 // must not be a usage of the copied regs between the reg-reg copies.
795 // Otherwise we assume that the result of the cstore is used in some
797 rtx prev_insn
= e
.cstore
.insn
;
798 for (std::vector
<set_of_reg
>::const_reverse_iterator i
=
799 e
.cstore_reg_reg_copies
.rbegin ();
800 i
!= e
.cstore_reg_reg_copies
.rend (); ++i
)
802 if (!reg_dead_after_insn (i
->set_src (), i
->insn
))
804 log_msg ("can't remove insn\n");
806 log_return (false, "\nbecause source of reg-reg copy doesn't die\n");
809 if (reg_used_between_p (i
->set_src (), prev_insn
, i
->insn
))
811 log_msg ("can't remove insn\n");
813 log_return (false, "\nbecause reg %d is otherwise used\n",
814 REGNO (i
->set_src ()));
820 // The cstore_dst reg must die after the test before the cbranch, otherwise
821 // it's not safe to remove the cstore.
822 // If the cstore destination reg is copied around check the effective
823 // destination reg of the cstore. The reg-reg copies are recorded in
824 // reverse order, i.e. the most recent reg-reg copy in the insn list
826 rtx cstore_dst
= e
.cstore_reg_reg_copies
.empty ()
827 ? e
.cstore
.set_dst ()
828 : e
.cstore_reg_reg_copies
.front ().set_dst ();
830 if (!reg_dead_after_insn (cstore_dst
, trace
.setcc
.insn
))
832 log_msg ("can't remove insn\n");
833 log_insn (e
.cstore
.insn
);
834 log_return (false, "\nbecause its effective target reg %d doesn't die "
835 "after trace.setcc.insn\n", REGNO (cstore_dst
));
838 // Also check that the cstore_dst reg is not used in other reachable code
839 // paths before it dies.
840 // Count the uses of the effective cstore_dst reg (i.e. the last known reg
841 // that holds the cstore value after reg-reg copies) in all BBs that can be
842 // reached from bb_entry's BB including the BB of the cstore insn.
843 // If we get more than 1 uses we assume that it's used somewhere else and is
844 // not safe to be removed.
845 int cstore_dst_use_count
= trace_reg_uses (cstore_dst
, e
.cstore
.insn
,
847 if (cstore_dst_use_count
> 1)
849 log_msg ("can't remove insn\n");
850 log_insn (e
.cstore
.insn
);
851 log_return (false, "\nbecause its effective target reg %d is used "
852 "in %d other places\n", REGNO (cstore_dst
),
853 cstore_dst_use_count
- 1);
860 sh_treg_combine::can_remove_comparison (const bb_entry
& e
,
861 const cbranch_trace
&/* trace*/) const
863 // If the ccreg is used otherwise between the comparison and the cstore,
865 if (reg_used_between_p (m_ccreg
, e
.setcc
.insn
, e
.cstore
.insn
))
867 log_msg ("can't remove insn\n");
868 log_insn (e
.setcc
.insn
);
869 log_return (false, "\nbecause the ccreg is used otherwise\n");
872 if (!reg_dead_after_insn (m_ccreg
, e
.cstore
.insn
)
873 && !reg_unused_after_insn (m_ccreg
, e
.cstore
.insn
))
875 log_msg ("can't remove insn\n");
876 log_insn (e
.cstore
.insn
);
877 log_return (false, "\nbecause ccreg is not dead or unused afterwards\n");
880 // On SH there are also multiple set patterns that can be used for
881 // comparisons, such as "shll". It's not safe to remove those.
882 if (multiple_sets (e
.setcc
.insn
))
884 log_msg ("can't remove insn\n");
885 log_insn (e
.cstore
.insn
);
886 log_return (false, "\nbecause it's a multiple set\n");
893 sh_treg_combine::make_not_reg_insn (rtx dst_reg
, rtx src_reg
) const
895 // This will to go through expanders and may output multiple insns
896 // for multi-word regs.
898 expand_simple_unop (GET_MODE (dst_reg
), NOT
, src_reg
, dst_reg
, 0);
899 rtx i
= get_insns ();
905 sh_treg_combine::make_inv_ccreg_insn (void) const
908 rtx i
= emit_insn (gen_rtx_SET (VOIDmode
, m_ccreg
,
909 gen_rtx_fmt_ee (XOR
, GET_MODE (m_ccreg
),
910 m_ccreg
, const1_rtx
)));
916 sh_treg_combine::touched_insn (rtx i
)
918 m_touched_insns
.push_back (i
);
923 sh_treg_combine::can_combine_comparisons (const_rtx x
, const_rtx y
) const
925 if (GET_CODE (x
) != GET_CODE (y
))
928 rtx x_op0
= XEXP (x
, 0);
929 rtx x_op1
= XEXP (x
, 1);
931 rtx y_op0
= XEXP (y
, 0);
932 rtx y_op1
= XEXP (y
, 1);
934 if (!REG_P (x_op0
) || !REG_P (y_op0
))
937 if (GET_MODE (x_op0
) != GET_MODE (y_op0
))
940 // rtx_equal_p also compares the reg numbers which we do not care about
941 // here, as long as both are regs and the modes are the same.
943 return REG_P (y_op1
) && GET_MODE (x_op1
) == GET_MODE (y_op1
);
945 return rtx_equal_p (x_op1
, y_op1
);
949 sh_treg_combine::can_extend_ccreg_usage (const bb_entry
& e
,
950 const cbranch_trace
& trace
) const
952 // Check if the ccreg is not modified by other insins in the BB path until
953 // the final cbranch of the trace.
954 // Start checking after the cstore that follows the setcc, assuming that
955 // the cstore will be removed.
957 // The assumption here is that the specified bb_entry's BB is a direct
958 // predecessor of the trace.cbranch_insn's BB.
959 if (e
.bb
!= trace
.bb () && !is_adjacent_bb (e
.bb
, trace
.bb ()))
961 "can't extend ccreg usage -- [bb %d] and [bb %d] are not adjacent\n",
962 e
.bb
->index
, trace
.bb ()->index
);
964 if (e
.cstore
.empty ())
965 log_return (false, "can't extend ccreg usage -- no cstore\n");
967 // The entry's cstore is in the same BB as the final cbranch.
968 if (e
.bb
== trace
.bb ())
970 if (reg_set_between_p (m_ccreg
, e
.cstore
.insn
, trace
.setcc
.insn
))
972 "can't extend ccreg usage -- it's modified between e.cstore.insn "
973 "and trace.setcc.insn");
978 // The entry's cstore and the final cbranch are in different BBs.
979 if (reg_set_between_p (m_ccreg
, e
.cstore
.insn
, NEXT_INSN (BB_END (e
.bb
))))
981 "can't extend ccreg usage -- it's modified in [bb %d]", e
.bb
->index
);
983 if (reg_set_between_p (m_ccreg
, PREV_INSN (BB_HEAD (trace
.bb ())),
986 "can't extend ccreg usage -- it's modified in [bb %d]",
993 sh_treg_combine::try_invert_branch_condition (cbranch_trace
& trace
)
995 log_msg ("inverting branch condition\n");
997 if (!invert_jump_1 (trace
.cbranch_insn
, JUMP_LABEL (trace
.cbranch_insn
)))
998 log_return (false, "invert_jump_1 failed\n");
1000 if (verify_changes (num_validated_changes ()))
1001 confirm_change_group ();
1003 log_return (false, "verify_changed failed\n");
1005 touched_insn (trace
.cbranch_insn
);
1010 sh_treg_combine::try_combine_comparisons (cbranch_trace
& trace
,
1012 int inv_cstore_count
,
1013 cstore_type_t dominating_cstore
)
1015 log_msg ("\ntry_combine_comparisons\n");
1017 // This function will always try to create new pseudos.
1018 if (!can_create_pseudo_p ())
1019 log_return (false, "can't create pseudos\n");
1021 // Check that all ccset insns are comparisons and all comparison types in
1022 // all BBs are the same and could be combined into one single comparison.
1023 rtx comp
= NULL_RTX
;
1024 rtx comp_insn
= NULL_RTX
;
1026 for (std::list
<bb_entry
>::const_iterator i
= trace
.bb_entries
.begin ();
1027 i
!= trace
.bb_entries
.end (); ++i
)
1029 int i_empty_count
= i
->setcc
.empty () + i
->cstore
.empty ();
1031 // A completly empty entry is OK (could be the BB of the cbranch).
1032 if (i_empty_count
== 2)
1035 // Otherwise we need both, the setcc and the cstore.
1036 if (i_empty_count
!= 0)
1037 log_return (false, "bb entry is not a setcc cstore pair\n");
1039 rtx other_comp
= i
->comparison_rtx ();
1041 if (!COMPARISON_P (other_comp
))
1043 log_msg ("setcc is not a comparison:\n");
1044 log_rtx (other_comp
);
1045 log_return (false, "\n");
1048 if (comp_insn
== NULL_RTX
)
1051 comp_insn
= i
->setcc
.insn
;
1053 else if (!can_combine_comparisons (comp
, other_comp
))
1056 // The goal here is to eliminate all cstores and comparisons in the BBs.
1057 // Thus check if every cstore can actually be removed safely.
1058 if (!can_remove_cstore (*i
, trace
) || !can_remove_comparison (*i
, trace
))
1062 // FIXME: The first operand of the comparison must be a simple reg.
1063 // This effectively prohibits combining div0s comparisons such as
1064 // (lt:SI (xor:SI (reg:SI) (reg:SI)))
1065 if (!REG_P (XEXP (comp
, 0)))
1067 log_msg ("comparison operand 0\n");
1068 log_rtx (XEXP (comp
, 0));
1069 log_return (false, "\nis not a reg\n");
1072 rtx comp_op0
= gen_reg_rtx (GET_MODE (XEXP (comp
, 0)));
1073 rtx comp_op1
= REG_P (XEXP (comp
, 1))
1074 ? gen_reg_rtx (GET_MODE (XEXP (comp
, 1)))
1077 // If there are both, inverting and non-inverting cstores, they can only
1078 // be eliminated if the comparison can be inverted. We assume that the
1079 // comparison insns that we find are already minimal and canonicalized.
1080 // There is one special case though, where an integer comparison
1081 // (eq (reg) (const_int 0))
1082 // can be inverted with a sequence
1083 // (eq (not (reg)) (const_int 0))
1084 if (inv_cstore_count
!= 0 && cstore_count
!= 0)
1086 if (make_not_reg_insn (comp_op0
, comp_op0
) == NULL_RTX
)
1087 log_return (false, "make_not_reg_insn failed.\n");
1089 for (std::list
<bb_entry
>::const_iterator i
= trace
.bb_entries
.begin ();
1090 i
!= trace
.bb_entries
.end (); ++i
)
1092 if (i
->setcc
.empty () || i
->cstore
.empty ())
1095 if (i
->cstore_type
!= dominating_cstore
1096 && !is_cmp_eq_zero (i
->comparison_rtx ()))
1098 log_msg ("can't invert comparison in insn\n");
1099 log_insn (i
->setcc
.insn
);
1101 "\nbecause it's not a (eq (reg) (const_int 0))\n");
1106 if (dominating_cstore
== cstore_normal
1107 && !try_invert_branch_condition (trace
))
1110 // Replace the test insn before the cbranch with the common comparison.
1111 // Instead of creating a new insn from scratch we copy the common comparison
1112 // pattern. This simplifies handling parallel comparison patterns, such as
1113 // FP comparisons on SH, which have an extra use on FPSCR.
1114 log_msg ("installing common comparison in [bb %d]\n", trace
.bb ()->index
);
1116 rtx common_comp_pat
= copy_rtx (PATTERN (comp_insn
));
1117 rtx common_comp
= const_cast<rtx
> (set_of (m_ccreg
, common_comp_pat
));
1119 gcc_assert (common_comp
!= NULL_RTX
);
1121 XEXP (XEXP (common_comp
, 1), 0) = comp_op0
;
1122 XEXP (XEXP (common_comp
, 1), 1) = comp_op1
;
1124 log_rtx (common_comp_pat
);
1127 rtx common_comp_insn
= touched_insn (emit_insn_after (common_comp_pat
,
1130 if (REG_P (comp_op0
))
1131 add_reg_note (common_comp_insn
, REG_DEAD
, copy_rtx (comp_op0
));
1132 if (REG_P (comp_op1
))
1133 add_reg_note (common_comp_insn
, REG_DEAD
, copy_rtx (comp_op1
));
1135 delete_insn (trace
.setcc
.insn
);
1137 // Replace comparison and cstore insns with reg-reg moves in all BBs.
1138 for (std::list
<bb_entry
>::const_iterator i
= trace
.bb_entries
.begin ();
1139 i
!= trace
.bb_entries
.end (); ++i
)
1141 if (i
->setcc
.empty () || i
->cstore
.empty ())
1144 rtx i_comp_op0
= XEXP (i
->comparison_rtx (), 0);
1145 rtx i_comp_op1
= XEXP (i
->comparison_rtx (), 1);
1147 if (i
->cstore_type
== dominating_cstore
)
1149 log_msg ("replacing comparison and cstore with reg move "
1150 "in [bb %d]\n", i
->bb
->index
);
1152 rtx new_i
= touched_insn (
1153 emit_insn_after (gen_move_insn (comp_op0
, i_comp_op0
),
1156 if (REG_P (i_comp_op0
)
1157 && reg_dead_after_insn (i_comp_op0
, i
->setcc
.insn
))
1158 add_reg_note (new_i
, REG_DEAD
, copy_rtx (i_comp_op0
));
1160 // If the second operand is a reg, have to emit a move insn.
1161 // Otherwise assume it's a const_int and just reference it.
1162 if (REG_P (comp_op1
))
1164 new_i
= touched_insn (
1165 emit_insn_after (gen_move_insn (comp_op1
, i_comp_op1
),
1168 if (reg_dead_after_insn (i_comp_op1
, i
->setcc
.insn
))
1169 add_reg_note (new_i
, REG_DEAD
, copy_rtx (i_comp_op1
));
1174 log_msg ("replacing comparison and cstore with inverting reg move "
1175 "in [bb %d]\n", i
->bb
->index
);
1177 rtx new_i
= make_not_reg_insn (comp_op0
, i_comp_op0
);
1178 if (REG_P (i_comp_op0
)
1179 && reg_dead_after_insn (i_comp_op0
, i
->setcc
.insn
))
1180 add_reg_note (new_i
, REG_DEAD
, copy_rtx (i_comp_op0
));
1182 touched_insn (emit_insn_after (new_i
, i
->setcc
.insn
));
1185 delete_insn (i
->cstore
.insn
);
1186 delete_insn (i
->setcc
.insn
);
1193 sh_treg_combine::try_eliminate_cstores (cbranch_trace
& trace
,
1194 int cstore_count
, int inv_cstore_count
,
1195 cstore_type_t dominating_cstore
)
1197 log_msg ("\ntry_eliminate_cstores\n");
1199 for (std::list
<bb_entry
>::const_iterator i
= trace
.bb_entries
.begin ();
1200 i
!= trace
.bb_entries
.end (); ++i
)
1202 // A completly empty entry is OK (could be the BB of the cbranch).
1203 if (i
->setcc
.empty () && i
->cstore
.empty ())
1206 // We're going to eliminate cstores, but for that they have to be
1207 // there. We don't care about the setcc in this case.
1208 if (i
->cstore
.empty ())
1209 log_return (false, "bb entry cstore empty -- aborting\n");
1211 // The goal here is to eliminate all cstores in the BBs and extend the
1213 if (!can_extend_ccreg_usage (*i
, trace
))
1216 // If the cstore can't be removed we can keep it around as long as
1217 // it doesn't modify the ccreg.
1218 if (!can_remove_cstore (*i
, trace
)
1219 && modified_in_p (m_ccreg
, i
->cstore
.insn
))
1220 log_return (false, "cstore sets ccreg -- aborting\n");
1223 // If there are both, inverting and non-inverting cstores, we'll have to
1224 // invert the ccreg as a replacement for one of them.
1225 if (cstore_count
!= 0 && inv_cstore_count
!= 0)
1227 rtx i
= make_inv_ccreg_insn ();
1228 if (recog_memoized (i
) < 0)
1230 log_msg ("failed to match ccreg inversion insn:\n");
1231 log_rtx (PATTERN (i
));
1232 log_return (false, "\naborting\n");
1236 if (dominating_cstore
== cstore_normal
1237 && !try_invert_branch_condition (trace
))
1240 // Eliminate cstores in all BBs.
1241 for (std::list
<bb_entry
>::const_iterator i
= trace
.bb_entries
.begin ();
1242 i
!= trace
.bb_entries
.end (); ++i
)
1244 if (i
->cstore
.empty ())
1247 if (i
->cstore_type
== dominating_cstore
)
1248 log_msg ("removing cstore in [bb %d]\n", i
->bb
->index
);
1251 log_msg ("replacing cstore with ccreg inversion in [bb %d]\n",
1255 emit_insn_after (make_inv_ccreg_insn (), i
->cstore
.insn
));
1258 if (can_remove_cstore (*i
, trace
))
1259 delete_insn (i
->cstore
.insn
);
1262 log_msg ("removing test insn before cbranch\n");
1263 delete_insn (trace
.setcc
.insn
);
1268 sh_treg_combine::try_optimize_cbranch (rtx insn
)
1270 cbranch_trace
trace (insn
);
1272 log_msg ("\n\n--------------------------------------\n");
1273 log_msg ("found cbranch insn in [bb %d]:\n", trace
.bb ()->index
);
1276 trace
.cbranch_type
= branch_condition_type (trace
.branch_condition_rtx ());
1278 if (trace
.cbranch_type
== branch_if_true
)
1279 log_msg ("condition: branch if true\n");
1280 else if (trace
.cbranch_type
== branch_if_false
)
1281 log_msg ("condition: branch if false\n");
1284 log_msg ("unknown branch condition\n");
1285 log_rtx (trace
.branch_condition_rtx ());
1286 log_return_void ("\n");
1289 update_ccreg_mode (trace
.branch_condition_rtx ());
1291 // Scan the insns backwards for an insn that sets the ccreg by testing a
1292 // reg against zero like
1293 // (set (reg ccreg) (eq (reg) (const_int 0)))
1294 // The testing insn could also be outside of the current basic block, but
1295 // for now we limit the search to the current basic block.
1296 trace
.setcc
= find_set_of_reg_bb (m_ccreg
, prev_nonnote_insn_bb (insn
));
1298 if (!is_cmp_eq_zero (trace
.setcc
.set_src ()))
1299 log_return_void ("could not find set of ccreg in current BB\n");
1301 rtx trace_reg
= XEXP (trace
.setcc
.set_src (), 0);
1303 log_msg ("set of ccreg:\n");
1304 log_insn (trace
.setcc
.insn
);
1306 // See if we can remove the trace.setcc insn safely.
1307 if (reg_used_between_p (m_ccreg
, trace
.setcc
.insn
, trace
.cbranch_insn
))
1308 log_return_void ("ccreg used between testing insn and branch insn\n");
1310 if (volatile_insn_p (PATTERN (trace
.setcc
.insn
)))
1312 log_msg ("can't remove insn\n");
1313 log_insn (trace
.setcc
.insn
);
1314 log_return_void ("\nbecause it's volatile\n");
1317 // Now that we have an insn which tests some reg and sets the condition
1318 // reg before the conditional branch, try to figure out how that tested
1319 // reg was formed, i.e. find all the insns that set the tested reg in
1321 // The tested reg might be set in multiple basic blocks so we need to
1322 // check all basic blocks which can reach this current basic block.
1323 // If the set of reg is an inverting or non-inverting store of the condition
1324 // register, check how the ccreg value was obtained.
1325 log_msg ("\ntracing ");
1326 log_rtx (trace_reg
);
1330 // First check the basic block where the conditional branch is in.
1331 // If we find it here there's no point in checking other BBs.
1332 trace
.bb_entries
.push_front (bb_entry (trace
.bb ()));
1334 record_return_t res
=
1335 record_set_of_reg (trace_reg
, prev_nonnote_insn_bb (trace
.setcc
.insn
),
1336 trace
.bb_entries
.front ());
1338 if (res
== other_set_found
)
1339 log_return_void ("other set found - aborting trace\n");
1340 else if (res
== set_not_found
)
1342 // It seems the initial search in the BB of the conditional branch
1343 // didn't find anything. Now look in all predecessor BBs.
1344 for (edge_iterator ei
= ei_start (trace
.bb ()->preds
);
1345 !ei_end_p (ei
); ei_next (&ei
))
1347 edge e
= ei_edge (ei
);
1348 trace
.bb_entries
.push_front (bb_entry (e
->src
));
1350 res
= record_set_of_reg (trace_reg
, BB_END (e
->src
),
1351 trace
.bb_entries
.front ());
1352 if (res
!= set_found
)
1353 log_return_void ("set not found - aborting trace\n");
1357 if (dump_file
!= NULL
)
1359 log_msg ("\ncbranch trace summary:\n");
1360 for (std::list
<bb_entry
>::const_iterator i
= trace
.bb_entries
.begin ();
1361 i
!= trace
.bb_entries
.end (); ++i
)
1363 log_msg ("\n[bb %d]\n", i
->bb
->index
);
1364 if (!i
->setcc
.empty ())
1366 log_rtx (i
->setcc
.set_rtx
);
1369 if (!i
->cstore
.empty ())
1371 log_rtx (i
->cstore
.set_rtx
);
1375 for (std::vector
<set_of_reg
>::const_reverse_iterator j
=
1376 i
->cstore_reg_reg_copies
.rbegin ();
1377 j
!= i
->cstore_reg_reg_copies
.rend (); ++j
)
1379 log_rtx (j
->set_rtx
);
1384 log_rtx (trace
.setcc
.set_rtx
);
1386 log_rtx (PATTERN (trace
.cbranch_insn
));
1390 // Check that we don't have any empty BBs.
1391 // Only the BB with the cbranch may be empty.
1392 for (std::list
<bb_entry
>::const_iterator i
= trace
.bb_entries
.begin ();
1393 i
!= trace
.bb_entries
.end (); ++i
)
1394 if (i
->setcc
.empty () && i
->cstore
.empty () && i
->bb
!= trace
.bb ())
1395 log_return_void ("\n[bb %d] is empty - aborting.\n", i
->bb
->index
);
1397 // Determine the dominating cstore type
1398 // FIXME: Try to take the probabilities of the BBs into account somehow.
1399 int cstore_count
= 0;
1400 int inv_cstore_count
= 0;
1402 for (std::list
<bb_entry
>::const_iterator i
= trace
.bb_entries
.begin ();
1403 i
!= trace
.bb_entries
.end (); ++i
)
1405 if (i
->cstore_type
== cstore_normal
)
1407 else if (i
->cstore_type
== cstore_inverted
)
1408 inv_cstore_count
+= 1;
1411 log_msg ("cstore count = %d inverted cstore count = %d\n",
1412 cstore_count
, inv_cstore_count
);
1414 // This puts a priority on inverting cstores.
1415 cstore_type_t dominating_cstore
= inv_cstore_count
>= cstore_count
1419 if (dominating_cstore
== cstore_inverted
)
1420 log_msg ("will try to eliminate inverted cstore\n");
1421 else if (dominating_cstore
== cstore_normal
)
1423 log_msg ("will try to eliminate normal cstore\n");
1424 if (!trace
.can_invert_condition ())
1425 log_return_void ("branch condition can't be inverted - aborting\n");
1430 if (try_combine_comparisons (trace
, cstore_count
, inv_cstore_count
,
1434 try_eliminate_cstores (trace
, cstore_count
, inv_cstore_count
,
1439 sh_treg_combine::gate (void)
1441 return optimize
> 0;
1445 sh_treg_combine::execute (void)
1447 unsigned int ccr0
= INVALID_REGNUM
;
1448 unsigned int ccr1
= INVALID_REGNUM
;
1450 if (targetm
.fixed_condition_code_regs (&ccr0
, &ccr1
)
1451 && ccr0
!= INVALID_REGNUM
)
1453 // Initially create a reg rtx with VOIDmode.
1454 // When the first conditional branch is discovered, the mode is changed
1455 // to the mode that is actually used by the target.
1456 m_ccreg
= gen_rtx_REG (VOIDmode
, ccr0
);
1459 if (m_ccreg
== NULL_RTX
)
1460 log_return (0, "no ccreg.\n\n");
1462 if (STORE_FLAG_VALUE
!= 1)
1463 log_return (0, "unsupported STORE_FLAG_VALUE %d", STORE_FLAG_VALUE
);
1465 log_msg ("ccreg: ");
1467 log_msg (" STORE_FLAG_VALUE = %d\n", STORE_FLAG_VALUE
);
1469 // Look for basic blocks that end with a conditional branch and try to
1472 FOR_EACH_BB_FN (bb
, cfun
)
1474 rtx i
= BB_END (bb
);
1475 if (any_condjump_p (i
) && onlyjump_p (i
))
1476 try_optimize_cbranch (i
);
1481 // If new insns are created and this pass is executed after all insns
1482 // have been split already, we must split the insns we've changed or added
1484 // FIXME: Multi-word operations (which emit multiple insns) are not handled
1485 // properly here, since only one insn will end up in 'm_touched_insns'.
1486 // On SH this is not a problem though.
1488 for (std::vector
<rtx
>::const_iterator i
= m_touched_insns
.begin ();
1489 i
!= m_touched_insns
.end (); ++i
)
1491 log_msg ("trying to split insn:\n");
1494 try_split (PATTERN (*i
), *i
, 0);
1497 m_touched_insns
.clear ();
1498 log_return (0, "\n\n");
1501 // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
1502 // This allows instantiating the pass somewhere else without having to pull
1503 // in a header file.
1505 make_pass_sh_treg_combine (gcc::context
* ctx
, bool split_insns
,
1508 return new sh_treg_combine (ctx
, split_insns
, name
);