]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/sh/sh_treg_combine.cc
remove has_execute
[thirdparty/gcc.git] / gcc / config / sh / sh_treg_combine.cc
1 /* An SH specific RTL pass that tries to combine comparisons and redundant
2 condition code register stores across multiple basic blocks.
3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "machmode.h"
25 #include "basic-block.h"
26 #include "df.h"
27 #include "rtl.h"
28 #include "insn-config.h"
29 #include "insn-codes.h"
30 #include "emit-rtl.h"
31 #include "recog.h"
32 #include "tree-pass.h"
33 #include "target.h"
34 #include "expr.h"
35
36 #include <algorithm>
37 #include <list>
38 #include <vector>
39
40 /*
41 This pass tries to optimize for example this:
42 mov.l @(4,r4),r1
43 tst r1,r1
44 movt r1
45 tst r1,r1
46 bt/s .L5
47
48 into something simpler:
49 mov.l @(4,r4),r1
50 tst r1,r1
51 bf/s .L5
52
53 Such sequences can be identified by looking for conditional branches and
54 checking whether the ccreg is set before the conditional branch
55 by testing another register for != 0, which was set by a ccreg store.
56 This can be optimized by eliminating the redundant comparison and
57 inverting the branch condition. There can be multiple comparisons in
58 different basic blocks that all end up in the redunant test insn before the
59 conditional branch. Some example RTL ...
60
61 Example 1)
62 ----------
63
64 [bb 3]
65 (set (reg:SI 147 t) (eq:SI (reg:SI 173) (const_int 0)))
66 (set (reg:SI 167) (xor:SI (reg:SI 147 t) (const_int 1)))
67 -> bb 5
68
69 [bb 4]
70 (set (reg:SI 147 t) (eq:SI (reg:SI 177) (const_int 0)))
71 (set (reg:SI 167) (reg:SI 147 t))
72 -> bb 5
73
74 [bb 5]
75 (set (reg:SI 147 t) (eq:SI (reg:SI 167) (const_int 0)))
76 (set (pc) (if_then_else (ne (reg:SI 147 t) (const_int 0))
77 (label_ref:SI 50) (pc)))
78
79 In [bb 4] elimination of the comparison would require inversion of the branch
80 condition and compensation of other BBs.
81 Instead an inverting reg-move can be used:
82
83 [bb 3]
84 (set (reg:SI 167) (reg:SI 173))
85 -> bb 5
86
87 [BB 4]
88 (set (reg:SI 167) (not:SI (reg:SI 177)))
89 -> bb 5
90
91 [bb 5]
92 (set (reg:SI 147 t) (eq:SI (reg:SI 167) (const_int 0)))
93 (set (pc) (if_then_else (ne (reg:SI 147 t) (const_int 0)))
94 (label_ref:SI 50) (pc)))
95
96
97 Example 2)
98 ----------
99
100 [bb 3]
101 (set (reg:SI 147 t) (gt:SI (reg:SI 173) (reg:SI 175)))
102 (set (reg:SI 167) (reg:SI 147 t))
103 -> bb 5
104
105 [bb 4]
106 (set (reg:SI 147 t) (gt:SI (reg:SI 177) (reg:SI 179)))
107 (set (reg:SI 167) (reg:SI 147 t))
108 -> bb 5
109
110 [bb 5]
111 (set (reg:SI 147 t) (eq:SI (reg:SI 167) (const_int 0)))
112 (set (pc) (if_then_else (ne (reg:SI 147 t) (const_int 0))
113 (label_ref:SI 51) (pc)))
114
115 The common comparison is factored out and the branch condition is inverted:
116
117 [bb 3]
118 (set (reg:SI 167) (reg:SI 173))
119 (set (reg:SI 200) (reg:SI 175))
120 -> bb 5
121
122 [bb 4]
123 (set (reg:SI 167) (reg:SI 177))
124 (set (reg:SI 200) (reg:SI 179))
125 -> bb 5
126
127 [bb 5]
128 (set (reg:SI 147 t) (gt:SI (reg:SI 167) (reg:SI 200)))
129 (set (pc) (if_then_else (eq (reg:SI 147 t) (const_int 0))
130 (label_ref:SI 51) (pc)))
131
132
133 Example 3)
134 ----------
135
136 [bb 3]
137 (set (reg:SI 147 t) (gt:SI (reg:SI 173) (reg:SI 175)))
138 (set (reg:SI 167) (reg:SI 147 t))
139 -> bb 5
140
141 [bb 4]
142 (set (reg:SI 147 t) (ge:SI (reg:SI 179) (reg:SI 177)))
143 (set (reg:SI 167) (reg:SI 147 t))
144 -> bb 5
145
146 [bb 5]
147 (set (reg:SI 147 t) (eq:SI (reg:SI 167) (const_int 0)))
148 (set (pc) (if_then_else (ne (reg:SI 147 t) (const_int 0))
149 (label_ref:SI 51) (pc)))
150
151 The T bit lifetime is extended and the branch condition is inverted:
152
153 [bb 3]
154 (set (reg:SI 147 t) (gt:SI (reg:SI 173) (reg:SI 175)))
155 -> bb 5
156
157 [bb 4]
158 (set (reg:SI 147 t) (ge:SI (reg:SI 179) (reg:SI 177)))
159 -> bb 5
160
161 [bb 5]
162 (set (pc) (if_then_else (eq (reg:SI 147 t) (const_int 0))
163 (label_ref:SI 51) (pc)))
164
165
166 Example 4)
167 ----------
168
169 [bb 3]
170 (set (reg:SI 147 t) (eq:SI (reg:SI 173) (const_int 5)))
171 (set (reg:SI 167) (reg:SI 147 t))
172 -> bb 5
173
174 [bb 4]
175 (set (reg:SI 147 t) (eq:SI (reg:SI 176) (const_int 5)))
176 (set (reg:SI 167) (xor:SI (reg:SI 147 t) (const_int 1)))
177 -> bb 5
178
179 [bb 5]
180 (set (reg:SI 147 t) (eq:SI (reg:SI 167) (const_int 0)))
181 (set (pc) (if_then_else (ne (reg:SI 147 t) (const_int 0))
182 (label_ref:SI 50) (pc)))
183
184 In this case the comparisons are the same and could be combined, but the
185 branch condition is different for [bb 3] and [bb 5]. Since the comparison
186 is not a zero comparison, we can't negate one of the operands. The best thing
187 we can do here is to eliminate the comparison before the cbranch and invert
188 the ccreg in one of the BBs. On SH2A this will utilize the 'nott' instruction.
189
190 [bb 3]
191 (set (reg:SI 147 t) (eq:SI (reg:SI 173) (const_int 5)))
192 -> bb 5
193
194 [bb 4]
195 (set (reg:SI 147 t) (eq:SI (reg:SI 176) (const_int 5)))
196 (set (reg:SI 147 t) (xor:SI (reg:SI 147 t) (const_int 1)))
197 -> bb 5
198
199 [bb 5]
200 (set (pc) (if_then_else (eq (reg:SI 147 t) (const_int 0)) // inverted
201 (label_ref:SI 50) (pc)))
202
203
204 In order to handle cases such as above the RTL pass does the following:
205
206 - Find the ccreg sets (comparisons) and ccreg stores
207 (inverting and non-inverting) in all related BBs.
208
209 - If the comparison types in the BBs are all the same, try to combine the
210 comparisons in the BBs and replace the zero comparison before the cbranch
211 with the common comparison.
212
213 - If the cstores are the same, move the comparison before the cbranch
214 and replace the comparisons in the BBs with reg-reg copies to get the
215 operands in place (create new pseudo regs).
216
217 - If the cstores differ, try to apply the special case
218 (eq (reg) (const_int 0)) -> inverted = (not (reg)).
219 for the subordinate cstore types and eliminate the dominating ones.
220
221 - If the comparison types in the BBs are not the same, or the first approach
222 doesn't work out for some reason, try to eliminate the comparison before the
223 cbranch by extending the lifetime of the ccreg by leaving the individual
224 comparisons but eliminating the cstores.
225 If the cstores are all the same this is straight forward.
226 If they're not, try to reverse the ccreg for the subordinate cstore type
227 and eliminate the dominating one.
228 */
229
230 // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
231 // Helper functions
232
233 #define log_msg(...)\
234 do { if (dump_file != NULL) fprintf (dump_file, __VA_ARGS__); } while (0)
235
236 #define log_insn(i)\
237 do { if (dump_file != NULL) print_rtl_single (dump_file, \
238 (const_rtx)i); } while (0)
239
240 #define log_rtx(r)\
241 do { if (dump_file != NULL) print_rtl (dump_file, (const_rtx)r); } while (0)
242
243 #define log_return(retval, ...)\
244 do { if (dump_file != NULL) fprintf (dump_file, __VA_ARGS__); \
245 return retval; } while (0)
246
247 #define log_return_void(...)\
248 do { if (dump_file != NULL) fprintf (dump_file, __VA_ARGS__); \
249 return; } while (0)
250
251 struct set_of_reg
252 {
253 // The insn where the search stopped or NULL_RTX.
254 rtx insn;
255
256 // The set rtx of the specified reg if found, NULL_RTX otherwise.
257 // Notice that the set rtx can also be in a parallel.
258 const_rtx set_rtx;
259
260 // The set source operand rtx if found, NULL_RTX otherwise.
261 rtx
262 set_src (void) const
263 {
264 return set_rtx == NULL_RTX ? NULL_RTX : XEXP (set_rtx, 1);
265 }
266
267 // The set destination operand rtx if found, NULL_RTX otherwise.
268 rtx
269 set_dst (void) const
270 {
271 return set_rtx == NULL_RTX ? NULL_RTX : XEXP (set_rtx, 0);
272 }
273
274 bool
275 empty (void) const
276 {
277 return insn == NULL_RTX || set_rtx == NULL_RTX;
278 }
279 };
280
281 // Given a reg rtx and a start insn find the insn (in the same basic block)
282 // that sets the reg.
283 static set_of_reg
284 find_set_of_reg_bb (rtx reg, rtx insn)
285 {
286 set_of_reg result = { insn, NULL_RTX };
287
288 if (!REG_P (reg) || insn == NULL_RTX)
289 return result;
290
291 for (result.insn = insn; result.insn != NULL_RTX;
292 result.insn = prev_nonnote_insn_bb (result.insn))
293 {
294 if (BARRIER_P (result.insn))
295 return result;
296 if (!NONJUMP_INSN_P (result.insn))
297 continue;
298 if (reg_set_p (reg, result.insn))
299 {
300 result.set_rtx = set_of (reg, result.insn);
301 if (result.set_rtx == NULL_RTX || GET_CODE (result.set_rtx) != SET)
302 result.set_rtx = NULL_RTX;
303 return result;
304 }
305 }
306
307 return result;
308 }
309
310 static bool
311 reg_dead_after_insn (const_rtx reg, const_rtx insn)
312 {
313 return find_regno_note (insn, REG_DEAD, REGNO (reg)) != NULL_RTX;
314 }
315
316 static bool
317 reg_unused_after_insn (const_rtx reg, const_rtx insn)
318 {
319 return find_regno_note (insn, REG_UNUSED, REGNO (reg)) != NULL_RTX;
320 }
321
322 // Check whether the two specified basic blocks are adjacent, i.e. there's no
323 // other basic block in between them.
324 static bool
325 is_adjacent_bb (basic_block a, basic_block b)
326 {
327 basic_block bb0[] = { a, b };
328 basic_block bb1[] = { b, a };
329
330 for (int i = 0; i < 2; ++i)
331 for (edge_iterator ei = ei_start (bb0[i]->succs);
332 !ei_end_p (ei); ei_next (&ei))
333 if (ei_edge (ei)->dest == bb1[i])
334 return true;
335
336 return false;
337 }
338
339 // Internal function of trace_reg_uses.
340 static void
341 trace_reg_uses_1 (rtx reg, rtx start_insn, basic_block bb, int& count,
342 std::vector<basic_block>& visited_bb, rtx abort_at_insn)
343 {
344 if (bb == NULL)
345 return;
346
347 if (std::find (visited_bb.begin (), visited_bb.end (), bb)
348 != visited_bb.end ())
349 log_return_void ("[bb %d] already visited\n", bb->index);
350
351 visited_bb.push_back (bb);
352
353 if (BB_END (bb) == NULL_RTX)
354 log_return_void ("[bb %d] BB_END is null\n", bb->index);
355
356 if (start_insn == NULL_RTX)
357 log_return_void ("[bb %d] start_insn is null\n", bb->index);
358
359 rtx end_insn = NEXT_INSN (BB_END (bb));
360 if (end_insn == NULL_RTX)
361 log_return_void ("[bb %d] end_insn is null\n", bb->index);
362
363 for (rtx i = NEXT_INSN (start_insn); i != end_insn; i = NEXT_INSN (i))
364 {
365 if (INSN_P (i))
366 {
367 if (NONDEBUG_INSN_P (i)
368 && (reg_overlap_mentioned_p (reg, PATTERN (i))
369 || (CALL_P (i) && find_reg_fusage (i, USE, reg))))
370 {
371 log_msg ("found use in [bb %d] at insn:\n", bb->index);
372 log_insn (i);
373 log_msg ("\n");
374 count += 1;
375 }
376
377 // Stop following this BB if the reg is set or dies along the way.
378 if (reg_set_p (reg, i) || reg_dead_after_insn (reg, i))
379 return;
380 }
381
382 if (abort_at_insn != NULL_RTX && abort_at_insn == i)
383 return;
384 }
385
386 for (edge_iterator ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
387 {
388 basic_block succ_bb = ei_edge (ei)->dest;
389 trace_reg_uses_1 (reg, BB_HEAD (succ_bb), succ_bb, count, visited_bb,
390 abort_at_insn);
391 }
392 }
393
394 // Trace uses of the specified reg in all basic blocks that are reachable from
395 // the specified insn. If 'abort_at_insn' is not null, abort the trace at
396 // that insn. If the insn 'abort_at_insn' uses the specified reg, it is also
397 // counted.
398 static int
399 trace_reg_uses (rtx reg, rtx start_insn, rtx abort_at_insn)
400 {
401 log_msg ("\ntrace_reg_uses\nreg = ");
402 log_rtx (reg);
403 log_msg ("\nstart_insn = ");
404 log_insn (start_insn);
405
406 int count = 0;
407 std::vector<basic_block> visited_bb;
408 visited_bb.reserve (32);
409
410 trace_reg_uses_1 (reg, start_insn, BLOCK_FOR_INSN (start_insn),
411 count, visited_bb, abort_at_insn);
412 return count;
413 }
414
415 // FIXME: Remove dependency on SH predicate function somehow.
416 extern int t_reg_operand (rtx, machine_mode);
417 extern int negt_reg_operand (rtx, machine_mode);
418
419 // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
420 // RTL pass class
421
422 class sh_treg_combine : public rtl_opt_pass
423 {
424 public:
425 sh_treg_combine (gcc::context* ctx, bool split_insns, const char* name);
426 virtual ~sh_treg_combine (void);
427 virtual bool gate (function *);
428 virtual unsigned int execute (function *);
429
430 private:
431 // Type of ccreg store that is supported.
432 enum cstore_type_t
433 {
434 cstore_normal = 0,
435 cstore_inverted = 1,
436 cstore_unknown = -1
437 };
438
439 // Type of branch condition that is supported.
440 enum branch_condition_type_t
441 {
442 branch_if_true = 1,
443 branch_if_false = 0,
444 unknown_branch_condition = -1
445 };
446
447 // For each basic block there can be a trace entry which consists of an
448 // insn that sets the ccreg (usually a comparison) and a ccreg store.
449 struct bb_entry
450 {
451 basic_block bb;
452 set_of_reg setcc;
453 set_of_reg cstore;
454 cstore_type_t cstore_type;
455 std::vector<set_of_reg> cstore_reg_reg_copies;
456
457 bb_entry (basic_block b)
458 : bb (b), setcc (), cstore (), cstore_type (cstore_unknown) { }
459
460 rtx comparison_rtx (void) const { return setcc.set_src (); }
461 };
462
463 // A ccreg trace for a conditional branch.
464 struct cbranch_trace
465 {
466 rtx cbranch_insn;
467 branch_condition_type_t cbranch_type;
468
469 // The comparison against zero right before the conditional branch.
470 set_of_reg setcc;
471
472 // All BBs that are related to the cbranch. The last BB in the list is
473 // the BB of the cbranch itself and might be empty.
474 std::list<bb_entry> bb_entries;
475
476 cbranch_trace (rtx insn)
477 : cbranch_insn (insn),
478 cbranch_type (unknown_branch_condition),
479 setcc ()
480 {
481 }
482
483 basic_block bb (void) const { return BLOCK_FOR_INSN (cbranch_insn); }
484
485 rtx
486 branch_condition_rtx (void) const
487 {
488 rtx x = pc_set (cbranch_insn);
489 return x == NULL_RTX ? NULL_RTX : XEXP (XEXP (x, 1), 0);
490 }
491
492 bool
493 can_invert_condition (void) const
494 {
495 // The branch condition can be inverted safely only if the condition
496 // reg is dead after the cbranch.
497 return reg_dead_after_insn (XEXP (branch_condition_rtx (), 0),
498 cbranch_insn);
499 }
500 };
501
502 static const pass_data default_pass_data;
503
504 // Tells whether modified or newly added insns are to be split at the end
505 // of the pass.
506 const bool m_split_insns;
507
508 // rtx of the ccreg that is obtained from the target.
509 rtx m_ccreg;
510
511 // Newly added or modified insns.
512 std::vector<rtx> m_touched_insns;
513
514 // Given an rtx determine whether it's a comparison with a constant zero.
515 static bool is_cmp_eq_zero (const_rtx i);
516
517 // Update the stored mode of the ccreg from the given branch condition rtx.
518 void update_ccreg_mode (const_rtx cond);
519
520 // Given an rtx, figure out the branch condition, assuming that it is
521 // in canonical form:
522 // (ne (reg) (const_int 0))
523 // (eq (reg) (const_int 0))
524 branch_condition_type_t branch_condition_type (const_rtx cond) const;
525
526 // Return true if the specified rtx is either a normal ccreg or
527 // a negated form of the ccreg.
528 bool is_normal_ccreg (const_rtx x) const;
529 bool is_inverted_ccreg (const_rtx x) const;
530
531 // Given a reg rtx and a start insn rtx, try to find the insn in the same
532 // basic block that sets the specified reg.
533 // Return how the search ended and the insn where it stopped or NULL_RTX.
534 enum record_return_t
535 {
536 set_found,
537 set_not_found,
538 other_set_found
539 };
540 record_return_t record_set_of_reg (rtx reg, rtx start_insn, bb_entry& e);
541
542 // Tells whether the cbranch insn of the specified bb_entry can be removed
543 // safely without triggering any side effects.
544 bool can_remove_cstore (const bb_entry& e,
545 const cbranch_trace& trace) const;
546
547 // Tells whether the setcc insn of the specified bb_entry can be removed
548 // safely without triggering any side effects.
549 bool can_remove_comparison (const bb_entry& e,
550 const cbranch_trace& trace) const;
551
552 // Tells whether the two specified comparison rtx can be combined into a
553 // single comparison.
554 bool can_combine_comparisons (const_rtx x, const_rtx y) const;
555
556 // Tells whether the ccreg usage can be extended from the bb_entry on until
557 // the final cbranch of the trace.
558 bool can_extend_ccreg_usage (const bb_entry& e,
559 const cbranch_trace& trace) const;
560
561 // Create an insn rtx that is a negating reg move (not operation).
562 rtx make_not_reg_insn (rtx dst_reg, rtx src_reg) const;
563
564 // Create an insn rtx that inverts the ccreg.
565 rtx make_inv_ccreg_insn (void) const;
566
567 // Adds the specified insn to the set of modified or newly added insns that
568 // might need splitting at the end of the pass.
569 rtx touched_insn (rtx i);
570
571 // Try to invert the branch condition of the specified trace.
572 bool try_invert_branch_condition (cbranch_trace& trace);
573
574 // Try to optimize a cbranch trace by combining comparisons in BBs and
575 // eliminate the cstores.
576 bool try_combine_comparisons (cbranch_trace& trace,
577 int cstore_count, int inv_cstore_count,
578 cstore_type_t dominating_cstore);
579
580 // Try to optimize a cbranch trace by eliminating the cstores in BBs only.
581 bool try_eliminate_cstores (cbranch_trace& trace,
582 int cstore_count, int inv_cstore_count,
583 cstore_type_t dominating_cstore);
584
585 // Given a branch insn, try to optimize its branch condition.
586 // If any insns are modified or added they are added to 'm_touched_insns'.
587 void try_optimize_cbranch (rtx i);
588 };
589
590
591 const pass_data sh_treg_combine::default_pass_data =
592 {
593 RTL_PASS, // type
594 "", // name (overwritten by the constructor)
595 OPTGROUP_NONE, // optinfo_flags
596 TV_OPTIMIZE, // tv_id
597 0, // properties_required
598 0, // properties_provided
599 0, // properties_destroyed
600 0, // todo_flags_start
601 TODO_df_finish | TODO_df_verify // todo_flags_finish
602 };
603
604 sh_treg_combine::sh_treg_combine (gcc::context* ctx, bool split_insns,
605 const char* name)
606 : rtl_opt_pass (default_pass_data, ctx),
607 m_split_insns (split_insns),
608 m_ccreg (NULL_RTX)
609 {
610 // Overwrite default name in pass_data base class.
611 this->name = name;
612 }
613
614 sh_treg_combine::~sh_treg_combine (void)
615 {
616 }
617
618 void sh_treg_combine::update_ccreg_mode (const_rtx cond)
619 {
620 if (REG_P (XEXP (cond, 0)) && REGNO (XEXP (cond, 0)) != REGNO (m_ccreg))
621 return;
622
623 machine_mode m = GET_MODE (XEXP (cond, 0));
624 if (m == GET_MODE (m_ccreg))
625 return;
626
627 PUT_MODE (m_ccreg, m);
628 log_msg ("updated ccreg mode: ");
629 log_rtx (m_ccreg);
630 log_msg ("\n");
631 }
632
633 bool
634 sh_treg_combine::is_cmp_eq_zero (const_rtx i)
635 {
636 return i != NULL_RTX && GET_CODE (i) == EQ
637 && REG_P (XEXP (i, 0)) && XEXP (i, 1) == const0_rtx;
638 }
639
640 sh_treg_combine::branch_condition_type_t
641 sh_treg_combine::branch_condition_type (const_rtx cond) const
642 {
643 if (cond == NULL_RTX)
644 return unknown_branch_condition;
645
646 if (GET_CODE (cond) == NE
647 && REG_P (XEXP (cond, 0)) && REGNO (XEXP (cond, 0)) == REGNO (m_ccreg)
648 && XEXP (cond, 1) == const0_rtx)
649 return branch_if_true;
650
651 else if (GET_CODE (cond) == EQ
652 && REG_P (XEXP (cond, 0)) && REGNO (XEXP (cond, 0)) == REGNO (m_ccreg)
653 && XEXP (cond, 1) == const0_rtx)
654 return branch_if_false;
655
656 else
657 return unknown_branch_condition;
658 }
659
660 bool
661 sh_treg_combine::is_normal_ccreg (const_rtx x) const
662 {
663 return t_reg_operand (const_cast<rtx> (x), VOIDmode);
664 }
665
666 bool
667 sh_treg_combine::is_inverted_ccreg (const_rtx x) const
668 {
669 return negt_reg_operand (const_cast<rtx> (x), VOIDmode);
670 }
671
672 sh_treg_combine::record_return_t
673 sh_treg_combine::record_set_of_reg (rtx reg, rtx start_insn,
674 bb_entry& new_entry)
675 {
676 log_msg ("\n[bb %d]\n", new_entry.bb->index);
677
678 if (start_insn == NULL_RTX)
679 log_return (set_not_found, "set of reg not found. empty BB?\n");
680
681 new_entry.cstore_type = cstore_unknown;
682
683 for (rtx i = start_insn; i != NULL_RTX; )
684 {
685 new_entry.cstore = find_set_of_reg_bb (reg, i);
686
687 if (new_entry.cstore.set_src () == NULL_RTX)
688 log_return (set_not_found, "set of reg not found (cstore)\n");
689
690 log_insn (new_entry.cstore.insn);
691 log_msg ("\n");
692
693 if (is_normal_ccreg (new_entry.cstore.set_src ()))
694 {
695 log_msg ("normal condition store\n");
696 new_entry.cstore_type = cstore_normal;
697 }
698 else if (is_inverted_ccreg (new_entry.cstore.set_src ()))
699 {
700 log_msg ("inverted condition store\n");
701 new_entry.cstore_type = cstore_inverted;
702 }
703 else if (REG_P (new_entry.cstore.set_src ()))
704 {
705 // If it's a reg-reg copy follow the copied reg.
706 new_entry.cstore_reg_reg_copies.push_back (new_entry.cstore);
707 reg = new_entry.cstore.set_src ();
708 i = new_entry.cstore.insn;
709
710 log_msg ("reg-reg copy. tracing ");
711 log_rtx (reg);
712 log_msg ("\n");
713 continue;
714 }
715 else
716 log_return (other_set_found, "not a condition store\n");
717
718 gcc_assert (new_entry.cstore_type != cstore_unknown);
719
720 // Now see how the ccreg was set.
721 // For now it must be in the same BB.
722 log_msg ("tracing ccreg\n");
723 new_entry.setcc =
724 find_set_of_reg_bb (m_ccreg,
725 prev_nonnote_insn_bb (new_entry.cstore.insn));
726
727 // If cstore was found but setcc was not found continue anyway, as
728 // for some of the optimization types the setcc is irrelevant.
729 if (new_entry.setcc.set_src () == NULL_RTX)
730 log_return (set_found, "set of ccreg not found\n");
731
732 else if (GET_CODE (new_entry.setcc.set_rtx) == SET)
733 {
734 // Also allow insns that set the ccreg, but are not true comparison
735 // insns, as long as they are sets and not e.g. clobbers.
736 log_insn (new_entry.setcc.insn);
737 log_msg ("\n");
738 return set_found;
739 }
740 else
741 // If cstore was found but setcc was not found continue anyway, as
742 // for some of the optimization types the setcc is irrelevant.
743 log_return (set_found, "unknown set of ccreg\n");
744 }
745
746 log_return (set_not_found, "set of reg not found\n");
747 }
748
749 bool
750 sh_treg_combine::can_remove_cstore (const bb_entry& e,
751 const cbranch_trace& trace) const
752 {
753 if (volatile_insn_p (PATTERN (e.cstore.insn)))
754 {
755 log_msg ("can't remove insn\n");
756 log_insn (e.cstore.insn);
757 log_return (false, "\nbecause it's volatile\n");
758 }
759
760 // On SH there are parallel patterns which store the ccreg multiple times.
761 // In this case it's not safe.
762 rtx cstore_pat = PATTERN (e.cstore.insn);
763 if (GET_CODE (cstore_pat) == PARALLEL)
764 for (int i = 0; i < XVECLEN (cstore_pat, 0); ++i)
765 {
766 rtx x = XVECEXP (cstore_pat, 0, i);
767
768 // It's the cstore set that we're referring to, ignore that one.
769 if (x != e.cstore.set_rtx
770 && GET_CODE (x) == SET && reg_referenced_p (m_ccreg, x))
771 {
772 log_msg ("can't remove insn\n");
773 log_insn (e.cstore.insn);
774 log_return (false, "\nbecause it's a multiple ccreg store\n");
775 }
776 }
777
778 // If the cstore sets the ccreg (e.g. negc) and the ccreg is used afterwards
779 // it's not safe.
780 if (modified_in_p (m_ccreg, e.cstore.insn)
781 && !(reg_dead_after_insn (m_ccreg, e.cstore.insn)
782 || reg_unused_after_insn (m_ccreg, e.cstore.insn)))
783 {
784 log_msg ("can't remove insn\n");
785 log_insn (e.cstore.insn);
786 log_return (false, "\nbecause it sets the ccreg\n");
787 }
788
789 // If the cstore destination reg is copied around check the reg-reg
790 // copies. At every reg-reg copy the copied reg must be dead and there
791 // must not be a usage of the copied regs between the reg-reg copies.
792 // Otherwise we assume that the result of the cstore is used in some
793 // other way.
794 rtx prev_insn = e.cstore.insn;
795 for (std::vector<set_of_reg>::const_reverse_iterator i =
796 e.cstore_reg_reg_copies.rbegin ();
797 i != e.cstore_reg_reg_copies.rend (); ++i)
798 {
799 if (!reg_dead_after_insn (i->set_src (), i->insn))
800 {
801 log_msg ("can't remove insn\n");
802 log_insn (i->insn);
803 log_return (false, "\nbecause source of reg-reg copy doesn't die\n");
804 }
805
806 if (reg_used_between_p (i->set_src (), prev_insn, i->insn))
807 {
808 log_msg ("can't remove insn\n");
809 log_insn (i->insn);
810 log_return (false, "\nbecause reg %d is otherwise used\n",
811 REGNO (i->set_src ()));
812 }
813
814 prev_insn = i->insn;
815 }
816
817 // The cstore_dst reg must die after the test before the cbranch, otherwise
818 // it's not safe to remove the cstore.
819 // If the cstore destination reg is copied around check the effective
820 // destination reg of the cstore. The reg-reg copies are recorded in
821 // reverse order, i.e. the most recent reg-reg copy in the insn list
822 // comes first.
823 rtx cstore_dst = e.cstore_reg_reg_copies.empty ()
824 ? e.cstore.set_dst ()
825 : e.cstore_reg_reg_copies.front ().set_dst ();
826
827 if (!reg_dead_after_insn (cstore_dst, trace.setcc.insn))
828 {
829 log_msg ("can't remove insn\n");
830 log_insn (e.cstore.insn);
831 log_return (false, "\nbecause its effective target reg %d doesn't die "
832 "after trace.setcc.insn\n", REGNO (cstore_dst));
833 }
834
835 // Also check that the cstore_dst reg is not used in other reachable code
836 // paths before it dies.
837 // Count the uses of the effective cstore_dst reg (i.e. the last known reg
838 // that holds the cstore value after reg-reg copies) in all BBs that can be
839 // reached from bb_entry's BB including the BB of the cstore insn.
840 // If we get more than 1 uses we assume that it's used somewhere else and is
841 // not safe to be removed.
842 int cstore_dst_use_count = trace_reg_uses (cstore_dst, e.cstore.insn,
843 trace.setcc.insn);
844 if (cstore_dst_use_count > 1)
845 {
846 log_msg ("can't remove insn\n");
847 log_insn (e.cstore.insn);
848 log_return (false, "\nbecause its effective target reg %d is used "
849 "in %d other places\n", REGNO (cstore_dst),
850 cstore_dst_use_count - 1);
851 }
852
853 return true;
854 }
855
856 bool
857 sh_treg_combine::can_remove_comparison (const bb_entry& e,
858 const cbranch_trace&/* trace*/) const
859 {
860 // If the ccreg is used otherwise between the comparison and the cstore,
861 // it's not safe.
862 if (reg_used_between_p (m_ccreg, e.setcc.insn, e.cstore.insn))
863 {
864 log_msg ("can't remove insn\n");
865 log_insn (e.setcc.insn);
866 log_return (false, "\nbecause the ccreg is used otherwise\n");
867 }
868
869 if (!reg_dead_after_insn (m_ccreg, e.cstore.insn)
870 && !reg_unused_after_insn (m_ccreg, e.cstore.insn))
871 {
872 log_msg ("can't remove insn\n");
873 log_insn (e.cstore.insn);
874 log_return (false, "\nbecause ccreg is not dead or unused afterwards\n");
875 }
876
877 // On SH there are also multiple set patterns that can be used for
878 // comparisons, such as "shll". It's not safe to remove those.
879 if (multiple_sets (e.setcc.insn))
880 {
881 log_msg ("can't remove insn\n");
882 log_insn (e.cstore.insn);
883 log_return (false, "\nbecause it's a multiple set\n");
884 }
885
886 return true;
887 }
888
889 rtx
890 sh_treg_combine::make_not_reg_insn (rtx dst_reg, rtx src_reg) const
891 {
892 // This will to go through expanders and may output multiple insns
893 // for multi-word regs.
894 start_sequence ();
895 expand_simple_unop (GET_MODE (dst_reg), NOT, src_reg, dst_reg, 0);
896 rtx i = get_insns ();
897 end_sequence ();
898 return i;
899 }
900
901 rtx
902 sh_treg_combine::make_inv_ccreg_insn (void) const
903 {
904 start_sequence ();
905 rtx i = emit_insn (gen_rtx_SET (VOIDmode, m_ccreg,
906 gen_rtx_fmt_ee (XOR, GET_MODE (m_ccreg),
907 m_ccreg, const1_rtx)));
908 end_sequence ();
909 return i;
910 }
911
912 rtx
913 sh_treg_combine::touched_insn (rtx i)
914 {
915 m_touched_insns.push_back (i);
916 return i;
917 }
918
919 bool
920 sh_treg_combine::can_combine_comparisons (const_rtx x, const_rtx y) const
921 {
922 if (GET_CODE (x) != GET_CODE (y))
923 return false;
924
925 rtx x_op0 = XEXP (x, 0);
926 rtx x_op1 = XEXP (x, 1);
927
928 rtx y_op0 = XEXP (y, 0);
929 rtx y_op1 = XEXP (y, 1);
930
931 if (!REG_P (x_op0) || !REG_P (y_op0))
932 return false;
933
934 if (GET_MODE (x_op0) != GET_MODE (y_op0))
935 return false;
936
937 // rtx_equal_p also compares the reg numbers which we do not care about
938 // here, as long as both are regs and the modes are the same.
939 if (REG_P (x_op1))
940 return REG_P (y_op1) && GET_MODE (x_op1) == GET_MODE (y_op1);
941
942 return rtx_equal_p (x_op1, y_op1);
943 }
944
945 bool
946 sh_treg_combine::can_extend_ccreg_usage (const bb_entry& e,
947 const cbranch_trace& trace) const
948 {
949 // Check if the ccreg is not modified by other insins in the BB path until
950 // the final cbranch of the trace.
951 // Start checking after the cstore that follows the setcc, assuming that
952 // the cstore will be removed.
953
954 // The assumption here is that the specified bb_entry's BB is a direct
955 // predecessor of the trace.cbranch_insn's BB.
956 if (e.bb != trace.bb () && !is_adjacent_bb (e.bb, trace.bb ()))
957 log_return (false,
958 "can't extend ccreg usage -- [bb %d] and [bb %d] are not adjacent\n",
959 e.bb->index, trace.bb ()->index);
960
961 if (e.cstore.empty ())
962 log_return (false, "can't extend ccreg usage -- no cstore\n");
963
964 // The entry's cstore is in the same BB as the final cbranch.
965 if (e.bb == trace.bb ())
966 {
967 if (reg_set_between_p (m_ccreg, e.cstore.insn, trace.setcc.insn))
968 log_return (false,
969 "can't extend ccreg usage -- it's modified between e.cstore.insn "
970 "and trace.setcc.insn");
971 else
972 return true;
973 }
974
975 // The entry's cstore and the final cbranch are in different BBs.
976 if (reg_set_between_p (m_ccreg, e.cstore.insn, NEXT_INSN (BB_END (e.bb))))
977 log_return (false,
978 "can't extend ccreg usage -- it's modified in [bb %d]", e.bb->index);
979
980 if (reg_set_between_p (m_ccreg, PREV_INSN (BB_HEAD (trace.bb ())),
981 trace.setcc.insn))
982 log_return (false,
983 "can't extend ccreg usage -- it's modified in [bb %d]",
984 trace.bb ()->index);
985
986 return true;
987 }
988
989 bool
990 sh_treg_combine::try_invert_branch_condition (cbranch_trace& trace)
991 {
992 log_msg ("inverting branch condition\n");
993
994 if (!invert_jump_1 (trace.cbranch_insn, JUMP_LABEL (trace.cbranch_insn)))
995 log_return (false, "invert_jump_1 failed\n");
996
997 if (verify_changes (num_validated_changes ()))
998 confirm_change_group ();
999 else
1000 log_return (false, "verify_changed failed\n");
1001
1002 touched_insn (trace.cbranch_insn);
1003 return true;
1004 }
1005
1006 bool
1007 sh_treg_combine::try_combine_comparisons (cbranch_trace& trace,
1008 int cstore_count,
1009 int inv_cstore_count,
1010 cstore_type_t dominating_cstore)
1011 {
1012 log_msg ("\ntry_combine_comparisons\n");
1013
1014 // This function will always try to create new pseudos.
1015 if (!can_create_pseudo_p ())
1016 log_return (false, "can't create pseudos\n");
1017
1018 // Check that all ccset insns are comparisons and all comparison types in
1019 // all BBs are the same and could be combined into one single comparison.
1020 rtx comp = NULL_RTX;
1021 rtx comp_insn = NULL_RTX;
1022
1023 for (std::list<bb_entry>::const_iterator i = trace.bb_entries.begin ();
1024 i != trace.bb_entries.end (); ++i)
1025 {
1026 int i_empty_count = i->setcc.empty () + i->cstore.empty ();
1027
1028 // A completly empty entry is OK (could be the BB of the cbranch).
1029 if (i_empty_count == 2)
1030 continue;
1031
1032 // Otherwise we need both, the setcc and the cstore.
1033 if (i_empty_count != 0)
1034 log_return (false, "bb entry is not a setcc cstore pair\n");
1035
1036 rtx other_comp = i->comparison_rtx ();
1037
1038 if (!COMPARISON_P (other_comp))
1039 {
1040 log_msg ("setcc is not a comparison:\n");
1041 log_rtx (other_comp);
1042 log_return (false, "\n");
1043 }
1044
1045 if (comp_insn == NULL_RTX)
1046 {
1047 comp = other_comp;
1048 comp_insn = i->setcc.insn;
1049 }
1050 else if (!can_combine_comparisons (comp, other_comp))
1051 return false;
1052
1053 // The goal here is to eliminate all cstores and comparisons in the BBs.
1054 // Thus check if every cstore can actually be removed safely.
1055 if (!can_remove_cstore (*i, trace) || !can_remove_comparison (*i, trace))
1056 return false;
1057 }
1058
1059 // FIXME: The first operand of the comparison must be a simple reg.
1060 // This effectively prohibits combining div0s comparisons such as
1061 // (lt:SI (xor:SI (reg:SI) (reg:SI)))
1062 if (!REG_P (XEXP (comp, 0)))
1063 {
1064 log_msg ("comparison operand 0\n");
1065 log_rtx (XEXP (comp, 0));
1066 log_return (false, "\nis not a reg\n");
1067 }
1068
1069 rtx comp_op0 = gen_reg_rtx (GET_MODE (XEXP (comp, 0)));
1070 rtx comp_op1 = REG_P (XEXP (comp, 1))
1071 ? gen_reg_rtx (GET_MODE (XEXP (comp, 1)))
1072 : XEXP (comp, 1);
1073
1074 // If there are both, inverting and non-inverting cstores, they can only
1075 // be eliminated if the comparison can be inverted. We assume that the
1076 // comparison insns that we find are already minimal and canonicalized.
1077 // There is one special case though, where an integer comparison
1078 // (eq (reg) (const_int 0))
1079 // can be inverted with a sequence
1080 // (eq (not (reg)) (const_int 0))
1081 if (inv_cstore_count != 0 && cstore_count != 0)
1082 {
1083 if (make_not_reg_insn (comp_op0, comp_op0) == NULL_RTX)
1084 log_return (false, "make_not_reg_insn failed.\n");
1085
1086 for (std::list<bb_entry>::const_iterator i = trace.bb_entries.begin ();
1087 i != trace.bb_entries.end (); ++i)
1088 {
1089 if (i->setcc.empty () || i->cstore.empty ())
1090 continue;
1091
1092 if (i->cstore_type != dominating_cstore
1093 && !is_cmp_eq_zero (i->comparison_rtx ()))
1094 {
1095 log_msg ("can't invert comparison in insn\n");
1096 log_insn (i->setcc.insn);
1097 log_return (false,
1098 "\nbecause it's not a (eq (reg) (const_int 0))\n");
1099 }
1100 }
1101 }
1102
1103 if (dominating_cstore == cstore_normal
1104 && !try_invert_branch_condition (trace))
1105 return false;
1106
1107 // Replace the test insn before the cbranch with the common comparison.
1108 // Instead of creating a new insn from scratch we copy the common comparison
1109 // pattern. This simplifies handling parallel comparison patterns, such as
1110 // FP comparisons on SH, which have an extra use on FPSCR.
1111 log_msg ("installing common comparison in [bb %d]\n", trace.bb ()->index);
1112
1113 rtx common_comp_pat = copy_rtx (PATTERN (comp_insn));
1114 rtx common_comp = const_cast<rtx> (set_of (m_ccreg, common_comp_pat));
1115
1116 gcc_assert (common_comp != NULL_RTX);
1117
1118 XEXP (XEXP (common_comp, 1), 0) = comp_op0;
1119 XEXP (XEXP (common_comp, 1), 1) = comp_op1;
1120
1121 log_rtx (common_comp_pat);
1122 log_msg ("\n");
1123
1124 rtx common_comp_insn = touched_insn (emit_insn_after (common_comp_pat,
1125 trace.setcc.insn));
1126
1127 if (REG_P (comp_op0))
1128 add_reg_note (common_comp_insn, REG_DEAD, copy_rtx (comp_op0));
1129 if (REG_P (comp_op1))
1130 add_reg_note (common_comp_insn, REG_DEAD, copy_rtx (comp_op1));
1131
1132 delete_insn (trace.setcc.insn);
1133
1134 // Replace comparison and cstore insns with reg-reg moves in all BBs.
1135 for (std::list<bb_entry>::const_iterator i = trace.bb_entries.begin ();
1136 i != trace.bb_entries.end (); ++i)
1137 {
1138 if (i->setcc.empty () || i->cstore.empty ())
1139 continue;
1140
1141 rtx i_comp_op0 = XEXP (i->comparison_rtx (), 0);
1142 rtx i_comp_op1 = XEXP (i->comparison_rtx (), 1);
1143
1144 if (i->cstore_type == dominating_cstore)
1145 {
1146 log_msg ("replacing comparison and cstore with reg move "
1147 "in [bb %d]\n", i->bb->index);
1148
1149 rtx new_i = touched_insn (
1150 emit_insn_after (gen_move_insn (comp_op0, i_comp_op0),
1151 i->setcc.insn));
1152
1153 if (REG_P (i_comp_op0)
1154 && reg_dead_after_insn (i_comp_op0, i->setcc.insn))
1155 add_reg_note (new_i, REG_DEAD, copy_rtx (i_comp_op0));
1156
1157 // If the second operand is a reg, have to emit a move insn.
1158 // Otherwise assume it's a const_int and just reference it.
1159 if (REG_P (comp_op1))
1160 {
1161 new_i = touched_insn (
1162 emit_insn_after (gen_move_insn (comp_op1, i_comp_op1),
1163 i->setcc.insn));
1164
1165 if (reg_dead_after_insn (i_comp_op1, i->setcc.insn))
1166 add_reg_note (new_i, REG_DEAD, copy_rtx (i_comp_op1));
1167 }
1168 }
1169 else
1170 {
1171 log_msg ("replacing comparison and cstore with inverting reg move "
1172 "in [bb %d]\n", i->bb->index);
1173
1174 rtx new_i = make_not_reg_insn (comp_op0, i_comp_op0);
1175 if (REG_P (i_comp_op0)
1176 && reg_dead_after_insn (i_comp_op0, i->setcc.insn))
1177 add_reg_note (new_i, REG_DEAD, copy_rtx (i_comp_op0));
1178
1179 touched_insn (emit_insn_after (new_i, i->setcc.insn));
1180 }
1181
1182 delete_insn (i->cstore.insn);
1183 delete_insn (i->setcc.insn);
1184 }
1185
1186 return true;
1187 }
1188
1189 bool
1190 sh_treg_combine::try_eliminate_cstores (cbranch_trace& trace,
1191 int cstore_count, int inv_cstore_count,
1192 cstore_type_t dominating_cstore)
1193 {
1194 log_msg ("\ntry_eliminate_cstores\n");
1195
1196 for (std::list<bb_entry>::const_iterator i = trace.bb_entries.begin ();
1197 i != trace.bb_entries.end (); ++i)
1198 {
1199 // A completly empty entry is OK (could be the BB of the cbranch).
1200 if (i->setcc.empty () && i->cstore.empty ())
1201 continue;
1202
1203 // We're going to eliminate cstores, but for that they have to be
1204 // there. We don't care about the setcc in this case.
1205 if (i->cstore.empty ())
1206 log_return (false, "bb entry cstore empty -- aborting\n");
1207
1208 // The goal here is to eliminate all cstores in the BBs and extend the
1209 // ccreg usage.
1210 if (!can_extend_ccreg_usage (*i, trace))
1211 return false;
1212
1213 // If the cstore can't be removed we can keep it around as long as
1214 // it doesn't modify the ccreg.
1215 if (!can_remove_cstore (*i, trace)
1216 && modified_in_p (m_ccreg, i->cstore.insn))
1217 log_return (false, "cstore sets ccreg -- aborting\n");
1218 }
1219
1220 // If there are both, inverting and non-inverting cstores, we'll have to
1221 // invert the ccreg as a replacement for one of them.
1222 if (cstore_count != 0 && inv_cstore_count != 0)
1223 {
1224 rtx i = make_inv_ccreg_insn ();
1225 if (recog_memoized (i) < 0)
1226 {
1227 log_msg ("failed to match ccreg inversion insn:\n");
1228 log_rtx (PATTERN (i));
1229 log_return (false, "\naborting\n");
1230 }
1231 }
1232
1233 if (dominating_cstore == cstore_normal
1234 && !try_invert_branch_condition (trace))
1235 return false;
1236
1237 // Eliminate cstores in all BBs.
1238 for (std::list<bb_entry>::const_iterator i = trace.bb_entries.begin ();
1239 i != trace.bb_entries.end (); ++i)
1240 {
1241 if (i->cstore.empty ())
1242 continue;
1243
1244 if (i->cstore_type == dominating_cstore)
1245 log_msg ("removing cstore in [bb %d]\n", i->bb->index);
1246 else
1247 {
1248 log_msg ("replacing cstore with ccreg inversion in [bb %d]\n",
1249 i->bb->index);
1250
1251 touched_insn (
1252 emit_insn_after (make_inv_ccreg_insn (), i->cstore.insn));
1253 }
1254
1255 if (can_remove_cstore (*i, trace))
1256 delete_insn (i->cstore.insn);
1257 }
1258
1259 log_msg ("removing test insn before cbranch\n");
1260 delete_insn (trace.setcc.insn);
1261 return true;
1262 }
1263
1264 void
1265 sh_treg_combine::try_optimize_cbranch (rtx insn)
1266 {
1267 cbranch_trace trace (insn);
1268
1269 log_msg ("\n\n--------------------------------------\n");
1270 log_msg ("found cbranch insn in [bb %d]:\n", trace.bb ()->index);
1271 log_insn (insn);
1272
1273 trace.cbranch_type = branch_condition_type (trace.branch_condition_rtx ());
1274
1275 if (trace.cbranch_type == branch_if_true)
1276 log_msg ("condition: branch if true\n");
1277 else if (trace.cbranch_type == branch_if_false)
1278 log_msg ("condition: branch if false\n");
1279 else
1280 {
1281 log_msg ("unknown branch condition\n");
1282 log_rtx (trace.branch_condition_rtx ());
1283 log_return_void ("\n");
1284 }
1285
1286 update_ccreg_mode (trace.branch_condition_rtx ());
1287
1288 // Scan the insns backwards for an insn that sets the ccreg by testing a
1289 // reg against zero like
1290 // (set (reg ccreg) (eq (reg) (const_int 0)))
1291 // The testing insn could also be outside of the current basic block, but
1292 // for now we limit the search to the current basic block.
1293 trace.setcc = find_set_of_reg_bb (m_ccreg, prev_nonnote_insn_bb (insn));
1294
1295 if (!is_cmp_eq_zero (trace.setcc.set_src ()))
1296 log_return_void ("could not find set of ccreg in current BB\n");
1297
1298 rtx trace_reg = XEXP (trace.setcc.set_src (), 0);
1299
1300 log_msg ("set of ccreg:\n");
1301 log_insn (trace.setcc.insn);
1302
1303 // See if we can remove the trace.setcc insn safely.
1304 if (reg_used_between_p (m_ccreg, trace.setcc.insn, trace.cbranch_insn))
1305 log_return_void ("ccreg used between testing insn and branch insn\n");
1306
1307 if (volatile_insn_p (PATTERN (trace.setcc.insn)))
1308 {
1309 log_msg ("can't remove insn\n");
1310 log_insn (trace.setcc.insn);
1311 log_return_void ("\nbecause it's volatile\n");
1312 }
1313
1314 // Now that we have an insn which tests some reg and sets the condition
1315 // reg before the conditional branch, try to figure out how that tested
1316 // reg was formed, i.e. find all the insns that set the tested reg in
1317 // some way.
1318 // The tested reg might be set in multiple basic blocks so we need to
1319 // check all basic blocks which can reach this current basic block.
1320 // If the set of reg is an inverting or non-inverting store of the condition
1321 // register, check how the ccreg value was obtained.
1322 log_msg ("\ntracing ");
1323 log_rtx (trace_reg);
1324 log_msg ("\n");
1325
1326
1327 // First check the basic block where the conditional branch is in.
1328 // If we find it here there's no point in checking other BBs.
1329 trace.bb_entries.push_front (bb_entry (trace.bb ()));
1330
1331 record_return_t res =
1332 record_set_of_reg (trace_reg, prev_nonnote_insn_bb (trace.setcc.insn),
1333 trace.bb_entries.front ());
1334
1335 if (res == other_set_found)
1336 log_return_void ("other set found - aborting trace\n");
1337 else if (res == set_not_found)
1338 {
1339 // It seems the initial search in the BB of the conditional branch
1340 // didn't find anything. Now look in all predecessor BBs.
1341 for (edge_iterator ei = ei_start (trace.bb ()->preds);
1342 !ei_end_p (ei); ei_next (&ei))
1343 {
1344 edge e = ei_edge (ei);
1345 trace.bb_entries.push_front (bb_entry (e->src));
1346
1347 res = record_set_of_reg (trace_reg, BB_END (e->src),
1348 trace.bb_entries.front ());
1349 if (res != set_found)
1350 log_return_void ("set not found - aborting trace\n");
1351 }
1352 }
1353
1354 if (dump_file != NULL)
1355 {
1356 log_msg ("\ncbranch trace summary:\n");
1357 for (std::list<bb_entry>::const_iterator i = trace.bb_entries.begin ();
1358 i != trace.bb_entries.end (); ++i)
1359 {
1360 log_msg ("\n[bb %d]\n", i->bb->index);
1361 if (!i->setcc.empty ())
1362 {
1363 log_rtx (i->setcc.set_rtx);
1364 log_msg ("\n");
1365 }
1366 if (!i->cstore.empty ())
1367 {
1368 log_rtx (i->cstore.set_rtx);
1369 log_msg ("\n");
1370 }
1371
1372 for (std::vector<set_of_reg>::const_reverse_iterator j =
1373 i->cstore_reg_reg_copies.rbegin ();
1374 j != i->cstore_reg_reg_copies.rend (); ++j)
1375 {
1376 log_rtx (j->set_rtx);
1377 log_msg ("\n");
1378 }
1379 }
1380
1381 log_rtx (trace.setcc.set_rtx);
1382 log_msg ("\n");
1383 log_rtx (PATTERN (trace.cbranch_insn));
1384 log_msg ("\n");
1385 }
1386
1387 // Check that we don't have any empty BBs.
1388 // Only the BB with the cbranch may be empty.
1389 for (std::list<bb_entry>::const_iterator i = trace.bb_entries.begin ();
1390 i != trace.bb_entries.end (); ++i)
1391 if (i->setcc.empty () && i->cstore.empty () && i->bb != trace.bb ())
1392 log_return_void ("\n[bb %d] is empty - aborting.\n", i->bb->index);
1393
1394 // Determine the dominating cstore type
1395 // FIXME: Try to take the probabilities of the BBs into account somehow.
1396 int cstore_count = 0;
1397 int inv_cstore_count = 0;
1398
1399 for (std::list<bb_entry>::const_iterator i = trace.bb_entries.begin ();
1400 i != trace.bb_entries.end (); ++i)
1401 {
1402 if (i->cstore_type == cstore_normal)
1403 cstore_count += 1;
1404 else if (i->cstore_type == cstore_inverted)
1405 inv_cstore_count += 1;
1406 }
1407
1408 log_msg ("cstore count = %d inverted cstore count = %d\n",
1409 cstore_count, inv_cstore_count);
1410
1411 // This puts a priority on inverting cstores.
1412 cstore_type_t dominating_cstore = inv_cstore_count >= cstore_count
1413 ? cstore_inverted
1414 : cstore_normal;
1415
1416 if (dominating_cstore == cstore_inverted)
1417 log_msg ("will try to eliminate inverted cstore\n");
1418 else if (dominating_cstore == cstore_normal)
1419 {
1420 log_msg ("will try to eliminate normal cstore\n");
1421 if (!trace.can_invert_condition ())
1422 log_return_void ("branch condition can't be inverted - aborting\n");
1423 }
1424 else
1425 gcc_unreachable ();
1426
1427 if (try_combine_comparisons (trace, cstore_count, inv_cstore_count,
1428 dominating_cstore))
1429 return;
1430
1431 try_eliminate_cstores (trace, cstore_count, inv_cstore_count,
1432 dominating_cstore);
1433 }
1434
1435 bool
1436 sh_treg_combine::gate (function *)
1437 {
1438 return optimize > 0;
1439 }
1440
1441 unsigned int
1442 sh_treg_combine::execute (function *fun)
1443 {
1444 unsigned int ccr0 = INVALID_REGNUM;
1445 unsigned int ccr1 = INVALID_REGNUM;
1446
1447 if (targetm.fixed_condition_code_regs (&ccr0, &ccr1)
1448 && ccr0 != INVALID_REGNUM)
1449 {
1450 // Initially create a reg rtx with VOIDmode.
1451 // When the first conditional branch is discovered, the mode is changed
1452 // to the mode that is actually used by the target.
1453 m_ccreg = gen_rtx_REG (VOIDmode, ccr0);
1454 }
1455
1456 if (m_ccreg == NULL_RTX)
1457 log_return (0, "no ccreg.\n\n");
1458
1459 if (STORE_FLAG_VALUE != 1)
1460 log_return (0, "unsupported STORE_FLAG_VALUE %d", STORE_FLAG_VALUE);
1461
1462 log_msg ("ccreg: ");
1463 log_rtx (m_ccreg);
1464 log_msg (" STORE_FLAG_VALUE = %d\n", STORE_FLAG_VALUE);
1465
1466 // Look for basic blocks that end with a conditional branch and try to
1467 // optimize them.
1468 basic_block bb;
1469 FOR_EACH_BB_FN (bb, fun)
1470 {
1471 rtx i = BB_END (bb);
1472 if (any_condjump_p (i) && onlyjump_p (i))
1473 try_optimize_cbranch (i);
1474 }
1475
1476 log_msg ("\n\n");
1477
1478 // If new insns are created and this pass is executed after all insns
1479 // have been split already, we must split the insns we've changed or added
1480 // ourselves here.
1481 // FIXME: Multi-word operations (which emit multiple insns) are not handled
1482 // properly here, since only one insn will end up in 'm_touched_insns'.
1483 // On SH this is not a problem though.
1484 if (m_split_insns)
1485 for (std::vector<rtx>::const_iterator i = m_touched_insns.begin ();
1486 i != m_touched_insns.end (); ++i)
1487 {
1488 log_msg ("trying to split insn:\n");
1489 log_insn (*i);
1490 log_msg ("\n");
1491 try_split (PATTERN (*i), *i, 0);
1492 }
1493
1494 m_touched_insns.clear ();
1495 log_return (0, "\n\n");
1496 }
1497
1498 // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
1499 // This allows instantiating the pass somewhere else without having to pull
1500 // in a header file.
1501 opt_pass*
1502 make_pass_sh_treg_combine (gcc::context* ctx, bool split_insns,
1503 const char* name)
1504 {
1505 return new sh_treg_combine (ctx, split_insns, name);
1506 }