]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/loop-unswitch.c
fe707dac312077157928c7eab2de4c698205fc98
[thirdparty/gcc.git] / gcc / loop-unswitch.c
1 /* Loop unswitching for GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "hard-reg-set.h"
27 #include "obstack.h"
28 #include "basic-block.h"
29 #include "cfgloop.h"
30 #include "cfglayout.h"
31 #include "params.h"
32 #include "output.h"
33 #include "expr.h"
34
35 /* This pass moves constant conditions out of loops, duplicating the loop
36 in progress, i.e. this code:
37
38 while (loop_cond)
39 {
40 A;
41 if (cond)
42 branch1;
43 else
44 branch2;
45 B;
46 if (cond)
47 branch3;
48 C;
49 }
50 where nothing inside the loop alters cond is transformed
51 into
52
53 if (cond)
54 {
55 while (loop_cond)
56 {
57 A;
58 branch1;
59 B;
60 branch3;
61 C;
62 }
63 }
64 else
65 {
66 while (loop_cond)
67 {
68 A;
69 branch2;
70 B;
71 C;
72 }
73 }
74
75 Duplicating the loop might lead to code growth exponential in number of
76 branches inside loop, so we limit the number of unswitchings performed
77 in a single loop to PARAM_MAX_UNSWITCH_LEVEL. We only perform the
78 transformation on innermost loops, as the benefit of doing it on loops
79 containing subloops would not be very large compared to complications
80 with handling this case. */
81
82 static struct loop *unswitch_loop (struct loops *, struct loop *,
83 basic_block, rtx, rtx);
84 static void unswitch_single_loop (struct loops *, struct loop *, rtx, int);
85 static rtx may_unswitch_on (basic_block, struct loop *, rtx *);
86
87 /* Prepare a sequence comparing OP0 with OP1 using COMP and jumping to LABEL if
88 true, with probability PROB. If CINSN is not NULL, it is the insn to copy
89 in order to create a jump. */
90
91 rtx
92 compare_and_jump_seq (rtx op0, rtx op1, enum rtx_code comp, rtx label, int prob,
93 rtx cinsn)
94 {
95 rtx seq, jump, cond;
96 enum machine_mode mode;
97
98 mode = GET_MODE (op0);
99 if (mode == VOIDmode)
100 mode = GET_MODE (op1);
101
102 start_sequence ();
103 if (GET_MODE_CLASS (mode) == MODE_CC)
104 {
105 /* A hack -- there seems to be no easy generic way how to make a
106 conditional jump from a ccmode comparison. */
107 if (!cinsn)
108 abort ();
109 cond = XEXP (SET_SRC (pc_set (cinsn)), 0);
110 if (GET_CODE (cond) != comp
111 || !rtx_equal_p (op0, XEXP (cond, 0))
112 || !rtx_equal_p (op1, XEXP (cond, 1)))
113 abort ();
114 emit_jump_insn (copy_insn (PATTERN (cinsn)));
115 jump = get_last_insn ();
116 JUMP_LABEL (jump) = JUMP_LABEL (cinsn);
117 LABEL_NUSES (JUMP_LABEL (jump))++;
118 redirect_jump (jump, label, 0);
119 }
120 else
121 {
122 if (cinsn)
123 abort ();
124
125 op0 = force_operand (op0, NULL_RTX);
126 op1 = force_operand (op1, NULL_RTX);
127 do_compare_rtx_and_jump (op0, op1, comp, 0,
128 mode, NULL_RTX, NULL_RTX, label);
129 jump = get_last_insn ();
130 JUMP_LABEL (jump) = label;
131 LABEL_NUSES (label)++;
132 }
133 REG_NOTES (jump) = gen_rtx_EXPR_LIST (REG_BR_PROB, GEN_INT (prob),
134 REG_NOTES (jump));
135 seq = get_insns ();
136 end_sequence ();
137
138 return seq;
139 }
140
141 /* Main entry point. Perform loop unswitching on all suitable LOOPS. */
142 void
143 unswitch_loops (struct loops *loops)
144 {
145 int i, num;
146 struct loop *loop;
147
148 /* Go through inner loops (only original ones). */
149 num = loops->num;
150
151 for (i = 1; i < num; i++)
152 {
153 /* Removed loop? */
154 loop = loops->parray[i];
155 if (!loop)
156 continue;
157
158 if (loop->inner)
159 continue;
160
161 unswitch_single_loop (loops, loop, NULL_RTX, 0);
162 #ifdef ENABLE_CHECKING
163 verify_dominators (CDI_DOMINATORS);
164 verify_loop_structure (loops);
165 #endif
166 }
167
168 iv_analysis_done ();
169 }
170
171 /* Checks whether we can unswitch LOOP on condition at end of BB -- one of its
172 basic blocks (for what it means see comments below). In case condition
173 compares loop invariant cc mode register, return the jump in CINSN. */
174
175 static rtx
176 may_unswitch_on (basic_block bb, struct loop *loop, rtx *cinsn)
177 {
178 rtx test, at, insn, op[2], stest;
179 struct rtx_iv iv;
180 unsigned i;
181 enum machine_mode mode;
182
183 /* BB must end in a simple conditional jump. */
184 if (EDGE_COUNT (bb->succs) != 2)
185 return NULL_RTX;
186 if (!any_condjump_p (BB_END (bb)))
187 return NULL_RTX;
188
189 /* With branches inside loop. */
190 if (!flow_bb_inside_loop_p (loop, EDGE_SUCC (bb, 0)->dest)
191 || !flow_bb_inside_loop_p (loop, EDGE_SUCC (bb, 1)->dest))
192 return NULL_RTX;
193
194 /* It must be executed just once each iteration (because otherwise we
195 are unable to update dominator/irreducible loop information correctly). */
196 if (!just_once_each_iteration_p (loop, bb))
197 return NULL_RTX;
198
199 /* Condition must be invariant. */
200 test = get_condition (BB_END (bb), &at, true, false);
201 if (!test)
202 return NULL_RTX;
203
204 for (i = 0; i < 2; i++)
205 {
206 op[i] = XEXP (test, i);
207
208 if (CONSTANT_P (op[i]))
209 continue;
210
211 insn = iv_get_reaching_def (at, op[i]);
212 if (!iv_analyze (insn, op[i], &iv))
213 return NULL_RTX;
214 if (iv.step != const0_rtx
215 || iv.first_special)
216 return NULL_RTX;
217
218 op[i] = get_iv_value (&iv, const0_rtx);
219 }
220
221 mode = GET_MODE (op[0]);
222 if (mode == VOIDmode)
223 mode = GET_MODE (op[1]);
224 if (GET_MODE_CLASS (mode) == MODE_CC)
225 {
226 if (at != BB_END (bb))
227 return NULL_RTX;
228
229 *cinsn = BB_END (bb);
230 if (!rtx_equal_p (op[0], XEXP (test, 0))
231 || !rtx_equal_p (op[1], XEXP (test, 1)))
232 return NULL_RTX;
233
234 return test;
235 }
236
237 stest = simplify_gen_relational (GET_CODE (test), SImode,
238 mode, op[0], op[1]);
239 if (stest == const0_rtx
240 || stest == const_true_rtx)
241 return stest;
242
243 return canon_condition (gen_rtx_fmt_ee (GET_CODE (test), SImode,
244 op[0], op[1]));
245 }
246
247 /* Reverses CONDition; returns NULL if we cannot. */
248 rtx
249 reversed_condition (rtx cond)
250 {
251 enum rtx_code reversed;
252 reversed = reversed_comparison_code (cond, NULL);
253 if (reversed == UNKNOWN)
254 return NULL_RTX;
255 else
256 return gen_rtx_fmt_ee (reversed,
257 GET_MODE (cond), XEXP (cond, 0),
258 XEXP (cond, 1));
259 }
260
261 /* Unswitch single LOOP. COND_CHECKED holds list of conditions we already
262 unswitched on and are therefore known to be true in this LOOP. NUM is
263 number of unswitchings done; do not allow it to grow too much, it is too
264 easy to create example on that the code would grow exponentially. */
265 static void
266 unswitch_single_loop (struct loops *loops, struct loop *loop,
267 rtx cond_checked, int num)
268 {
269 basic_block *bbs;
270 struct loop *nloop;
271 unsigned i;
272 rtx cond, rcond = NULL_RTX, conds, rconds, acond, cinsn = NULL_RTX;
273 int repeat;
274 edge e;
275
276 /* Do not unswitch too much. */
277 if (num > PARAM_VALUE (PARAM_MAX_UNSWITCH_LEVEL))
278 {
279 if (dump_file)
280 fprintf (dump_file, ";; Not unswitching anymore, hit max level\n");
281 return;
282 }
283
284 /* Only unswitch innermost loops. */
285 if (loop->inner)
286 {
287 if (dump_file)
288 fprintf (dump_file, ";; Not unswitching, not innermost loop\n");
289 return;
290 }
291
292 /* We must be able to duplicate loop body. */
293 if (!can_duplicate_loop_p (loop))
294 {
295 if (dump_file)
296 fprintf (dump_file, ";; Not unswitching, can't duplicate loop\n");
297 return;
298 }
299
300 /* The loop should not be too large, to limit code growth. */
301 if (num_loop_insns (loop) > PARAM_VALUE (PARAM_MAX_UNSWITCH_INSNS))
302 {
303 if (dump_file)
304 fprintf (dump_file, ";; Not unswitching, loop too big\n");
305 return;
306 }
307
308 /* Do not unswitch in cold areas. */
309 if (!maybe_hot_bb_p (loop->header))
310 {
311 if (dump_file)
312 fprintf (dump_file, ";; Not unswitching, not hot area\n");
313 return;
314 }
315
316 /* Nor if the loop usually does not roll. */
317 if (expected_loop_iterations (loop) < 1)
318 {
319 if (dump_file)
320 fprintf (dump_file, ";; Not unswitching, loop iterations < 1\n");
321 return;
322 }
323
324 do
325 {
326 repeat = 0;
327
328 /* Find a bb to unswitch on. */
329 bbs = get_loop_body (loop);
330 iv_analysis_loop_init (loop);
331 for (i = 0; i < loop->num_nodes; i++)
332 if ((cond = may_unswitch_on (bbs[i], loop, &cinsn)))
333 break;
334
335 if (i == loop->num_nodes)
336 {
337 free (bbs);
338 return;
339 }
340
341 if (cond != const0_rtx
342 && cond != const_true_rtx)
343 {
344 rcond = reversed_condition (cond);
345 if (rcond)
346 rcond = canon_condition (rcond);
347
348 /* Check whether the result can be predicted. */
349 for (acond = cond_checked; acond; acond = XEXP (acond, 1))
350 simplify_using_condition (XEXP (acond, 0), &cond, NULL);
351 }
352
353 if (cond == const_true_rtx)
354 {
355 /* Remove false path. */
356 e = FALLTHRU_EDGE (bbs[i]);
357 remove_path (loops, e);
358 free (bbs);
359 repeat = 1;
360 }
361 else if (cond == const0_rtx)
362 {
363 /* Remove true path. */
364 e = BRANCH_EDGE (bbs[i]);
365 remove_path (loops, e);
366 free (bbs);
367 repeat = 1;
368 }
369 } while (repeat);
370
371 /* We found the condition we can unswitch on. */
372 conds = alloc_EXPR_LIST (0, cond, cond_checked);
373 if (rcond)
374 rconds = alloc_EXPR_LIST (0, rcond, cond_checked);
375 else
376 rconds = cond_checked;
377
378 if (dump_file)
379 fprintf (dump_file, ";; Unswitching loop\n");
380
381 /* Unswitch the loop on this condition. */
382 nloop = unswitch_loop (loops, loop, bbs[i], cond, cinsn);
383 if (!nloop)
384 abort ();
385
386 /* Invoke itself on modified loops. */
387 unswitch_single_loop (loops, nloop, rconds, num + 1);
388 unswitch_single_loop (loops, loop, conds, num + 1);
389
390 free_EXPR_LIST_node (conds);
391 if (rcond)
392 free_EXPR_LIST_node (rconds);
393
394 free (bbs);
395 }
396
397 /* Unswitch a LOOP w.r. to given basic block UNSWITCH_ON. We only support
398 unswitching of innermost loops. UNSWITCH_ON must be executed in every
399 iteration, i.e. it must dominate LOOP latch. COND is the condition
400 determining which loop is entered. Returns NULL if impossible, new loop
401 otherwise. The new loop is entered if COND is true. If CINSN is not
402 NULL, it is the insn in that COND is compared. */
403
404 static struct loop *
405 unswitch_loop (struct loops *loops, struct loop *loop, basic_block unswitch_on,
406 rtx cond, rtx cinsn)
407 {
408 edge entry, latch_edge, true_edge, false_edge, e;
409 basic_block switch_bb, unswitch_on_alt;
410 struct loop *nloop;
411 sbitmap zero_bitmap;
412 int irred_flag, prob;
413 rtx seq;
414
415 /* Some sanity checking. */
416 if (!flow_bb_inside_loop_p (loop, unswitch_on))
417 abort ();
418 if (EDGE_COUNT (unswitch_on->succs) != 2)
419 abort ();
420 if (!just_once_each_iteration_p (loop, unswitch_on))
421 abort ();
422 if (loop->inner)
423 abort ();
424 if (!flow_bb_inside_loop_p (loop, EDGE_SUCC (unswitch_on, 0)->dest))
425 abort ();
426 if (!flow_bb_inside_loop_p (loop, EDGE_SUCC (unswitch_on, 1)->dest))
427 abort ();
428
429 entry = loop_preheader_edge (loop);
430
431 /* Make a copy. */
432 irred_flag = entry->flags & EDGE_IRREDUCIBLE_LOOP;
433 entry->flags &= ~EDGE_IRREDUCIBLE_LOOP;
434 zero_bitmap = sbitmap_alloc (2);
435 sbitmap_zero (zero_bitmap);
436 if (!duplicate_loop_to_header_edge (loop, entry, loops, 1,
437 zero_bitmap, NULL, NULL, NULL, 0))
438 return NULL;
439 free (zero_bitmap);
440 entry->flags |= irred_flag;
441
442 /* Record the block with condition we unswitch on. */
443 unswitch_on_alt = unswitch_on->rbi->copy;
444 true_edge = BRANCH_EDGE (unswitch_on_alt);
445 false_edge = FALLTHRU_EDGE (unswitch_on);
446 latch_edge = EDGE_SUCC (loop->latch->rbi->copy, 0);
447
448 /* Create a block with the condition. */
449 prob = true_edge->probability;
450 switch_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
451 seq = compare_and_jump_seq (XEXP (cond, 0), XEXP (cond, 1), GET_CODE (cond),
452 block_label (true_edge->dest),
453 prob, cinsn);
454 emit_insn_after (seq, BB_END (switch_bb));
455 e = make_edge (switch_bb, true_edge->dest, 0);
456 e->probability = prob;
457 e->count = latch_edge->count * prob / REG_BR_PROB_BASE;
458 e = make_edge (switch_bb, FALLTHRU_EDGE (unswitch_on)->dest, EDGE_FALLTHRU);
459 e->probability = false_edge->probability;
460 e->count = latch_edge->count * (false_edge->probability) / REG_BR_PROB_BASE;
461
462 if (irred_flag)
463 {
464 switch_bb->flags |= BB_IRREDUCIBLE_LOOP;
465 EDGE_SUCC (switch_bb, 0)->flags |= EDGE_IRREDUCIBLE_LOOP;
466 EDGE_SUCC (switch_bb, 1)->flags |= EDGE_IRREDUCIBLE_LOOP;
467 }
468 else
469 {
470 switch_bb->flags &= ~BB_IRREDUCIBLE_LOOP;
471 EDGE_SUCC (switch_bb, 0)->flags &= ~EDGE_IRREDUCIBLE_LOOP;
472 EDGE_SUCC (switch_bb, 1)->flags &= ~EDGE_IRREDUCIBLE_LOOP;
473 }
474
475 /* Loopify from the copy of LOOP body, constructing the new loop. */
476 nloop = loopify (loops, latch_edge,
477 EDGE_PRED (loop->header->rbi->copy, 0), switch_bb,
478 BRANCH_EDGE (switch_bb), FALLTHRU_EDGE (switch_bb), true);
479
480 /* Remove branches that are now unreachable in new loops. */
481 remove_path (loops, true_edge);
482 remove_path (loops, false_edge);
483
484 /* One of created loops do not have to be subloop of the outer loop now,
485 so fix its placement in loop data structure. */
486 fix_loop_placement (loop);
487 fix_loop_placement (nloop);
488
489 /* Preserve the simple loop preheaders. */
490 loop_split_edge_with (loop_preheader_edge (loop), NULL_RTX);
491 loop_split_edge_with (loop_preheader_edge (nloop), NULL_RTX);
492
493 return nloop;
494 }