]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-sink.c
Update copyright years.
[thirdparty/gcc.git] / gcc / tree-ssa-sink.c
CommitLineData
f3ec9a6c 1/* Code sinking for trees
f1717362 2 Copyright (C) 2001-2016 Free Software Foundation, Inc.
f3ec9a6c 3 Contributed by Daniel Berlin <dan@dberlin.org>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
8c4c00c1 9the Free Software Foundation; either version 3, or (at your option)
f3ec9a6c 10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
f3ec9a6c 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
9ef16211 24#include "backend.h"
f3ec9a6c 25#include "tree.h"
9ef16211 26#include "gimple.h"
7c29e30e 27#include "cfghooks.h"
7c29e30e 28#include "tree-pass.h"
9ef16211 29#include "ssa.h"
7c29e30e 30#include "gimple-pretty-print.h"
b20a8bb4 31#include "fold-const.h"
9ed99284 32#include "stor-layout.h"
94ea8568 33#include "cfganal.h"
dcf1a1ec 34#include "gimple-iterator.h"
073c1fd5 35#include "tree-cfg.h"
f3ec9a6c 36#include "cfgloop.h"
77ecaaba 37#include "params.h"
f3ec9a6c 38
39/* TODO:
40 1. Sinking store only using scalar promotion (IE without moving the RHS):
41
42 *q = p;
43 p = p + 1;
44 if (something)
45 *q = <not p>;
46 else
47 y = *q;
48
48e1416a 49
f3ec9a6c 50 should become
51 sinktemp = p;
52 p = p + 1;
53 if (something)
54 *q = <not p>;
55 else
56 {
57 *q = sinktemp;
58 y = *q
59 }
60 Store copy propagation will take care of the store elimination above.
48e1416a 61
f3ec9a6c 62
63 2. Sinking using Partial Dead Code Elimination. */
64
65
66static struct
48e1416a 67{
f7f07c95 68 /* The number of statements sunk down the flowgraph by code sinking. */
f3ec9a6c 69 int sunk;
48e1416a 70
f3ec9a6c 71} sink_stats;
72
73
fe24f256 74/* Given a PHI, and one of its arguments (DEF), find the edge for
f3ec9a6c 75 that argument and return it. If the argument occurs twice in the PHI node,
76 we return NULL. */
77
78static basic_block
1a91d914 79find_bb_for_arg (gphi *phi, tree def)
f3ec9a6c 80{
75a70cf9 81 size_t i;
f3ec9a6c 82 bool foundone = false;
83 basic_block result = NULL;
75a70cf9 84 for (i = 0; i < gimple_phi_num_args (phi); i++)
f3ec9a6c 85 if (PHI_ARG_DEF (phi, i) == def)
86 {
87 if (foundone)
88 return NULL;
89 foundone = true;
75a70cf9 90 result = gimple_phi_arg_edge (phi, i)->src;
f3ec9a6c 91 }
92 return result;
93}
94
95/* When the first immediate use is in a statement, then return true if all
96 immediate uses in IMM are in the same statement.
97 We could also do the case where the first immediate use is in a phi node,
98 and all the other uses are in phis in the same basic block, but this
99 requires some expensive checking later (you have to make sure no def/vdef
100 in the statement occurs for multiple edges in the various phi nodes it's
f7f07c95 101 used in, so that you only have one place you can sink it to. */
f3ec9a6c 102
103static bool
9cc12bed 104all_immediate_uses_same_place (def_operand_p def_p)
f3ec9a6c 105{
9cc12bed 106 tree var = DEF_FROM_PTR (def_p);
22aa74c4 107 imm_use_iterator imm_iter;
108 use_operand_p use_p;
f3ec9a6c 109
42acab1c 110 gimple *firstuse = NULL;
9cc12bed 111 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
f3ec9a6c 112 {
9cc12bed 113 if (is_gimple_debug (USE_STMT (use_p)))
114 continue;
115 if (firstuse == NULL)
116 firstuse = USE_STMT (use_p);
117 else
118 if (firstuse != USE_STMT (use_p))
119 return false;
f3ec9a6c 120 }
22aa74c4 121
f3ec9a6c 122 return true;
123}
124
f3ec9a6c 125/* Find the nearest common dominator of all of the immediate uses in IMM. */
126
127static basic_block
9cc12bed 128nearest_common_dominator_of_uses (def_operand_p def_p, bool *debug_stmts)
48e1416a 129{
9cc12bed 130 tree var = DEF_FROM_PTR (def_p);
f3ec9a6c 131 bitmap blocks = BITMAP_ALLOC (NULL);
132 basic_block commondom;
f3ec9a6c 133 unsigned int j;
134 bitmap_iterator bi;
22aa74c4 135 imm_use_iterator imm_iter;
136 use_operand_p use_p;
22aa74c4 137
9cc12bed 138 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
f3ec9a6c 139 {
42acab1c 140 gimple *usestmt = USE_STMT (use_p);
9cc12bed 141 basic_block useblock;
c52a36cc 142
1a91d914 143 if (gphi *phi = dyn_cast <gphi *> (usestmt))
9cc12bed 144 {
145 int idx = PHI_ARG_INDEX_FROM_USE (use_p);
c48d3403 146
1a91d914 147 useblock = gimple_phi_arg_edge (phi, idx)->src;
9cc12bed 148 }
149 else if (is_gimple_debug (usestmt))
150 {
151 *debug_stmts = true;
152 continue;
153 }
154 else
155 {
156 useblock = gimple_bb (usestmt);
157 }
22aa74c4 158
9cc12bed 159 /* Short circuit. Nothing dominates the entry block. */
160 if (useblock == ENTRY_BLOCK_PTR_FOR_FN (cfun))
161 {
162 BITMAP_FREE (blocks);
163 return NULL;
f3ec9a6c 164 }
9cc12bed 165 bitmap_set_bit (blocks, useblock->index);
f3ec9a6c 166 }
f5a6b05f 167 commondom = BASIC_BLOCK_FOR_FN (cfun, bitmap_first_set_bit (blocks));
f3ec9a6c 168 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, j, bi)
48e1416a 169 commondom = nearest_common_dominator (CDI_DOMINATORS, commondom,
f5a6b05f 170 BASIC_BLOCK_FOR_FN (cfun, j));
f3ec9a6c 171 BITMAP_FREE (blocks);
172 return commondom;
173}
174
77ecaaba 175/* Given EARLY_BB and LATE_BB, two blocks in a path through the dominator
176 tree, return the best basic block between them (inclusive) to place
177 statements.
178
179 We want the most control dependent block in the shallowest loop nest.
180
181 If the resulting block is in a shallower loop nest, then use it. Else
182 only use the resulting block if it has significantly lower execution
183 frequency than EARLY_BB to avoid gratutious statement movement. We
184 consider statements with VOPS more desirable to move.
185
186 This pass would obviously benefit from PDO as it utilizes block
187 frequencies. It would also benefit from recomputing frequencies
188 if profile data is not available since frequencies often get out
189 of sync with reality. */
190
191static basic_block
192select_best_block (basic_block early_bb,
193 basic_block late_bb,
42acab1c 194 gimple *stmt)
77ecaaba 195{
196 basic_block best_bb = late_bb;
197 basic_block temp_bb = late_bb;
198 int threshold;
199
200 while (temp_bb != early_bb)
201 {
202 /* If we've moved into a lower loop nest, then that becomes
203 our best block. */
6b42039a 204 if (bb_loop_depth (temp_bb) < bb_loop_depth (best_bb))
77ecaaba 205 best_bb = temp_bb;
206
207 /* Walk up the dominator tree, hopefully we'll find a shallower
208 loop nest. */
209 temp_bb = get_immediate_dominator (CDI_DOMINATORS, temp_bb);
210 }
211
212 /* If we found a shallower loop nest, then we always consider that
213 a win. This will always give us the most control dependent block
214 within that loop nest. */
6b42039a 215 if (bb_loop_depth (best_bb) < bb_loop_depth (early_bb))
77ecaaba 216 return best_bb;
217
218 /* Get the sinking threshold. If the statement to be moved has memory
219 operands, then increase the threshold by 7% as those are even more
220 profitable to avoid, clamping at 100%. */
221 threshold = PARAM_VALUE (PARAM_SINK_FREQUENCY_THRESHOLD);
222 if (gimple_vuse (stmt) || gimple_vdef (stmt))
223 {
224 threshold += 7;
225 if (threshold > 100)
226 threshold = 100;
227 }
228
229 /* If BEST_BB is at the same nesting level, then require it to have
230 significantly lower execution frequency to avoid gratutious movement. */
6b42039a 231 if (bb_loop_depth (best_bb) == bb_loop_depth (early_bb)
77ecaaba 232 && best_bb->frequency < (early_bb->frequency * threshold / 100.0))
233 return best_bb;
234
235 /* No better block found, so return EARLY_BB, which happens to be the
236 statement's original block. */
237 return early_bb;
238}
239
48e1416a 240/* Given a statement (STMT) and the basic block it is currently in (FROMBB),
f3ec9a6c 241 determine the location to sink the statement to, if any.
75a70cf9 242 Returns true if there is such location; in that case, TOGSI points to the
243 statement before that STMT should be moved. */
f3ec9a6c 244
4122c2d4 245static bool
42acab1c 246statement_sink_location (gimple *stmt, basic_block frombb,
75a70cf9 247 gimple_stmt_iterator *togsi)
f3ec9a6c 248{
42acab1c 249 gimple *use;
22aa74c4 250 use_operand_p one_use = NULL_USE_OPERAND_P;
f3ec9a6c 251 basic_block sinkbb;
252 use_operand_p use_p;
253 def_operand_p def_p;
254 ssa_op_iter iter;
22aa74c4 255 imm_use_iterator imm_iter;
256
2109076a 257 /* We only can sink assignments. */
258 if (!is_gimple_assign (stmt))
259 return false;
f3ec9a6c 260
2109076a 261 /* We only can sink stmts with a single definition. */
262 def_p = single_ssa_def_operand (stmt, SSA_OP_ALL_DEFS);
263 if (def_p == NULL_DEF_OPERAND_P)
4122c2d4 264 return false;
f3ec9a6c 265
2109076a 266 /* Return if there are no immediate uses of this stmt. */
267 if (has_zero_uses (DEF_FROM_PTR (def_p)))
4122c2d4 268 return false;
f3ec9a6c 269
270 /* There are a few classes of things we can't or don't move, some because we
271 don't have code to handle it, some because it's not profitable and some
48e1416a 272 because it's not legal.
273
f3ec9a6c 274 We can't sink things that may be global stores, at least not without
275 calculating a lot more information, because we may cause it to no longer
276 be seen by an external routine that needs it depending on where it gets
48e1416a 277 moved to.
278
f3ec9a6c 279 We can't sink statements that end basic blocks without splitting the
280 incoming edge for the sink location to place it there.
281
48e1416a 282 We can't sink statements that have volatile operands.
f3ec9a6c 283
284 We don't want to sink dead code, so anything with 0 immediate uses is not
e15deb4b 285 sunk.
286
287 Don't sink BLKmode assignments if current function has any local explicit
288 register variables, as BLKmode assignments may involve memcpy or memset
289 calls or, on some targets, inline expansion thereof that sometimes need
290 to use specific hard registers.
f3ec9a6c 291
292 */
b66731e8 293 if (stmt_ends_bb_p (stmt)
75a70cf9 294 || gimple_has_side_effects (stmt)
75a70cf9 295 || gimple_has_volatile_ops (stmt)
e15deb4b 296 || (cfun->has_local_explicit_reg_vars
297 && TYPE_MODE (TREE_TYPE (gimple_assign_lhs (stmt))) == BLKmode))
4122c2d4 298 return false;
48e1416a 299
2109076a 300 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (DEF_FROM_PTR (def_p)))
301 return false;
48e1416a 302
f3ec9a6c 303 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_ALL_USES)
304 {
305 tree use = USE_FROM_PTR (use_p);
306 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use))
4122c2d4 307 return false;
f3ec9a6c 308 }
48e1416a 309
2109076a 310 use = NULL;
311
312 /* If stmt is a store the one and only use needs to be the VOP
313 merging PHI node. */
9cc12bed 314 if (virtual_operand_p (DEF_FROM_PTR (def_p)))
2109076a 315 {
316 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, DEF_FROM_PTR (def_p))
317 {
42acab1c 318 gimple *use_stmt = USE_STMT (use_p);
2109076a 319
320 /* A killing definition is not a use. */
03dccd68 321 if ((gimple_has_lhs (use_stmt)
322 && operand_equal_p (gimple_assign_lhs (stmt),
323 gimple_get_lhs (use_stmt), 0))
324 || stmt_kills_ref_p (use_stmt, gimple_assign_lhs (stmt)))
325 {
326 /* If use_stmt is or might be a nop assignment then USE_STMT
327 acts as a use as well as definition. */
328 if (stmt != use_stmt
329 && ref_maybe_used_by_stmt_p (use_stmt,
330 gimple_assign_lhs (stmt)))
331 return false;
332 continue;
333 }
2109076a 334
335 if (gimple_code (use_stmt) != GIMPLE_PHI)
336 return false;
337
338 if (use
339 && use != use_stmt)
340 return false;
341
342 use = use_stmt;
343 }
344 if (!use)
345 return false;
346 }
f3ec9a6c 347 /* If all the immediate uses are not in the same place, find the nearest
348 common dominator of all the immediate uses. For PHI nodes, we have to
349 find the nearest common dominator of all of the predecessor blocks, since
350 that is where insertion would have to take place. */
9cc12bed 351 else if (gimple_vuse (stmt)
352 || !all_immediate_uses_same_place (def_p))
f3ec9a6c 353 {
9845d120 354 bool debug_stmts = false;
9cc12bed 355 basic_block commondom = nearest_common_dominator_of_uses (def_p,
9845d120 356 &debug_stmts);
48e1416a 357
f3ec9a6c 358 if (commondom == frombb)
4122c2d4 359 return false;
f3ec9a6c 360
9cc12bed 361 /* If this is a load then do not sink past any stores.
362 ??? This is overly simple but cheap. We basically look
363 for an existing load with the same VUSE in the path to one
364 of the sink candidate blocks and we adjust commondom to the
365 nearest to commondom. */
366 if (gimple_vuse (stmt))
367 {
7ec37726 368 /* Do not sink loads from hard registers. */
369 if (gimple_assign_single_p (stmt)
370 && TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
371 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt)))
372 return false;
373
9cc12bed 374 imm_use_iterator imm_iter;
375 use_operand_p use_p;
376 basic_block found = NULL;
377 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, gimple_vuse (stmt))
378 {
42acab1c 379 gimple *use_stmt = USE_STMT (use_p);
9cc12bed 380 basic_block bb = gimple_bb (use_stmt);
381 /* For PHI nodes the block we know sth about
382 is the incoming block with the use. */
383 if (gimple_code (use_stmt) == GIMPLE_PHI)
384 bb = EDGE_PRED (bb, PHI_ARG_INDEX_FROM_USE (use_p))->src;
385 /* Any dominator of commondom would be ok with
386 adjusting commondom to that block. */
387 bb = nearest_common_dominator (CDI_DOMINATORS, bb, commondom);
388 if (!found)
389 found = bb;
390 else if (dominated_by_p (CDI_DOMINATORS, bb, found))
391 found = bb;
392 /* If we can't improve, stop. */
393 if (found == commondom)
394 break;
395 }
396 commondom = found;
397 if (commondom == frombb)
398 return false;
399 }
400
f3ec9a6c 401 /* Our common dominator has to be dominated by frombb in order to be a
402 trivially safe place to put this statement, since it has multiple
48e1416a 403 uses. */
f3ec9a6c 404 if (!dominated_by_p (CDI_DOMINATORS, commondom, frombb))
4122c2d4 405 return false;
48e1416a 406
77ecaaba 407 commondom = select_best_block (frombb, commondom, stmt);
f3ec9a6c 408
77ecaaba 409 if (commondom == frombb)
410 return false;
9845d120 411
75a70cf9 412 *togsi = gsi_after_labels (commondom);
9845d120 413
4122c2d4 414 return true;
f3ec9a6c 415 }
2109076a 416 else
f3ec9a6c 417 {
2109076a 418 FOR_EACH_IMM_USE_FAST (one_use, imm_iter, DEF_FROM_PTR (def_p))
419 {
420 if (is_gimple_debug (USE_STMT (one_use)))
421 continue;
422 break;
423 }
424 use = USE_STMT (one_use);
75a70cf9 425
2109076a 426 if (gimple_code (use) != GIMPLE_PHI)
427 {
428 sinkbb = gimple_bb (use);
77ecaaba 429 sinkbb = select_best_block (frombb, gimple_bb (use), stmt);
258b3d02 430
77ecaaba 431 if (sinkbb == frombb)
2109076a 432 return false;
9845d120 433
2109076a 434 *togsi = gsi_for_stmt (use);
f3ec9a6c 435
2109076a 436 return true;
437 }
438 }
b66731e8 439
1a91d914 440 sinkbb = find_bb_for_arg (as_a <gphi *> (use), DEF_FROM_PTR (def_p));
f3ec9a6c 441
77ecaaba 442 /* This can happen if there are multiple uses in a PHI. */
443 if (!sinkbb)
4122c2d4 444 return false;
77ecaaba 445
446 sinkbb = select_best_block (frombb, sinkbb, stmt);
447 if (!sinkbb || sinkbb == frombb)
4122c2d4 448 return false;
449
87e53408 450 /* If the latch block is empty, don't make it non-empty by sinking
451 something into it. */
452 if (sinkbb == frombb->loop_father->latch
453 && empty_block_p (sinkbb))
454 return false;
455
75a70cf9 456 *togsi = gsi_after_labels (sinkbb);
f3ec9a6c 457
4122c2d4 458 return true;
f3ec9a6c 459}
460
461/* Perform code sinking on BB */
462
463static void
464sink_code_in_bb (basic_block bb)
465{
466 basic_block son;
75a70cf9 467 gimple_stmt_iterator gsi;
f3ec9a6c 468 edge_iterator ei;
469 edge e;
5add5f91 470 bool last = true;
48e1416a 471
f3ec9a6c 472 /* If this block doesn't dominate anything, there can't be any place to sink
473 the statements to. */
474 if (first_dom_son (CDI_DOMINATORS, bb) == NULL)
475 goto earlyout;
476
477 /* We can't move things across abnormal edges, so don't try. */
478 FOR_EACH_EDGE (e, ei, bb->succs)
479 if (e->flags & EDGE_ABNORMAL)
480 goto earlyout;
481
75a70cf9 482 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
f3ec9a6c 483 {
42acab1c 484 gimple *stmt = gsi_stmt (gsi);
75a70cf9 485 gimple_stmt_iterator togsi;
4122c2d4 486
75a70cf9 487 if (!statement_sink_location (stmt, bb, &togsi))
f3ec9a6c 488 {
75a70cf9 489 if (!gsi_end_p (gsi))
490 gsi_prev (&gsi);
5add5f91 491 last = false;
f3ec9a6c 492 continue;
48e1416a 493 }
f3ec9a6c 494 if (dump_file)
495 {
496 fprintf (dump_file, "Sinking ");
75a70cf9 497 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS);
f3ec9a6c 498 fprintf (dump_file, " from bb %d to bb %d\n",
75a70cf9 499 bb->index, (gsi_bb (togsi))->index);
f3ec9a6c 500 }
48e1416a 501
63eb2430 502 /* Update virtual operands of statements in the path we
503 do not sink to. */
2109076a 504 if (gimple_vdef (stmt))
505 {
63eb2430 506 imm_use_iterator iter;
507 use_operand_p use_p;
42acab1c 508 gimple *vuse_stmt;
63eb2430 509
510 FOR_EACH_IMM_USE_STMT (vuse_stmt, iter, gimple_vdef (stmt))
511 if (gimple_code (vuse_stmt) != GIMPLE_PHI)
512 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
513 SET_USE (use_p, gimple_vuse (stmt));
2109076a 514 }
515
f3ec9a6c 516 /* If this is the end of the basic block, we need to insert at the end
517 of the basic block. */
75a70cf9 518 if (gsi_end_p (togsi))
519 gsi_move_to_bb_end (&gsi, gsi_bb (togsi));
f3ec9a6c 520 else
75a70cf9 521 gsi_move_before (&gsi, &togsi);
f3ec9a6c 522
523 sink_stats.sunk++;
5add5f91 524
525 /* If we've just removed the last statement of the BB, the
75a70cf9 526 gsi_end_p() test below would fail, but gsi_prev() would have
5add5f91 527 succeeded, and we want it to succeed. So we keep track of
528 whether we're at the last statement and pick up the new last
529 statement. */
530 if (last)
531 {
75a70cf9 532 gsi = gsi_last_bb (bb);
5add5f91 533 continue;
534 }
535
536 last = false;
75a70cf9 537 if (!gsi_end_p (gsi))
538 gsi_prev (&gsi);
48e1416a 539
f3ec9a6c 540 }
541 earlyout:
542 for (son = first_dom_son (CDI_POST_DOMINATORS, bb);
543 son;
544 son = next_dom_son (CDI_POST_DOMINATORS, son))
545 {
546 sink_code_in_bb (son);
547 }
48e1416a 548}
f3ec9a6c 549
550/* Perform code sinking.
551 This moves code down the flowgraph when we know it would be
552 profitable to do so, or it wouldn't increase the number of
553 executions of the statement.
554
555 IE given
48e1416a 556
f3ec9a6c 557 a_1 = b + c;
558 if (<something>)
559 {
560 }
561 else
562 {
563 foo (&b, &c);
564 a_5 = b + c;
565 }
566 a_6 = PHI (a_5, a_1);
567 USE a_6.
568
569 we'll transform this into:
570
571 if (<something>)
572 {
573 a_1 = b + c;
574 }
575 else
576 {
577 foo (&b, &c);
578 a_5 = b + c;
579 }
580 a_6 = PHI (a_5, a_1);
581 USE a_6.
582
583 Note that this reduces the number of computations of a = b + c to 1
584 when we take the else edge, instead of 2.
585*/
7620bc82 586namespace {
587
588const pass_data pass_data_sink_code =
f3ec9a6c 589{
cbe8bda8 590 GIMPLE_PASS, /* type */
591 "sink", /* name */
592 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 593 TV_TREE_SINK, /* tv_id */
c8942648 594 /* PROP_no_crit_edges is ensured by running split_critical_edges in
65b0537f 595 pass_data_sink_code::execute (). */
c8942648 596 ( PROP_cfg | PROP_ssa ), /* properties_required */
cbe8bda8 597 0, /* properties_provided */
598 0, /* properties_destroyed */
599 0, /* todo_flags_start */
8b88439e 600 TODO_update_ssa, /* todo_flags_finish */
f3ec9a6c 601};
cbe8bda8 602
7620bc82 603class pass_sink_code : public gimple_opt_pass
cbe8bda8 604{
605public:
9af5ce0c 606 pass_sink_code (gcc::context *ctxt)
607 : gimple_opt_pass (pass_data_sink_code, ctxt)
cbe8bda8 608 {}
609
610 /* opt_pass methods: */
31315c24 611 virtual bool gate (function *) { return flag_tree_sink != 0; }
65b0537f 612 virtual unsigned int execute (function *);
cbe8bda8 613
614}; // class pass_sink_code
615
65b0537f 616unsigned int
617pass_sink_code::execute (function *fun)
618{
619 loop_optimizer_init (LOOPS_NORMAL);
620 split_critical_edges ();
621 connect_infinite_loops_to_exit ();
622 memset (&sink_stats, 0, sizeof (sink_stats));
623 calculate_dominance_info (CDI_DOMINATORS);
624 calculate_dominance_info (CDI_POST_DOMINATORS);
625 sink_code_in_bb (EXIT_BLOCK_PTR_FOR_FN (fun));
626 statistics_counter_event (fun, "Sunk statements", sink_stats.sunk);
627 free_dominance_info (CDI_POST_DOMINATORS);
628 remove_fake_exit_edges ();
629 loop_optimizer_finalize ();
630
631 return 0;
632}
633
7620bc82 634} // anon namespace
635
cbe8bda8 636gimple_opt_pass *
637make_pass_sink_code (gcc::context *ctxt)
638{
639 return new pass_sink_code (ctxt);
640}