]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-split.c
gimple-walk.h: New File.
[thirdparty/gcc.git] / gcc / ipa-split.c
CommitLineData
3e485f62 1/* Function splitting pass
d1e082c2 2 Copyright (C) 2010-2013 Free Software Foundation, Inc.
3e485f62
JH
3 Contributed by Jan Hubicka <jh@suse.cz>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21/* The purpose of this pass is to split function bodies to improve
22 inlining. I.e. for function of the form:
23
24 func (...)
25 {
26 if (cheap_test)
27 something_small
28 else
29 something_big
30 }
31
32 Produce:
33
34 func.part (...)
35 {
36 something_big
37 }
38
39 func (...)
40 {
41 if (cheap_test)
42 something_small
43 else
44 func.part (...);
45 }
46
47 When func becomes inlinable and when cheap_test is often true, inlining func,
ed7656f6 48 but not fund.part leads to performance improvement similar as inlining
3e485f62
JH
49 original func while the code size growth is smaller.
50
51 The pass is organized in three stages:
52 1) Collect local info about basic block into BB_INFO structure and
53 compute function body estimated size and time.
54 2) Via DFS walk find all possible basic blocks where we can split
55 and chose best one.
56 3) If split point is found, split at the specified BB by creating a clone
57 and updating function to call it.
58
59 The decisions what functions to split are in execute_split_functions
60 and consider_split.
61
62 There are several possible future improvements for this pass including:
63
64 1) Splitting to break up large functions
65 2) Splitting to reduce stack frame usage
66 3) Allow split part of function to use values computed in the header part.
67 The values needs to be passed to split function, perhaps via same
68 interface as for nested functions or as argument.
69 4) Support for simple rematerialization. I.e. when split part use
70 value computed in header from function parameter in very cheap way, we
71 can just recompute it.
72 5) Support splitting of nested functions.
73 6) Support non-SSA arguments.
74 7) There is nothing preventing us from producing multiple parts of single function
75 when needed or splitting also the parts. */
76
77#include "config.h"
78#include "system.h"
79#include "coretypes.h"
80#include "tree.h"
45b0be94 81#include "gimplify.h"
5be5c238
AM
82#include "gimple-iterator.h"
83#include "gimple-walk.h"
3e485f62 84#include "target.h"
3e485f62 85#include "ipa-prop.h"
442b4905
AM
86#include "gimple-ssa.h"
87#include "tree-cfg.h"
88#include "tree-phinodes.h"
89#include "ssa-iterators.h"
90#include "tree-ssanames.h"
91#include "tree-into-ssa.h"
92#include "tree-dfa.h"
3e485f62
JH
93#include "tree-pass.h"
94#include "flags.h"
3e485f62
JH
95#include "diagnostic.h"
96#include "tree-dump.h"
97#include "tree-inline.h"
3e485f62
JH
98#include "params.h"
99#include "gimple-pretty-print.h"
e7f23018 100#include "ipa-inline.h"
a9e0d843 101#include "cfgloop.h"
3e485f62
JH
102
103/* Per basic block info. */
104
105typedef struct
106{
107 unsigned int size;
108 unsigned int time;
109} bb_info;
3e485f62 110
9771b263 111static vec<bb_info> bb_info_vec;
3e485f62
JH
112
113/* Description of split point. */
114
115struct split_point
116{
117 /* Size of the partitions. */
118 unsigned int header_time, header_size, split_time, split_size;
119
ed7656f6 120 /* SSA names that need to be passed into spit function. */
3e485f62
JH
121 bitmap ssa_names_to_pass;
122
123 /* Basic block where we split (that will become entry point of new function. */
124 basic_block entry_bb;
125
126 /* Basic blocks we are splitting away. */
127 bitmap split_bbs;
241a2b9e
JH
128
129 /* True when return value is computed on split part and thus it needs
130 to be returned. */
131 bool split_part_set_retval;
3e485f62
JH
132};
133
134/* Best split point found. */
135
136struct split_point best_split_point;
137
b2e25729
BS
138/* Set of basic blocks that are not allowed to dominate a split point. */
139
140static bitmap forbidden_dominators;
141
241a2b9e
JH
142static tree find_retval (basic_block return_bb);
143
1802378d 144/* Callback for walk_stmt_load_store_addr_ops. If T is non-SSA automatic
3e485f62
JH
145 variable, check it if it is present in bitmap passed via DATA. */
146
147static bool
1802378d 148test_nonssa_use (gimple stmt ATTRIBUTE_UNUSED, tree t, void *data)
3e485f62
JH
149{
150 t = get_base_address (t);
151
1802378d
EB
152 if (!t || is_gimple_reg (t))
153 return false;
154
155 if (TREE_CODE (t) == PARM_DECL
156 || (TREE_CODE (t) == VAR_DECL
3e485f62 157 && auto_var_in_fn_p (t, current_function_decl))
1802378d
EB
158 || TREE_CODE (t) == RESULT_DECL
159 || TREE_CODE (t) == LABEL_DECL)
3e485f62 160 return bitmap_bit_p ((bitmap)data, DECL_UID (t));
241a2b9e 161
1802378d
EB
162 /* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
163 to pretend that the value pointed to is actual result decl. */
164 if ((TREE_CODE (t) == MEM_REF || INDIRECT_REF_P (t))
241a2b9e 165 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
70b5e7dc 166 && SSA_NAME_VAR (TREE_OPERAND (t, 0))
241a2b9e
JH
167 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t, 0))) == RESULT_DECL
168 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
1802378d
EB
169 return
170 bitmap_bit_p ((bitmap)data,
171 DECL_UID (DECL_RESULT (current_function_decl)));
172
3e485f62
JH
173 return false;
174}
175
176/* Dump split point CURRENT. */
177
178static void
179dump_split_point (FILE * file, struct split_point *current)
180{
181 fprintf (file,
cfef45c8
RG
182 "Split point at BB %i\n"
183 " header time: %i header size: %i\n"
184 " split time: %i split size: %i\n bbs: ",
3e485f62
JH
185 current->entry_bb->index, current->header_time,
186 current->header_size, current->split_time, current->split_size);
187 dump_bitmap (file, current->split_bbs);
188 fprintf (file, " SSA names to pass: ");
189 dump_bitmap (file, current->ssa_names_to_pass);
190}
191
1802378d
EB
192/* Look for all BBs in header that might lead to the split part and verify
193 that they are not defining any non-SSA var used by the split part.
2094f1fc
JH
194 Parameters are the same as for consider_split. */
195
196static bool
197verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
198 basic_block return_bb)
199{
200 bitmap seen = BITMAP_ALLOC (NULL);
6e1aa848 201 vec<basic_block> worklist = vNULL;
2094f1fc
JH
202 edge e;
203 edge_iterator ei;
204 bool ok = true;
1802378d 205
2094f1fc
JH
206 FOR_EACH_EDGE (e, ei, current->entry_bb->preds)
207 if (e->src != ENTRY_BLOCK_PTR
208 && !bitmap_bit_p (current->split_bbs, e->src->index))
209 {
9771b263 210 worklist.safe_push (e->src);
2094f1fc
JH
211 bitmap_set_bit (seen, e->src->index);
212 }
1802378d 213
9771b263 214 while (!worklist.is_empty ())
2094f1fc
JH
215 {
216 gimple_stmt_iterator bsi;
9771b263 217 basic_block bb = worklist.pop ();
2094f1fc
JH
218
219 FOR_EACH_EDGE (e, ei, bb->preds)
220 if (e->src != ENTRY_BLOCK_PTR
fcaa4ca4 221 && bitmap_set_bit (seen, e->src->index))
2094f1fc
JH
222 {
223 gcc_checking_assert (!bitmap_bit_p (current->split_bbs,
224 e->src->index));
9771b263 225 worklist.safe_push (e->src);
2094f1fc
JH
226 }
227 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
228 {
1802378d
EB
229 gimple stmt = gsi_stmt (bsi);
230 if (is_gimple_debug (stmt))
2094f1fc
JH
231 continue;
232 if (walk_stmt_load_store_addr_ops
1802378d
EB
233 (stmt, non_ssa_vars, test_nonssa_use, test_nonssa_use,
234 test_nonssa_use))
2094f1fc
JH
235 {
236 ok = false;
237 goto done;
238 }
1802378d
EB
239 if (gimple_code (stmt) == GIMPLE_LABEL
240 && test_nonssa_use (stmt, gimple_label_label (stmt),
241 non_ssa_vars))
242 {
243 ok = false;
244 goto done;
245 }
2094f1fc
JH
246 }
247 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
248 {
249 if (walk_stmt_load_store_addr_ops
1802378d
EB
250 (gsi_stmt (bsi), non_ssa_vars, test_nonssa_use, test_nonssa_use,
251 test_nonssa_use))
2094f1fc
JH
252 {
253 ok = false;
254 goto done;
255 }
256 }
257 FOR_EACH_EDGE (e, ei, bb->succs)
258 {
259 if (e->dest != return_bb)
260 continue;
261 for (bsi = gsi_start_phis (return_bb); !gsi_end_p (bsi);
262 gsi_next (&bsi))
263 {
264 gimple stmt = gsi_stmt (bsi);
265 tree op = gimple_phi_arg_def (stmt, e->dest_idx);
266
ea057359 267 if (virtual_operand_p (gimple_phi_result (stmt)))
2094f1fc
JH
268 continue;
269 if (TREE_CODE (op) != SSA_NAME
270 && test_nonssa_use (stmt, op, non_ssa_vars))
271 {
272 ok = false;
273 goto done;
274 }
275 }
276 }
277 }
278done:
279 BITMAP_FREE (seen);
9771b263 280 worklist.release ();
2094f1fc
JH
281 return ok;
282}
283
b2e25729
BS
284/* If STMT is a call, check the callee against a list of forbidden
285 predicate functions. If a match is found, look for uses of the
286 call result in condition statements that compare against zero.
287 For each such use, find the block targeted by the condition
288 statement for the nonzero result, and set the bit for this block
289 in the forbidden dominators bitmap. The purpose of this is to avoid
290 selecting a split point where we are likely to lose the chance
291 to optimize away an unused function call. */
292
293static void
294check_forbidden_calls (gimple stmt)
295{
296 imm_use_iterator use_iter;
297 use_operand_p use_p;
298 tree lhs;
299
300 /* At the moment, __builtin_constant_p is the only forbidden
301 predicate function call (see PR49642). */
302 if (!gimple_call_builtin_p (stmt, BUILT_IN_CONSTANT_P))
303 return;
304
305 lhs = gimple_call_lhs (stmt);
306
307 if (!lhs || TREE_CODE (lhs) != SSA_NAME)
308 return;
309
310 FOR_EACH_IMM_USE_FAST (use_p, use_iter, lhs)
311 {
312 tree op1;
313 basic_block use_bb, forbidden_bb;
314 enum tree_code code;
315 edge true_edge, false_edge;
316 gimple use_stmt = USE_STMT (use_p);
317
318 if (gimple_code (use_stmt) != GIMPLE_COND)
319 continue;
320
321 /* Assuming canonical form for GIMPLE_COND here, with constant
322 in second position. */
323 op1 = gimple_cond_rhs (use_stmt);
324 code = gimple_cond_code (use_stmt);
325 use_bb = gimple_bb (use_stmt);
326
327 extract_true_false_edges_from_block (use_bb, &true_edge, &false_edge);
328
329 /* We're only interested in comparisons that distinguish
330 unambiguously from zero. */
331 if (!integer_zerop (op1) || code == LE_EXPR || code == GE_EXPR)
332 continue;
333
334 if (code == EQ_EXPR)
335 forbidden_bb = false_edge->dest;
336 else
337 forbidden_bb = true_edge->dest;
338
339 bitmap_set_bit (forbidden_dominators, forbidden_bb->index);
340 }
341}
342
343/* If BB is dominated by any block in the forbidden dominators set,
344 return TRUE; else FALSE. */
345
346static bool
347dominated_by_forbidden (basic_block bb)
348{
349 unsigned dom_bb;
350 bitmap_iterator bi;
351
352 EXECUTE_IF_SET_IN_BITMAP (forbidden_dominators, 1, dom_bb, bi)
353 {
354 if (dominated_by_p (CDI_DOMINATORS, bb, BASIC_BLOCK (dom_bb)))
355 return true;
356 }
357
358 return false;
359}
360
3e485f62
JH
361/* We found an split_point CURRENT. NON_SSA_VARS is bitmap of all non ssa
362 variables used and RETURN_BB is return basic block.
363 See if we can split function here. */
364
365static void
366consider_split (struct split_point *current, bitmap non_ssa_vars,
367 basic_block return_bb)
368{
369 tree parm;
370 unsigned int num_args = 0;
371 unsigned int call_overhead;
372 edge e;
373 edge_iterator ei;
8b3057b3
JH
374 gimple_stmt_iterator bsi;
375 unsigned int i;
ed7656f6 376 int incoming_freq = 0;
241a2b9e 377 tree retval;
e70670cf 378 bool back_edge = false;
8b3057b3 379
3e485f62
JH
380 if (dump_file && (dump_flags & TDF_DETAILS))
381 dump_split_point (dump_file, current);
382
8b3057b3 383 FOR_EACH_EDGE (e, ei, current->entry_bb->preds)
e70670cf
JH
384 {
385 if (e->flags & EDGE_DFS_BACK)
386 back_edge = true;
387 if (!bitmap_bit_p (current->split_bbs, e->src->index))
388 incoming_freq += EDGE_FREQUENCY (e);
389 }
8b3057b3 390
3e485f62 391 /* Do not split when we would end up calling function anyway. */
ed7656f6 392 if (incoming_freq
3e485f62
JH
393 >= (ENTRY_BLOCK_PTR->frequency
394 * PARAM_VALUE (PARAM_PARTIAL_INLINING_ENTRY_PROBABILITY) / 100))
395 {
e70670cf
JH
396 /* When profile is guessed, we can not expect it to give us
397 realistic estimate on likelyness of function taking the
398 complex path. As a special case, when tail of the function is
399 a loop, enable splitting since inlining code skipping the loop
400 is likely noticeable win. */
401 if (back_edge
402 && profile_status != PROFILE_READ
403 && incoming_freq < ENTRY_BLOCK_PTR->frequency)
404 {
405 if (dump_file && (dump_flags & TDF_DETAILS))
406 fprintf (dump_file,
407 " Split before loop, accepting despite low frequencies %i %i.\n",
408 incoming_freq,
409 ENTRY_BLOCK_PTR->frequency);
410 }
411 else
412 {
413 if (dump_file && (dump_flags & TDF_DETAILS))
414 fprintf (dump_file,
415 " Refused: incoming frequency is too large.\n");
416 return;
417 }
3e485f62
JH
418 }
419
420 if (!current->header_size)
421 {
422 if (dump_file && (dump_flags & TDF_DETAILS))
423 fprintf (dump_file, " Refused: header empty\n");
3e485f62
JH
424 return;
425 }
426
ed7656f6
JJ
427 /* Verify that PHI args on entry are either virtual or all their operands
428 incoming from header are the same. */
8b3057b3 429 for (bsi = gsi_start_phis (current->entry_bb); !gsi_end_p (bsi); gsi_next (&bsi))
3e485f62 430 {
8b3057b3
JH
431 gimple stmt = gsi_stmt (bsi);
432 tree val = NULL;
433
ea057359 434 if (virtual_operand_p (gimple_phi_result (stmt)))
8b3057b3
JH
435 continue;
436 for (i = 0; i < gimple_phi_num_args (stmt); i++)
437 {
438 edge e = gimple_phi_arg_edge (stmt, i);
439 if (!bitmap_bit_p (current->split_bbs, e->src->index))
440 {
441 tree edge_val = gimple_phi_arg_def (stmt, i);
442 if (val && edge_val != val)
443 {
444 if (dump_file && (dump_flags & TDF_DETAILS))
445 fprintf (dump_file,
446 " Refused: entry BB has PHI with multiple variants\n");
447 return;
448 }
449 val = edge_val;
450 }
451 }
3e485f62
JH
452 }
453
454
455 /* See what argument we will pass to the split function and compute
456 call overhead. */
457 call_overhead = eni_size_weights.call_cost;
458 for (parm = DECL_ARGUMENTS (current_function_decl); parm;
910ad8de 459 parm = DECL_CHAIN (parm))
3e485f62
JH
460 {
461 if (!is_gimple_reg (parm))
462 {
463 if (bitmap_bit_p (non_ssa_vars, DECL_UID (parm)))
464 {
465 if (dump_file && (dump_flags & TDF_DETAILS))
466 fprintf (dump_file,
467 " Refused: need to pass non-ssa param values\n");
468 return;
469 }
470 }
32244553 471 else
3e485f62 472 {
32244553
RG
473 tree ddef = ssa_default_def (cfun, parm);
474 if (ddef
475 && bitmap_bit_p (current->ssa_names_to_pass,
476 SSA_NAME_VERSION (ddef)))
477 {
478 if (!VOID_TYPE_P (TREE_TYPE (parm)))
479 call_overhead += estimate_move_cost (TREE_TYPE (parm));
480 num_args++;
481 }
3e485f62
JH
482 }
483 }
484 if (!VOID_TYPE_P (TREE_TYPE (current_function_decl)))
485 call_overhead += estimate_move_cost (TREE_TYPE (current_function_decl));
486
487 if (current->split_size <= call_overhead)
488 {
489 if (dump_file && (dump_flags & TDF_DETAILS))
490 fprintf (dump_file,
491 " Refused: split size is smaller than call overhead\n");
492 return;
493 }
494 if (current->header_size + call_overhead
495 >= (unsigned int)(DECL_DECLARED_INLINE_P (current_function_decl)
496 ? MAX_INLINE_INSNS_SINGLE
497 : MAX_INLINE_INSNS_AUTO))
498 {
499 if (dump_file && (dump_flags & TDF_DETAILS))
500 fprintf (dump_file,
501 " Refused: header size is too large for inline candidate\n");
502 return;
503 }
504
505 /* FIXME: we currently can pass only SSA function parameters to the split
d402c33d 506 arguments. Once parm_adjustment infrastructure is supported by cloning,
3e485f62
JH
507 we can pass more than that. */
508 if (num_args != bitmap_count_bits (current->ssa_names_to_pass))
509 {
8b3057b3 510
3e485f62
JH
511 if (dump_file && (dump_flags & TDF_DETAILS))
512 fprintf (dump_file,
513 " Refused: need to pass non-param values\n");
514 return;
515 }
516
517 /* When there are non-ssa vars used in the split region, see if they
518 are used in the header region. If so, reject the split.
519 FIXME: we can use nested function support to access both. */
2094f1fc
JH
520 if (!bitmap_empty_p (non_ssa_vars)
521 && !verify_non_ssa_vars (current, non_ssa_vars, return_bb))
3e485f62 522 {
2094f1fc
JH
523 if (dump_file && (dump_flags & TDF_DETAILS))
524 fprintf (dump_file,
525 " Refused: split part has non-ssa uses\n");
3e485f62
JH
526 return;
527 }
b2e25729
BS
528
529 /* If the split point is dominated by a forbidden block, reject
530 the split. */
531 if (!bitmap_empty_p (forbidden_dominators)
532 && dominated_by_forbidden (current->entry_bb))
533 {
534 if (dump_file && (dump_flags & TDF_DETAILS))
535 fprintf (dump_file,
536 " Refused: split point dominated by forbidden block\n");
537 return;
538 }
539
241a2b9e
JH
540 /* See if retval used by return bb is computed by header or split part.
541 When it is computed by split part, we need to produce return statement
542 in the split part and add code to header to pass it around.
543
544 This is bit tricky to test:
545 1) When there is no return_bb or no return value, we always pass
546 value around.
547 2) Invariants are always computed by caller.
548 3) For SSA we need to look if defining statement is in header or split part
549 4) For non-SSA we need to look where the var is computed. */
550 retval = find_retval (return_bb);
551 if (!retval)
552 current->split_part_set_retval = true;
553 else if (is_gimple_min_invariant (retval))
554 current->split_part_set_retval = false;
555 /* Special case is value returned by reference we record as if it was non-ssa
556 set to result_decl. */
557 else if (TREE_CODE (retval) == SSA_NAME
70b5e7dc 558 && SSA_NAME_VAR (retval)
241a2b9e
JH
559 && TREE_CODE (SSA_NAME_VAR (retval)) == RESULT_DECL
560 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
561 current->split_part_set_retval
562 = bitmap_bit_p (non_ssa_vars, DECL_UID (SSA_NAME_VAR (retval)));
563 else if (TREE_CODE (retval) == SSA_NAME)
564 current->split_part_set_retval
565 = (!SSA_NAME_IS_DEFAULT_DEF (retval)
566 && (bitmap_bit_p (current->split_bbs,
567 gimple_bb (SSA_NAME_DEF_STMT (retval))->index)
568 || gimple_bb (SSA_NAME_DEF_STMT (retval)) == return_bb));
569 else if (TREE_CODE (retval) == PARM_DECL)
570 current->split_part_set_retval = false;
571 else if (TREE_CODE (retval) == VAR_DECL
572 || TREE_CODE (retval) == RESULT_DECL)
573 current->split_part_set_retval
574 = bitmap_bit_p (non_ssa_vars, DECL_UID (retval));
575 else
576 current->split_part_set_retval = true;
577
28fc44f3
JJ
578 /* split_function fixes up at most one PHI non-virtual PHI node in return_bb,
579 for the return value. If there are other PHIs, give up. */
580 if (return_bb != EXIT_BLOCK_PTR)
581 {
582 gimple_stmt_iterator psi;
583
584 for (psi = gsi_start_phis (return_bb); !gsi_end_p (psi); gsi_next (&psi))
ea057359 585 if (!virtual_operand_p (gimple_phi_result (gsi_stmt (psi)))
28fc44f3
JJ
586 && !(retval
587 && current->split_part_set_retval
588 && TREE_CODE (retval) == SSA_NAME
589 && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))
590 && SSA_NAME_DEF_STMT (retval) == gsi_stmt (psi)))
591 {
592 if (dump_file && (dump_flags & TDF_DETAILS))
593 fprintf (dump_file,
594 " Refused: return bb has extra PHIs\n");
595 return;
596 }
597 }
598
599 if (dump_file && (dump_flags & TDF_DETAILS))
600 fprintf (dump_file, " Accepted!\n");
601
3e485f62
JH
602 /* At the moment chose split point with lowest frequency and that leaves
603 out smallest size of header.
604 In future we might re-consider this heuristics. */
605 if (!best_split_point.split_bbs
606 || best_split_point.entry_bb->frequency > current->entry_bb->frequency
607 || (best_split_point.entry_bb->frequency == current->entry_bb->frequency
608 && best_split_point.split_size < current->split_size))
609
610 {
611 if (dump_file && (dump_flags & TDF_DETAILS))
612 fprintf (dump_file, " New best split point!\n");
613 if (best_split_point.ssa_names_to_pass)
614 {
615 BITMAP_FREE (best_split_point.ssa_names_to_pass);
616 BITMAP_FREE (best_split_point.split_bbs);
617 }
618 best_split_point = *current;
619 best_split_point.ssa_names_to_pass = BITMAP_ALLOC (NULL);
620 bitmap_copy (best_split_point.ssa_names_to_pass,
621 current->ssa_names_to_pass);
622 best_split_point.split_bbs = BITMAP_ALLOC (NULL);
623 bitmap_copy (best_split_point.split_bbs, current->split_bbs);
624 }
625}
626
2094f1fc
JH
627/* Return basic block containing RETURN statement. We allow basic blocks
628 of the form:
629 <retval> = tmp_var;
630 return <retval>
631 but return_bb can not be more complex than this.
632 If nothing is found, return EXIT_BLOCK_PTR.
633
3e485f62
JH
634 When there are multiple RETURN statement, chose one with return value,
635 since that one is more likely shared by multiple code paths.
2094f1fc
JH
636
637 Return BB is special, because for function splitting it is the only
638 basic block that is duplicated in between header and split part of the
639 function.
640
3e485f62
JH
641 TODO: We might support multiple return blocks. */
642
643static basic_block
644find_return_bb (void)
645{
646 edge e;
3e485f62 647 basic_block return_bb = EXIT_BLOCK_PTR;
68457901
JJ
648 gimple_stmt_iterator bsi;
649 bool found_return = false;
650 tree retval = NULL_TREE;
3e485f62 651
68457901
JJ
652 if (!single_pred_p (EXIT_BLOCK_PTR))
653 return return_bb;
654
655 e = single_pred_edge (EXIT_BLOCK_PTR);
656 for (bsi = gsi_last_bb (e->src); !gsi_end_p (bsi); gsi_prev (&bsi))
657 {
658 gimple stmt = gsi_stmt (bsi);
a348dc7f
JJ
659 if (gimple_code (stmt) == GIMPLE_LABEL
660 || is_gimple_debug (stmt)
661 || gimple_clobber_p (stmt))
68457901
JJ
662 ;
663 else if (gimple_code (stmt) == GIMPLE_ASSIGN
664 && found_return
665 && gimple_assign_single_p (stmt)
666 && (auto_var_in_fn_p (gimple_assign_rhs1 (stmt),
667 current_function_decl)
668 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
669 && retval == gimple_assign_lhs (stmt))
670 ;
671 else if (gimple_code (stmt) == GIMPLE_RETURN)
672 {
673 found_return = true;
674 retval = gimple_return_retval (stmt);
675 }
676 else
677 break;
678 }
679 if (gsi_end_p (bsi) && found_return)
680 return_bb = e->src;
3e485f62 681
3e485f62
JH
682 return return_bb;
683}
684
ed7656f6 685/* Given return basic block RETURN_BB, see where return value is really
2094f1fc
JH
686 stored. */
687static tree
688find_retval (basic_block return_bb)
689{
690 gimple_stmt_iterator bsi;
691 for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi); gsi_next (&bsi))
692 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_RETURN)
693 return gimple_return_retval (gsi_stmt (bsi));
a348dc7f
JJ
694 else if (gimple_code (gsi_stmt (bsi)) == GIMPLE_ASSIGN
695 && !gimple_clobber_p (gsi_stmt (bsi)))
2094f1fc
JH
696 return gimple_assign_rhs1 (gsi_stmt (bsi));
697 return NULL;
698}
699
1802378d
EB
700/* Callback for walk_stmt_load_store_addr_ops. If T is non-SSA automatic
701 variable, mark it as used in bitmap passed via DATA.
3e485f62
JH
702 Return true when access to T prevents splitting the function. */
703
704static bool
1802378d 705mark_nonssa_use (gimple stmt ATTRIBUTE_UNUSED, tree t, void *data)
3e485f62
JH
706{
707 t = get_base_address (t);
708
709 if (!t || is_gimple_reg (t))
710 return false;
711
712 /* At present we can't pass non-SSA arguments to split function.
713 FIXME: this can be relaxed by passing references to arguments. */
714 if (TREE_CODE (t) == PARM_DECL)
715 {
716 if (dump_file && (dump_flags & TDF_DETAILS))
1802378d
EB
717 fprintf (dump_file,
718 "Cannot split: use of non-ssa function parameter.\n");
3e485f62
JH
719 return true;
720 }
721
1802378d
EB
722 if ((TREE_CODE (t) == VAR_DECL
723 && auto_var_in_fn_p (t, current_function_decl))
724 || TREE_CODE (t) == RESULT_DECL
725 || TREE_CODE (t) == LABEL_DECL)
3e485f62 726 bitmap_set_bit ((bitmap)data, DECL_UID (t));
241a2b9e 727
1802378d
EB
728 /* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
729 to pretend that the value pointed to is actual result decl. */
730 if ((TREE_CODE (t) == MEM_REF || INDIRECT_REF_P (t))
241a2b9e 731 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
70b5e7dc 732 && SSA_NAME_VAR (TREE_OPERAND (t, 0))
241a2b9e
JH
733 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t, 0))) == RESULT_DECL
734 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
1802378d
EB
735 return
736 bitmap_bit_p ((bitmap)data,
737 DECL_UID (DECL_RESULT (current_function_decl)));
738
3e485f62
JH
739 return false;
740}
741
742/* Compute local properties of basic block BB we collect when looking for
743 split points. We look for ssa defs and store them in SET_SSA_NAMES,
744 for ssa uses and store them in USED_SSA_NAMES and for any non-SSA automatic
745 vars stored in NON_SSA_VARS.
746
747 When BB has edge to RETURN_BB, collect uses in RETURN_BB too.
748
749 Return false when BB contains something that prevents it from being put into
750 split function. */
751
752static bool
753visit_bb (basic_block bb, basic_block return_bb,
754 bitmap set_ssa_names, bitmap used_ssa_names,
755 bitmap non_ssa_vars)
756{
757 gimple_stmt_iterator bsi;
758 edge e;
759 edge_iterator ei;
760 bool can_split = true;
761
762 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
763 {
764 gimple stmt = gsi_stmt (bsi);
765 tree op;
766 ssa_op_iter iter;
767 tree decl;
768
769 if (is_gimple_debug (stmt))
770 continue;
771
a348dc7f
JJ
772 if (gimple_clobber_p (stmt))
773 continue;
774
3e485f62
JH
775 /* FIXME: We can split regions containing EH. We can not however
776 split RESX, EH_DISPATCH and EH_POINTER referring to same region
777 into different partitions. This would require tracking of
778 EH regions and checking in consider_split_point if they
779 are not used elsewhere. */
1da7d8c0 780 if (gimple_code (stmt) == GIMPLE_RESX)
3e485f62
JH
781 {
782 if (dump_file && (dump_flags & TDF_DETAILS))
1da7d8c0 783 fprintf (dump_file, "Cannot split: resx.\n");
3e485f62
JH
784 can_split = false;
785 }
786 if (gimple_code (stmt) == GIMPLE_EH_DISPATCH)
787 {
788 if (dump_file && (dump_flags & TDF_DETAILS))
1802378d 789 fprintf (dump_file, "Cannot split: eh dispatch.\n");
3e485f62
JH
790 can_split = false;
791 }
792
793 /* Check builtins that prevent splitting. */
794 if (gimple_code (stmt) == GIMPLE_CALL
795 && (decl = gimple_call_fndecl (stmt)) != NULL_TREE
796 && DECL_BUILT_IN (decl)
797 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
798 switch (DECL_FUNCTION_CODE (decl))
799 {
800 /* FIXME: once we will allow passing non-parm values to split part,
801 we need to be sure to handle correct builtin_stack_save and
802 builtin_stack_restore. At the moment we are safe; there is no
803 way to store builtin_stack_save result in non-SSA variable
804 since all calls to those are compiler generated. */
805 case BUILT_IN_APPLY:
61e03ffc 806 case BUILT_IN_APPLY_ARGS:
3e485f62
JH
807 case BUILT_IN_VA_START:
808 if (dump_file && (dump_flags & TDF_DETAILS))
1802378d
EB
809 fprintf (dump_file,
810 "Cannot split: builtin_apply and va_start.\n");
3e485f62
JH
811 can_split = false;
812 break;
813 case BUILT_IN_EH_POINTER:
814 if (dump_file && (dump_flags & TDF_DETAILS))
1802378d 815 fprintf (dump_file, "Cannot split: builtin_eh_pointer.\n");
3e485f62
JH
816 can_split = false;
817 break;
818 default:
819 break;
820 }
821
822 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
823 bitmap_set_bit (set_ssa_names, SSA_NAME_VERSION (op));
824 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
825 bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
826 can_split &= !walk_stmt_load_store_addr_ops (stmt, non_ssa_vars,
827 mark_nonssa_use,
828 mark_nonssa_use,
829 mark_nonssa_use);
830 }
831 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
832 {
833 gimple stmt = gsi_stmt (bsi);
8b3057b3 834 unsigned int i;
3e485f62 835
ea057359 836 if (virtual_operand_p (gimple_phi_result (stmt)))
3e485f62 837 continue;
8b3057b3
JH
838 bitmap_set_bit (set_ssa_names,
839 SSA_NAME_VERSION (gimple_phi_result (stmt)));
840 for (i = 0; i < gimple_phi_num_args (stmt); i++)
841 {
842 tree op = gimple_phi_arg_def (stmt, i);
843 if (TREE_CODE (op) == SSA_NAME)
844 bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
845 }
3e485f62
JH
846 can_split &= !walk_stmt_load_store_addr_ops (stmt, non_ssa_vars,
847 mark_nonssa_use,
848 mark_nonssa_use,
849 mark_nonssa_use);
850 }
ed7656f6 851 /* Record also uses coming from PHI operand in return BB. */
3e485f62
JH
852 FOR_EACH_EDGE (e, ei, bb->succs)
853 if (e->dest == return_bb)
854 {
3e485f62
JH
855 for (bsi = gsi_start_phis (return_bb); !gsi_end_p (bsi); gsi_next (&bsi))
856 {
857 gimple stmt = gsi_stmt (bsi);
858 tree op = gimple_phi_arg_def (stmt, e->dest_idx);
859
ea057359 860 if (virtual_operand_p (gimple_phi_result (stmt)))
3e485f62 861 continue;
3e485f62
JH
862 if (TREE_CODE (op) == SSA_NAME)
863 bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
864 else
865 can_split &= !mark_nonssa_use (stmt, op, non_ssa_vars);
866 }
3e485f62
JH
867 }
868 return can_split;
869}
870
871/* Stack entry for recursive DFS walk in find_split_point. */
872
873typedef struct
874{
875 /* Basic block we are examining. */
876 basic_block bb;
877
878 /* SSA names set and used by the BB and all BBs reachable
879 from it via DFS walk. */
880 bitmap set_ssa_names, used_ssa_names;
881 bitmap non_ssa_vars;
882
883 /* All BBS visited from this BB via DFS walk. */
884 bitmap bbs_visited;
885
886 /* Last examined edge in DFS walk. Since we walk unoriented graph,
ed7656f6 887 the value is up to sum of incoming and outgoing edges of BB. */
3e485f62
JH
888 unsigned int edge_num;
889
890 /* Stack entry index of earliest BB reachable from current BB
ed7656f6 891 or any BB visited later in DFS walk. */
3e485f62
JH
892 int earliest;
893
894 /* Overall time and size of all BBs reached from this BB in DFS walk. */
895 int overall_time, overall_size;
896
897 /* When false we can not split on this BB. */
898 bool can_split;
899} stack_entry;
3e485f62
JH
900
901
902/* Find all articulations and call consider_split on them.
903 OVERALL_TIME and OVERALL_SIZE is time and size of the function.
904
905 We perform basic algorithm for finding an articulation in a graph
906 created from CFG by considering it to be an unoriented graph.
907
908 The articulation is discovered via DFS walk. We collect earliest
909 basic block on stack that is reachable via backward edge. Articulation
910 is any basic block such that there is no backward edge bypassing it.
911 To reduce stack usage we maintain heap allocated stack in STACK vector.
912 AUX pointer of BB is set to index it appears in the stack or -1 once
913 it is visited and popped off the stack.
914
915 The algorithm finds articulation after visiting the whole component
916 reachable by it. This makes it convenient to collect information about
917 the component used by consider_split. */
918
919static void
920find_split_points (int overall_time, int overall_size)
921{
922 stack_entry first;
6e1aa848 923 vec<stack_entry> stack = vNULL;
3e485f62
JH
924 basic_block bb;
925 basic_block return_bb = find_return_bb ();
926 struct split_point current;
927
928 current.header_time = overall_time;
929 current.header_size = overall_size;
930 current.split_time = 0;
931 current.split_size = 0;
932 current.ssa_names_to_pass = BITMAP_ALLOC (NULL);
933
934 first.bb = ENTRY_BLOCK_PTR;
935 first.edge_num = 0;
936 first.overall_time = 0;
937 first.overall_size = 0;
938 first.earliest = INT_MAX;
939 first.set_ssa_names = 0;
940 first.used_ssa_names = 0;
941 first.bbs_visited = 0;
9771b263 942 stack.safe_push (first);
3e485f62
JH
943 ENTRY_BLOCK_PTR->aux = (void *)(intptr_t)-1;
944
9771b263 945 while (!stack.is_empty ())
3e485f62 946 {
9771b263 947 stack_entry *entry = &stack.last ();
3e485f62
JH
948
949 /* We are walking an acyclic graph, so edge_num counts
950 succ and pred edges together. However when considering
951 articulation, we want to have processed everything reachable
952 from articulation but nothing that reaches into it. */
953 if (entry->edge_num == EDGE_COUNT (entry->bb->succs)
954 && entry->bb != ENTRY_BLOCK_PTR)
955 {
9771b263 956 int pos = stack.length ();
3e485f62
JH
957 entry->can_split &= visit_bb (entry->bb, return_bb,
958 entry->set_ssa_names,
959 entry->used_ssa_names,
960 entry->non_ssa_vars);
961 if (pos <= entry->earliest && !entry->can_split
962 && dump_file && (dump_flags & TDF_DETAILS))
963 fprintf (dump_file,
964 "found articulation at bb %i but can not split\n",
965 entry->bb->index);
966 if (pos <= entry->earliest && entry->can_split)
967 {
968 if (dump_file && (dump_flags & TDF_DETAILS))
969 fprintf (dump_file, "found articulation at bb %i\n",
970 entry->bb->index);
971 current.entry_bb = entry->bb;
972 current.ssa_names_to_pass = BITMAP_ALLOC (NULL);
973 bitmap_and_compl (current.ssa_names_to_pass,
974 entry->used_ssa_names, entry->set_ssa_names);
975 current.header_time = overall_time - entry->overall_time;
976 current.header_size = overall_size - entry->overall_size;
977 current.split_time = entry->overall_time;
978 current.split_size = entry->overall_size;
979 current.split_bbs = entry->bbs_visited;
980 consider_split (&current, entry->non_ssa_vars, return_bb);
981 BITMAP_FREE (current.ssa_names_to_pass);
982 }
983 }
984 /* Do actual DFS walk. */
985 if (entry->edge_num
986 < (EDGE_COUNT (entry->bb->succs)
987 + EDGE_COUNT (entry->bb->preds)))
988 {
989 edge e;
990 basic_block dest;
991 if (entry->edge_num < EDGE_COUNT (entry->bb->succs))
992 {
993 e = EDGE_SUCC (entry->bb, entry->edge_num);
994 dest = e->dest;
995 }
996 else
997 {
998 e = EDGE_PRED (entry->bb, entry->edge_num
999 - EDGE_COUNT (entry->bb->succs));
1000 dest = e->src;
1001 }
1002
1003 entry->edge_num++;
1004
1005 /* New BB to visit, push it to the stack. */
1006 if (dest != return_bb && dest != EXIT_BLOCK_PTR
1007 && !dest->aux)
1008 {
1009 stack_entry new_entry;
1010
1011 new_entry.bb = dest;
1012 new_entry.edge_num = 0;
1013 new_entry.overall_time
9771b263 1014 = bb_info_vec[dest->index].time;
3e485f62 1015 new_entry.overall_size
9771b263 1016 = bb_info_vec[dest->index].size;
3e485f62
JH
1017 new_entry.earliest = INT_MAX;
1018 new_entry.set_ssa_names = BITMAP_ALLOC (NULL);
1019 new_entry.used_ssa_names = BITMAP_ALLOC (NULL);
1020 new_entry.bbs_visited = BITMAP_ALLOC (NULL);
1021 new_entry.non_ssa_vars = BITMAP_ALLOC (NULL);
1022 new_entry.can_split = true;
1023 bitmap_set_bit (new_entry.bbs_visited, dest->index);
9771b263
DN
1024 stack.safe_push (new_entry);
1025 dest->aux = (void *)(intptr_t)stack.length ();
3e485f62
JH
1026 }
1027 /* Back edge found, record the earliest point. */
1028 else if ((intptr_t)dest->aux > 0
1029 && (intptr_t)dest->aux < entry->earliest)
1030 entry->earliest = (intptr_t)dest->aux;
1031 }
ed7656f6
JJ
1032 /* We are done with examining the edges. Pop off the value from stack
1033 and merge stuff we accumulate during the walk. */
3e485f62
JH
1034 else if (entry->bb != ENTRY_BLOCK_PTR)
1035 {
9771b263 1036 stack_entry *prev = &stack[stack.length () - 2];
3e485f62
JH
1037
1038 entry->bb->aux = (void *)(intptr_t)-1;
1039 prev->can_split &= entry->can_split;
1040 if (prev->set_ssa_names)
1041 {
1042 bitmap_ior_into (prev->set_ssa_names, entry->set_ssa_names);
1043 bitmap_ior_into (prev->used_ssa_names, entry->used_ssa_names);
1044 bitmap_ior_into (prev->bbs_visited, entry->bbs_visited);
1045 bitmap_ior_into (prev->non_ssa_vars, entry->non_ssa_vars);
1046 }
1047 if (prev->earliest > entry->earliest)
1048 prev->earliest = entry->earliest;
1049 prev->overall_time += entry->overall_time;
1050 prev->overall_size += entry->overall_size;
1051 BITMAP_FREE (entry->set_ssa_names);
1052 BITMAP_FREE (entry->used_ssa_names);
1053 BITMAP_FREE (entry->bbs_visited);
1054 BITMAP_FREE (entry->non_ssa_vars);
9771b263 1055 stack.pop ();
3e485f62
JH
1056 }
1057 else
9771b263 1058 stack.pop ();
3e485f62
JH
1059 }
1060 ENTRY_BLOCK_PTR->aux = NULL;
1061 FOR_EACH_BB (bb)
1062 bb->aux = NULL;
9771b263 1063 stack.release ();
3e485f62
JH
1064 BITMAP_FREE (current.ssa_names_to_pass);
1065}
1066
1067/* Split function at SPLIT_POINT. */
1068
1069static void
1070split_function (struct split_point *split_point)
1071{
6e1aa848 1072 vec<tree> args_to_pass = vNULL;
201176d3 1073 bitmap args_to_skip;
3e485f62
JH
1074 tree parm;
1075 int num = 0;
201176d3 1076 struct cgraph_node *node, *cur_node = cgraph_get_node (current_function_decl);
3e485f62
JH
1077 basic_block return_bb = find_return_bb ();
1078 basic_block call_bb;
1079 gimple_stmt_iterator gsi;
1080 gimple call;
1081 edge e;
1082 edge_iterator ei;
1083 tree retval = NULL, real_retval = NULL;
1084 bool split_part_return_p = false;
1085 gimple last_stmt = NULL;
371556ee 1086 unsigned int i;
32244553 1087 tree arg, ddef;
9771b263 1088 vec<tree, va_gc> **debug_args = NULL;
3e485f62
JH
1089
1090 if (dump_file)
1091 {
1092 fprintf (dump_file, "\n\nSplitting function at:\n");
1093 dump_split_point (dump_file, split_point);
1094 }
1095
201176d3
MJ
1096 if (cur_node->local.can_change_signature)
1097 args_to_skip = BITMAP_ALLOC (NULL);
1098 else
1099 args_to_skip = NULL;
1100
3e485f62
JH
1101 /* Collect the parameters of new function and args_to_skip bitmap. */
1102 for (parm = DECL_ARGUMENTS (current_function_decl);
910ad8de 1103 parm; parm = DECL_CHAIN (parm), num++)
201176d3
MJ
1104 if (args_to_skip
1105 && (!is_gimple_reg (parm)
32244553 1106 || (ddef = ssa_default_def (cfun, parm)) == NULL_TREE
201176d3 1107 || !bitmap_bit_p (split_point->ssa_names_to_pass,
32244553 1108 SSA_NAME_VERSION (ddef))))
3e485f62
JH
1109 bitmap_set_bit (args_to_skip, num);
1110 else
371556ee 1111 {
86814190
MJ
1112 /* This parm might not have been used up to now, but is going to be
1113 used, hence register it. */
86814190 1114 if (is_gimple_reg (parm))
32244553 1115 arg = get_or_create_ssa_default_def (cfun, parm);
86814190
MJ
1116 else
1117 arg = parm;
201176d3 1118
2b3c0885
RG
1119 if (!useless_type_conversion_p (DECL_ARG_TYPE (parm), TREE_TYPE (arg)))
1120 arg = fold_convert (DECL_ARG_TYPE (parm), arg);
9771b263 1121 args_to_pass.safe_push (arg);
371556ee 1122 }
3e485f62
JH
1123
1124 /* See if the split function will return. */
1125 FOR_EACH_EDGE (e, ei, return_bb->preds)
1126 if (bitmap_bit_p (split_point->split_bbs, e->src->index))
1127 break;
1128 if (e)
1129 split_part_return_p = true;
1130
241a2b9e
JH
1131 /* Add return block to what will become the split function.
1132 We do not return; no return block is needed. */
1133 if (!split_part_return_p)
1134 ;
1135 /* We have no return block, so nothing is needed. */
1136 else if (return_bb == EXIT_BLOCK_PTR)
1137 ;
1138 /* When we do not want to return value, we need to construct
1139 new return block with empty return statement.
1140 FIXME: Once we are able to change return type, we should change function
1141 to return void instead of just outputting function with undefined return
1142 value. For structures this affects quality of codegen. */
1143 else if (!split_point->split_part_set_retval
1144 && find_retval (return_bb))
1145 {
1146 bool redirected = true;
1147 basic_block new_return_bb = create_basic_block (NULL, 0, return_bb);
1148 gimple_stmt_iterator gsi = gsi_start_bb (new_return_bb);
1149 gsi_insert_after (&gsi, gimple_build_return (NULL), GSI_NEW_STMT);
1150 while (redirected)
1151 {
1152 redirected = false;
1153 FOR_EACH_EDGE (e, ei, return_bb->preds)
1154 if (bitmap_bit_p (split_point->split_bbs, e->src->index))
1155 {
1156 new_return_bb->count += e->count;
1157 new_return_bb->frequency += EDGE_FREQUENCY (e);
1158 redirect_edge_and_branch (e, new_return_bb);
1159 redirected = true;
1160 break;
1161 }
1162 }
1163 e = make_edge (new_return_bb, EXIT_BLOCK_PTR, 0);
1164 e->probability = REG_BR_PROB_BASE;
1165 e->count = new_return_bb->count;
a9e0d843
RB
1166 if (current_loops)
1167 add_bb_to_loop (new_return_bb, current_loops->tree_root);
241a2b9e 1168 bitmap_set_bit (split_point->split_bbs, new_return_bb->index);
2e5e346d
JL
1169 }
1170 /* When we pass around the value, use existing return block. */
1171 else
1172 bitmap_set_bit (split_point->split_bbs, return_bb->index);
1173
1174 /* If RETURN_BB has virtual operand PHIs, they must be removed and the
1175 virtual operand marked for renaming as we change the CFG in a way that
cfef45c8 1176 tree-inline is not able to compensate for.
2e5e346d
JL
1177
1178 Note this can happen whether or not we have a return value. If we have
1179 a return value, then RETURN_BB may have PHIs for real operands too. */
1180 if (return_bb != EXIT_BLOCK_PTR)
1181 {
cfef45c8 1182 bool phi_p = false;
241a2b9e
JH
1183 for (gsi = gsi_start_phis (return_bb); !gsi_end_p (gsi);)
1184 {
1185 gimple stmt = gsi_stmt (gsi);
ea057359 1186 if (!virtual_operand_p (gimple_phi_result (stmt)))
2e5e346d
JL
1187 {
1188 gsi_next (&gsi);
1189 continue;
1190 }
6b8c9df8
RG
1191 mark_virtual_phi_result_for_renaming (stmt);
1192 remove_phi_node (&gsi, true);
cfef45c8 1193 phi_p = true;
241a2b9e 1194 }
cfef45c8
RG
1195 /* In reality we have to rename the reaching definition of the
1196 virtual operand at return_bb as we will eventually release it
1197 when we remove the code region we outlined.
1198 So we have to rename all immediate virtual uses of that region
1199 if we didn't see a PHI definition yet. */
1200 /* ??? In real reality we want to set the reaching vdef of the
1201 entry of the SESE region as the vuse of the call and the reaching
1202 vdef of the exit of the SESE region as the vdef of the call. */
1203 if (!phi_p)
1204 for (gsi = gsi_start_bb (return_bb); !gsi_end_p (gsi); gsi_next (&gsi))
1205 {
1206 gimple stmt = gsi_stmt (gsi);
1207 if (gimple_vuse (stmt))
1208 {
1209 gimple_set_vuse (stmt, NULL_TREE);
1210 update_stmt (stmt);
1211 }
1212 if (gimple_vdef (stmt))
1213 break;
1214 }
241a2b9e 1215 }
3e485f62
JH
1216
1217 /* Now create the actual clone. */
1218 rebuild_cgraph_edges ();
6e1aa848 1219 node = cgraph_function_versioning (cur_node, vNULL,
9771b263
DN
1220 NULL,
1221 args_to_skip,
1a2c27e9 1222 !split_part_return_p,
3e485f62 1223 split_point->split_bbs,
2094f1fc 1224 split_point->entry_bb, "part");
d402c33d
JH
1225 /* For usual cloning it is enough to clear builtin only when signature
1226 changes. For partial inlining we however can not expect the part
1227 of builtin implementation to have same semantic as the whole. */
67348ccc 1228 if (DECL_BUILT_IN (node->decl))
d402c33d 1229 {
67348ccc
DM
1230 DECL_BUILT_IN_CLASS (node->decl) = NOT_BUILT_IN;
1231 DECL_FUNCTION_CODE (node->decl) = (enum built_in_function) 0;
d402c33d 1232 }
9a6af450
EB
1233 /* If the original function is declared inline, there is no point in issuing
1234 a warning for the non-inlinable part. */
67348ccc 1235 DECL_NO_INLINE_WARNING_P (node->decl) = 1;
201176d3 1236 cgraph_node_remove_callees (cur_node);
67348ccc 1237 ipa_remove_all_references (&cur_node->ref_list);
3e485f62 1238 if (!split_part_return_p)
67348ccc 1239 TREE_THIS_VOLATILE (node->decl) = 1;
3e485f62 1240 if (dump_file)
67348ccc 1241 dump_function_to_file (node->decl, dump_file, dump_flags);
3e485f62
JH
1242
1243 /* Create the basic block we place call into. It is the entry basic block
1244 split after last label. */
1245 call_bb = split_point->entry_bb;
1246 for (gsi = gsi_start_bb (call_bb); !gsi_end_p (gsi);)
1247 if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
1248 {
1249 last_stmt = gsi_stmt (gsi);
1250 gsi_next (&gsi);
1251 }
1252 else
1253 break;
1254 e = split_block (split_point->entry_bb, last_stmt);
1255 remove_edge (e);
1256
1257 /* Produce the call statement. */
1258 gsi = gsi_last_bb (call_bb);
9771b263 1259 FOR_EACH_VEC_ELT (args_to_pass, i, arg)
2b3c0885
RG
1260 if (!is_gimple_val (arg))
1261 {
1262 arg = force_gimple_operand_gsi (&gsi, arg, true, NULL_TREE,
f6e52e91 1263 false, GSI_CONTINUE_LINKING);
9771b263 1264 args_to_pass[i] = arg;
2b3c0885 1265 }
67348ccc 1266 call = gimple_build_call_vec (node->decl, args_to_pass);
3e485f62 1267 gimple_set_block (call, DECL_INITIAL (current_function_decl));
9771b263 1268 args_to_pass.release ();
3e485f62 1269
878eef4a
JJ
1270 /* For optimized away parameters, add on the caller side
1271 before the call
1272 DEBUG D#X => parm_Y(D)
1273 stmts and associate D#X with parm in decl_debug_args_lookup
1274 vector to say for debug info that if parameter parm had been passed,
1275 it would have value parm_Y(D). */
1276 if (args_to_skip)
1277 for (parm = DECL_ARGUMENTS (current_function_decl), num = 0;
1278 parm; parm = DECL_CHAIN (parm), num++)
1279 if (bitmap_bit_p (args_to_skip, num)
1280 && is_gimple_reg (parm))
1281 {
1282 tree ddecl;
1283 gimple def_temp;
1284
1285 /* This needs to be done even without MAY_HAVE_DEBUG_STMTS,
1286 otherwise if it didn't exist before, we'd end up with
1287 different SSA_NAME_VERSIONs between -g and -g0. */
1288 arg = get_or_create_ssa_default_def (cfun, parm);
1289 if (!MAY_HAVE_DEBUG_STMTS)
1290 continue;
1291
1292 if (debug_args == NULL)
67348ccc 1293 debug_args = decl_debug_args_insert (node->decl);
878eef4a
JJ
1294 ddecl = make_node (DEBUG_EXPR_DECL);
1295 DECL_ARTIFICIAL (ddecl) = 1;
1296 TREE_TYPE (ddecl) = TREE_TYPE (parm);
1297 DECL_MODE (ddecl) = DECL_MODE (parm);
9771b263
DN
1298 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
1299 vec_safe_push (*debug_args, ddecl);
878eef4a
JJ
1300 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg),
1301 call);
1302 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
1303 }
1304 /* And on the callee side, add
1305 DEBUG D#Y s=> parm
1306 DEBUG var => D#Y
1307 stmts to the first bb where var is a VAR_DECL created for the
1308 optimized away parameter in DECL_INITIAL block. This hints
1309 in the debug info that var (whole DECL_ORIGIN is the parm PARM_DECL)
1310 is optimized away, but could be looked up at the call site
1311 as value of D#X there. */
1312 if (debug_args != NULL)
1313 {
1314 unsigned int i;
1315 tree var, vexpr;
1316 gimple_stmt_iterator cgsi;
1317 gimple def_temp;
1318
67348ccc
DM
1319 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1320 var = BLOCK_VARS (DECL_INITIAL (node->decl));
9771b263 1321 i = vec_safe_length (*debug_args);
878eef4a
JJ
1322 cgsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
1323 do
1324 {
1325 i -= 2;
1326 while (var != NULL_TREE
9771b263 1327 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
878eef4a
JJ
1328 var = TREE_CHAIN (var);
1329 if (var == NULL_TREE)
1330 break;
1331 vexpr = make_node (DEBUG_EXPR_DECL);
9771b263 1332 parm = (**debug_args)[i];
878eef4a
JJ
1333 DECL_ARTIFICIAL (vexpr) = 1;
1334 TREE_TYPE (vexpr) = TREE_TYPE (parm);
1335 DECL_MODE (vexpr) = DECL_MODE (parm);
1336 def_temp = gimple_build_debug_source_bind (vexpr, parm,
1337 NULL);
1338 gsi_insert_before (&cgsi, def_temp, GSI_SAME_STMT);
1339 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
1340 gsi_insert_before (&cgsi, def_temp, GSI_SAME_STMT);
1341 }
1342 while (i);
1343 pop_cfun ();
1344 }
1345
556e9ba0
JH
1346 /* We avoid address being taken on any variable used by split part,
1347 so return slot optimization is always possible. Moreover this is
1348 required to make DECL_BY_REFERENCE work. */
1349 if (aggregate_value_p (DECL_RESULT (current_function_decl),
22110e6c
EB
1350 TREE_TYPE (current_function_decl))
1351 && (!is_gimple_reg_type (TREE_TYPE (DECL_RESULT (current_function_decl)))
1352 || DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))))
556e9ba0
JH
1353 gimple_call_set_return_slot_opt (call, true);
1354
3e485f62
JH
1355 /* Update return value. This is bit tricky. When we do not return,
1356 do nothing. When we return we might need to update return_bb
1357 or produce a new return statement. */
1358 if (!split_part_return_p)
1359 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1360 else
1361 {
1362 e = make_edge (call_bb, return_bb,
1363 return_bb == EXIT_BLOCK_PTR ? 0 : EDGE_FALLTHRU);
1364 e->count = call_bb->count;
1365 e->probability = REG_BR_PROB_BASE;
6938f93f
JH
1366
1367 /* If there is return basic block, see what value we need to store
1368 return value into and put call just before it. */
3e485f62
JH
1369 if (return_bb != EXIT_BLOCK_PTR)
1370 {
2094f1fc 1371 real_retval = retval = find_retval (return_bb);
6938f93f 1372
241a2b9e 1373 if (real_retval && split_point->split_part_set_retval)
3e485f62
JH
1374 {
1375 gimple_stmt_iterator psi;
1376
6938f93f
JH
1377 /* See if we need new SSA_NAME for the result.
1378 When DECL_BY_REFERENCE is true, retval is actually pointer to
1379 return value and it is constant in whole function. */
1380 if (TREE_CODE (retval) == SSA_NAME
1381 && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
3e485f62 1382 {
070ecdfd 1383 retval = copy_ssa_name (retval, call);
6938f93f
JH
1384
1385 /* See if there is PHI defining return value. */
1386 for (psi = gsi_start_phis (return_bb);
1387 !gsi_end_p (psi); gsi_next (&psi))
ea057359 1388 if (!virtual_operand_p (gimple_phi_result (gsi_stmt (psi))))
6938f93f
JH
1389 break;
1390
1391 /* When there is PHI, just update its value. */
3e485f62
JH
1392 if (TREE_CODE (retval) == SSA_NAME
1393 && !gsi_end_p (psi))
9e227d60 1394 add_phi_arg (gsi_stmt (psi), retval, e, UNKNOWN_LOCATION);
6938f93f
JH
1395 /* Otherwise update the return BB itself.
1396 find_return_bb allows at most one assignment to return value,
1397 so update first statement. */
1398 else
3e485f62 1399 {
2094f1fc
JH
1400 gimple_stmt_iterator bsi;
1401 for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi);
1402 gsi_next (&bsi))
1403 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_RETURN)
1404 {
1405 gimple_return_set_retval (gsi_stmt (bsi), retval);
1406 break;
1407 }
2e216592
JJ
1408 else if (gimple_code (gsi_stmt (bsi)) == GIMPLE_ASSIGN
1409 && !gimple_clobber_p (gsi_stmt (bsi)))
2094f1fc
JH
1410 {
1411 gimple_assign_set_rhs1 (gsi_stmt (bsi), retval);
1412 break;
1413 }
1414 update_stmt (gsi_stmt (bsi));
3e485f62
JH
1415 }
1416 }
556e9ba0 1417 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
42b05b6e
RG
1418 {
1419 gimple_call_set_lhs (call, build_simple_mem_ref (retval));
1420 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1421 }
556e9ba0 1422 else
42b05b6e
RG
1423 {
1424 tree restype;
1425 restype = TREE_TYPE (DECL_RESULT (current_function_decl));
1426 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1427 if (!useless_type_conversion_p (TREE_TYPE (retval), restype))
1428 {
1429 gimple cpy;
1430 tree tem = create_tmp_reg (restype, NULL);
1431 tem = make_ssa_name (tem, call);
1432 cpy = gimple_build_assign_with_ops (NOP_EXPR, retval,
1433 tem, NULL_TREE);
1434 gsi_insert_after (&gsi, cpy, GSI_NEW_STMT);
1435 retval = tem;
1436 }
1437 gimple_call_set_lhs (call, retval);
1438 update_stmt (call);
1439 }
3e485f62 1440 }
42b05b6e
RG
1441 else
1442 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
3e485f62 1443 }
6938f93f
JH
1444 /* We don't use return block (there is either no return in function or
1445 multiple of them). So create new basic block with return statement.
1446 */
3e485f62
JH
1447 else
1448 {
1449 gimple ret;
241a2b9e
JH
1450 if (split_point->split_part_set_retval
1451 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
3e485f62 1452 {
4021f4a1 1453 retval = DECL_RESULT (current_function_decl);
8a9c1ae6
JH
1454
1455 /* We use temporary register to hold value when aggregate_value_p
1456 is false. Similarly for DECL_BY_REFERENCE we must avoid extra
1457 copy. */
1458 if (!aggregate_value_p (retval, TREE_TYPE (current_function_decl))
1459 && !DECL_BY_REFERENCE (retval))
1460 retval = create_tmp_reg (TREE_TYPE (retval), NULL);
3e485f62 1461 if (is_gimple_reg (retval))
6938f93f
JH
1462 {
1463 /* When returning by reference, there is only one SSA name
1464 assigned to RESULT_DECL (that is pointer to return value).
1465 Look it up or create new one if it is missing. */
1466 if (DECL_BY_REFERENCE (retval))
32244553 1467 retval = get_or_create_ssa_default_def (cfun, retval);
6938f93f
JH
1468 /* Otherwise produce new SSA name for return value. */
1469 else
1470 retval = make_ssa_name (retval, call);
1471 }
556e9ba0
JH
1472 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
1473 gimple_call_set_lhs (call, build_simple_mem_ref (retval));
1474 else
1475 gimple_call_set_lhs (call, retval);
3e485f62
JH
1476 }
1477 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1478 ret = gimple_build_return (retval);
1479 gsi_insert_after (&gsi, ret, GSI_NEW_STMT);
1480 }
1481 }
1482 free_dominance_info (CDI_DOMINATORS);
1483 free_dominance_info (CDI_POST_DOMINATORS);
632b4f8e 1484 compute_inline_parameters (node, true);
3e485f62
JH
1485}
1486
1487/* Execute function splitting pass. */
1488
1489static unsigned int
1490execute_split_functions (void)
1491{
1492 gimple_stmt_iterator bsi;
1493 basic_block bb;
1494 int overall_time = 0, overall_size = 0;
1495 int todo = 0;
581985d7 1496 struct cgraph_node *node = cgraph_get_node (current_function_decl);
3e485f62 1497
b2d2adc6
RG
1498 if (flags_from_decl_or_type (current_function_decl)
1499 & (ECF_NORETURN|ECF_MALLOC))
3e485f62
JH
1500 {
1501 if (dump_file)
b2d2adc6 1502 fprintf (dump_file, "Not splitting: noreturn/malloc function.\n");
3e485f62
JH
1503 return 0;
1504 }
1505 if (MAIN_NAME_P (DECL_NAME (current_function_decl)))
1506 {
1507 if (dump_file)
1508 fprintf (dump_file, "Not splitting: main function.\n");
1509 return 0;
1510 }
1511 /* This can be relaxed; function might become inlinable after splitting
1512 away the uninlinable part. */
9771b263
DN
1513 if (inline_edge_summary_vec.exists ()
1514 && !inline_summary (node)->inlinable)
3e485f62
JH
1515 {
1516 if (dump_file)
1517 fprintf (dump_file, "Not splitting: not inlinable.\n");
1518 return 0;
1519 }
67348ccc 1520 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
3e485f62
JH
1521 {
1522 if (dump_file)
ed7656f6 1523 fprintf (dump_file, "Not splitting: disregarding inline limits.\n");
3e485f62
JH
1524 return 0;
1525 }
1526 /* This can be relaxed; most of versioning tests actually prevents
1527 a duplication. */
1528 if (!tree_versionable_function_p (current_function_decl))
1529 {
1530 if (dump_file)
1531 fprintf (dump_file, "Not splitting: not versionable.\n");
1532 return 0;
1533 }
1534 /* FIXME: we could support this. */
1535 if (DECL_STRUCT_FUNCTION (current_function_decl)->static_chain_decl)
1536 {
1537 if (dump_file)
1538 fprintf (dump_file, "Not splitting: nested function.\n");
1539 return 0;
1540 }
3e485f62
JH
1541
1542 /* See if it makes sense to try to split.
1543 It makes sense to split if we inline, that is if we have direct calls to
1544 handle or direct calls are possibly going to appear as result of indirect
cf9712cc
JH
1545 inlining or LTO. Also handle -fprofile-generate as LTO to allow non-LTO
1546 training for LTO -fprofile-use build.
1547
3e485f62
JH
1548 Note that we are not completely conservative about disqualifying functions
1549 called once. It is possible that the caller is called more then once and
1550 then inlining would still benefit. */
c91061e6
JH
1551 if ((!node->callers
1552 /* Local functions called once will be completely inlined most of time. */
1553 || (!node->callers->next_caller && node->local.local))
67348ccc
DM
1554 && !node->address_taken
1555 && (!flag_lto || !node->externally_visible))
3e485f62
JH
1556 {
1557 if (dump_file)
1558 fprintf (dump_file, "Not splitting: not called directly "
1559 "or called once.\n");
1560 return 0;
1561 }
1562
1563 /* FIXME: We can actually split if splitting reduces call overhead. */
1564 if (!flag_inline_small_functions
1565 && !DECL_DECLARED_INLINE_P (current_function_decl))
1566 {
1567 if (dump_file)
1568 fprintf (dump_file, "Not splitting: not autoinlining and function"
1569 " is not inline.\n");
1570 return 0;
1571 }
1572
e70670cf
JH
1573 /* We enforce splitting after loop headers when profile info is not
1574 available. */
1575 if (profile_status != PROFILE_READ)
1576 mark_dfs_back_edges ();
1577
b2e25729
BS
1578 /* Initialize bitmap to track forbidden calls. */
1579 forbidden_dominators = BITMAP_ALLOC (NULL);
1580 calculate_dominance_info (CDI_DOMINATORS);
1581
3e485f62 1582 /* Compute local info about basic blocks and determine function size/time. */
9771b263 1583 bb_info_vec.safe_grow_cleared (last_basic_block + 1);
3e485f62
JH
1584 memset (&best_split_point, 0, sizeof (best_split_point));
1585 FOR_EACH_BB (bb)
1586 {
1587 int time = 0;
1588 int size = 0;
1589 int freq = compute_call_stmt_bb_frequency (current_function_decl, bb);
1590
1591 if (dump_file && (dump_flags & TDF_DETAILS))
1592 fprintf (dump_file, "Basic block %i\n", bb->index);
1593
1594 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
1595 {
1596 int this_time, this_size;
1597 gimple stmt = gsi_stmt (bsi);
1598
1599 this_size = estimate_num_insns (stmt, &eni_size_weights);
1600 this_time = estimate_num_insns (stmt, &eni_time_weights) * freq;
1601 size += this_size;
1602 time += this_time;
b2e25729 1603 check_forbidden_calls (stmt);
3e485f62
JH
1604
1605 if (dump_file && (dump_flags & TDF_DETAILS))
1606 {
1607 fprintf (dump_file, " freq:%6i size:%3i time:%3i ",
1608 freq, this_size, this_time);
1609 print_gimple_stmt (dump_file, stmt, 0, 0);
1610 }
1611 }
1612 overall_time += time;
1613 overall_size += size;
9771b263
DN
1614 bb_info_vec[bb->index].time = time;
1615 bb_info_vec[bb->index].size = size;
3e485f62
JH
1616 }
1617 find_split_points (overall_time, overall_size);
1618 if (best_split_point.split_bbs)
1619 {
1620 split_function (&best_split_point);
1621 BITMAP_FREE (best_split_point.ssa_names_to_pass);
1622 BITMAP_FREE (best_split_point.split_bbs);
1623 todo = TODO_update_ssa | TODO_cleanup_cfg;
1624 }
b2e25729 1625 BITMAP_FREE (forbidden_dominators);
9771b263 1626 bb_info_vec.release ();
3e485f62
JH
1627 return todo;
1628}
1629
cf9712cc
JH
1630/* Gate function splitting pass. When doing profile feedback, we want
1631 to execute the pass after profiling is read. So disable one in
1632 early optimization. */
1633
3e485f62
JH
1634static bool
1635gate_split_functions (void)
1636{
cf9712cc
JH
1637 return (flag_partial_inlining
1638 && !profile_arc_flag && !flag_branch_probabilities);
3e485f62
JH
1639}
1640
27a4cd48
DM
1641namespace {
1642
1643const pass_data pass_data_split_functions =
3e485f62 1644{
27a4cd48
DM
1645 GIMPLE_PASS, /* type */
1646 "fnsplit", /* name */
1647 OPTGROUP_NONE, /* optinfo_flags */
1648 true, /* has_gate */
1649 true, /* has_execute */
1650 TV_IPA_FNSPLIT, /* tv_id */
1651 PROP_cfg, /* properties_required */
1652 0, /* properties_provided */
1653 0, /* properties_destroyed */
1654 0, /* todo_flags_start */
1655 TODO_verify_all, /* todo_flags_finish */
3e485f62 1656};
cf9712cc 1657
27a4cd48
DM
1658class pass_split_functions : public gimple_opt_pass
1659{
1660public:
c3284718
RS
1661 pass_split_functions (gcc::context *ctxt)
1662 : gimple_opt_pass (pass_data_split_functions, ctxt)
27a4cd48
DM
1663 {}
1664
1665 /* opt_pass methods: */
1666 bool gate () { return gate_split_functions (); }
1667 unsigned int execute () { return execute_split_functions (); }
1668
1669}; // class pass_split_functions
1670
1671} // anon namespace
1672
1673gimple_opt_pass *
1674make_pass_split_functions (gcc::context *ctxt)
1675{
1676 return new pass_split_functions (ctxt);
1677}
1678
cf9712cc
JH
1679/* Gate feedback driven function splitting pass.
1680 We don't need to split when profiling at all, we are producing
1681 lousy code anyway. */
1682
1683static bool
1684gate_feedback_split_functions (void)
1685{
1686 return (flag_partial_inlining
1687 && flag_branch_probabilities);
1688}
1689
1690/* Execute function splitting pass. */
1691
1692static unsigned int
1693execute_feedback_split_functions (void)
1694{
1695 unsigned int retval = execute_split_functions ();
1696 if (retval)
1697 retval |= TODO_rebuild_cgraph_edges;
1698 return retval;
1699}
1700
27a4cd48
DM
1701namespace {
1702
1703const pass_data pass_data_feedback_split_functions =
cf9712cc 1704{
27a4cd48
DM
1705 GIMPLE_PASS, /* type */
1706 "feedback_fnsplit", /* name */
1707 OPTGROUP_NONE, /* optinfo_flags */
1708 true, /* has_gate */
1709 true, /* has_execute */
1710 TV_IPA_FNSPLIT, /* tv_id */
1711 PROP_cfg, /* properties_required */
1712 0, /* properties_provided */
1713 0, /* properties_destroyed */
1714 0, /* todo_flags_start */
1715 TODO_verify_all, /* todo_flags_finish */
cf9712cc 1716};
27a4cd48
DM
1717
1718class pass_feedback_split_functions : public gimple_opt_pass
1719{
1720public:
c3284718
RS
1721 pass_feedback_split_functions (gcc::context *ctxt)
1722 : gimple_opt_pass (pass_data_feedback_split_functions, ctxt)
27a4cd48
DM
1723 {}
1724
1725 /* opt_pass methods: */
1726 bool gate () { return gate_feedback_split_functions (); }
1727 unsigned int execute () { return execute_feedback_split_functions (); }
1728
1729}; // class pass_feedback_split_functions
1730
1731} // anon namespace
1732
1733gimple_opt_pass *
1734make_pass_feedback_split_functions (gcc::context *ctxt)
1735{
1736 return new pass_feedback_split_functions (ctxt);
1737}