]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-split.c
backport: As described in http://gcc.gnu.org/ml/gcc/2012-08/msg00015.html...
[thirdparty/gcc.git] / gcc / ipa-split.c
1 /* Function splitting pass
2 Copyright (C) 2010, 2011, 2012
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka <jh@suse.cz>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* The purpose of this pass is to split function bodies to improve
23 inlining. I.e. for function of the form:
24
25 func (...)
26 {
27 if (cheap_test)
28 something_small
29 else
30 something_big
31 }
32
33 Produce:
34
35 func.part (...)
36 {
37 something_big
38 }
39
40 func (...)
41 {
42 if (cheap_test)
43 something_small
44 else
45 func.part (...);
46 }
47
48 When func becomes inlinable and when cheap_test is often true, inlining func,
49 but not fund.part leads to performance improvement similar as inlining
50 original func while the code size growth is smaller.
51
52 The pass is organized in three stages:
53 1) Collect local info about basic block into BB_INFO structure and
54 compute function body estimated size and time.
55 2) Via DFS walk find all possible basic blocks where we can split
56 and chose best one.
57 3) If split point is found, split at the specified BB by creating a clone
58 and updating function to call it.
59
60 The decisions what functions to split are in execute_split_functions
61 and consider_split.
62
63 There are several possible future improvements for this pass including:
64
65 1) Splitting to break up large functions
66 2) Splitting to reduce stack frame usage
67 3) Allow split part of function to use values computed in the header part.
68 The values needs to be passed to split function, perhaps via same
69 interface as for nested functions or as argument.
70 4) Support for simple rematerialization. I.e. when split part use
71 value computed in header from function parameter in very cheap way, we
72 can just recompute it.
73 5) Support splitting of nested functions.
74 6) Support non-SSA arguments.
75 7) There is nothing preventing us from producing multiple parts of single function
76 when needed or splitting also the parts. */
77
78 #include "config.h"
79 #include "system.h"
80 #include "coretypes.h"
81 #include "tree.h"
82 #include "target.h"
83 #include "cgraph.h"
84 #include "ipa-prop.h"
85 #include "tree-flow.h"
86 #include "tree-pass.h"
87 #include "flags.h"
88 #include "diagnostic.h"
89 #include "tree-dump.h"
90 #include "tree-inline.h"
91 #include "params.h"
92 #include "gimple-pretty-print.h"
93 #include "ipa-inline.h"
94
95 /* Per basic block info. */
96
97 typedef struct
98 {
99 unsigned int size;
100 unsigned int time;
101 } bb_info;
102 DEF_VEC_O(bb_info);
103 DEF_VEC_ALLOC_O(bb_info,heap);
104
105 static VEC(bb_info, heap) *bb_info_vec;
106
107 /* Description of split point. */
108
109 struct split_point
110 {
111 /* Size of the partitions. */
112 unsigned int header_time, header_size, split_time, split_size;
113
114 /* SSA names that need to be passed into spit function. */
115 bitmap ssa_names_to_pass;
116
117 /* Basic block where we split (that will become entry point of new function. */
118 basic_block entry_bb;
119
120 /* Basic blocks we are splitting away. */
121 bitmap split_bbs;
122
123 /* True when return value is computed on split part and thus it needs
124 to be returned. */
125 bool split_part_set_retval;
126 };
127
128 /* Best split point found. */
129
130 struct split_point best_split_point;
131
132 /* Set of basic blocks that are not allowed to dominate a split point. */
133
134 static bitmap forbidden_dominators;
135
136 static tree find_retval (basic_block return_bb);
137
138 /* Callback for walk_stmt_load_store_addr_ops. If T is non-SSA automatic
139 variable, check it if it is present in bitmap passed via DATA. */
140
141 static bool
142 test_nonssa_use (gimple stmt ATTRIBUTE_UNUSED, tree t, void *data)
143 {
144 t = get_base_address (t);
145
146 if (!t || is_gimple_reg (t))
147 return false;
148
149 if (TREE_CODE (t) == PARM_DECL
150 || (TREE_CODE (t) == VAR_DECL
151 && auto_var_in_fn_p (t, current_function_decl))
152 || TREE_CODE (t) == RESULT_DECL
153 || TREE_CODE (t) == LABEL_DECL)
154 return bitmap_bit_p ((bitmap)data, DECL_UID (t));
155
156 /* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
157 to pretend that the value pointed to is actual result decl. */
158 if ((TREE_CODE (t) == MEM_REF || INDIRECT_REF_P (t))
159 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
160 && SSA_NAME_VAR (TREE_OPERAND (t, 0))
161 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t, 0))) == RESULT_DECL
162 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
163 return
164 bitmap_bit_p ((bitmap)data,
165 DECL_UID (DECL_RESULT (current_function_decl)));
166
167 return false;
168 }
169
170 /* Dump split point CURRENT. */
171
172 static void
173 dump_split_point (FILE * file, struct split_point *current)
174 {
175 fprintf (file,
176 "Split point at BB %i\n"
177 " header time: %i header size: %i\n"
178 " split time: %i split size: %i\n bbs: ",
179 current->entry_bb->index, current->header_time,
180 current->header_size, current->split_time, current->split_size);
181 dump_bitmap (file, current->split_bbs);
182 fprintf (file, " SSA names to pass: ");
183 dump_bitmap (file, current->ssa_names_to_pass);
184 }
185
186 /* Look for all BBs in header that might lead to the split part and verify
187 that they are not defining any non-SSA var used by the split part.
188 Parameters are the same as for consider_split. */
189
190 static bool
191 verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
192 basic_block return_bb)
193 {
194 bitmap seen = BITMAP_ALLOC (NULL);
195 VEC (basic_block,heap) *worklist = NULL;
196 edge e;
197 edge_iterator ei;
198 bool ok = true;
199
200 FOR_EACH_EDGE (e, ei, current->entry_bb->preds)
201 if (e->src != ENTRY_BLOCK_PTR
202 && !bitmap_bit_p (current->split_bbs, e->src->index))
203 {
204 VEC_safe_push (basic_block, heap, worklist, e->src);
205 bitmap_set_bit (seen, e->src->index);
206 }
207
208 while (!VEC_empty (basic_block, worklist))
209 {
210 gimple_stmt_iterator bsi;
211 basic_block bb = VEC_pop (basic_block, worklist);
212
213 FOR_EACH_EDGE (e, ei, bb->preds)
214 if (e->src != ENTRY_BLOCK_PTR
215 && bitmap_set_bit (seen, e->src->index))
216 {
217 gcc_checking_assert (!bitmap_bit_p (current->split_bbs,
218 e->src->index));
219 VEC_safe_push (basic_block, heap, worklist, e->src);
220 }
221 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
222 {
223 gimple stmt = gsi_stmt (bsi);
224 if (is_gimple_debug (stmt))
225 continue;
226 if (walk_stmt_load_store_addr_ops
227 (stmt, non_ssa_vars, test_nonssa_use, test_nonssa_use,
228 test_nonssa_use))
229 {
230 ok = false;
231 goto done;
232 }
233 if (gimple_code (stmt) == GIMPLE_LABEL
234 && test_nonssa_use (stmt, gimple_label_label (stmt),
235 non_ssa_vars))
236 {
237 ok = false;
238 goto done;
239 }
240 }
241 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
242 {
243 if (walk_stmt_load_store_addr_ops
244 (gsi_stmt (bsi), non_ssa_vars, test_nonssa_use, test_nonssa_use,
245 test_nonssa_use))
246 {
247 ok = false;
248 goto done;
249 }
250 }
251 FOR_EACH_EDGE (e, ei, bb->succs)
252 {
253 if (e->dest != return_bb)
254 continue;
255 for (bsi = gsi_start_phis (return_bb); !gsi_end_p (bsi);
256 gsi_next (&bsi))
257 {
258 gimple stmt = gsi_stmt (bsi);
259 tree op = gimple_phi_arg_def (stmt, e->dest_idx);
260
261 if (virtual_operand_p (gimple_phi_result (stmt)))
262 continue;
263 if (TREE_CODE (op) != SSA_NAME
264 && test_nonssa_use (stmt, op, non_ssa_vars))
265 {
266 ok = false;
267 goto done;
268 }
269 }
270 }
271 }
272 done:
273 BITMAP_FREE (seen);
274 VEC_free (basic_block, heap, worklist);
275 return ok;
276 }
277
278 /* If STMT is a call, check the callee against a list of forbidden
279 predicate functions. If a match is found, look for uses of the
280 call result in condition statements that compare against zero.
281 For each such use, find the block targeted by the condition
282 statement for the nonzero result, and set the bit for this block
283 in the forbidden dominators bitmap. The purpose of this is to avoid
284 selecting a split point where we are likely to lose the chance
285 to optimize away an unused function call. */
286
287 static void
288 check_forbidden_calls (gimple stmt)
289 {
290 imm_use_iterator use_iter;
291 use_operand_p use_p;
292 tree lhs;
293
294 /* At the moment, __builtin_constant_p is the only forbidden
295 predicate function call (see PR49642). */
296 if (!gimple_call_builtin_p (stmt, BUILT_IN_CONSTANT_P))
297 return;
298
299 lhs = gimple_call_lhs (stmt);
300
301 if (!lhs || TREE_CODE (lhs) != SSA_NAME)
302 return;
303
304 FOR_EACH_IMM_USE_FAST (use_p, use_iter, lhs)
305 {
306 tree op1;
307 basic_block use_bb, forbidden_bb;
308 enum tree_code code;
309 edge true_edge, false_edge;
310 gimple use_stmt = USE_STMT (use_p);
311
312 if (gimple_code (use_stmt) != GIMPLE_COND)
313 continue;
314
315 /* Assuming canonical form for GIMPLE_COND here, with constant
316 in second position. */
317 op1 = gimple_cond_rhs (use_stmt);
318 code = gimple_cond_code (use_stmt);
319 use_bb = gimple_bb (use_stmt);
320
321 extract_true_false_edges_from_block (use_bb, &true_edge, &false_edge);
322
323 /* We're only interested in comparisons that distinguish
324 unambiguously from zero. */
325 if (!integer_zerop (op1) || code == LE_EXPR || code == GE_EXPR)
326 continue;
327
328 if (code == EQ_EXPR)
329 forbidden_bb = false_edge->dest;
330 else
331 forbidden_bb = true_edge->dest;
332
333 bitmap_set_bit (forbidden_dominators, forbidden_bb->index);
334 }
335 }
336
337 /* If BB is dominated by any block in the forbidden dominators set,
338 return TRUE; else FALSE. */
339
340 static bool
341 dominated_by_forbidden (basic_block bb)
342 {
343 unsigned dom_bb;
344 bitmap_iterator bi;
345
346 EXECUTE_IF_SET_IN_BITMAP (forbidden_dominators, 1, dom_bb, bi)
347 {
348 if (dominated_by_p (CDI_DOMINATORS, bb, BASIC_BLOCK (dom_bb)))
349 return true;
350 }
351
352 return false;
353 }
354
355 /* We found an split_point CURRENT. NON_SSA_VARS is bitmap of all non ssa
356 variables used and RETURN_BB is return basic block.
357 See if we can split function here. */
358
359 static void
360 consider_split (struct split_point *current, bitmap non_ssa_vars,
361 basic_block return_bb)
362 {
363 tree parm;
364 unsigned int num_args = 0;
365 unsigned int call_overhead;
366 edge e;
367 edge_iterator ei;
368 gimple_stmt_iterator bsi;
369 unsigned int i;
370 int incoming_freq = 0;
371 tree retval;
372
373 if (dump_file && (dump_flags & TDF_DETAILS))
374 dump_split_point (dump_file, current);
375
376 FOR_EACH_EDGE (e, ei, current->entry_bb->preds)
377 if (!bitmap_bit_p (current->split_bbs, e->src->index))
378 incoming_freq += EDGE_FREQUENCY (e);
379
380 /* Do not split when we would end up calling function anyway. */
381 if (incoming_freq
382 >= (ENTRY_BLOCK_PTR->frequency
383 * PARAM_VALUE (PARAM_PARTIAL_INLINING_ENTRY_PROBABILITY) / 100))
384 {
385 if (dump_file && (dump_flags & TDF_DETAILS))
386 fprintf (dump_file,
387 " Refused: incoming frequency is too large.\n");
388 return;
389 }
390
391 if (!current->header_size)
392 {
393 if (dump_file && (dump_flags & TDF_DETAILS))
394 fprintf (dump_file, " Refused: header empty\n");
395 return;
396 }
397
398 /* Verify that PHI args on entry are either virtual or all their operands
399 incoming from header are the same. */
400 for (bsi = gsi_start_phis (current->entry_bb); !gsi_end_p (bsi); gsi_next (&bsi))
401 {
402 gimple stmt = gsi_stmt (bsi);
403 tree val = NULL;
404
405 if (virtual_operand_p (gimple_phi_result (stmt)))
406 continue;
407 for (i = 0; i < gimple_phi_num_args (stmt); i++)
408 {
409 edge e = gimple_phi_arg_edge (stmt, i);
410 if (!bitmap_bit_p (current->split_bbs, e->src->index))
411 {
412 tree edge_val = gimple_phi_arg_def (stmt, i);
413 if (val && edge_val != val)
414 {
415 if (dump_file && (dump_flags & TDF_DETAILS))
416 fprintf (dump_file,
417 " Refused: entry BB has PHI with multiple variants\n");
418 return;
419 }
420 val = edge_val;
421 }
422 }
423 }
424
425
426 /* See what argument we will pass to the split function and compute
427 call overhead. */
428 call_overhead = eni_size_weights.call_cost;
429 for (parm = DECL_ARGUMENTS (current_function_decl); parm;
430 parm = DECL_CHAIN (parm))
431 {
432 if (!is_gimple_reg (parm))
433 {
434 if (bitmap_bit_p (non_ssa_vars, DECL_UID (parm)))
435 {
436 if (dump_file && (dump_flags & TDF_DETAILS))
437 fprintf (dump_file,
438 " Refused: need to pass non-ssa param values\n");
439 return;
440 }
441 }
442 else
443 {
444 tree ddef = ssa_default_def (cfun, parm);
445 if (ddef
446 && bitmap_bit_p (current->ssa_names_to_pass,
447 SSA_NAME_VERSION (ddef)))
448 {
449 if (!VOID_TYPE_P (TREE_TYPE (parm)))
450 call_overhead += estimate_move_cost (TREE_TYPE (parm));
451 num_args++;
452 }
453 }
454 }
455 if (!VOID_TYPE_P (TREE_TYPE (current_function_decl)))
456 call_overhead += estimate_move_cost (TREE_TYPE (current_function_decl));
457
458 if (current->split_size <= call_overhead)
459 {
460 if (dump_file && (dump_flags & TDF_DETAILS))
461 fprintf (dump_file,
462 " Refused: split size is smaller than call overhead\n");
463 return;
464 }
465 if (current->header_size + call_overhead
466 >= (unsigned int)(DECL_DECLARED_INLINE_P (current_function_decl)
467 ? MAX_INLINE_INSNS_SINGLE
468 : MAX_INLINE_INSNS_AUTO))
469 {
470 if (dump_file && (dump_flags & TDF_DETAILS))
471 fprintf (dump_file,
472 " Refused: header size is too large for inline candidate\n");
473 return;
474 }
475
476 /* FIXME: we currently can pass only SSA function parameters to the split
477 arguments. Once parm_adjustment infrastructure is supported by cloning,
478 we can pass more than that. */
479 if (num_args != bitmap_count_bits (current->ssa_names_to_pass))
480 {
481
482 if (dump_file && (dump_flags & TDF_DETAILS))
483 fprintf (dump_file,
484 " Refused: need to pass non-param values\n");
485 return;
486 }
487
488 /* When there are non-ssa vars used in the split region, see if they
489 are used in the header region. If so, reject the split.
490 FIXME: we can use nested function support to access both. */
491 if (!bitmap_empty_p (non_ssa_vars)
492 && !verify_non_ssa_vars (current, non_ssa_vars, return_bb))
493 {
494 if (dump_file && (dump_flags & TDF_DETAILS))
495 fprintf (dump_file,
496 " Refused: split part has non-ssa uses\n");
497 return;
498 }
499
500 /* If the split point is dominated by a forbidden block, reject
501 the split. */
502 if (!bitmap_empty_p (forbidden_dominators)
503 && dominated_by_forbidden (current->entry_bb))
504 {
505 if (dump_file && (dump_flags & TDF_DETAILS))
506 fprintf (dump_file,
507 " Refused: split point dominated by forbidden block\n");
508 return;
509 }
510
511 /* See if retval used by return bb is computed by header or split part.
512 When it is computed by split part, we need to produce return statement
513 in the split part and add code to header to pass it around.
514
515 This is bit tricky to test:
516 1) When there is no return_bb or no return value, we always pass
517 value around.
518 2) Invariants are always computed by caller.
519 3) For SSA we need to look if defining statement is in header or split part
520 4) For non-SSA we need to look where the var is computed. */
521 retval = find_retval (return_bb);
522 if (!retval)
523 current->split_part_set_retval = true;
524 else if (is_gimple_min_invariant (retval))
525 current->split_part_set_retval = false;
526 /* Special case is value returned by reference we record as if it was non-ssa
527 set to result_decl. */
528 else if (TREE_CODE (retval) == SSA_NAME
529 && SSA_NAME_VAR (retval)
530 && TREE_CODE (SSA_NAME_VAR (retval)) == RESULT_DECL
531 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
532 current->split_part_set_retval
533 = bitmap_bit_p (non_ssa_vars, DECL_UID (SSA_NAME_VAR (retval)));
534 else if (TREE_CODE (retval) == SSA_NAME)
535 current->split_part_set_retval
536 = (!SSA_NAME_IS_DEFAULT_DEF (retval)
537 && (bitmap_bit_p (current->split_bbs,
538 gimple_bb (SSA_NAME_DEF_STMT (retval))->index)
539 || gimple_bb (SSA_NAME_DEF_STMT (retval)) == return_bb));
540 else if (TREE_CODE (retval) == PARM_DECL)
541 current->split_part_set_retval = false;
542 else if (TREE_CODE (retval) == VAR_DECL
543 || TREE_CODE (retval) == RESULT_DECL)
544 current->split_part_set_retval
545 = bitmap_bit_p (non_ssa_vars, DECL_UID (retval));
546 else
547 current->split_part_set_retval = true;
548
549 /* split_function fixes up at most one PHI non-virtual PHI node in return_bb,
550 for the return value. If there are other PHIs, give up. */
551 if (return_bb != EXIT_BLOCK_PTR)
552 {
553 gimple_stmt_iterator psi;
554
555 for (psi = gsi_start_phis (return_bb); !gsi_end_p (psi); gsi_next (&psi))
556 if (!virtual_operand_p (gimple_phi_result (gsi_stmt (psi)))
557 && !(retval
558 && current->split_part_set_retval
559 && TREE_CODE (retval) == SSA_NAME
560 && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))
561 && SSA_NAME_DEF_STMT (retval) == gsi_stmt (psi)))
562 {
563 if (dump_file && (dump_flags & TDF_DETAILS))
564 fprintf (dump_file,
565 " Refused: return bb has extra PHIs\n");
566 return;
567 }
568 }
569
570 if (dump_file && (dump_flags & TDF_DETAILS))
571 fprintf (dump_file, " Accepted!\n");
572
573 /* At the moment chose split point with lowest frequency and that leaves
574 out smallest size of header.
575 In future we might re-consider this heuristics. */
576 if (!best_split_point.split_bbs
577 || best_split_point.entry_bb->frequency > current->entry_bb->frequency
578 || (best_split_point.entry_bb->frequency == current->entry_bb->frequency
579 && best_split_point.split_size < current->split_size))
580
581 {
582 if (dump_file && (dump_flags & TDF_DETAILS))
583 fprintf (dump_file, " New best split point!\n");
584 if (best_split_point.ssa_names_to_pass)
585 {
586 BITMAP_FREE (best_split_point.ssa_names_to_pass);
587 BITMAP_FREE (best_split_point.split_bbs);
588 }
589 best_split_point = *current;
590 best_split_point.ssa_names_to_pass = BITMAP_ALLOC (NULL);
591 bitmap_copy (best_split_point.ssa_names_to_pass,
592 current->ssa_names_to_pass);
593 best_split_point.split_bbs = BITMAP_ALLOC (NULL);
594 bitmap_copy (best_split_point.split_bbs, current->split_bbs);
595 }
596 }
597
598 /* Return basic block containing RETURN statement. We allow basic blocks
599 of the form:
600 <retval> = tmp_var;
601 return <retval>
602 but return_bb can not be more complex than this.
603 If nothing is found, return EXIT_BLOCK_PTR.
604
605 When there are multiple RETURN statement, chose one with return value,
606 since that one is more likely shared by multiple code paths.
607
608 Return BB is special, because for function splitting it is the only
609 basic block that is duplicated in between header and split part of the
610 function.
611
612 TODO: We might support multiple return blocks. */
613
614 static basic_block
615 find_return_bb (void)
616 {
617 edge e;
618 basic_block return_bb = EXIT_BLOCK_PTR;
619 gimple_stmt_iterator bsi;
620 bool found_return = false;
621 tree retval = NULL_TREE;
622
623 if (!single_pred_p (EXIT_BLOCK_PTR))
624 return return_bb;
625
626 e = single_pred_edge (EXIT_BLOCK_PTR);
627 for (bsi = gsi_last_bb (e->src); !gsi_end_p (bsi); gsi_prev (&bsi))
628 {
629 gimple stmt = gsi_stmt (bsi);
630 if (gimple_code (stmt) == GIMPLE_LABEL
631 || is_gimple_debug (stmt)
632 || gimple_clobber_p (stmt))
633 ;
634 else if (gimple_code (stmt) == GIMPLE_ASSIGN
635 && found_return
636 && gimple_assign_single_p (stmt)
637 && (auto_var_in_fn_p (gimple_assign_rhs1 (stmt),
638 current_function_decl)
639 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
640 && retval == gimple_assign_lhs (stmt))
641 ;
642 else if (gimple_code (stmt) == GIMPLE_RETURN)
643 {
644 found_return = true;
645 retval = gimple_return_retval (stmt);
646 }
647 else
648 break;
649 }
650 if (gsi_end_p (bsi) && found_return)
651 return_bb = e->src;
652
653 return return_bb;
654 }
655
656 /* Given return basic block RETURN_BB, see where return value is really
657 stored. */
658 static tree
659 find_retval (basic_block return_bb)
660 {
661 gimple_stmt_iterator bsi;
662 for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi); gsi_next (&bsi))
663 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_RETURN)
664 return gimple_return_retval (gsi_stmt (bsi));
665 else if (gimple_code (gsi_stmt (bsi)) == GIMPLE_ASSIGN
666 && !gimple_clobber_p (gsi_stmt (bsi)))
667 return gimple_assign_rhs1 (gsi_stmt (bsi));
668 return NULL;
669 }
670
671 /* Callback for walk_stmt_load_store_addr_ops. If T is non-SSA automatic
672 variable, mark it as used in bitmap passed via DATA.
673 Return true when access to T prevents splitting the function. */
674
675 static bool
676 mark_nonssa_use (gimple stmt ATTRIBUTE_UNUSED, tree t, void *data)
677 {
678 t = get_base_address (t);
679
680 if (!t || is_gimple_reg (t))
681 return false;
682
683 /* At present we can't pass non-SSA arguments to split function.
684 FIXME: this can be relaxed by passing references to arguments. */
685 if (TREE_CODE (t) == PARM_DECL)
686 {
687 if (dump_file && (dump_flags & TDF_DETAILS))
688 fprintf (dump_file,
689 "Cannot split: use of non-ssa function parameter.\n");
690 return true;
691 }
692
693 if ((TREE_CODE (t) == VAR_DECL
694 && auto_var_in_fn_p (t, current_function_decl))
695 || TREE_CODE (t) == RESULT_DECL
696 || TREE_CODE (t) == LABEL_DECL)
697 bitmap_set_bit ((bitmap)data, DECL_UID (t));
698
699 /* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
700 to pretend that the value pointed to is actual result decl. */
701 if ((TREE_CODE (t) == MEM_REF || INDIRECT_REF_P (t))
702 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
703 && SSA_NAME_VAR (TREE_OPERAND (t, 0))
704 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t, 0))) == RESULT_DECL
705 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
706 return
707 bitmap_bit_p ((bitmap)data,
708 DECL_UID (DECL_RESULT (current_function_decl)));
709
710 return false;
711 }
712
713 /* Compute local properties of basic block BB we collect when looking for
714 split points. We look for ssa defs and store them in SET_SSA_NAMES,
715 for ssa uses and store them in USED_SSA_NAMES and for any non-SSA automatic
716 vars stored in NON_SSA_VARS.
717
718 When BB has edge to RETURN_BB, collect uses in RETURN_BB too.
719
720 Return false when BB contains something that prevents it from being put into
721 split function. */
722
723 static bool
724 visit_bb (basic_block bb, basic_block return_bb,
725 bitmap set_ssa_names, bitmap used_ssa_names,
726 bitmap non_ssa_vars)
727 {
728 gimple_stmt_iterator bsi;
729 edge e;
730 edge_iterator ei;
731 bool can_split = true;
732
733 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
734 {
735 gimple stmt = gsi_stmt (bsi);
736 tree op;
737 ssa_op_iter iter;
738 tree decl;
739
740 if (is_gimple_debug (stmt))
741 continue;
742
743 if (gimple_clobber_p (stmt))
744 continue;
745
746 /* FIXME: We can split regions containing EH. We can not however
747 split RESX, EH_DISPATCH and EH_POINTER referring to same region
748 into different partitions. This would require tracking of
749 EH regions and checking in consider_split_point if they
750 are not used elsewhere. */
751 if (gimple_code (stmt) == GIMPLE_RESX)
752 {
753 if (dump_file && (dump_flags & TDF_DETAILS))
754 fprintf (dump_file, "Cannot split: resx.\n");
755 can_split = false;
756 }
757 if (gimple_code (stmt) == GIMPLE_EH_DISPATCH)
758 {
759 if (dump_file && (dump_flags & TDF_DETAILS))
760 fprintf (dump_file, "Cannot split: eh dispatch.\n");
761 can_split = false;
762 }
763
764 /* Check builtins that prevent splitting. */
765 if (gimple_code (stmt) == GIMPLE_CALL
766 && (decl = gimple_call_fndecl (stmt)) != NULL_TREE
767 && DECL_BUILT_IN (decl)
768 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
769 switch (DECL_FUNCTION_CODE (decl))
770 {
771 /* FIXME: once we will allow passing non-parm values to split part,
772 we need to be sure to handle correct builtin_stack_save and
773 builtin_stack_restore. At the moment we are safe; there is no
774 way to store builtin_stack_save result in non-SSA variable
775 since all calls to those are compiler generated. */
776 case BUILT_IN_APPLY:
777 case BUILT_IN_APPLY_ARGS:
778 case BUILT_IN_VA_START:
779 if (dump_file && (dump_flags & TDF_DETAILS))
780 fprintf (dump_file,
781 "Cannot split: builtin_apply and va_start.\n");
782 can_split = false;
783 break;
784 case BUILT_IN_EH_POINTER:
785 if (dump_file && (dump_flags & TDF_DETAILS))
786 fprintf (dump_file, "Cannot split: builtin_eh_pointer.\n");
787 can_split = false;
788 break;
789 default:
790 break;
791 }
792
793 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
794 bitmap_set_bit (set_ssa_names, SSA_NAME_VERSION (op));
795 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
796 bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
797 can_split &= !walk_stmt_load_store_addr_ops (stmt, non_ssa_vars,
798 mark_nonssa_use,
799 mark_nonssa_use,
800 mark_nonssa_use);
801 }
802 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
803 {
804 gimple stmt = gsi_stmt (bsi);
805 unsigned int i;
806
807 if (virtual_operand_p (gimple_phi_result (stmt)))
808 continue;
809 bitmap_set_bit (set_ssa_names,
810 SSA_NAME_VERSION (gimple_phi_result (stmt)));
811 for (i = 0; i < gimple_phi_num_args (stmt); i++)
812 {
813 tree op = gimple_phi_arg_def (stmt, i);
814 if (TREE_CODE (op) == SSA_NAME)
815 bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
816 }
817 can_split &= !walk_stmt_load_store_addr_ops (stmt, non_ssa_vars,
818 mark_nonssa_use,
819 mark_nonssa_use,
820 mark_nonssa_use);
821 }
822 /* Record also uses coming from PHI operand in return BB. */
823 FOR_EACH_EDGE (e, ei, bb->succs)
824 if (e->dest == return_bb)
825 {
826 for (bsi = gsi_start_phis (return_bb); !gsi_end_p (bsi); gsi_next (&bsi))
827 {
828 gimple stmt = gsi_stmt (bsi);
829 tree op = gimple_phi_arg_def (stmt, e->dest_idx);
830
831 if (virtual_operand_p (gimple_phi_result (stmt)))
832 continue;
833 if (TREE_CODE (op) == SSA_NAME)
834 bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
835 else
836 can_split &= !mark_nonssa_use (stmt, op, non_ssa_vars);
837 }
838 }
839 return can_split;
840 }
841
842 /* Stack entry for recursive DFS walk in find_split_point. */
843
844 typedef struct
845 {
846 /* Basic block we are examining. */
847 basic_block bb;
848
849 /* SSA names set and used by the BB and all BBs reachable
850 from it via DFS walk. */
851 bitmap set_ssa_names, used_ssa_names;
852 bitmap non_ssa_vars;
853
854 /* All BBS visited from this BB via DFS walk. */
855 bitmap bbs_visited;
856
857 /* Last examined edge in DFS walk. Since we walk unoriented graph,
858 the value is up to sum of incoming and outgoing edges of BB. */
859 unsigned int edge_num;
860
861 /* Stack entry index of earliest BB reachable from current BB
862 or any BB visited later in DFS walk. */
863 int earliest;
864
865 /* Overall time and size of all BBs reached from this BB in DFS walk. */
866 int overall_time, overall_size;
867
868 /* When false we can not split on this BB. */
869 bool can_split;
870 } stack_entry;
871 DEF_VEC_O(stack_entry);
872 DEF_VEC_ALLOC_O(stack_entry,heap);
873
874
875 /* Find all articulations and call consider_split on them.
876 OVERALL_TIME and OVERALL_SIZE is time and size of the function.
877
878 We perform basic algorithm for finding an articulation in a graph
879 created from CFG by considering it to be an unoriented graph.
880
881 The articulation is discovered via DFS walk. We collect earliest
882 basic block on stack that is reachable via backward edge. Articulation
883 is any basic block such that there is no backward edge bypassing it.
884 To reduce stack usage we maintain heap allocated stack in STACK vector.
885 AUX pointer of BB is set to index it appears in the stack or -1 once
886 it is visited and popped off the stack.
887
888 The algorithm finds articulation after visiting the whole component
889 reachable by it. This makes it convenient to collect information about
890 the component used by consider_split. */
891
892 static void
893 find_split_points (int overall_time, int overall_size)
894 {
895 stack_entry first;
896 VEC(stack_entry, heap) *stack = NULL;
897 basic_block bb;
898 basic_block return_bb = find_return_bb ();
899 struct split_point current;
900
901 current.header_time = overall_time;
902 current.header_size = overall_size;
903 current.split_time = 0;
904 current.split_size = 0;
905 current.ssa_names_to_pass = BITMAP_ALLOC (NULL);
906
907 first.bb = ENTRY_BLOCK_PTR;
908 first.edge_num = 0;
909 first.overall_time = 0;
910 first.overall_size = 0;
911 first.earliest = INT_MAX;
912 first.set_ssa_names = 0;
913 first.used_ssa_names = 0;
914 first.bbs_visited = 0;
915 VEC_safe_push (stack_entry, heap, stack, &first);
916 ENTRY_BLOCK_PTR->aux = (void *)(intptr_t)-1;
917
918 while (!VEC_empty (stack_entry, stack))
919 {
920 stack_entry *entry = &VEC_last (stack_entry, stack);
921
922 /* We are walking an acyclic graph, so edge_num counts
923 succ and pred edges together. However when considering
924 articulation, we want to have processed everything reachable
925 from articulation but nothing that reaches into it. */
926 if (entry->edge_num == EDGE_COUNT (entry->bb->succs)
927 && entry->bb != ENTRY_BLOCK_PTR)
928 {
929 int pos = VEC_length (stack_entry, stack);
930 entry->can_split &= visit_bb (entry->bb, return_bb,
931 entry->set_ssa_names,
932 entry->used_ssa_names,
933 entry->non_ssa_vars);
934 if (pos <= entry->earliest && !entry->can_split
935 && dump_file && (dump_flags & TDF_DETAILS))
936 fprintf (dump_file,
937 "found articulation at bb %i but can not split\n",
938 entry->bb->index);
939 if (pos <= entry->earliest && entry->can_split)
940 {
941 if (dump_file && (dump_flags & TDF_DETAILS))
942 fprintf (dump_file, "found articulation at bb %i\n",
943 entry->bb->index);
944 current.entry_bb = entry->bb;
945 current.ssa_names_to_pass = BITMAP_ALLOC (NULL);
946 bitmap_and_compl (current.ssa_names_to_pass,
947 entry->used_ssa_names, entry->set_ssa_names);
948 current.header_time = overall_time - entry->overall_time;
949 current.header_size = overall_size - entry->overall_size;
950 current.split_time = entry->overall_time;
951 current.split_size = entry->overall_size;
952 current.split_bbs = entry->bbs_visited;
953 consider_split (&current, entry->non_ssa_vars, return_bb);
954 BITMAP_FREE (current.ssa_names_to_pass);
955 }
956 }
957 /* Do actual DFS walk. */
958 if (entry->edge_num
959 < (EDGE_COUNT (entry->bb->succs)
960 + EDGE_COUNT (entry->bb->preds)))
961 {
962 edge e;
963 basic_block dest;
964 if (entry->edge_num < EDGE_COUNT (entry->bb->succs))
965 {
966 e = EDGE_SUCC (entry->bb, entry->edge_num);
967 dest = e->dest;
968 }
969 else
970 {
971 e = EDGE_PRED (entry->bb, entry->edge_num
972 - EDGE_COUNT (entry->bb->succs));
973 dest = e->src;
974 }
975
976 entry->edge_num++;
977
978 /* New BB to visit, push it to the stack. */
979 if (dest != return_bb && dest != EXIT_BLOCK_PTR
980 && !dest->aux)
981 {
982 stack_entry new_entry;
983
984 new_entry.bb = dest;
985 new_entry.edge_num = 0;
986 new_entry.overall_time
987 = VEC_index (bb_info, bb_info_vec, dest->index).time;
988 new_entry.overall_size
989 = VEC_index (bb_info, bb_info_vec, dest->index).size;
990 new_entry.earliest = INT_MAX;
991 new_entry.set_ssa_names = BITMAP_ALLOC (NULL);
992 new_entry.used_ssa_names = BITMAP_ALLOC (NULL);
993 new_entry.bbs_visited = BITMAP_ALLOC (NULL);
994 new_entry.non_ssa_vars = BITMAP_ALLOC (NULL);
995 new_entry.can_split = true;
996 bitmap_set_bit (new_entry.bbs_visited, dest->index);
997 VEC_safe_push (stack_entry, heap, stack, &new_entry);
998 dest->aux = (void *)(intptr_t)VEC_length (stack_entry, stack);
999 }
1000 /* Back edge found, record the earliest point. */
1001 else if ((intptr_t)dest->aux > 0
1002 && (intptr_t)dest->aux < entry->earliest)
1003 entry->earliest = (intptr_t)dest->aux;
1004 }
1005 /* We are done with examining the edges. Pop off the value from stack
1006 and merge stuff we accumulate during the walk. */
1007 else if (entry->bb != ENTRY_BLOCK_PTR)
1008 {
1009 stack_entry *prev = &VEC_index (stack_entry, stack,
1010 VEC_length (stack_entry, stack) - 2);
1011
1012 entry->bb->aux = (void *)(intptr_t)-1;
1013 prev->can_split &= entry->can_split;
1014 if (prev->set_ssa_names)
1015 {
1016 bitmap_ior_into (prev->set_ssa_names, entry->set_ssa_names);
1017 bitmap_ior_into (prev->used_ssa_names, entry->used_ssa_names);
1018 bitmap_ior_into (prev->bbs_visited, entry->bbs_visited);
1019 bitmap_ior_into (prev->non_ssa_vars, entry->non_ssa_vars);
1020 }
1021 if (prev->earliest > entry->earliest)
1022 prev->earliest = entry->earliest;
1023 prev->overall_time += entry->overall_time;
1024 prev->overall_size += entry->overall_size;
1025 BITMAP_FREE (entry->set_ssa_names);
1026 BITMAP_FREE (entry->used_ssa_names);
1027 BITMAP_FREE (entry->bbs_visited);
1028 BITMAP_FREE (entry->non_ssa_vars);
1029 VEC_pop (stack_entry, stack);
1030 }
1031 else
1032 VEC_pop (stack_entry, stack);
1033 }
1034 ENTRY_BLOCK_PTR->aux = NULL;
1035 FOR_EACH_BB (bb)
1036 bb->aux = NULL;
1037 VEC_free (stack_entry, heap, stack);
1038 BITMAP_FREE (current.ssa_names_to_pass);
1039 }
1040
1041 /* Split function at SPLIT_POINT. */
1042
1043 static void
1044 split_function (struct split_point *split_point)
1045 {
1046 VEC (tree, heap) *args_to_pass = NULL;
1047 bitmap args_to_skip;
1048 tree parm;
1049 int num = 0;
1050 struct cgraph_node *node, *cur_node = cgraph_get_node (current_function_decl);
1051 basic_block return_bb = find_return_bb ();
1052 basic_block call_bb;
1053 gimple_stmt_iterator gsi;
1054 gimple call;
1055 edge e;
1056 edge_iterator ei;
1057 tree retval = NULL, real_retval = NULL;
1058 bool split_part_return_p = false;
1059 gimple last_stmt = NULL;
1060 unsigned int i;
1061 tree arg, ddef;
1062
1063 if (dump_file)
1064 {
1065 fprintf (dump_file, "\n\nSplitting function at:\n");
1066 dump_split_point (dump_file, split_point);
1067 }
1068
1069 if (cur_node->local.can_change_signature)
1070 args_to_skip = BITMAP_ALLOC (NULL);
1071 else
1072 args_to_skip = NULL;
1073
1074 /* Collect the parameters of new function and args_to_skip bitmap. */
1075 for (parm = DECL_ARGUMENTS (current_function_decl);
1076 parm; parm = DECL_CHAIN (parm), num++)
1077 if (args_to_skip
1078 && (!is_gimple_reg (parm)
1079 || (ddef = ssa_default_def (cfun, parm)) == NULL_TREE
1080 || !bitmap_bit_p (split_point->ssa_names_to_pass,
1081 SSA_NAME_VERSION (ddef))))
1082 bitmap_set_bit (args_to_skip, num);
1083 else
1084 {
1085 /* This parm might not have been used up to now, but is going to be
1086 used, hence register it. */
1087 if (is_gimple_reg (parm))
1088 arg = get_or_create_ssa_default_def (cfun, parm);
1089 else
1090 arg = parm;
1091
1092 if (!useless_type_conversion_p (DECL_ARG_TYPE (parm), TREE_TYPE (arg)))
1093 arg = fold_convert (DECL_ARG_TYPE (parm), arg);
1094 VEC_safe_push (tree, heap, args_to_pass, arg);
1095 }
1096
1097 /* See if the split function will return. */
1098 FOR_EACH_EDGE (e, ei, return_bb->preds)
1099 if (bitmap_bit_p (split_point->split_bbs, e->src->index))
1100 break;
1101 if (e)
1102 split_part_return_p = true;
1103
1104 /* Add return block to what will become the split function.
1105 We do not return; no return block is needed. */
1106 if (!split_part_return_p)
1107 ;
1108 /* We have no return block, so nothing is needed. */
1109 else if (return_bb == EXIT_BLOCK_PTR)
1110 ;
1111 /* When we do not want to return value, we need to construct
1112 new return block with empty return statement.
1113 FIXME: Once we are able to change return type, we should change function
1114 to return void instead of just outputting function with undefined return
1115 value. For structures this affects quality of codegen. */
1116 else if (!split_point->split_part_set_retval
1117 && find_retval (return_bb))
1118 {
1119 bool redirected = true;
1120 basic_block new_return_bb = create_basic_block (NULL, 0, return_bb);
1121 gimple_stmt_iterator gsi = gsi_start_bb (new_return_bb);
1122 gsi_insert_after (&gsi, gimple_build_return (NULL), GSI_NEW_STMT);
1123 while (redirected)
1124 {
1125 redirected = false;
1126 FOR_EACH_EDGE (e, ei, return_bb->preds)
1127 if (bitmap_bit_p (split_point->split_bbs, e->src->index))
1128 {
1129 new_return_bb->count += e->count;
1130 new_return_bb->frequency += EDGE_FREQUENCY (e);
1131 redirect_edge_and_branch (e, new_return_bb);
1132 redirected = true;
1133 break;
1134 }
1135 }
1136 e = make_edge (new_return_bb, EXIT_BLOCK_PTR, 0);
1137 e->probability = REG_BR_PROB_BASE;
1138 e->count = new_return_bb->count;
1139 bitmap_set_bit (split_point->split_bbs, new_return_bb->index);
1140 }
1141 /* When we pass around the value, use existing return block. */
1142 else
1143 bitmap_set_bit (split_point->split_bbs, return_bb->index);
1144
1145 /* If RETURN_BB has virtual operand PHIs, they must be removed and the
1146 virtual operand marked for renaming as we change the CFG in a way that
1147 tree-inline is not able to compensate for.
1148
1149 Note this can happen whether or not we have a return value. If we have
1150 a return value, then RETURN_BB may have PHIs for real operands too. */
1151 if (return_bb != EXIT_BLOCK_PTR)
1152 {
1153 bool phi_p = false;
1154 for (gsi = gsi_start_phis (return_bb); !gsi_end_p (gsi);)
1155 {
1156 gimple stmt = gsi_stmt (gsi);
1157 if (!virtual_operand_p (gimple_phi_result (stmt)))
1158 {
1159 gsi_next (&gsi);
1160 continue;
1161 }
1162 mark_virtual_phi_result_for_renaming (stmt);
1163 remove_phi_node (&gsi, true);
1164 phi_p = true;
1165 }
1166 /* In reality we have to rename the reaching definition of the
1167 virtual operand at return_bb as we will eventually release it
1168 when we remove the code region we outlined.
1169 So we have to rename all immediate virtual uses of that region
1170 if we didn't see a PHI definition yet. */
1171 /* ??? In real reality we want to set the reaching vdef of the
1172 entry of the SESE region as the vuse of the call and the reaching
1173 vdef of the exit of the SESE region as the vdef of the call. */
1174 if (!phi_p)
1175 for (gsi = gsi_start_bb (return_bb); !gsi_end_p (gsi); gsi_next (&gsi))
1176 {
1177 gimple stmt = gsi_stmt (gsi);
1178 if (gimple_vuse (stmt))
1179 {
1180 gimple_set_vuse (stmt, NULL_TREE);
1181 update_stmt (stmt);
1182 }
1183 if (gimple_vdef (stmt))
1184 break;
1185 }
1186 }
1187
1188 /* Now create the actual clone. */
1189 rebuild_cgraph_edges ();
1190 node = cgraph_function_versioning (cur_node, NULL, NULL, args_to_skip,
1191 !split_part_return_p,
1192 split_point->split_bbs,
1193 split_point->entry_bb, "part");
1194 /* For usual cloning it is enough to clear builtin only when signature
1195 changes. For partial inlining we however can not expect the part
1196 of builtin implementation to have same semantic as the whole. */
1197 if (DECL_BUILT_IN (node->symbol.decl))
1198 {
1199 DECL_BUILT_IN_CLASS (node->symbol.decl) = NOT_BUILT_IN;
1200 DECL_FUNCTION_CODE (node->symbol.decl) = (enum built_in_function) 0;
1201 }
1202 cgraph_node_remove_callees (cur_node);
1203 if (!split_part_return_p)
1204 TREE_THIS_VOLATILE (node->symbol.decl) = 1;
1205 if (dump_file)
1206 dump_function_to_file (node->symbol.decl, dump_file, dump_flags);
1207
1208 /* Create the basic block we place call into. It is the entry basic block
1209 split after last label. */
1210 call_bb = split_point->entry_bb;
1211 for (gsi = gsi_start_bb (call_bb); !gsi_end_p (gsi);)
1212 if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
1213 {
1214 last_stmt = gsi_stmt (gsi);
1215 gsi_next (&gsi);
1216 }
1217 else
1218 break;
1219 e = split_block (split_point->entry_bb, last_stmt);
1220 remove_edge (e);
1221
1222 /* Produce the call statement. */
1223 gsi = gsi_last_bb (call_bb);
1224 FOR_EACH_VEC_ELT (tree, args_to_pass, i, arg)
1225 if (!is_gimple_val (arg))
1226 {
1227 arg = force_gimple_operand_gsi (&gsi, arg, true, NULL_TREE,
1228 false, GSI_CONTINUE_LINKING);
1229 VEC_replace (tree, args_to_pass, i, arg);
1230 }
1231 call = gimple_build_call_vec (node->symbol.decl, args_to_pass);
1232 gimple_set_block (call, DECL_INITIAL (current_function_decl));
1233
1234 /* We avoid address being taken on any variable used by split part,
1235 so return slot optimization is always possible. Moreover this is
1236 required to make DECL_BY_REFERENCE work. */
1237 if (aggregate_value_p (DECL_RESULT (current_function_decl),
1238 TREE_TYPE (current_function_decl)))
1239 gimple_call_set_return_slot_opt (call, true);
1240
1241 /* Update return value. This is bit tricky. When we do not return,
1242 do nothing. When we return we might need to update return_bb
1243 or produce a new return statement. */
1244 if (!split_part_return_p)
1245 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1246 else
1247 {
1248 e = make_edge (call_bb, return_bb,
1249 return_bb == EXIT_BLOCK_PTR ? 0 : EDGE_FALLTHRU);
1250 e->count = call_bb->count;
1251 e->probability = REG_BR_PROB_BASE;
1252
1253 /* If there is return basic block, see what value we need to store
1254 return value into and put call just before it. */
1255 if (return_bb != EXIT_BLOCK_PTR)
1256 {
1257 real_retval = retval = find_retval (return_bb);
1258
1259 if (real_retval && split_point->split_part_set_retval)
1260 {
1261 gimple_stmt_iterator psi;
1262
1263 /* See if we need new SSA_NAME for the result.
1264 When DECL_BY_REFERENCE is true, retval is actually pointer to
1265 return value and it is constant in whole function. */
1266 if (TREE_CODE (retval) == SSA_NAME
1267 && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
1268 {
1269 retval = copy_ssa_name (retval, call);
1270
1271 /* See if there is PHI defining return value. */
1272 for (psi = gsi_start_phis (return_bb);
1273 !gsi_end_p (psi); gsi_next (&psi))
1274 if (!virtual_operand_p (gimple_phi_result (gsi_stmt (psi))))
1275 break;
1276
1277 /* When there is PHI, just update its value. */
1278 if (TREE_CODE (retval) == SSA_NAME
1279 && !gsi_end_p (psi))
1280 add_phi_arg (gsi_stmt (psi), retval, e, UNKNOWN_LOCATION);
1281 /* Otherwise update the return BB itself.
1282 find_return_bb allows at most one assignment to return value,
1283 so update first statement. */
1284 else
1285 {
1286 gimple_stmt_iterator bsi;
1287 for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi);
1288 gsi_next (&bsi))
1289 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_RETURN)
1290 {
1291 gimple_return_set_retval (gsi_stmt (bsi), retval);
1292 break;
1293 }
1294 else if (gimple_code (gsi_stmt (bsi)) == GIMPLE_ASSIGN
1295 && !gimple_clobber_p (gsi_stmt (bsi)))
1296 {
1297 gimple_assign_set_rhs1 (gsi_stmt (bsi), retval);
1298 break;
1299 }
1300 update_stmt (gsi_stmt (bsi));
1301 }
1302 }
1303 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
1304 {
1305 gimple_call_set_lhs (call, build_simple_mem_ref (retval));
1306 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1307 }
1308 else
1309 {
1310 tree restype;
1311 restype = TREE_TYPE (DECL_RESULT (current_function_decl));
1312 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1313 if (!useless_type_conversion_p (TREE_TYPE (retval), restype))
1314 {
1315 gimple cpy;
1316 tree tem = create_tmp_reg (restype, NULL);
1317 tem = make_ssa_name (tem, call);
1318 cpy = gimple_build_assign_with_ops (NOP_EXPR, retval,
1319 tem, NULL_TREE);
1320 gsi_insert_after (&gsi, cpy, GSI_NEW_STMT);
1321 retval = tem;
1322 }
1323 gimple_call_set_lhs (call, retval);
1324 update_stmt (call);
1325 }
1326 }
1327 else
1328 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1329 }
1330 /* We don't use return block (there is either no return in function or
1331 multiple of them). So create new basic block with return statement.
1332 */
1333 else
1334 {
1335 gimple ret;
1336 if (split_point->split_part_set_retval
1337 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1338 {
1339 retval = DECL_RESULT (current_function_decl);
1340
1341 /* We use temporary register to hold value when aggregate_value_p
1342 is false. Similarly for DECL_BY_REFERENCE we must avoid extra
1343 copy. */
1344 if (!aggregate_value_p (retval, TREE_TYPE (current_function_decl))
1345 && !DECL_BY_REFERENCE (retval))
1346 retval = create_tmp_reg (TREE_TYPE (retval), NULL);
1347 if (is_gimple_reg (retval))
1348 {
1349 /* When returning by reference, there is only one SSA name
1350 assigned to RESULT_DECL (that is pointer to return value).
1351 Look it up or create new one if it is missing. */
1352 if (DECL_BY_REFERENCE (retval))
1353 retval = get_or_create_ssa_default_def (cfun, retval);
1354 /* Otherwise produce new SSA name for return value. */
1355 else
1356 retval = make_ssa_name (retval, call);
1357 }
1358 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
1359 gimple_call_set_lhs (call, build_simple_mem_ref (retval));
1360 else
1361 gimple_call_set_lhs (call, retval);
1362 }
1363 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1364 ret = gimple_build_return (retval);
1365 gsi_insert_after (&gsi, ret, GSI_NEW_STMT);
1366 }
1367 }
1368 free_dominance_info (CDI_DOMINATORS);
1369 free_dominance_info (CDI_POST_DOMINATORS);
1370 compute_inline_parameters (node, true);
1371 }
1372
1373 /* Execute function splitting pass. */
1374
1375 static unsigned int
1376 execute_split_functions (void)
1377 {
1378 gimple_stmt_iterator bsi;
1379 basic_block bb;
1380 int overall_time = 0, overall_size = 0;
1381 int todo = 0;
1382 struct cgraph_node *node = cgraph_get_node (current_function_decl);
1383
1384 if (flags_from_decl_or_type (current_function_decl)
1385 & (ECF_NORETURN|ECF_MALLOC))
1386 {
1387 if (dump_file)
1388 fprintf (dump_file, "Not splitting: noreturn/malloc function.\n");
1389 return 0;
1390 }
1391 if (MAIN_NAME_P (DECL_NAME (current_function_decl)))
1392 {
1393 if (dump_file)
1394 fprintf (dump_file, "Not splitting: main function.\n");
1395 return 0;
1396 }
1397 /* This can be relaxed; function might become inlinable after splitting
1398 away the uninlinable part. */
1399 if (inline_edge_summary_vec && !inline_summary (node)->inlinable)
1400 {
1401 if (dump_file)
1402 fprintf (dump_file, "Not splitting: not inlinable.\n");
1403 return 0;
1404 }
1405 if (DECL_DISREGARD_INLINE_LIMITS (node->symbol.decl))
1406 {
1407 if (dump_file)
1408 fprintf (dump_file, "Not splitting: disregarding inline limits.\n");
1409 return 0;
1410 }
1411 /* This can be relaxed; most of versioning tests actually prevents
1412 a duplication. */
1413 if (!tree_versionable_function_p (current_function_decl))
1414 {
1415 if (dump_file)
1416 fprintf (dump_file, "Not splitting: not versionable.\n");
1417 return 0;
1418 }
1419 /* FIXME: we could support this. */
1420 if (DECL_STRUCT_FUNCTION (current_function_decl)->static_chain_decl)
1421 {
1422 if (dump_file)
1423 fprintf (dump_file, "Not splitting: nested function.\n");
1424 return 0;
1425 }
1426
1427 /* See if it makes sense to try to split.
1428 It makes sense to split if we inline, that is if we have direct calls to
1429 handle or direct calls are possibly going to appear as result of indirect
1430 inlining or LTO. Also handle -fprofile-generate as LTO to allow non-LTO
1431 training for LTO -fprofile-use build.
1432
1433 Note that we are not completely conservative about disqualifying functions
1434 called once. It is possible that the caller is called more then once and
1435 then inlining would still benefit. */
1436 if ((!node->callers || !node->callers->next_caller)
1437 && !node->symbol.address_taken
1438 && (!flag_lto || !node->symbol.externally_visible))
1439 {
1440 if (dump_file)
1441 fprintf (dump_file, "Not splitting: not called directly "
1442 "or called once.\n");
1443 return 0;
1444 }
1445
1446 /* FIXME: We can actually split if splitting reduces call overhead. */
1447 if (!flag_inline_small_functions
1448 && !DECL_DECLARED_INLINE_P (current_function_decl))
1449 {
1450 if (dump_file)
1451 fprintf (dump_file, "Not splitting: not autoinlining and function"
1452 " is not inline.\n");
1453 return 0;
1454 }
1455
1456 /* Initialize bitmap to track forbidden calls. */
1457 forbidden_dominators = BITMAP_ALLOC (NULL);
1458 calculate_dominance_info (CDI_DOMINATORS);
1459
1460 /* Compute local info about basic blocks and determine function size/time. */
1461 VEC_safe_grow_cleared (bb_info, heap, bb_info_vec, last_basic_block + 1);
1462 memset (&best_split_point, 0, sizeof (best_split_point));
1463 FOR_EACH_BB (bb)
1464 {
1465 int time = 0;
1466 int size = 0;
1467 int freq = compute_call_stmt_bb_frequency (current_function_decl, bb);
1468
1469 if (dump_file && (dump_flags & TDF_DETAILS))
1470 fprintf (dump_file, "Basic block %i\n", bb->index);
1471
1472 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
1473 {
1474 int this_time, this_size;
1475 gimple stmt = gsi_stmt (bsi);
1476
1477 this_size = estimate_num_insns (stmt, &eni_size_weights);
1478 this_time = estimate_num_insns (stmt, &eni_time_weights) * freq;
1479 size += this_size;
1480 time += this_time;
1481 check_forbidden_calls (stmt);
1482
1483 if (dump_file && (dump_flags & TDF_DETAILS))
1484 {
1485 fprintf (dump_file, " freq:%6i size:%3i time:%3i ",
1486 freq, this_size, this_time);
1487 print_gimple_stmt (dump_file, stmt, 0, 0);
1488 }
1489 }
1490 overall_time += time;
1491 overall_size += size;
1492 VEC_index (bb_info, bb_info_vec, bb->index).time = time;
1493 VEC_index (bb_info, bb_info_vec, bb->index).size = size;
1494 }
1495 find_split_points (overall_time, overall_size);
1496 if (best_split_point.split_bbs)
1497 {
1498 split_function (&best_split_point);
1499 BITMAP_FREE (best_split_point.ssa_names_to_pass);
1500 BITMAP_FREE (best_split_point.split_bbs);
1501 todo = TODO_update_ssa | TODO_cleanup_cfg;
1502 }
1503 BITMAP_FREE (forbidden_dominators);
1504 VEC_free (bb_info, heap, bb_info_vec);
1505 bb_info_vec = NULL;
1506 return todo;
1507 }
1508
1509 /* Gate function splitting pass. When doing profile feedback, we want
1510 to execute the pass after profiling is read. So disable one in
1511 early optimization. */
1512
1513 static bool
1514 gate_split_functions (void)
1515 {
1516 return (flag_partial_inlining
1517 && !profile_arc_flag && !flag_branch_probabilities);
1518 }
1519
1520 struct gimple_opt_pass pass_split_functions =
1521 {
1522 {
1523 GIMPLE_PASS,
1524 "fnsplit", /* name */
1525 gate_split_functions, /* gate */
1526 execute_split_functions, /* execute */
1527 NULL, /* sub */
1528 NULL, /* next */
1529 0, /* static_pass_number */
1530 TV_IPA_FNSPLIT, /* tv_id */
1531 PROP_cfg, /* properties_required */
1532 0, /* properties_provided */
1533 0, /* properties_destroyed */
1534 0, /* todo_flags_start */
1535 TODO_verify_all /* todo_flags_finish */
1536 }
1537 };
1538
1539 /* Gate feedback driven function splitting pass.
1540 We don't need to split when profiling at all, we are producing
1541 lousy code anyway. */
1542
1543 static bool
1544 gate_feedback_split_functions (void)
1545 {
1546 return (flag_partial_inlining
1547 && flag_branch_probabilities);
1548 }
1549
1550 /* Execute function splitting pass. */
1551
1552 static unsigned int
1553 execute_feedback_split_functions (void)
1554 {
1555 unsigned int retval = execute_split_functions ();
1556 if (retval)
1557 retval |= TODO_rebuild_cgraph_edges;
1558 return retval;
1559 }
1560
1561 struct gimple_opt_pass pass_feedback_split_functions =
1562 {
1563 {
1564 GIMPLE_PASS,
1565 "feedback_fnsplit", /* name */
1566 gate_feedback_split_functions, /* gate */
1567 execute_feedback_split_functions, /* execute */
1568 NULL, /* sub */
1569 NULL, /* next */
1570 0, /* static_pass_number */
1571 TV_IPA_FNSPLIT, /* tv_id */
1572 PROP_cfg, /* properties_required */
1573 0, /* properties_provided */
1574 0, /* properties_destroyed */
1575 0, /* todo_flags_start */
1576 TODO_verify_all /* todo_flags_finish */
1577 }
1578 };