]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-tailcall.c
Fix PR testsuite/47013
[thirdparty/gcc.git] / gcc / tree-tailcall.c
1 /* Tail call optimization on trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "function.h"
29 #include "tree-flow.h"
30 #include "tree-dump.h"
31 #include "gimple-pretty-print.h"
32 #include "except.h"
33 #include "tree-pass.h"
34 #include "flags.h"
35 #include "langhooks.h"
36 #include "dbgcnt.h"
37 #include "target.h"
38 #include "common/common-target.h"
39
40 /* The file implements the tail recursion elimination. It is also used to
41 analyze the tail calls in general, passing the results to the rtl level
42 where they are used for sibcall optimization.
43
44 In addition to the standard tail recursion elimination, we handle the most
45 trivial cases of making the call tail recursive by creating accumulators.
46 For example the following function
47
48 int sum (int n)
49 {
50 if (n > 0)
51 return n + sum (n - 1);
52 else
53 return 0;
54 }
55
56 is transformed into
57
58 int sum (int n)
59 {
60 int acc = 0;
61
62 while (n > 0)
63 acc += n--;
64
65 return acc;
66 }
67
68 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
69 when we reach the return x statement, we should return a_acc + x * m_acc
70 instead. They are initially initialized to 0 and 1, respectively,
71 so the semantics of the function is obviously preserved. If we are
72 guaranteed that the value of the accumulator never change, we
73 omit the accumulator.
74
75 There are three cases how the function may exit. The first one is
76 handled in adjust_return_value, the other two in adjust_accumulator_values
77 (the second case is actually a special case of the third one and we
78 present it separately just for clarity):
79
80 1) Just return x, where x is not in any of the remaining special shapes.
81 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
82
83 2) return f (...), where f is the current function, is rewritten in a
84 classical tail-recursion elimination way, into assignment of arguments
85 and jump to the start of the function. Values of the accumulators
86 are unchanged.
87
88 3) return a + m * f(...), where a and m do not depend on call to f.
89 To preserve the semantics described before we want this to be rewritten
90 in such a way that we finally return
91
92 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
93
94 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
95 eliminate the tail call to f. Special cases when the value is just
96 added or just multiplied are obtained by setting a = 0 or m = 1.
97
98 TODO -- it is possible to do similar tricks for other operations. */
99
100 /* A structure that describes the tailcall. */
101
102 struct tailcall
103 {
104 /* The iterator pointing to the call statement. */
105 gimple_stmt_iterator call_gsi;
106
107 /* True if it is a call to the current function. */
108 bool tail_recursion;
109
110 /* The return value of the caller is mult * f + add, where f is the return
111 value of the call. */
112 tree mult, add;
113
114 /* Next tailcall in the chain. */
115 struct tailcall *next;
116 };
117
118 /* The variables holding the value of multiplicative and additive
119 accumulator. */
120 static tree m_acc, a_acc;
121
122 static bool suitable_for_tail_opt_p (void);
123 static bool optimize_tail_call (struct tailcall *, bool);
124 static void eliminate_tail_call (struct tailcall *);
125 static void find_tail_calls (basic_block, struct tailcall **);
126
127 /* Returns false when the function is not suitable for tail call optimization
128 from some reason (e.g. if it takes variable number of arguments). */
129
130 static bool
131 suitable_for_tail_opt_p (void)
132 {
133 if (cfun->stdarg)
134 return false;
135
136 return true;
137 }
138 /* Returns false when the function is not suitable for tail call optimization
139 from some reason (e.g. if it takes variable number of arguments).
140 This test must pass in addition to suitable_for_tail_opt_p in order to make
141 tail call discovery happen. */
142
143 static bool
144 suitable_for_tail_call_opt_p (void)
145 {
146 tree param;
147
148 /* alloca (until we have stack slot life analysis) inhibits
149 sibling call optimizations, but not tail recursion. */
150 if (cfun->calls_alloca)
151 return false;
152
153 /* If we are using sjlj exceptions, we may need to add a call to
154 _Unwind_SjLj_Unregister at exit of the function. Which means
155 that we cannot do any sibcall transformations. */
156 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
157 && current_function_has_exception_handlers ())
158 return false;
159
160 /* Any function that calls setjmp might have longjmp called from
161 any called function. ??? We really should represent this
162 properly in the CFG so that this needn't be special cased. */
163 if (cfun->calls_setjmp)
164 return false;
165
166 /* ??? It is OK if the argument of a function is taken in some cases,
167 but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
168 for (param = DECL_ARGUMENTS (current_function_decl);
169 param;
170 param = DECL_CHAIN (param))
171 if (TREE_ADDRESSABLE (param))
172 return false;
173
174 return true;
175 }
176
177 /* Checks whether the expression EXPR in stmt AT is independent of the
178 statement pointed to by GSI (in a sense that we already know EXPR's value
179 at GSI). We use the fact that we are only called from the chain of
180 basic blocks that have only single successor. Returns the expression
181 containing the value of EXPR at GSI. */
182
183 static tree
184 independent_of_stmt_p (tree expr, gimple at, gimple_stmt_iterator gsi)
185 {
186 basic_block bb, call_bb, at_bb;
187 edge e;
188 edge_iterator ei;
189
190 if (is_gimple_min_invariant (expr))
191 return expr;
192
193 if (TREE_CODE (expr) != SSA_NAME)
194 return NULL_TREE;
195
196 /* Mark the blocks in the chain leading to the end. */
197 at_bb = gimple_bb (at);
198 call_bb = gimple_bb (gsi_stmt (gsi));
199 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
200 bb->aux = &bb->aux;
201 bb->aux = &bb->aux;
202
203 while (1)
204 {
205 at = SSA_NAME_DEF_STMT (expr);
206 bb = gimple_bb (at);
207
208 /* The default definition or defined before the chain. */
209 if (!bb || !bb->aux)
210 break;
211
212 if (bb == call_bb)
213 {
214 for (; !gsi_end_p (gsi); gsi_next (&gsi))
215 if (gsi_stmt (gsi) == at)
216 break;
217
218 if (!gsi_end_p (gsi))
219 expr = NULL_TREE;
220 break;
221 }
222
223 if (gimple_code (at) != GIMPLE_PHI)
224 {
225 expr = NULL_TREE;
226 break;
227 }
228
229 FOR_EACH_EDGE (e, ei, bb->preds)
230 if (e->src->aux)
231 break;
232 gcc_assert (e);
233
234 expr = PHI_ARG_DEF_FROM_EDGE (at, e);
235 if (TREE_CODE (expr) != SSA_NAME)
236 {
237 /* The value is a constant. */
238 break;
239 }
240 }
241
242 /* Unmark the blocks. */
243 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
244 bb->aux = NULL;
245 bb->aux = NULL;
246
247 return expr;
248 }
249
250 /* Simulates the effect of an assignment STMT on the return value of the tail
251 recursive CALL passed in ASS_VAR. M and A are the multiplicative and the
252 additive factor for the real return value. */
253
254 static bool
255 process_assignment (gimple stmt, gimple_stmt_iterator call, tree *m,
256 tree *a, tree *ass_var)
257 {
258 tree op0, op1 = NULL_TREE, non_ass_var = NULL_TREE;
259 tree dest = gimple_assign_lhs (stmt);
260 enum tree_code code = gimple_assign_rhs_code (stmt);
261 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
262 tree src_var = gimple_assign_rhs1 (stmt);
263
264 /* See if this is a simple copy operation of an SSA name to the function
265 result. In that case we may have a simple tail call. Ignore type
266 conversions that can never produce extra code between the function
267 call and the function return. */
268 if ((rhs_class == GIMPLE_SINGLE_RHS || gimple_assign_cast_p (stmt))
269 && (TREE_CODE (src_var) == SSA_NAME))
270 {
271 /* Reject a tailcall if the type conversion might need
272 additional code. */
273 if (gimple_assign_cast_p (stmt)
274 && TYPE_MODE (TREE_TYPE (dest)) != TYPE_MODE (TREE_TYPE (src_var)))
275 return false;
276
277 if (src_var != *ass_var)
278 return false;
279
280 *ass_var = dest;
281 return true;
282 }
283
284 switch (rhs_class)
285 {
286 case GIMPLE_BINARY_RHS:
287 op1 = gimple_assign_rhs2 (stmt);
288
289 /* Fall through. */
290
291 case GIMPLE_UNARY_RHS:
292 op0 = gimple_assign_rhs1 (stmt);
293 break;
294
295 default:
296 return false;
297 }
298
299 /* Accumulator optimizations will reverse the order of operations.
300 We can only do that for floating-point types if we're assuming
301 that addition and multiplication are associative. */
302 if (!flag_associative_math)
303 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
304 return false;
305
306 if (rhs_class == GIMPLE_UNARY_RHS)
307 ;
308 else if (op0 == *ass_var
309 && (non_ass_var = independent_of_stmt_p (op1, stmt, call)))
310 ;
311 else if (op1 == *ass_var
312 && (non_ass_var = independent_of_stmt_p (op0, stmt, call)))
313 ;
314 else
315 return false;
316
317 switch (code)
318 {
319 case PLUS_EXPR:
320 *a = non_ass_var;
321 *ass_var = dest;
322 return true;
323
324 case MULT_EXPR:
325 *m = non_ass_var;
326 *ass_var = dest;
327 return true;
328
329 case NEGATE_EXPR:
330 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
331 *m = build_real (TREE_TYPE (op0), dconstm1);
332 else
333 *m = build_int_cst (TREE_TYPE (op0), -1);
334
335 *ass_var = dest;
336 return true;
337
338 case MINUS_EXPR:
339 if (*ass_var == op0)
340 *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
341 else
342 {
343 if (FLOAT_TYPE_P (TREE_TYPE (non_ass_var)))
344 *m = build_real (TREE_TYPE (non_ass_var), dconstm1);
345 else
346 *m = build_int_cst (TREE_TYPE (non_ass_var), -1);
347
348 *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
349 }
350
351 *ass_var = dest;
352 return true;
353
354 /* TODO -- Handle POINTER_PLUS_EXPR. */
355
356 default:
357 return false;
358 }
359 }
360
361 /* Propagate VAR through phis on edge E. */
362
363 static tree
364 propagate_through_phis (tree var, edge e)
365 {
366 basic_block dest = e->dest;
367 gimple_stmt_iterator gsi;
368
369 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
370 {
371 gimple phi = gsi_stmt (gsi);
372 if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
373 return PHI_RESULT (phi);
374 }
375 return var;
376 }
377
378 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
379 added to the start of RET. */
380
381 static void
382 find_tail_calls (basic_block bb, struct tailcall **ret)
383 {
384 tree ass_var = NULL_TREE, ret_var, func, param;
385 gimple stmt, call = NULL;
386 gimple_stmt_iterator gsi, agsi;
387 bool tail_recursion;
388 struct tailcall *nw;
389 edge e;
390 tree m, a;
391 basic_block abb;
392 size_t idx;
393 tree var;
394 referenced_var_iterator rvi;
395
396 if (!single_succ_p (bb))
397 return;
398
399 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
400 {
401 stmt = gsi_stmt (gsi);
402
403 /* Ignore labels, returns and debug stmts. */
404 if (gimple_code (stmt) == GIMPLE_LABEL
405 || gimple_code (stmt) == GIMPLE_RETURN
406 || is_gimple_debug (stmt))
407 continue;
408
409 /* Check for a call. */
410 if (is_gimple_call (stmt))
411 {
412 call = stmt;
413 ass_var = gimple_call_lhs (stmt);
414 break;
415 }
416
417 /* If the statement references memory or volatile operands, fail. */
418 if (gimple_references_memory_p (stmt)
419 || gimple_has_volatile_ops (stmt))
420 return;
421 }
422
423 if (gsi_end_p (gsi))
424 {
425 edge_iterator ei;
426 /* Recurse to the predecessors. */
427 FOR_EACH_EDGE (e, ei, bb->preds)
428 find_tail_calls (e->src, ret);
429
430 return;
431 }
432
433 /* If the LHS of our call is not just a simple register, we can't
434 transform this into a tail or sibling call. This situation happens,
435 in (e.g.) "*p = foo()" where foo returns a struct. In this case
436 we won't have a temporary here, but we need to carry out the side
437 effect anyway, so tailcall is impossible.
438
439 ??? In some situations (when the struct is returned in memory via
440 invisible argument) we could deal with this, e.g. by passing 'p'
441 itself as that argument to foo, but it's too early to do this here,
442 and expand_call() will not handle it anyway. If it ever can, then
443 we need to revisit this here, to allow that situation. */
444 if (ass_var && !is_gimple_reg (ass_var))
445 return;
446
447 /* We found the call, check whether it is suitable. */
448 tail_recursion = false;
449 func = gimple_call_fndecl (call);
450 if (func == current_function_decl)
451 {
452 tree arg;
453
454 for (param = DECL_ARGUMENTS (func), idx = 0;
455 param && idx < gimple_call_num_args (call);
456 param = DECL_CHAIN (param), idx ++)
457 {
458 arg = gimple_call_arg (call, idx);
459 if (param != arg)
460 {
461 /* Make sure there are no problems with copying. The parameter
462 have a copyable type and the two arguments must have reasonably
463 equivalent types. The latter requirement could be relaxed if
464 we emitted a suitable type conversion statement. */
465 if (!is_gimple_reg_type (TREE_TYPE (param))
466 || !useless_type_conversion_p (TREE_TYPE (param),
467 TREE_TYPE (arg)))
468 break;
469
470 /* The parameter should be a real operand, so that phi node
471 created for it at the start of the function has the meaning
472 of copying the value. This test implies is_gimple_reg_type
473 from the previous condition, however this one could be
474 relaxed by being more careful with copying the new value
475 of the parameter (emitting appropriate GIMPLE_ASSIGN and
476 updating the virtual operands). */
477 if (!is_gimple_reg (param))
478 break;
479 }
480 }
481 if (idx == gimple_call_num_args (call) && !param)
482 tail_recursion = true;
483 }
484
485 /* Make sure the tail invocation of this function does not refer
486 to local variables. */
487 FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
488 {
489 if (TREE_CODE (var) != PARM_DECL
490 && auto_var_in_fn_p (var, cfun->decl)
491 && (ref_maybe_used_by_stmt_p (call, var)
492 || call_may_clobber_ref_p (call, var)))
493 return;
494 }
495
496 /* Now check the statements after the call. None of them has virtual
497 operands, so they may only depend on the call through its return
498 value. The return value should also be dependent on each of them,
499 since we are running after dce. */
500 m = NULL_TREE;
501 a = NULL_TREE;
502
503 abb = bb;
504 agsi = gsi;
505 while (1)
506 {
507 tree tmp_a = NULL_TREE;
508 tree tmp_m = NULL_TREE;
509 gsi_next (&agsi);
510
511 while (gsi_end_p (agsi))
512 {
513 ass_var = propagate_through_phis (ass_var, single_succ_edge (abb));
514 abb = single_succ (abb);
515 agsi = gsi_start_bb (abb);
516 }
517
518 stmt = gsi_stmt (agsi);
519
520 if (gimple_code (stmt) == GIMPLE_LABEL)
521 continue;
522
523 if (gimple_code (stmt) == GIMPLE_RETURN)
524 break;
525
526 if (is_gimple_debug (stmt))
527 continue;
528
529 if (gimple_code (stmt) != GIMPLE_ASSIGN)
530 return;
531
532 /* This is a gimple assign. */
533 if (! process_assignment (stmt, gsi, &tmp_m, &tmp_a, &ass_var))
534 return;
535
536 if (tmp_a)
537 {
538 tree type = TREE_TYPE (tmp_a);
539 if (a)
540 a = fold_build2 (PLUS_EXPR, type, fold_convert (type, a), tmp_a);
541 else
542 a = tmp_a;
543 }
544 if (tmp_m)
545 {
546 tree type = TREE_TYPE (tmp_m);
547 if (m)
548 m = fold_build2 (MULT_EXPR, type, fold_convert (type, m), tmp_m);
549 else
550 m = tmp_m;
551
552 if (a)
553 a = fold_build2 (MULT_EXPR, type, fold_convert (type, a), tmp_m);
554 }
555 }
556
557 /* See if this is a tail call we can handle. */
558 ret_var = gimple_return_retval (stmt);
559
560 /* We may proceed if there either is no return value, or the return value
561 is identical to the call's return. */
562 if (ret_var
563 && (ret_var != ass_var))
564 return;
565
566 /* If this is not a tail recursive call, we cannot handle addends or
567 multiplicands. */
568 if (!tail_recursion && (m || a))
569 return;
570
571 nw = XNEW (struct tailcall);
572
573 nw->call_gsi = gsi;
574
575 nw->tail_recursion = tail_recursion;
576
577 nw->mult = m;
578 nw->add = a;
579
580 nw->next = *ret;
581 *ret = nw;
582 }
583
584 /* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E. */
585
586 static void
587 add_successor_phi_arg (edge e, tree var, tree phi_arg)
588 {
589 gimple_stmt_iterator gsi;
590
591 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
592 if (PHI_RESULT (gsi_stmt (gsi)) == var)
593 break;
594
595 gcc_assert (!gsi_end_p (gsi));
596 add_phi_arg (gsi_stmt (gsi), phi_arg, e, UNKNOWN_LOCATION);
597 }
598
599 /* Creates a GIMPLE statement which computes the operation specified by
600 CODE, OP0 and OP1 to a new variable with name LABEL and inserts the
601 statement in the position specified by GSI and UPDATE. Returns the
602 tree node of the statement's result. */
603
604 static tree
605 adjust_return_value_with_ops (enum tree_code code, const char *label,
606 tree acc, tree op1, gimple_stmt_iterator gsi)
607 {
608
609 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
610 tree tmp = create_tmp_reg (ret_type, label);
611 gimple stmt;
612 tree result;
613
614 add_referenced_var (tmp);
615
616 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
617 stmt = gimple_build_assign_with_ops (code, tmp, acc, op1);
618 else
619 {
620 tree rhs = fold_convert (TREE_TYPE (acc),
621 fold_build2 (code,
622 TREE_TYPE (op1),
623 fold_convert (TREE_TYPE (op1), acc),
624 op1));
625 rhs = force_gimple_operand_gsi (&gsi, rhs,
626 false, NULL, true, GSI_CONTINUE_LINKING);
627 stmt = gimple_build_assign (NULL_TREE, rhs);
628 }
629
630 result = make_ssa_name (tmp, stmt);
631 gimple_assign_set_lhs (stmt, result);
632 update_stmt (stmt);
633 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
634 return result;
635 }
636
637 /* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by
638 the computation specified by CODE and OP1 and insert the statement
639 at the position specified by GSI as a new statement. Returns new SSA name
640 of updated accumulator. */
641
642 static tree
643 update_accumulator_with_ops (enum tree_code code, tree acc, tree op1,
644 gimple_stmt_iterator gsi)
645 {
646 gimple stmt;
647 tree var;
648 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
649 stmt = gimple_build_assign_with_ops (code, SSA_NAME_VAR (acc), acc, op1);
650 else
651 {
652 tree rhs = fold_convert (TREE_TYPE (acc),
653 fold_build2 (code,
654 TREE_TYPE (op1),
655 fold_convert (TREE_TYPE (op1), acc),
656 op1));
657 rhs = force_gimple_operand_gsi (&gsi, rhs,
658 false, NULL, false, GSI_CONTINUE_LINKING);
659 stmt = gimple_build_assign (NULL_TREE, rhs);
660 }
661 var = make_ssa_name (SSA_NAME_VAR (acc), stmt);
662 gimple_assign_set_lhs (stmt, var);
663 update_stmt (stmt);
664 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
665 return var;
666 }
667
668 /* Adjust the accumulator values according to A and M after GSI, and update
669 the phi nodes on edge BACK. */
670
671 static void
672 adjust_accumulator_values (gimple_stmt_iterator gsi, tree m, tree a, edge back)
673 {
674 tree var, a_acc_arg, m_acc_arg;
675
676 if (m)
677 m = force_gimple_operand_gsi (&gsi, m, true, NULL, true, GSI_SAME_STMT);
678 if (a)
679 a = force_gimple_operand_gsi (&gsi, a, true, NULL, true, GSI_SAME_STMT);
680
681 a_acc_arg = a_acc;
682 m_acc_arg = m_acc;
683 if (a)
684 {
685 if (m_acc)
686 {
687 if (integer_onep (a))
688 var = m_acc;
689 else
690 var = adjust_return_value_with_ops (MULT_EXPR, "acc_tmp", m_acc,
691 a, gsi);
692 }
693 else
694 var = a;
695
696 a_acc_arg = update_accumulator_with_ops (PLUS_EXPR, a_acc, var, gsi);
697 }
698
699 if (m)
700 m_acc_arg = update_accumulator_with_ops (MULT_EXPR, m_acc, m, gsi);
701
702 if (a_acc)
703 add_successor_phi_arg (back, a_acc, a_acc_arg);
704
705 if (m_acc)
706 add_successor_phi_arg (back, m_acc, m_acc_arg);
707 }
708
709 /* Adjust value of the return at the end of BB according to M and A
710 accumulators. */
711
712 static void
713 adjust_return_value (basic_block bb, tree m, tree a)
714 {
715 tree retval;
716 gimple ret_stmt = gimple_seq_last_stmt (bb_seq (bb));
717 gimple_stmt_iterator gsi = gsi_last_bb (bb);
718
719 gcc_assert (gimple_code (ret_stmt) == GIMPLE_RETURN);
720
721 retval = gimple_return_retval (ret_stmt);
722 if (!retval || retval == error_mark_node)
723 return;
724
725 if (m)
726 retval = adjust_return_value_with_ops (MULT_EXPR, "mul_tmp", m_acc, retval,
727 gsi);
728 if (a)
729 retval = adjust_return_value_with_ops (PLUS_EXPR, "acc_tmp", a_acc, retval,
730 gsi);
731 gimple_return_set_retval (ret_stmt, retval);
732 update_stmt (ret_stmt);
733 }
734
735 /* Subtract COUNT and FREQUENCY from the basic block and it's
736 outgoing edge. */
737 static void
738 decrease_profile (basic_block bb, gcov_type count, int frequency)
739 {
740 edge e;
741 bb->count -= count;
742 if (bb->count < 0)
743 bb->count = 0;
744 bb->frequency -= frequency;
745 if (bb->frequency < 0)
746 bb->frequency = 0;
747 if (!single_succ_p (bb))
748 {
749 gcc_assert (!EDGE_COUNT (bb->succs));
750 return;
751 }
752 e = single_succ_edge (bb);
753 e->count -= count;
754 if (e->count < 0)
755 e->count = 0;
756 }
757
758 /* Returns true if argument PARAM of the tail recursive call needs to be copied
759 when the call is eliminated. */
760
761 static bool
762 arg_needs_copy_p (tree param)
763 {
764 tree def;
765
766 if (!is_gimple_reg (param) || !var_ann (param))
767 return false;
768
769 /* Parameters that are only defined but never used need not be copied. */
770 def = gimple_default_def (cfun, param);
771 if (!def)
772 return false;
773
774 return true;
775 }
776
777 /* Eliminates tail call described by T. TMP_VARS is a list of
778 temporary variables used to copy the function arguments. */
779
780 static void
781 eliminate_tail_call (struct tailcall *t)
782 {
783 tree param, rslt;
784 gimple stmt, call;
785 tree arg;
786 size_t idx;
787 basic_block bb, first;
788 edge e;
789 gimple phi;
790 gimple_stmt_iterator gsi;
791 gimple orig_stmt;
792
793 stmt = orig_stmt = gsi_stmt (t->call_gsi);
794 bb = gsi_bb (t->call_gsi);
795
796 if (dump_file && (dump_flags & TDF_DETAILS))
797 {
798 fprintf (dump_file, "Eliminated tail recursion in bb %d : ",
799 bb->index);
800 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
801 fprintf (dump_file, "\n");
802 }
803
804 gcc_assert (is_gimple_call (stmt));
805
806 first = single_succ (ENTRY_BLOCK_PTR);
807
808 /* Remove the code after call_gsi that will become unreachable. The
809 possibly unreachable code in other blocks is removed later in
810 cfg cleanup. */
811 gsi = t->call_gsi;
812 gsi_next (&gsi);
813 while (!gsi_end_p (gsi))
814 {
815 gimple t = gsi_stmt (gsi);
816 /* Do not remove the return statement, so that redirect_edge_and_branch
817 sees how the block ends. */
818 if (gimple_code (t) == GIMPLE_RETURN)
819 break;
820
821 gsi_remove (&gsi, true);
822 release_defs (t);
823 }
824
825 /* Number of executions of function has reduced by the tailcall. */
826 e = single_succ_edge (gsi_bb (t->call_gsi));
827 decrease_profile (EXIT_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
828 decrease_profile (ENTRY_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
829 if (e->dest != EXIT_BLOCK_PTR)
830 decrease_profile (e->dest, e->count, EDGE_FREQUENCY (e));
831
832 /* Replace the call by a jump to the start of function. */
833 e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)),
834 first);
835 gcc_assert (e);
836 PENDING_STMT (e) = NULL;
837
838 /* Add phi node entries for arguments. The ordering of the phi nodes should
839 be the same as the ordering of the arguments. */
840 for (param = DECL_ARGUMENTS (current_function_decl),
841 idx = 0, gsi = gsi_start_phis (first);
842 param;
843 param = DECL_CHAIN (param), idx++)
844 {
845 if (!arg_needs_copy_p (param))
846 continue;
847
848 arg = gimple_call_arg (stmt, idx);
849 phi = gsi_stmt (gsi);
850 gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));
851
852 add_phi_arg (phi, arg, e, gimple_location (stmt));
853 gsi_next (&gsi);
854 }
855
856 /* Update the values of accumulators. */
857 adjust_accumulator_values (t->call_gsi, t->mult, t->add, e);
858
859 call = gsi_stmt (t->call_gsi);
860 rslt = gimple_call_lhs (call);
861 if (rslt != NULL_TREE)
862 {
863 /* Result of the call will no longer be defined. So adjust the
864 SSA_NAME_DEF_STMT accordingly. */
865 SSA_NAME_DEF_STMT (rslt) = gimple_build_nop ();
866 }
867
868 gsi_remove (&t->call_gsi, true);
869 release_defs (call);
870 }
871
872 /* Add phi nodes for the virtual operands defined in the function to the
873 header of the loop created by tail recursion elimination.
874
875 Originally, we used to add phi nodes only for call clobbered variables,
876 as the value of the non-call clobbered ones obviously cannot be used
877 or changed within the recursive call. However, the local variables
878 from multiple calls now share the same location, so the virtual ssa form
879 requires us to say that the location dies on further iterations of the loop,
880 which requires adding phi nodes.
881 */
882 static void
883 add_virtual_phis (void)
884 {
885 referenced_var_iterator rvi;
886 tree var;
887
888 /* The problematic part is that there is no way how to know what
889 to put into phi nodes (there in fact does not have to be such
890 ssa name available). A solution would be to have an artificial
891 use/kill for all virtual operands in EXIT node. Unless we have
892 this, we cannot do much better than to rebuild the ssa form for
893 possibly affected virtual ssa names from scratch. */
894
895 FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
896 {
897 if (!is_gimple_reg (var) && gimple_default_def (cfun, var) != NULL_TREE)
898 mark_sym_for_renaming (var);
899 }
900 }
901
902 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
903 mark the tailcalls for the sibcall optimization. */
904
905 static bool
906 optimize_tail_call (struct tailcall *t, bool opt_tailcalls)
907 {
908 if (t->tail_recursion)
909 {
910 eliminate_tail_call (t);
911 return true;
912 }
913
914 if (opt_tailcalls)
915 {
916 gimple stmt = gsi_stmt (t->call_gsi);
917
918 gimple_call_set_tail (stmt, true);
919 if (dump_file && (dump_flags & TDF_DETAILS))
920 {
921 fprintf (dump_file, "Found tail call ");
922 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
923 fprintf (dump_file, " in bb %i\n", (gsi_bb (t->call_gsi))->index);
924 }
925 }
926
927 return false;
928 }
929
930 /* Creates a tail-call accumulator of the same type as the return type of the
931 current function. LABEL is the name used to creating the temporary
932 variable for the accumulator. The accumulator will be inserted in the
933 phis of a basic block BB with single predecessor with an initial value
934 INIT converted to the current function return type. */
935
936 static tree
937 create_tailcall_accumulator (const char *label, basic_block bb, tree init)
938 {
939 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
940 tree tmp = create_tmp_reg (ret_type, label);
941 gimple phi;
942
943 add_referenced_var (tmp);
944 phi = create_phi_node (tmp, bb);
945 /* RET_TYPE can be a float when -ffast-maths is enabled. */
946 add_phi_arg (phi, fold_convert (ret_type, init), single_pred_edge (bb),
947 UNKNOWN_LOCATION);
948 return PHI_RESULT (phi);
949 }
950
951 /* Optimizes tail calls in the function, turning the tail recursion
952 into iteration. */
953
954 static unsigned int
955 tree_optimize_tail_calls_1 (bool opt_tailcalls)
956 {
957 edge e;
958 bool phis_constructed = false;
959 struct tailcall *tailcalls = NULL, *act, *next;
960 bool changed = false;
961 basic_block first = single_succ (ENTRY_BLOCK_PTR);
962 tree param;
963 gimple stmt;
964 edge_iterator ei;
965
966 if (!suitable_for_tail_opt_p ())
967 return 0;
968 if (opt_tailcalls)
969 opt_tailcalls = suitable_for_tail_call_opt_p ();
970
971 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
972 {
973 /* Only traverse the normal exits, i.e. those that end with return
974 statement. */
975 stmt = last_stmt (e->src);
976
977 if (stmt
978 && gimple_code (stmt) == GIMPLE_RETURN)
979 find_tail_calls (e->src, &tailcalls);
980 }
981
982 /* Construct the phi nodes and accumulators if necessary. */
983 a_acc = m_acc = NULL_TREE;
984 for (act = tailcalls; act; act = act->next)
985 {
986 if (!act->tail_recursion)
987 continue;
988
989 if (!phis_constructed)
990 {
991 /* Ensure that there is only one predecessor of the block
992 or if there are existing degenerate PHI nodes. */
993 if (!single_pred_p (first)
994 || !gimple_seq_empty_p (phi_nodes (first)))
995 first = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
996
997 /* Copy the args if needed. */
998 for (param = DECL_ARGUMENTS (current_function_decl);
999 param;
1000 param = DECL_CHAIN (param))
1001 if (arg_needs_copy_p (param))
1002 {
1003 tree name = gimple_default_def (cfun, param);
1004 tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
1005 gimple phi;
1006
1007 set_default_def (param, new_name);
1008 phi = create_phi_node (name, first);
1009 SSA_NAME_DEF_STMT (name) = phi;
1010 add_phi_arg (phi, new_name, single_pred_edge (first),
1011 EXPR_LOCATION (param));
1012 }
1013 phis_constructed = true;
1014 }
1015
1016 if (act->add && !a_acc)
1017 a_acc = create_tailcall_accumulator ("add_acc", first,
1018 integer_zero_node);
1019
1020 if (act->mult && !m_acc)
1021 m_acc = create_tailcall_accumulator ("mult_acc", first,
1022 integer_one_node);
1023 }
1024
1025 if (a_acc || m_acc)
1026 {
1027 /* When the tail call elimination using accumulators is performed,
1028 statements adding the accumulated value are inserted at all exits.
1029 This turns all other tail calls to non-tail ones. */
1030 opt_tailcalls = false;
1031 }
1032
1033 for (; tailcalls; tailcalls = next)
1034 {
1035 next = tailcalls->next;
1036 changed |= optimize_tail_call (tailcalls, opt_tailcalls);
1037 free (tailcalls);
1038 }
1039
1040 if (a_acc || m_acc)
1041 {
1042 /* Modify the remaining return statements. */
1043 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1044 {
1045 stmt = last_stmt (e->src);
1046
1047 if (stmt
1048 && gimple_code (stmt) == GIMPLE_RETURN)
1049 adjust_return_value (e->src, m_acc, a_acc);
1050 }
1051 }
1052
1053 if (changed)
1054 free_dominance_info (CDI_DOMINATORS);
1055
1056 if (phis_constructed)
1057 add_virtual_phis ();
1058 if (changed)
1059 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
1060 return 0;
1061 }
1062
1063 static unsigned int
1064 execute_tail_recursion (void)
1065 {
1066 return tree_optimize_tail_calls_1 (false);
1067 }
1068
1069 static bool
1070 gate_tail_calls (void)
1071 {
1072 return flag_optimize_sibling_calls != 0 && dbg_cnt (tail_call);
1073 }
1074
1075 static unsigned int
1076 execute_tail_calls (void)
1077 {
1078 return tree_optimize_tail_calls_1 (true);
1079 }
1080
1081 struct gimple_opt_pass pass_tail_recursion =
1082 {
1083 {
1084 GIMPLE_PASS,
1085 "tailr", /* name */
1086 gate_tail_calls, /* gate */
1087 execute_tail_recursion, /* execute */
1088 NULL, /* sub */
1089 NULL, /* next */
1090 0, /* static_pass_number */
1091 TV_NONE, /* tv_id */
1092 PROP_cfg | PROP_ssa, /* properties_required */
1093 0, /* properties_provided */
1094 0, /* properties_destroyed */
1095 0, /* todo_flags_start */
1096 TODO_verify_ssa /* todo_flags_finish */
1097 }
1098 };
1099
1100 struct gimple_opt_pass pass_tail_calls =
1101 {
1102 {
1103 GIMPLE_PASS,
1104 "tailc", /* name */
1105 gate_tail_calls, /* gate */
1106 execute_tail_calls, /* execute */
1107 NULL, /* sub */
1108 NULL, /* next */
1109 0, /* static_pass_number */
1110 TV_NONE, /* tv_id */
1111 PROP_cfg | PROP_ssa, /* properties_required */
1112 0, /* properties_provided */
1113 0, /* properties_destroyed */
1114 0, /* todo_flags_start */
1115 TODO_verify_ssa /* todo_flags_finish */
1116 }
1117 };