]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-tailcall.c
[Ada] Revert change for gnatprove that is no longer needed
[thirdparty/gcc.git] / gcc / tree-tailcall.c
1 /* Tail call optimization on trees.
2 Copyright (C) 2003-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "cfghooks.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "cgraph.h"
31 #include "gimple-pretty-print.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "gimple-iterator.h"
35 #include "gimplify-me.h"
36 #include "tree-cfg.h"
37 #include "tree-into-ssa.h"
38 #include "tree-dfa.h"
39 #include "except.h"
40 #include "tree-eh.h"
41 #include "dbgcnt.h"
42 #include "cfgloop.h"
43 #include "common/common-target.h"
44 #include "ipa-utils.h"
45 #include "tree-ssa-live.h"
46
47 /* The file implements the tail recursion elimination. It is also used to
48 analyze the tail calls in general, passing the results to the rtl level
49 where they are used for sibcall optimization.
50
51 In addition to the standard tail recursion elimination, we handle the most
52 trivial cases of making the call tail recursive by creating accumulators.
53 For example the following function
54
55 int sum (int n)
56 {
57 if (n > 0)
58 return n + sum (n - 1);
59 else
60 return 0;
61 }
62
63 is transformed into
64
65 int sum (int n)
66 {
67 int acc = 0;
68
69 while (n > 0)
70 acc += n--;
71
72 return acc;
73 }
74
75 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
76 when we reach the return x statement, we should return a_acc + x * m_acc
77 instead. They are initially initialized to 0 and 1, respectively,
78 so the semantics of the function is obviously preserved. If we are
79 guaranteed that the value of the accumulator never change, we
80 omit the accumulator.
81
82 There are three cases how the function may exit. The first one is
83 handled in adjust_return_value, the other two in adjust_accumulator_values
84 (the second case is actually a special case of the third one and we
85 present it separately just for clarity):
86
87 1) Just return x, where x is not in any of the remaining special shapes.
88 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
89
90 2) return f (...), where f is the current function, is rewritten in a
91 classical tail-recursion elimination way, into assignment of arguments
92 and jump to the start of the function. Values of the accumulators
93 are unchanged.
94
95 3) return a + m * f(...), where a and m do not depend on call to f.
96 To preserve the semantics described before we want this to be rewritten
97 in such a way that we finally return
98
99 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
100
101 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
102 eliminate the tail call to f. Special cases when the value is just
103 added or just multiplied are obtained by setting a = 0 or m = 1.
104
105 TODO -- it is possible to do similar tricks for other operations. */
106
107 /* A structure that describes the tailcall. */
108
109 struct tailcall
110 {
111 /* The iterator pointing to the call statement. */
112 gimple_stmt_iterator call_gsi;
113
114 /* True if it is a call to the current function. */
115 bool tail_recursion;
116
117 /* The return value of the caller is mult * f + add, where f is the return
118 value of the call. */
119 tree mult, add;
120
121 /* Next tailcall in the chain. */
122 struct tailcall *next;
123 };
124
125 /* The variables holding the value of multiplicative and additive
126 accumulator. */
127 static tree m_acc, a_acc;
128
129 static bool optimize_tail_call (struct tailcall *, bool);
130 static void eliminate_tail_call (struct tailcall *);
131
132 /* Returns false when the function is not suitable for tail call optimization
133 from some reason (e.g. if it takes variable number of arguments). */
134
135 static bool
136 suitable_for_tail_opt_p (void)
137 {
138 if (cfun->stdarg)
139 return false;
140
141 return true;
142 }
143
144 /* Returns false when the function is not suitable for tail call optimization
145 for some reason (e.g. if it takes variable number of arguments).
146 This test must pass in addition to suitable_for_tail_opt_p in order to make
147 tail call discovery happen. */
148
149 static bool
150 suitable_for_tail_call_opt_p (void)
151 {
152 tree param;
153
154 /* alloca (until we have stack slot life analysis) inhibits
155 sibling call optimizations, but not tail recursion. */
156 if (cfun->calls_alloca)
157 return false;
158
159 /* If we are using sjlj exceptions, we may need to add a call to
160 _Unwind_SjLj_Unregister at exit of the function. Which means
161 that we cannot do any sibcall transformations. */
162 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
163 && current_function_has_exception_handlers ())
164 return false;
165
166 /* Any function that calls setjmp might have longjmp called from
167 any called function. ??? We really should represent this
168 properly in the CFG so that this needn't be special cased. */
169 if (cfun->calls_setjmp)
170 return false;
171
172 /* Various targets don't handle tail calls correctly in functions
173 that call __builtin_eh_return. */
174 if (cfun->calls_eh_return)
175 return false;
176
177 /* ??? It is OK if the argument of a function is taken in some cases,
178 but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
179 for (param = DECL_ARGUMENTS (current_function_decl);
180 param;
181 param = DECL_CHAIN (param))
182 if (TREE_ADDRESSABLE (param))
183 return false;
184
185 return true;
186 }
187
188 /* Checks whether the expression EXPR in stmt AT is independent of the
189 statement pointed to by GSI (in a sense that we already know EXPR's value
190 at GSI). We use the fact that we are only called from the chain of
191 basic blocks that have only single successor. Returns the expression
192 containing the value of EXPR at GSI. */
193
194 static tree
195 independent_of_stmt_p (tree expr, gimple *at, gimple_stmt_iterator gsi,
196 bitmap to_move)
197 {
198 basic_block bb, call_bb, at_bb;
199 edge e;
200 edge_iterator ei;
201
202 if (is_gimple_min_invariant (expr))
203 return expr;
204
205 if (TREE_CODE (expr) != SSA_NAME)
206 return NULL_TREE;
207
208 if (bitmap_bit_p (to_move, SSA_NAME_VERSION (expr)))
209 return expr;
210
211 /* Mark the blocks in the chain leading to the end. */
212 at_bb = gimple_bb (at);
213 call_bb = gimple_bb (gsi_stmt (gsi));
214 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
215 bb->aux = &bb->aux;
216 bb->aux = &bb->aux;
217
218 while (1)
219 {
220 at = SSA_NAME_DEF_STMT (expr);
221 bb = gimple_bb (at);
222
223 /* The default definition or defined before the chain. */
224 if (!bb || !bb->aux)
225 break;
226
227 if (bb == call_bb)
228 {
229 for (; !gsi_end_p (gsi); gsi_next (&gsi))
230 if (gsi_stmt (gsi) == at)
231 break;
232
233 if (!gsi_end_p (gsi))
234 expr = NULL_TREE;
235 break;
236 }
237
238 if (gimple_code (at) != GIMPLE_PHI)
239 {
240 expr = NULL_TREE;
241 break;
242 }
243
244 FOR_EACH_EDGE (e, ei, bb->preds)
245 if (e->src->aux)
246 break;
247 gcc_assert (e);
248
249 expr = PHI_ARG_DEF_FROM_EDGE (at, e);
250 if (TREE_CODE (expr) != SSA_NAME)
251 {
252 /* The value is a constant. */
253 break;
254 }
255 }
256
257 /* Unmark the blocks. */
258 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
259 bb->aux = NULL;
260 bb->aux = NULL;
261
262 return expr;
263 }
264
265 enum par { FAIL, OK, TRY_MOVE };
266
267 /* Simulates the effect of an assignment STMT on the return value of the tail
268 recursive CALL passed in ASS_VAR. M and A are the multiplicative and the
269 additive factor for the real return value. */
270
271 static par
272 process_assignment (gassign *stmt,
273 gimple_stmt_iterator call, tree *m,
274 tree *a, tree *ass_var, bitmap to_move)
275 {
276 tree op0, op1 = NULL_TREE, non_ass_var = NULL_TREE;
277 tree dest = gimple_assign_lhs (stmt);
278 enum tree_code code = gimple_assign_rhs_code (stmt);
279 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
280 tree src_var = gimple_assign_rhs1 (stmt);
281
282 /* See if this is a simple copy operation of an SSA name to the function
283 result. In that case we may have a simple tail call. Ignore type
284 conversions that can never produce extra code between the function
285 call and the function return. */
286 if ((rhs_class == GIMPLE_SINGLE_RHS || gimple_assign_cast_p (stmt))
287 && src_var == *ass_var)
288 {
289 /* Reject a tailcall if the type conversion might need
290 additional code. */
291 if (gimple_assign_cast_p (stmt))
292 {
293 if (TYPE_MODE (TREE_TYPE (dest)) != TYPE_MODE (TREE_TYPE (src_var)))
294 return FAIL;
295
296 /* Even if the type modes are the same, if the precision of the
297 type is smaller than mode's precision,
298 reduce_to_bit_field_precision would generate additional code. */
299 if (INTEGRAL_TYPE_P (TREE_TYPE (dest))
300 && !type_has_mode_precision_p (TREE_TYPE (dest)))
301 return FAIL;
302 }
303
304 *ass_var = dest;
305 return OK;
306 }
307
308 switch (rhs_class)
309 {
310 case GIMPLE_BINARY_RHS:
311 op1 = gimple_assign_rhs2 (stmt);
312
313 /* Fall through. */
314
315 case GIMPLE_UNARY_RHS:
316 op0 = gimple_assign_rhs1 (stmt);
317 break;
318
319 default:
320 return FAIL;
321 }
322
323 /* Accumulator optimizations will reverse the order of operations.
324 We can only do that for floating-point types if we're assuming
325 that addition and multiplication are associative. */
326 if (!flag_associative_math)
327 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
328 return FAIL;
329
330 if (rhs_class == GIMPLE_UNARY_RHS
331 && op0 == *ass_var)
332 ;
333 else if (op0 == *ass_var
334 && (non_ass_var = independent_of_stmt_p (op1, stmt, call,
335 to_move)))
336 ;
337 else if (op1 == *ass_var
338 && (non_ass_var = independent_of_stmt_p (op0, stmt, call,
339 to_move)))
340 ;
341 else
342 return TRY_MOVE;
343
344 switch (code)
345 {
346 case PLUS_EXPR:
347 *a = non_ass_var;
348 *ass_var = dest;
349 return OK;
350
351 case POINTER_PLUS_EXPR:
352 if (op0 != *ass_var)
353 return FAIL;
354 *a = non_ass_var;
355 *ass_var = dest;
356 return OK;
357
358 case MULT_EXPR:
359 *m = non_ass_var;
360 *ass_var = dest;
361 return OK;
362
363 case NEGATE_EXPR:
364 *m = build_minus_one_cst (TREE_TYPE (op0));
365 *ass_var = dest;
366 return OK;
367
368 case MINUS_EXPR:
369 if (*ass_var == op0)
370 *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
371 else
372 {
373 *m = build_minus_one_cst (TREE_TYPE (non_ass_var));
374 *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
375 }
376
377 *ass_var = dest;
378 return OK;
379
380 default:
381 return FAIL;
382 }
383 }
384
385 /* Propagate VAR through phis on edge E. */
386
387 static tree
388 propagate_through_phis (tree var, edge e)
389 {
390 basic_block dest = e->dest;
391 gphi_iterator gsi;
392
393 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
394 {
395 gphi *phi = gsi.phi ();
396 if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
397 return PHI_RESULT (phi);
398 }
399 return var;
400 }
401
402 /* Argument for compute_live_vars/live_vars_at_stmt and what compute_live_vars
403 returns. Computed lazily, but just once for the function. */
404 static live_vars_map *live_vars;
405 static vec<bitmap_head> live_vars_vec;
406
407 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
408 added to the start of RET. */
409
410 static void
411 find_tail_calls (basic_block bb, struct tailcall **ret)
412 {
413 tree ass_var = NULL_TREE, ret_var, func, param;
414 gimple *stmt;
415 gcall *call = NULL;
416 gimple_stmt_iterator gsi, agsi;
417 bool tail_recursion;
418 struct tailcall *nw;
419 edge e;
420 tree m, a;
421 basic_block abb;
422 size_t idx;
423 tree var;
424
425 if (!single_succ_p (bb))
426 return;
427
428 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
429 {
430 stmt = gsi_stmt (gsi);
431
432 /* Ignore labels, returns, nops, clobbers and debug stmts. */
433 if (gimple_code (stmt) == GIMPLE_LABEL
434 || gimple_code (stmt) == GIMPLE_RETURN
435 || gimple_code (stmt) == GIMPLE_NOP
436 || gimple_code (stmt) == GIMPLE_PREDICT
437 || gimple_clobber_p (stmt)
438 || is_gimple_debug (stmt))
439 continue;
440
441 /* Check for a call. */
442 if (is_gimple_call (stmt))
443 {
444 call = as_a <gcall *> (stmt);
445 ass_var = gimple_call_lhs (call);
446 break;
447 }
448
449 /* Allow simple copies between local variables, even if they're
450 aggregates. */
451 if (is_gimple_assign (stmt)
452 && auto_var_in_fn_p (gimple_assign_lhs (stmt), cfun->decl)
453 && auto_var_in_fn_p (gimple_assign_rhs1 (stmt), cfun->decl))
454 continue;
455
456 /* If the statement references memory or volatile operands, fail. */
457 if (gimple_references_memory_p (stmt)
458 || gimple_has_volatile_ops (stmt))
459 return;
460 }
461
462 if (gsi_end_p (gsi))
463 {
464 edge_iterator ei;
465 /* Recurse to the predecessors. */
466 FOR_EACH_EDGE (e, ei, bb->preds)
467 find_tail_calls (e->src, ret);
468
469 return;
470 }
471
472 /* If the LHS of our call is not just a simple register or local
473 variable, we can't transform this into a tail or sibling call.
474 This situation happens, in (e.g.) "*p = foo()" where foo returns a
475 struct. In this case we won't have a temporary here, but we need
476 to carry out the side effect anyway, so tailcall is impossible.
477
478 ??? In some situations (when the struct is returned in memory via
479 invisible argument) we could deal with this, e.g. by passing 'p'
480 itself as that argument to foo, but it's too early to do this here,
481 and expand_call() will not handle it anyway. If it ever can, then
482 we need to revisit this here, to allow that situation. */
483 if (ass_var
484 && !is_gimple_reg (ass_var)
485 && !auto_var_in_fn_p (ass_var, cfun->decl))
486 return;
487
488 /* If the call might throw an exception that wouldn't propagate out of
489 cfun, we can't transform to a tail or sibling call (82081). */
490 if (stmt_could_throw_p (cfun, stmt)
491 && !stmt_can_throw_external (cfun, stmt))
492 return;
493
494 /* If the function returns a value, then at present, the tail call
495 must return the same type of value. There is conceptually a copy
496 between the object returned by the tail call candidate and the
497 object returned by CFUN itself.
498
499 This means that if we have:
500
501 lhs = f (&<retval>); // f reads from <retval>
502 // (lhs is usually also <retval>)
503
504 there is a copy between the temporary object returned by f and lhs,
505 meaning that any use of <retval> in f occurs before the assignment
506 to lhs begins. Thus the <retval> that is live on entry to the call
507 to f is really an independent local variable V that happens to be
508 stored in the RESULT_DECL rather than a local VAR_DECL.
509
510 Turning this into a tail call would remove the copy and make the
511 lifetimes of the return value and V overlap. The same applies to
512 tail recursion, since if f can read from <retval>, we have to assume
513 that CFUN might already have written to <retval> before the call.
514
515 The problem doesn't apply when <retval> is passed by value, but that
516 isn't a case we handle anyway. */
517 tree result_decl = DECL_RESULT (cfun->decl);
518 if (result_decl
519 && may_be_aliased (result_decl)
520 && ref_maybe_used_by_stmt_p (call, result_decl))
521 return;
522
523 /* We found the call, check whether it is suitable. */
524 tail_recursion = false;
525 func = gimple_call_fndecl (call);
526 if (func
527 && !fndecl_built_in_p (func)
528 && recursive_call_p (current_function_decl, func))
529 {
530 tree arg;
531
532 for (param = DECL_ARGUMENTS (current_function_decl), idx = 0;
533 param && idx < gimple_call_num_args (call);
534 param = DECL_CHAIN (param), idx ++)
535 {
536 arg = gimple_call_arg (call, idx);
537 if (param != arg)
538 {
539 /* Make sure there are no problems with copying. The parameter
540 have a copyable type and the two arguments must have reasonably
541 equivalent types. The latter requirement could be relaxed if
542 we emitted a suitable type conversion statement. */
543 if (!is_gimple_reg_type (TREE_TYPE (param))
544 || !useless_type_conversion_p (TREE_TYPE (param),
545 TREE_TYPE (arg)))
546 break;
547
548 /* The parameter should be a real operand, so that phi node
549 created for it at the start of the function has the meaning
550 of copying the value. This test implies is_gimple_reg_type
551 from the previous condition, however this one could be
552 relaxed by being more careful with copying the new value
553 of the parameter (emitting appropriate GIMPLE_ASSIGN and
554 updating the virtual operands). */
555 if (!is_gimple_reg (param))
556 break;
557 }
558 }
559 if (idx == gimple_call_num_args (call) && !param)
560 tail_recursion = true;
561 }
562
563 /* Compute live vars if not computed yet. */
564 if (live_vars == NULL)
565 {
566 unsigned int cnt = 0;
567 FOR_EACH_LOCAL_DECL (cfun, idx, var)
568 if (VAR_P (var)
569 && auto_var_in_fn_p (var, cfun->decl)
570 && may_be_aliased (var))
571 {
572 if (live_vars == NULL)
573 live_vars = new live_vars_map;
574 live_vars->put (DECL_UID (var), cnt++);
575 }
576 if (live_vars)
577 live_vars_vec = compute_live_vars (cfun, live_vars);
578 }
579
580 /* Determine a bitmap of variables which are still in scope after the
581 call. */
582 bitmap local_live_vars = NULL;
583 if (live_vars)
584 local_live_vars = live_vars_at_stmt (live_vars_vec, live_vars, call);
585
586 /* Make sure the tail invocation of this function does not indirectly
587 refer to local variables. (Passing variables directly by value
588 is OK.) */
589 FOR_EACH_LOCAL_DECL (cfun, idx, var)
590 {
591 if (TREE_CODE (var) != PARM_DECL
592 && auto_var_in_fn_p (var, cfun->decl)
593 && may_be_aliased (var)
594 && (ref_maybe_used_by_stmt_p (call, var)
595 || call_may_clobber_ref_p (call, var)))
596 {
597 if (!VAR_P (var))
598 {
599 if (local_live_vars)
600 BITMAP_FREE (local_live_vars);
601 return;
602 }
603 else
604 {
605 unsigned int *v = live_vars->get (DECL_UID (var));
606 if (bitmap_bit_p (local_live_vars, *v))
607 {
608 BITMAP_FREE (local_live_vars);
609 return;
610 }
611 }
612 }
613 }
614
615 if (local_live_vars)
616 BITMAP_FREE (local_live_vars);
617
618 /* Now check the statements after the call. None of them has virtual
619 operands, so they may only depend on the call through its return
620 value. The return value should also be dependent on each of them,
621 since we are running after dce. */
622 m = NULL_TREE;
623 a = NULL_TREE;
624 auto_bitmap to_move_defs;
625 auto_vec<gimple *> to_move_stmts;
626
627 abb = bb;
628 agsi = gsi;
629 while (1)
630 {
631 tree tmp_a = NULL_TREE;
632 tree tmp_m = NULL_TREE;
633 gsi_next (&agsi);
634
635 while (gsi_end_p (agsi))
636 {
637 ass_var = propagate_through_phis (ass_var, single_succ_edge (abb));
638 abb = single_succ (abb);
639 agsi = gsi_start_bb (abb);
640 }
641
642 stmt = gsi_stmt (agsi);
643 if (gimple_code (stmt) == GIMPLE_RETURN)
644 break;
645
646 if (gimple_code (stmt) == GIMPLE_LABEL
647 || gimple_code (stmt) == GIMPLE_NOP
648 || gimple_code (stmt) == GIMPLE_PREDICT
649 || gimple_clobber_p (stmt)
650 || is_gimple_debug (stmt))
651 continue;
652
653 if (gimple_code (stmt) != GIMPLE_ASSIGN)
654 return;
655
656 /* This is a gimple assign. */
657 par ret = process_assignment (as_a <gassign *> (stmt), gsi,
658 &tmp_m, &tmp_a, &ass_var, to_move_defs);
659 if (ret == FAIL)
660 return;
661 else if (ret == TRY_MOVE)
662 {
663 if (! tail_recursion)
664 return;
665 /* Do not deal with checking dominance, the real fix is to
666 do path isolation for the transform phase anyway, removing
667 the need to compute the accumulators with new stmts. */
668 if (abb != bb)
669 return;
670 for (unsigned opno = 1; opno < gimple_num_ops (stmt); ++opno)
671 {
672 tree op = gimple_op (stmt, opno);
673 if (independent_of_stmt_p (op, stmt, gsi, to_move_defs) != op)
674 return;
675 }
676 bitmap_set_bit (to_move_defs,
677 SSA_NAME_VERSION (gimple_assign_lhs (stmt)));
678 to_move_stmts.safe_push (stmt);
679 continue;
680 }
681
682 if (tmp_a)
683 {
684 tree type = TREE_TYPE (tmp_a);
685 if (a)
686 a = fold_build2 (PLUS_EXPR, type, fold_convert (type, a), tmp_a);
687 else
688 a = tmp_a;
689 }
690 if (tmp_m)
691 {
692 tree type = TREE_TYPE (tmp_m);
693 if (m)
694 m = fold_build2 (MULT_EXPR, type, fold_convert (type, m), tmp_m);
695 else
696 m = tmp_m;
697
698 if (a)
699 a = fold_build2 (MULT_EXPR, type, fold_convert (type, a), tmp_m);
700 }
701 }
702
703 /* See if this is a tail call we can handle. */
704 ret_var = gimple_return_retval (as_a <greturn *> (stmt));
705
706 /* We may proceed if there either is no return value, or the return value
707 is identical to the call's return. */
708 if (ret_var
709 && (ret_var != ass_var))
710 return;
711
712 /* If this is not a tail recursive call, we cannot handle addends or
713 multiplicands. */
714 if (!tail_recursion && (m || a))
715 return;
716
717 /* For pointers only allow additions. */
718 if (m && POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
719 return;
720
721 /* Move queued defs. */
722 if (tail_recursion)
723 {
724 unsigned i;
725 FOR_EACH_VEC_ELT (to_move_stmts, i, stmt)
726 {
727 gimple_stmt_iterator mgsi = gsi_for_stmt (stmt);
728 gsi_move_before (&mgsi, &gsi);
729 }
730 }
731
732 nw = XNEW (struct tailcall);
733
734 nw->call_gsi = gsi;
735
736 nw->tail_recursion = tail_recursion;
737
738 nw->mult = m;
739 nw->add = a;
740
741 nw->next = *ret;
742 *ret = nw;
743 }
744
745 /* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E. */
746
747 static void
748 add_successor_phi_arg (edge e, tree var, tree phi_arg)
749 {
750 gphi_iterator gsi;
751
752 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
753 if (PHI_RESULT (gsi.phi ()) == var)
754 break;
755
756 gcc_assert (!gsi_end_p (gsi));
757 add_phi_arg (gsi.phi (), phi_arg, e, UNKNOWN_LOCATION);
758 }
759
760 /* Creates a GIMPLE statement which computes the operation specified by
761 CODE, ACC and OP1 to a new variable with name LABEL and inserts the
762 statement in the position specified by GSI. Returns the
763 tree node of the statement's result. */
764
765 static tree
766 adjust_return_value_with_ops (enum tree_code code, const char *label,
767 tree acc, tree op1, gimple_stmt_iterator gsi)
768 {
769
770 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
771 tree result = make_temp_ssa_name (ret_type, NULL, label);
772 gassign *stmt;
773
774 if (POINTER_TYPE_P (ret_type))
775 {
776 gcc_assert (code == PLUS_EXPR && TREE_TYPE (acc) == sizetype);
777 code = POINTER_PLUS_EXPR;
778 }
779 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1))
780 && code != POINTER_PLUS_EXPR)
781 stmt = gimple_build_assign (result, code, acc, op1);
782 else
783 {
784 tree tem;
785 if (code == POINTER_PLUS_EXPR)
786 tem = fold_build2 (code, TREE_TYPE (op1), op1, acc);
787 else
788 tem = fold_build2 (code, TREE_TYPE (op1),
789 fold_convert (TREE_TYPE (op1), acc), op1);
790 tree rhs = fold_convert (ret_type, tem);
791 rhs = force_gimple_operand_gsi (&gsi, rhs,
792 false, NULL, true, GSI_SAME_STMT);
793 stmt = gimple_build_assign (result, rhs);
794 }
795
796 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
797 return result;
798 }
799
800 /* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by
801 the computation specified by CODE and OP1 and insert the statement
802 at the position specified by GSI as a new statement. Returns new SSA name
803 of updated accumulator. */
804
805 static tree
806 update_accumulator_with_ops (enum tree_code code, tree acc, tree op1,
807 gimple_stmt_iterator gsi)
808 {
809 gassign *stmt;
810 tree var = copy_ssa_name (acc);
811 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
812 stmt = gimple_build_assign (var, code, acc, op1);
813 else
814 {
815 tree rhs = fold_convert (TREE_TYPE (acc),
816 fold_build2 (code,
817 TREE_TYPE (op1),
818 fold_convert (TREE_TYPE (op1), acc),
819 op1));
820 rhs = force_gimple_operand_gsi (&gsi, rhs,
821 false, NULL, false, GSI_CONTINUE_LINKING);
822 stmt = gimple_build_assign (var, rhs);
823 }
824 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
825 return var;
826 }
827
828 /* Adjust the accumulator values according to A and M after GSI, and update
829 the phi nodes on edge BACK. */
830
831 static void
832 adjust_accumulator_values (gimple_stmt_iterator gsi, tree m, tree a, edge back)
833 {
834 tree var, a_acc_arg, m_acc_arg;
835
836 if (m)
837 m = force_gimple_operand_gsi (&gsi, m, true, NULL, true, GSI_SAME_STMT);
838 if (a)
839 a = force_gimple_operand_gsi (&gsi, a, true, NULL, true, GSI_SAME_STMT);
840
841 a_acc_arg = a_acc;
842 m_acc_arg = m_acc;
843 if (a)
844 {
845 if (m_acc)
846 {
847 if (integer_onep (a))
848 var = m_acc;
849 else
850 var = adjust_return_value_with_ops (MULT_EXPR, "acc_tmp", m_acc,
851 a, gsi);
852 }
853 else
854 var = a;
855
856 a_acc_arg = update_accumulator_with_ops (PLUS_EXPR, a_acc, var, gsi);
857 }
858
859 if (m)
860 m_acc_arg = update_accumulator_with_ops (MULT_EXPR, m_acc, m, gsi);
861
862 if (a_acc)
863 add_successor_phi_arg (back, a_acc, a_acc_arg);
864
865 if (m_acc)
866 add_successor_phi_arg (back, m_acc, m_acc_arg);
867 }
868
869 /* Adjust value of the return at the end of BB according to M and A
870 accumulators. */
871
872 static void
873 adjust_return_value (basic_block bb, tree m, tree a)
874 {
875 tree retval;
876 greturn *ret_stmt = as_a <greturn *> (gimple_seq_last_stmt (bb_seq (bb)));
877 gimple_stmt_iterator gsi = gsi_last_bb (bb);
878
879 gcc_assert (gimple_code (ret_stmt) == GIMPLE_RETURN);
880
881 retval = gimple_return_retval (ret_stmt);
882 if (!retval || retval == error_mark_node)
883 return;
884
885 if (m)
886 retval = adjust_return_value_with_ops (MULT_EXPR, "mul_tmp", m_acc, retval,
887 gsi);
888 if (a)
889 retval = adjust_return_value_with_ops (PLUS_EXPR, "acc_tmp", a_acc, retval,
890 gsi);
891 gimple_return_set_retval (ret_stmt, retval);
892 update_stmt (ret_stmt);
893 }
894
895 /* Subtract COUNT and FREQUENCY from the basic block and it's
896 outgoing edge. */
897 static void
898 decrease_profile (basic_block bb, profile_count count)
899 {
900 bb->count = bb->count - count;
901 if (!single_succ_p (bb))
902 {
903 gcc_assert (!EDGE_COUNT (bb->succs));
904 return;
905 }
906 }
907
908 /* Returns true if argument PARAM of the tail recursive call needs to be copied
909 when the call is eliminated. */
910
911 static bool
912 arg_needs_copy_p (tree param)
913 {
914 tree def;
915
916 if (!is_gimple_reg (param))
917 return false;
918
919 /* Parameters that are only defined but never used need not be copied. */
920 def = ssa_default_def (cfun, param);
921 if (!def)
922 return false;
923
924 return true;
925 }
926
927 /* Eliminates tail call described by T. TMP_VARS is a list of
928 temporary variables used to copy the function arguments. */
929
930 static void
931 eliminate_tail_call (struct tailcall *t)
932 {
933 tree param, rslt;
934 gimple *stmt, *call;
935 tree arg;
936 size_t idx;
937 basic_block bb, first;
938 edge e;
939 gphi *phi;
940 gphi_iterator gpi;
941 gimple_stmt_iterator gsi;
942 gimple *orig_stmt;
943
944 stmt = orig_stmt = gsi_stmt (t->call_gsi);
945 bb = gsi_bb (t->call_gsi);
946
947 if (dump_file && (dump_flags & TDF_DETAILS))
948 {
949 fprintf (dump_file, "Eliminated tail recursion in bb %d : ",
950 bb->index);
951 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
952 fprintf (dump_file, "\n");
953 }
954
955 gcc_assert (is_gimple_call (stmt));
956
957 first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
958
959 /* Remove the code after call_gsi that will become unreachable. The
960 possibly unreachable code in other blocks is removed later in
961 cfg cleanup. */
962 gsi = t->call_gsi;
963 gimple_stmt_iterator gsi2 = gsi_last_bb (gimple_bb (gsi_stmt (gsi)));
964 while (gsi_stmt (gsi2) != gsi_stmt (gsi))
965 {
966 gimple *t = gsi_stmt (gsi2);
967 /* Do not remove the return statement, so that redirect_edge_and_branch
968 sees how the block ends. */
969 if (gimple_code (t) != GIMPLE_RETURN)
970 {
971 gimple_stmt_iterator gsi3 = gsi2;
972 gsi_prev (&gsi2);
973 gsi_remove (&gsi3, true);
974 release_defs (t);
975 }
976 else
977 gsi_prev (&gsi2);
978 }
979
980 /* Number of executions of function has reduced by the tailcall. */
981 e = single_succ_edge (gsi_bb (t->call_gsi));
982
983 profile_count count = e->count ();
984
985 /* When profile is inconsistent and the recursion edge is more frequent
986 than number of executions of functions, scale it down, so we do not end
987 up with 0 executions of entry block. */
988 if (count >= ENTRY_BLOCK_PTR_FOR_FN (cfun)->count)
989 count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (7, 8);
990 decrease_profile (EXIT_BLOCK_PTR_FOR_FN (cfun), count);
991 decrease_profile (ENTRY_BLOCK_PTR_FOR_FN (cfun), count);
992 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
993 decrease_profile (e->dest, count);
994
995 /* Replace the call by a jump to the start of function. */
996 e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)),
997 first);
998 gcc_assert (e);
999 PENDING_STMT (e) = NULL;
1000
1001 /* Add phi node entries for arguments. The ordering of the phi nodes should
1002 be the same as the ordering of the arguments. */
1003 for (param = DECL_ARGUMENTS (current_function_decl),
1004 idx = 0, gpi = gsi_start_phis (first);
1005 param;
1006 param = DECL_CHAIN (param), idx++)
1007 {
1008 if (!arg_needs_copy_p (param))
1009 continue;
1010
1011 arg = gimple_call_arg (stmt, idx);
1012 phi = gpi.phi ();
1013 gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));
1014
1015 add_phi_arg (phi, arg, e, gimple_location (stmt));
1016 gsi_next (&gpi);
1017 }
1018
1019 /* Update the values of accumulators. */
1020 adjust_accumulator_values (t->call_gsi, t->mult, t->add, e);
1021
1022 call = gsi_stmt (t->call_gsi);
1023 rslt = gimple_call_lhs (call);
1024 if (rslt != NULL_TREE && TREE_CODE (rslt) == SSA_NAME)
1025 {
1026 /* Result of the call will no longer be defined. So adjust the
1027 SSA_NAME_DEF_STMT accordingly. */
1028 SSA_NAME_DEF_STMT (rslt) = gimple_build_nop ();
1029 }
1030
1031 gsi_remove (&t->call_gsi, true);
1032 release_defs (call);
1033 }
1034
1035 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
1036 mark the tailcalls for the sibcall optimization. */
1037
1038 static bool
1039 optimize_tail_call (struct tailcall *t, bool opt_tailcalls)
1040 {
1041 if (t->tail_recursion)
1042 {
1043 eliminate_tail_call (t);
1044 return true;
1045 }
1046
1047 if (opt_tailcalls)
1048 {
1049 gcall *stmt = as_a <gcall *> (gsi_stmt (t->call_gsi));
1050
1051 gimple_call_set_tail (stmt, true);
1052 cfun->tail_call_marked = true;
1053 if (dump_file && (dump_flags & TDF_DETAILS))
1054 {
1055 fprintf (dump_file, "Found tail call ");
1056 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1057 fprintf (dump_file, " in bb %i\n", (gsi_bb (t->call_gsi))->index);
1058 }
1059 }
1060
1061 return false;
1062 }
1063
1064 /* Creates a tail-call accumulator of the same type as the return type of the
1065 current function. LABEL is the name used to creating the temporary
1066 variable for the accumulator. The accumulator will be inserted in the
1067 phis of a basic block BB with single predecessor with an initial value
1068 INIT converted to the current function return type. */
1069
1070 static tree
1071 create_tailcall_accumulator (const char *label, basic_block bb, tree init)
1072 {
1073 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
1074 if (POINTER_TYPE_P (ret_type))
1075 ret_type = sizetype;
1076
1077 tree tmp = make_temp_ssa_name (ret_type, NULL, label);
1078 gphi *phi;
1079
1080 phi = create_phi_node (tmp, bb);
1081 /* RET_TYPE can be a float when -ffast-maths is enabled. */
1082 add_phi_arg (phi, fold_convert (ret_type, init), single_pred_edge (bb),
1083 UNKNOWN_LOCATION);
1084 return PHI_RESULT (phi);
1085 }
1086
1087 /* Optimizes tail calls in the function, turning the tail recursion
1088 into iteration. */
1089
1090 static unsigned int
1091 tree_optimize_tail_calls_1 (bool opt_tailcalls)
1092 {
1093 edge e;
1094 bool phis_constructed = false;
1095 struct tailcall *tailcalls = NULL, *act, *next;
1096 bool changed = false;
1097 basic_block first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
1098 tree param;
1099 gimple *stmt;
1100 edge_iterator ei;
1101
1102 if (!suitable_for_tail_opt_p ())
1103 return 0;
1104 if (opt_tailcalls)
1105 opt_tailcalls = suitable_for_tail_call_opt_p ();
1106
1107 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
1108 {
1109 /* Only traverse the normal exits, i.e. those that end with return
1110 statement. */
1111 stmt = last_stmt (e->src);
1112
1113 if (stmt
1114 && gimple_code (stmt) == GIMPLE_RETURN)
1115 find_tail_calls (e->src, &tailcalls);
1116 }
1117
1118 if (live_vars)
1119 {
1120 destroy_live_vars (live_vars_vec);
1121 delete live_vars;
1122 live_vars = NULL;
1123 }
1124
1125 /* Construct the phi nodes and accumulators if necessary. */
1126 a_acc = m_acc = NULL_TREE;
1127 for (act = tailcalls; act; act = act->next)
1128 {
1129 if (!act->tail_recursion)
1130 continue;
1131
1132 if (!phis_constructed)
1133 {
1134 /* Ensure that there is only one predecessor of the block
1135 or if there are existing degenerate PHI nodes. */
1136 if (!single_pred_p (first)
1137 || !gimple_seq_empty_p (phi_nodes (first)))
1138 first =
1139 split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1140
1141 /* Copy the args if needed. */
1142 for (param = DECL_ARGUMENTS (current_function_decl);
1143 param;
1144 param = DECL_CHAIN (param))
1145 if (arg_needs_copy_p (param))
1146 {
1147 tree name = ssa_default_def (cfun, param);
1148 tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
1149 gphi *phi;
1150
1151 set_ssa_default_def (cfun, param, new_name);
1152 phi = create_phi_node (name, first);
1153 add_phi_arg (phi, new_name, single_pred_edge (first),
1154 EXPR_LOCATION (param));
1155 }
1156 phis_constructed = true;
1157 }
1158
1159 if (act->add && !a_acc)
1160 a_acc = create_tailcall_accumulator ("add_acc", first,
1161 integer_zero_node);
1162
1163 if (act->mult && !m_acc)
1164 m_acc = create_tailcall_accumulator ("mult_acc", first,
1165 integer_one_node);
1166 }
1167
1168 if (a_acc || m_acc)
1169 {
1170 /* When the tail call elimination using accumulators is performed,
1171 statements adding the accumulated value are inserted at all exits.
1172 This turns all other tail calls to non-tail ones. */
1173 opt_tailcalls = false;
1174 }
1175
1176 for (; tailcalls; tailcalls = next)
1177 {
1178 next = tailcalls->next;
1179 changed |= optimize_tail_call (tailcalls, opt_tailcalls);
1180 free (tailcalls);
1181 }
1182
1183 if (a_acc || m_acc)
1184 {
1185 /* Modify the remaining return statements. */
1186 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
1187 {
1188 stmt = last_stmt (e->src);
1189
1190 if (stmt
1191 && gimple_code (stmt) == GIMPLE_RETURN)
1192 adjust_return_value (e->src, m_acc, a_acc);
1193 }
1194 }
1195
1196 if (changed)
1197 {
1198 /* We may have created new loops. Make them magically appear. */
1199 loops_state_set (LOOPS_NEED_FIXUP);
1200 free_dominance_info (CDI_DOMINATORS);
1201 }
1202
1203 /* Add phi nodes for the virtual operands defined in the function to the
1204 header of the loop created by tail recursion elimination. Do so
1205 by triggering the SSA renamer. */
1206 if (phis_constructed)
1207 mark_virtual_operands_for_renaming (cfun);
1208
1209 if (changed)
1210 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
1211 return 0;
1212 }
1213
1214 static bool
1215 gate_tail_calls (void)
1216 {
1217 return flag_optimize_sibling_calls != 0 && dbg_cnt (tail_call);
1218 }
1219
1220 static unsigned int
1221 execute_tail_calls (void)
1222 {
1223 return tree_optimize_tail_calls_1 (true);
1224 }
1225
1226 namespace {
1227
1228 const pass_data pass_data_tail_recursion =
1229 {
1230 GIMPLE_PASS, /* type */
1231 "tailr", /* name */
1232 OPTGROUP_NONE, /* optinfo_flags */
1233 TV_NONE, /* tv_id */
1234 ( PROP_cfg | PROP_ssa ), /* properties_required */
1235 0, /* properties_provided */
1236 0, /* properties_destroyed */
1237 0, /* todo_flags_start */
1238 0, /* todo_flags_finish */
1239 };
1240
1241 class pass_tail_recursion : public gimple_opt_pass
1242 {
1243 public:
1244 pass_tail_recursion (gcc::context *ctxt)
1245 : gimple_opt_pass (pass_data_tail_recursion, ctxt)
1246 {}
1247
1248 /* opt_pass methods: */
1249 opt_pass * clone () { return new pass_tail_recursion (m_ctxt); }
1250 virtual bool gate (function *) { return gate_tail_calls (); }
1251 virtual unsigned int execute (function *)
1252 {
1253 return tree_optimize_tail_calls_1 (false);
1254 }
1255
1256 }; // class pass_tail_recursion
1257
1258 } // anon namespace
1259
1260 gimple_opt_pass *
1261 make_pass_tail_recursion (gcc::context *ctxt)
1262 {
1263 return new pass_tail_recursion (ctxt);
1264 }
1265
1266 namespace {
1267
1268 const pass_data pass_data_tail_calls =
1269 {
1270 GIMPLE_PASS, /* type */
1271 "tailc", /* name */
1272 OPTGROUP_NONE, /* optinfo_flags */
1273 TV_NONE, /* tv_id */
1274 ( PROP_cfg | PROP_ssa ), /* properties_required */
1275 0, /* properties_provided */
1276 0, /* properties_destroyed */
1277 0, /* todo_flags_start */
1278 0, /* todo_flags_finish */
1279 };
1280
1281 class pass_tail_calls : public gimple_opt_pass
1282 {
1283 public:
1284 pass_tail_calls (gcc::context *ctxt)
1285 : gimple_opt_pass (pass_data_tail_calls, ctxt)
1286 {}
1287
1288 /* opt_pass methods: */
1289 virtual bool gate (function *) { return gate_tail_calls (); }
1290 virtual unsigned int execute (function *) { return execute_tail_calls (); }
1291
1292 }; // class pass_tail_calls
1293
1294 } // anon namespace
1295
1296 gimple_opt_pass *
1297 make_pass_tail_calls (gcc::context *ctxt)
1298 {
1299 return new pass_tail_calls (ctxt);
1300 }