]>
Commit | Line | Data |
---|---|---|
ac534736 | 1 | /* Tail call optimization on trees. |
ca786352 | 2 | Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 |
66647d44 | 3 | Free Software Foundation, Inc. |
6de9cd9a DN |
4 | |
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9dcd6f09 | 9 | the Free Software Foundation; either version 3, or (at your option) |
6de9cd9a DN |
10 | any later version. |
11 | ||
12 | GCC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ | |
6de9cd9a DN |
20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
24 | #include "tm.h" | |
25 | #include "tree.h" | |
6de9cd9a | 26 | #include "tm_p.h" |
6de9cd9a DN |
27 | #include "basic-block.h" |
28 | #include "function.h" | |
29 | #include "tree-flow.h" | |
30 | #include "tree-dump.h" | |
cf835838 | 31 | #include "gimple-pretty-print.h" |
6de9cd9a DN |
32 | #include "except.h" |
33 | #include "tree-pass.h" | |
34 | #include "flags.h" | |
35 | #include "langhooks.h" | |
6fb5fa3c | 36 | #include "dbgcnt.h" |
f0a0390e | 37 | #include "target.h" |
677f3fa8 | 38 | #include "common/common-target.h" |
6de9cd9a DN |
39 | |
40 | /* The file implements the tail recursion elimination. It is also used to | |
1ea7e6ad | 41 | analyze the tail calls in general, passing the results to the rtl level |
6de9cd9a DN |
42 | where they are used for sibcall optimization. |
43 | ||
44 | In addition to the standard tail recursion elimination, we handle the most | |
45 | trivial cases of making the call tail recursive by creating accumulators. | |
46 | For example the following function | |
47 | ||
48 | int sum (int n) | |
49 | { | |
50 | if (n > 0) | |
51 | return n + sum (n - 1); | |
52 | else | |
53 | return 0; | |
54 | } | |
55 | ||
56 | is transformed into | |
57 | ||
58 | int sum (int n) | |
59 | { | |
60 | int acc = 0; | |
61 | ||
62 | while (n > 0) | |
63 | acc += n--; | |
64 | ||
65 | return acc; | |
66 | } | |
67 | ||
b8698a0f | 68 | To do this, we maintain two accumulators (a_acc and m_acc) that indicate |
6de9cd9a DN |
69 | when we reach the return x statement, we should return a_acc + x * m_acc |
70 | instead. They are initially initialized to 0 and 1, respectively, | |
71 | so the semantics of the function is obviously preserved. If we are | |
72 | guaranteed that the value of the accumulator never change, we | |
73 | omit the accumulator. | |
74 | ||
75 | There are three cases how the function may exit. The first one is | |
6ce2bcb7 | 76 | handled in adjust_return_value, the other two in adjust_accumulator_values |
6de9cd9a DN |
77 | (the second case is actually a special case of the third one and we |
78 | present it separately just for clarity): | |
79 | ||
80 | 1) Just return x, where x is not in any of the remaining special shapes. | |
81 | We rewrite this to a gimple equivalent of return m_acc * x + a_acc. | |
b8698a0f | 82 | |
6de9cd9a | 83 | 2) return f (...), where f is the current function, is rewritten in a |
1ea7e6ad | 84 | classical tail-recursion elimination way, into assignment of arguments |
6de9cd9a DN |
85 | and jump to the start of the function. Values of the accumulators |
86 | are unchanged. | |
b8698a0f | 87 | |
6de9cd9a DN |
88 | 3) return a + m * f(...), where a and m do not depend on call to f. |
89 | To preserve the semantics described before we want this to be rewritten | |
90 | in such a way that we finally return | |
91 | ||
92 | a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...). | |
93 | ||
94 | I.e. we increase a_acc by a * m_acc, multiply m_acc by m and | |
95 | eliminate the tail call to f. Special cases when the value is just | |
96 | added or just multiplied are obtained by setting a = 0 or m = 1. | |
97 | ||
98 | TODO -- it is possible to do similar tricks for other operations. */ | |
99 | ||
100 | /* A structure that describes the tailcall. */ | |
101 | ||
102 | struct tailcall | |
103 | { | |
6de9cd9a | 104 | /* The iterator pointing to the call statement. */ |
726a989a | 105 | gimple_stmt_iterator call_gsi; |
6de9cd9a DN |
106 | |
107 | /* True if it is a call to the current function. */ | |
108 | bool tail_recursion; | |
109 | ||
110 | /* The return value of the caller is mult * f + add, where f is the return | |
111 | value of the call. */ | |
112 | tree mult, add; | |
113 | ||
114 | /* Next tailcall in the chain. */ | |
115 | struct tailcall *next; | |
116 | }; | |
117 | ||
118 | /* The variables holding the value of multiplicative and additive | |
119 | accumulator. */ | |
120 | static tree m_acc, a_acc; | |
121 | ||
122 | static bool suitable_for_tail_opt_p (void); | |
123 | static bool optimize_tail_call (struct tailcall *, bool); | |
124 | static void eliminate_tail_call (struct tailcall *); | |
125 | static void find_tail_calls (basic_block, struct tailcall **); | |
126 | ||
127 | /* Returns false when the function is not suitable for tail call optimization | |
128 | from some reason (e.g. if it takes variable number of arguments). */ | |
129 | ||
130 | static bool | |
131 | suitable_for_tail_opt_p (void) | |
132 | { | |
e3b5732b | 133 | if (cfun->stdarg) |
6de9cd9a DN |
134 | return false; |
135 | ||
6de9cd9a DN |
136 | return true; |
137 | } | |
138 | /* Returns false when the function is not suitable for tail call optimization | |
139 | from some reason (e.g. if it takes variable number of arguments). | |
140 | This test must pass in addition to suitable_for_tail_opt_p in order to make | |
141 | tail call discovery happen. */ | |
142 | ||
143 | static bool | |
144 | suitable_for_tail_call_opt_p (void) | |
145 | { | |
ead553a1 SB |
146 | tree param; |
147 | ||
6de9cd9a DN |
148 | /* alloca (until we have stack slot life analysis) inhibits |
149 | sibling call optimizations, but not tail recursion. */ | |
e3b5732b | 150 | if (cfun->calls_alloca) |
6de9cd9a DN |
151 | return false; |
152 | ||
153 | /* If we are using sjlj exceptions, we may need to add a call to | |
154 | _Unwind_SjLj_Unregister at exit of the function. Which means | |
155 | that we cannot do any sibcall transformations. */ | |
677f3fa8 | 156 | if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ |
f0a0390e | 157 | && current_function_has_exception_handlers ()) |
6de9cd9a DN |
158 | return false; |
159 | ||
160 | /* Any function that calls setjmp might have longjmp called from | |
161 | any called function. ??? We really should represent this | |
162 | properly in the CFG so that this needn't be special cased. */ | |
e3b5732b | 163 | if (cfun->calls_setjmp) |
6de9cd9a DN |
164 | return false; |
165 | ||
ead553a1 SB |
166 | /* ??? It is OK if the argument of a function is taken in some cases, |
167 | but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */ | |
168 | for (param = DECL_ARGUMENTS (current_function_decl); | |
169 | param; | |
910ad8de | 170 | param = DECL_CHAIN (param)) |
ead553a1 SB |
171 | if (TREE_ADDRESSABLE (param)) |
172 | return false; | |
173 | ||
6de9cd9a DN |
174 | return true; |
175 | } | |
176 | ||
177 | /* Checks whether the expression EXPR in stmt AT is independent of the | |
726a989a RB |
178 | statement pointed to by GSI (in a sense that we already know EXPR's value |
179 | at GSI). We use the fact that we are only called from the chain of | |
6de9cd9a | 180 | basic blocks that have only single successor. Returns the expression |
726a989a | 181 | containing the value of EXPR at GSI. */ |
6de9cd9a DN |
182 | |
183 | static tree | |
726a989a | 184 | independent_of_stmt_p (tree expr, gimple at, gimple_stmt_iterator gsi) |
6de9cd9a DN |
185 | { |
186 | basic_block bb, call_bb, at_bb; | |
187 | edge e; | |
628f6a4e | 188 | edge_iterator ei; |
6de9cd9a DN |
189 | |
190 | if (is_gimple_min_invariant (expr)) | |
191 | return expr; | |
192 | ||
193 | if (TREE_CODE (expr) != SSA_NAME) | |
194 | return NULL_TREE; | |
195 | ||
196 | /* Mark the blocks in the chain leading to the end. */ | |
726a989a RB |
197 | at_bb = gimple_bb (at); |
198 | call_bb = gimple_bb (gsi_stmt (gsi)); | |
c5cbcccf | 199 | for (bb = call_bb; bb != at_bb; bb = single_succ (bb)) |
6de9cd9a DN |
200 | bb->aux = &bb->aux; |
201 | bb->aux = &bb->aux; | |
202 | ||
203 | while (1) | |
b8698a0f | 204 | { |
6de9cd9a | 205 | at = SSA_NAME_DEF_STMT (expr); |
726a989a | 206 | bb = gimple_bb (at); |
6de9cd9a | 207 | |
61ada8ae | 208 | /* The default definition or defined before the chain. */ |
6de9cd9a DN |
209 | if (!bb || !bb->aux) |
210 | break; | |
211 | ||
212 | if (bb == call_bb) | |
213 | { | |
726a989a RB |
214 | for (; !gsi_end_p (gsi); gsi_next (&gsi)) |
215 | if (gsi_stmt (gsi) == at) | |
6de9cd9a DN |
216 | break; |
217 | ||
726a989a | 218 | if (!gsi_end_p (gsi)) |
6de9cd9a DN |
219 | expr = NULL_TREE; |
220 | break; | |
221 | } | |
222 | ||
726a989a | 223 | if (gimple_code (at) != GIMPLE_PHI) |
6de9cd9a DN |
224 | { |
225 | expr = NULL_TREE; | |
226 | break; | |
227 | } | |
228 | ||
628f6a4e | 229 | FOR_EACH_EDGE (e, ei, bb->preds) |
6de9cd9a DN |
230 | if (e->src->aux) |
231 | break; | |
1e128c5f | 232 | gcc_assert (e); |
6de9cd9a | 233 | |
d00ad49b | 234 | expr = PHI_ARG_DEF_FROM_EDGE (at, e); |
066a0344 ZD |
235 | if (TREE_CODE (expr) != SSA_NAME) |
236 | { | |
237 | /* The value is a constant. */ | |
238 | break; | |
239 | } | |
6de9cd9a DN |
240 | } |
241 | ||
242 | /* Unmark the blocks. */ | |
c5cbcccf | 243 | for (bb = call_bb; bb != at_bb; bb = single_succ (bb)) |
6de9cd9a DN |
244 | bb->aux = NULL; |
245 | bb->aux = NULL; | |
246 | ||
247 | return expr; | |
248 | } | |
249 | ||
726a989a RB |
250 | /* Simulates the effect of an assignment STMT on the return value of the tail |
251 | recursive CALL passed in ASS_VAR. M and A are the multiplicative and the | |
252 | additive factor for the real return value. */ | |
6de9cd9a DN |
253 | |
254 | static bool | |
726a989a | 255 | process_assignment (gimple stmt, gimple_stmt_iterator call, tree *m, |
6de9cd9a DN |
256 | tree *a, tree *ass_var) |
257 | { | |
68fce4ed | 258 | tree op0, op1 = NULL_TREE, non_ass_var = NULL_TREE; |
726a989a RB |
259 | tree dest = gimple_assign_lhs (stmt); |
260 | enum tree_code code = gimple_assign_rhs_code (stmt); | |
261 | enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code); | |
262 | tree src_var = gimple_assign_rhs1 (stmt); | |
b8698a0f | 263 | |
b89e96ac SB |
264 | /* See if this is a simple copy operation of an SSA name to the function |
265 | result. In that case we may have a simple tail call. Ignore type | |
266 | conversions that can never produce extra code between the function | |
267 | call and the function return. */ | |
726a989a RB |
268 | if ((rhs_class == GIMPLE_SINGLE_RHS || gimple_assign_cast_p (stmt)) |
269 | && (TREE_CODE (src_var) == SSA_NAME)) | |
6de9cd9a | 270 | { |
726a989a RB |
271 | /* Reject a tailcall if the type conversion might need |
272 | additional code. */ | |
7352e27c | 273 | if (gimple_assign_cast_p (stmt) |
726a989a RB |
274 | && TYPE_MODE (TREE_TYPE (dest)) != TYPE_MODE (TREE_TYPE (src_var))) |
275 | return false; | |
276 | ||
b89e96ac | 277 | if (src_var != *ass_var) |
6de9cd9a DN |
278 | return false; |
279 | ||
280 | *ass_var = dest; | |
281 | return true; | |
282 | } | |
283 | ||
68fce4ed GS |
284 | switch (rhs_class) |
285 | { | |
286 | case GIMPLE_BINARY_RHS: | |
287 | op1 = gimple_assign_rhs2 (stmt); | |
288 | ||
289 | /* Fall through. */ | |
290 | ||
291 | case GIMPLE_UNARY_RHS: | |
292 | op0 = gimple_assign_rhs1 (stmt); | |
293 | break; | |
294 | ||
295 | default: | |
296 | return false; | |
297 | } | |
6de9cd9a | 298 | |
85d62520 RS |
299 | /* Accumulator optimizations will reverse the order of operations. |
300 | We can only do that for floating-point types if we're assuming | |
301 | that addition and multiplication are associative. */ | |
a1a82611 | 302 | if (!flag_associative_math) |
85d62520 RS |
303 | if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl)))) |
304 | return false; | |
305 | ||
68fce4ed GS |
306 | if (rhs_class == GIMPLE_UNARY_RHS) |
307 | ; | |
308 | else if (op0 == *ass_var | |
6de9cd9a DN |
309 | && (non_ass_var = independent_of_stmt_p (op1, stmt, call))) |
310 | ; | |
311 | else if (op1 == *ass_var | |
312 | && (non_ass_var = independent_of_stmt_p (op0, stmt, call))) | |
313 | ; | |
314 | else | |
315 | return false; | |
316 | ||
b89e96ac | 317 | switch (code) |
6de9cd9a DN |
318 | { |
319 | case PLUS_EXPR: | |
6de9cd9a DN |
320 | *a = non_ass_var; |
321 | *ass_var = dest; | |
322 | return true; | |
323 | ||
324 | case MULT_EXPR: | |
6de9cd9a DN |
325 | *m = non_ass_var; |
326 | *ass_var = dest; | |
327 | return true; | |
328 | ||
68fce4ed GS |
329 | case NEGATE_EXPR: |
330 | if (FLOAT_TYPE_P (TREE_TYPE (op0))) | |
331 | *m = build_real (TREE_TYPE (op0), dconstm1); | |
332 | else | |
333 | *m = build_int_cst (TREE_TYPE (op0), -1); | |
334 | ||
335 | *ass_var = dest; | |
336 | return true; | |
337 | ||
338 | case MINUS_EXPR: | |
339 | if (*ass_var == op0) | |
340 | *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var); | |
341 | else | |
342 | { | |
343 | if (FLOAT_TYPE_P (TREE_TYPE (non_ass_var))) | |
344 | *m = build_real (TREE_TYPE (non_ass_var), dconstm1); | |
345 | else | |
346 | *m = build_int_cst (TREE_TYPE (non_ass_var), -1); | |
347 | ||
348 | *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var); | |
349 | } | |
350 | ||
351 | *ass_var = dest; | |
352 | return true; | |
353 | ||
354 | /* TODO -- Handle POINTER_PLUS_EXPR. */ | |
6de9cd9a DN |
355 | |
356 | default: | |
357 | return false; | |
358 | } | |
359 | } | |
360 | ||
361 | /* Propagate VAR through phis on edge E. */ | |
362 | ||
363 | static tree | |
364 | propagate_through_phis (tree var, edge e) | |
365 | { | |
366 | basic_block dest = e->dest; | |
726a989a | 367 | gimple_stmt_iterator gsi; |
b8698a0f | 368 | |
726a989a RB |
369 | for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi)) |
370 | { | |
371 | gimple phi = gsi_stmt (gsi); | |
372 | if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var) | |
373 | return PHI_RESULT (phi); | |
374 | } | |
6de9cd9a DN |
375 | return var; |
376 | } | |
377 | ||
378 | /* Finds tailcalls falling into basic block BB. The list of found tailcalls is | |
379 | added to the start of RET. */ | |
380 | ||
381 | static void | |
382 | find_tail_calls (basic_block bb, struct tailcall **ret) | |
383 | { | |
726a989a RB |
384 | tree ass_var = NULL_TREE, ret_var, func, param; |
385 | gimple stmt, call = NULL; | |
386 | gimple_stmt_iterator gsi, agsi; | |
6de9cd9a DN |
387 | bool tail_recursion; |
388 | struct tailcall *nw; | |
389 | edge e; | |
390 | tree m, a; | |
391 | basic_block abb; | |
726a989a | 392 | size_t idx; |
12de6355 RG |
393 | tree var; |
394 | referenced_var_iterator rvi; | |
6de9cd9a | 395 | |
c5cbcccf | 396 | if (!single_succ_p (bb)) |
6de9cd9a DN |
397 | return; |
398 | ||
726a989a | 399 | for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi)) |
6de9cd9a | 400 | { |
726a989a | 401 | stmt = gsi_stmt (gsi); |
6de9cd9a | 402 | |
ca786352 | 403 | /* Ignore labels, returns, clobbers and debug stmts. */ |
e106efc7 RG |
404 | if (gimple_code (stmt) == GIMPLE_LABEL |
405 | || gimple_code (stmt) == GIMPLE_RETURN | |
ca786352 | 406 | || gimple_clobber_p (stmt) |
e106efc7 | 407 | || is_gimple_debug (stmt)) |
6de9cd9a DN |
408 | continue; |
409 | ||
6de9cd9a | 410 | /* Check for a call. */ |
726a989a | 411 | if (is_gimple_call (stmt)) |
6de9cd9a | 412 | { |
6de9cd9a | 413 | call = stmt; |
726a989a RB |
414 | ass_var = gimple_call_lhs (stmt); |
415 | break; | |
6de9cd9a DN |
416 | } |
417 | ||
5006671f RG |
418 | /* If the statement references memory or volatile operands, fail. */ |
419 | if (gimple_references_memory_p (stmt) | |
420 | || gimple_has_volatile_ops (stmt)) | |
6de9cd9a DN |
421 | return; |
422 | } | |
423 | ||
726a989a | 424 | if (gsi_end_p (gsi)) |
6de9cd9a | 425 | { |
628f6a4e | 426 | edge_iterator ei; |
6de9cd9a | 427 | /* Recurse to the predecessors. */ |
628f6a4e | 428 | FOR_EACH_EDGE (e, ei, bb->preds) |
6de9cd9a DN |
429 | find_tail_calls (e->src, ret); |
430 | ||
431 | return; | |
432 | } | |
433 | ||
b8698a0f | 434 | /* If the LHS of our call is not just a simple register, we can't |
bd42a56b MM |
435 | transform this into a tail or sibling call. This situation happens, |
436 | in (e.g.) "*p = foo()" where foo returns a struct. In this case | |
437 | we won't have a temporary here, but we need to carry out the side | |
438 | effect anyway, so tailcall is impossible. | |
439 | ||
440 | ??? In some situations (when the struct is returned in memory via | |
441 | invisible argument) we could deal with this, e.g. by passing 'p' | |
442 | itself as that argument to foo, but it's too early to do this here, | |
443 | and expand_call() will not handle it anyway. If it ever can, then | |
444 | we need to revisit this here, to allow that situation. */ | |
445 | if (ass_var && !is_gimple_reg (ass_var)) | |
446 | return; | |
447 | ||
6de9cd9a DN |
448 | /* We found the call, check whether it is suitable. */ |
449 | tail_recursion = false; | |
726a989a | 450 | func = gimple_call_fndecl (call); |
6de9cd9a DN |
451 | if (func == current_function_decl) |
452 | { | |
12de6355 | 453 | tree arg; |
d086d311 | 454 | |
726a989a RB |
455 | for (param = DECL_ARGUMENTS (func), idx = 0; |
456 | param && idx < gimple_call_num_args (call); | |
910ad8de | 457 | param = DECL_CHAIN (param), idx ++) |
6de9cd9a | 458 | { |
726a989a | 459 | arg = gimple_call_arg (call, idx); |
14de86fa ZD |
460 | if (param != arg) |
461 | { | |
462 | /* Make sure there are no problems with copying. The parameter | |
6de9cd9a DN |
463 | have a copyable type and the two arguments must have reasonably |
464 | equivalent types. The latter requirement could be relaxed if | |
465 | we emitted a suitable type conversion statement. */ | |
14de86fa | 466 | if (!is_gimple_reg_type (TREE_TYPE (param)) |
f4088621 | 467 | || !useless_type_conversion_p (TREE_TYPE (param), |
726a989a | 468 | TREE_TYPE (arg))) |
14de86fa ZD |
469 | break; |
470 | ||
471 | /* The parameter should be a real operand, so that phi node | |
472 | created for it at the start of the function has the meaning | |
473 | of copying the value. This test implies is_gimple_reg_type | |
474 | from the previous condition, however this one could be | |
475 | relaxed by being more careful with copying the new value | |
726a989a | 476 | of the parameter (emitting appropriate GIMPLE_ASSIGN and |
14de86fa ZD |
477 | updating the virtual operands). */ |
478 | if (!is_gimple_reg (param)) | |
479 | break; | |
480 | } | |
6de9cd9a | 481 | } |
726a989a | 482 | if (idx == gimple_call_num_args (call) && !param) |
6de9cd9a | 483 | tail_recursion = true; |
12de6355 | 484 | } |
d086d311 | 485 | |
12de6355 RG |
486 | /* Make sure the tail invocation of this function does not refer |
487 | to local variables. */ | |
1b9a784a | 488 | FOR_EACH_REFERENCED_VAR (cfun, var, rvi) |
12de6355 | 489 | { |
4e708ff7 RG |
490 | if (TREE_CODE (var) != PARM_DECL |
491 | && auto_var_in_fn_p (var, cfun->decl) | |
52c307ba RG |
492 | && (ref_maybe_used_by_stmt_p (call, var) |
493 | || call_may_clobber_ref_p (call, var))) | |
12de6355 | 494 | return; |
6de9cd9a DN |
495 | } |
496 | ||
497 | /* Now check the statements after the call. None of them has virtual | |
498 | operands, so they may only depend on the call through its return | |
499 | value. The return value should also be dependent on each of them, | |
500 | since we are running after dce. */ | |
501 | m = NULL_TREE; | |
502 | a = NULL_TREE; | |
503 | ||
504 | abb = bb; | |
726a989a | 505 | agsi = gsi; |
6de9cd9a DN |
506 | while (1) |
507 | { | |
d9334061 GS |
508 | tree tmp_a = NULL_TREE; |
509 | tree tmp_m = NULL_TREE; | |
726a989a | 510 | gsi_next (&agsi); |
6de9cd9a | 511 | |
726a989a | 512 | while (gsi_end_p (agsi)) |
6de9cd9a | 513 | { |
c5cbcccf ZD |
514 | ass_var = propagate_through_phis (ass_var, single_succ_edge (abb)); |
515 | abb = single_succ (abb); | |
726a989a | 516 | agsi = gsi_start_bb (abb); |
6de9cd9a DN |
517 | } |
518 | ||
726a989a | 519 | stmt = gsi_stmt (agsi); |
6de9cd9a | 520 | |
726a989a | 521 | if (gimple_code (stmt) == GIMPLE_LABEL) |
6de9cd9a DN |
522 | continue; |
523 | ||
726a989a | 524 | if (gimple_code (stmt) == GIMPLE_RETURN) |
6de9cd9a DN |
525 | break; |
526 | ||
ca786352 JJ |
527 | if (gimple_clobber_p (stmt)) |
528 | continue; | |
529 | ||
b5b8b0ac AO |
530 | if (is_gimple_debug (stmt)) |
531 | continue; | |
532 | ||
726a989a | 533 | if (gimple_code (stmt) != GIMPLE_ASSIGN) |
6de9cd9a DN |
534 | return; |
535 | ||
726a989a | 536 | /* This is a gimple assign. */ |
d9334061 | 537 | if (! process_assignment (stmt, gsi, &tmp_m, &tmp_a, &ass_var)) |
6de9cd9a | 538 | return; |
d9334061 GS |
539 | |
540 | if (tmp_a) | |
541 | { | |
fdb8b4c0 | 542 | tree type = TREE_TYPE (tmp_a); |
d9334061 | 543 | if (a) |
fdb8b4c0 | 544 | a = fold_build2 (PLUS_EXPR, type, fold_convert (type, a), tmp_a); |
d9334061 GS |
545 | else |
546 | a = tmp_a; | |
547 | } | |
548 | if (tmp_m) | |
549 | { | |
fdb8b4c0 | 550 | tree type = TREE_TYPE (tmp_m); |
d9334061 | 551 | if (m) |
fdb8b4c0 | 552 | m = fold_build2 (MULT_EXPR, type, fold_convert (type, m), tmp_m); |
d9334061 GS |
553 | else |
554 | m = tmp_m; | |
555 | ||
556 | if (a) | |
fdb8b4c0 | 557 | a = fold_build2 (MULT_EXPR, type, fold_convert (type, a), tmp_m); |
d9334061 | 558 | } |
6de9cd9a DN |
559 | } |
560 | ||
b89e96ac | 561 | /* See if this is a tail call we can handle. */ |
726a989a | 562 | ret_var = gimple_return_retval (stmt); |
6de9cd9a DN |
563 | |
564 | /* We may proceed if there either is no return value, or the return value | |
565 | is identical to the call's return. */ | |
566 | if (ret_var | |
567 | && (ret_var != ass_var)) | |
568 | return; | |
569 | ||
4b5b9613 RH |
570 | /* If this is not a tail recursive call, we cannot handle addends or |
571 | multiplicands. */ | |
572 | if (!tail_recursion && (m || a)) | |
573 | return; | |
574 | ||
e1111e8e | 575 | nw = XNEW (struct tailcall); |
6de9cd9a | 576 | |
726a989a | 577 | nw->call_gsi = gsi; |
6de9cd9a DN |
578 | |
579 | nw->tail_recursion = tail_recursion; | |
580 | ||
581 | nw->mult = m; | |
582 | nw->add = a; | |
583 | ||
584 | nw->next = *ret; | |
585 | *ret = nw; | |
586 | } | |
587 | ||
726a989a | 588 | /* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E. */ |
6de9cd9a DN |
589 | |
590 | static void | |
726a989a RB |
591 | add_successor_phi_arg (edge e, tree var, tree phi_arg) |
592 | { | |
593 | gimple_stmt_iterator gsi; | |
594 | ||
595 | for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi)) | |
596 | if (PHI_RESULT (gsi_stmt (gsi)) == var) | |
597 | break; | |
598 | ||
599 | gcc_assert (!gsi_end_p (gsi)); | |
9e227d60 | 600 | add_phi_arg (gsi_stmt (gsi), phi_arg, e, UNKNOWN_LOCATION); |
726a989a RB |
601 | } |
602 | ||
603 | /* Creates a GIMPLE statement which computes the operation specified by | |
604 | CODE, OP0 and OP1 to a new variable with name LABEL and inserts the | |
605 | statement in the position specified by GSI and UPDATE. Returns the | |
606 | tree node of the statement's result. */ | |
607 | ||
608 | static tree | |
b8698a0f | 609 | adjust_return_value_with_ops (enum tree_code code, const char *label, |
9efc83f4 | 610 | tree acc, tree op1, gimple_stmt_iterator gsi) |
6de9cd9a | 611 | { |
726a989a | 612 | |
6de9cd9a | 613 | tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl)); |
acd63801 | 614 | tree tmp = create_tmp_reg (ret_type, label); |
9efc83f4 | 615 | gimple stmt; |
726a989a RB |
616 | tree result; |
617 | ||
618 | add_referenced_var (tmp); | |
9efc83f4 RG |
619 | |
620 | if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1))) | |
621 | stmt = gimple_build_assign_with_ops (code, tmp, acc, op1); | |
622 | else | |
623 | { | |
624 | tree rhs = fold_convert (TREE_TYPE (acc), | |
625 | fold_build2 (code, | |
626 | TREE_TYPE (op1), | |
627 | fold_convert (TREE_TYPE (op1), acc), | |
628 | op1)); | |
629 | rhs = force_gimple_operand_gsi (&gsi, rhs, | |
630 | false, NULL, true, GSI_CONTINUE_LINKING); | |
631 | stmt = gimple_build_assign (NULL_TREE, rhs); | |
632 | } | |
633 | ||
726a989a RB |
634 | result = make_ssa_name (tmp, stmt); |
635 | gimple_assign_set_lhs (stmt, result); | |
636 | update_stmt (stmt); | |
9efc83f4 | 637 | gsi_insert_before (&gsi, stmt, GSI_NEW_STMT); |
726a989a RB |
638 | return result; |
639 | } | |
640 | ||
b8698a0f | 641 | /* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by |
726a989a RB |
642 | the computation specified by CODE and OP1 and insert the statement |
643 | at the position specified by GSI as a new statement. Returns new SSA name | |
644 | of updated accumulator. */ | |
645 | ||
646 | static tree | |
647 | update_accumulator_with_ops (enum tree_code code, tree acc, tree op1, | |
648 | gimple_stmt_iterator gsi) | |
649 | { | |
9efc83f4 RG |
650 | gimple stmt; |
651 | tree var; | |
652 | if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1))) | |
653 | stmt = gimple_build_assign_with_ops (code, SSA_NAME_VAR (acc), acc, op1); | |
654 | else | |
655 | { | |
656 | tree rhs = fold_convert (TREE_TYPE (acc), | |
657 | fold_build2 (code, | |
658 | TREE_TYPE (op1), | |
659 | fold_convert (TREE_TYPE (op1), acc), | |
660 | op1)); | |
661 | rhs = force_gimple_operand_gsi (&gsi, rhs, | |
662 | false, NULL, false, GSI_CONTINUE_LINKING); | |
663 | stmt = gimple_build_assign (NULL_TREE, rhs); | |
664 | } | |
665 | var = make_ssa_name (SSA_NAME_VAR (acc), stmt); | |
726a989a RB |
666 | gimple_assign_set_lhs (stmt, var); |
667 | update_stmt (stmt); | |
668 | gsi_insert_after (&gsi, stmt, GSI_NEW_STMT); | |
669 | return var; | |
670 | } | |
671 | ||
672 | /* Adjust the accumulator values according to A and M after GSI, and update | |
673 | the phi nodes on edge BACK. */ | |
674 | ||
675 | static void | |
676 | adjust_accumulator_values (gimple_stmt_iterator gsi, tree m, tree a, edge back) | |
677 | { | |
d9334061 GS |
678 | tree var, a_acc_arg, m_acc_arg; |
679 | ||
680 | if (m) | |
681 | m = force_gimple_operand_gsi (&gsi, m, true, NULL, true, GSI_SAME_STMT); | |
682 | if (a) | |
683 | a = force_gimple_operand_gsi (&gsi, a, true, NULL, true, GSI_SAME_STMT); | |
6de9cd9a | 684 | |
d9334061 GS |
685 | a_acc_arg = a_acc; |
686 | m_acc_arg = m_acc; | |
6de9cd9a DN |
687 | if (a) |
688 | { | |
689 | if (m_acc) | |
690 | { | |
691 | if (integer_onep (a)) | |
692 | var = m_acc; | |
693 | else | |
726a989a | 694 | var = adjust_return_value_with_ops (MULT_EXPR, "acc_tmp", m_acc, |
9efc83f4 | 695 | a, gsi); |
6de9cd9a DN |
696 | } |
697 | else | |
698 | var = a; | |
699 | ||
726a989a | 700 | a_acc_arg = update_accumulator_with_ops (PLUS_EXPR, a_acc, var, gsi); |
6de9cd9a DN |
701 | } |
702 | ||
703 | if (m) | |
726a989a | 704 | m_acc_arg = update_accumulator_with_ops (MULT_EXPR, m_acc, m, gsi); |
6de9cd9a DN |
705 | |
706 | if (a_acc) | |
726a989a | 707 | add_successor_phi_arg (back, a_acc, a_acc_arg); |
6de9cd9a DN |
708 | |
709 | if (m_acc) | |
726a989a | 710 | add_successor_phi_arg (back, m_acc, m_acc_arg); |
6de9cd9a DN |
711 | } |
712 | ||
1ea7e6ad | 713 | /* Adjust value of the return at the end of BB according to M and A |
6de9cd9a DN |
714 | accumulators. */ |
715 | ||
716 | static void | |
717 | adjust_return_value (basic_block bb, tree m, tree a) | |
718 | { | |
726a989a RB |
719 | tree retval; |
720 | gimple ret_stmt = gimple_seq_last_stmt (bb_seq (bb)); | |
721 | gimple_stmt_iterator gsi = gsi_last_bb (bb); | |
6de9cd9a | 722 | |
726a989a | 723 | gcc_assert (gimple_code (ret_stmt) == GIMPLE_RETURN); |
6de9cd9a | 724 | |
726a989a RB |
725 | retval = gimple_return_retval (ret_stmt); |
726 | if (!retval || retval == error_mark_node) | |
6de9cd9a DN |
727 | return; |
728 | ||
6de9cd9a | 729 | if (m) |
726a989a | 730 | retval = adjust_return_value_with_ops (MULT_EXPR, "mul_tmp", m_acc, retval, |
9efc83f4 | 731 | gsi); |
6de9cd9a | 732 | if (a) |
726a989a | 733 | retval = adjust_return_value_with_ops (PLUS_EXPR, "acc_tmp", a_acc, retval, |
9efc83f4 | 734 | gsi); |
726a989a | 735 | gimple_return_set_retval (ret_stmt, retval); |
f430bae8 | 736 | update_stmt (ret_stmt); |
6de9cd9a DN |
737 | } |
738 | ||
23504559 JH |
739 | /* Subtract COUNT and FREQUENCY from the basic block and it's |
740 | outgoing edge. */ | |
741 | static void | |
742 | decrease_profile (basic_block bb, gcov_type count, int frequency) | |
743 | { | |
744 | edge e; | |
745 | bb->count -= count; | |
746 | if (bb->count < 0) | |
747 | bb->count = 0; | |
748 | bb->frequency -= frequency; | |
749 | if (bb->frequency < 0) | |
750 | bb->frequency = 0; | |
751 | if (!single_succ_p (bb)) | |
752 | { | |
753 | gcc_assert (!EDGE_COUNT (bb->succs)); | |
754 | return; | |
755 | } | |
756 | e = single_succ_edge (bb); | |
757 | e->count -= count; | |
758 | if (e->count < 0) | |
759 | e->count = 0; | |
760 | } | |
761 | ||
016925bc ZD |
762 | /* Returns true if argument PARAM of the tail recursive call needs to be copied |
763 | when the call is eliminated. */ | |
764 | ||
765 | static bool | |
766 | arg_needs_copy_p (tree param) | |
767 | { | |
768 | tree def; | |
769 | ||
770 | if (!is_gimple_reg (param) || !var_ann (param)) | |
771 | return false; | |
b8698a0f | 772 | |
016925bc | 773 | /* Parameters that are only defined but never used need not be copied. */ |
5cd4ec7f | 774 | def = gimple_default_def (cfun, param); |
016925bc ZD |
775 | if (!def) |
776 | return false; | |
777 | ||
778 | return true; | |
779 | } | |
780 | ||
6de9cd9a DN |
781 | /* Eliminates tail call described by T. TMP_VARS is a list of |
782 | temporary variables used to copy the function arguments. */ | |
783 | ||
784 | static void | |
785 | eliminate_tail_call (struct tailcall *t) | |
786 | { | |
726a989a RB |
787 | tree param, rslt; |
788 | gimple stmt, call; | |
5039610b | 789 | tree arg; |
726a989a | 790 | size_t idx; |
6de9cd9a DN |
791 | basic_block bb, first; |
792 | edge e; | |
726a989a RB |
793 | gimple phi; |
794 | gimple_stmt_iterator gsi; | |
795 | gimple orig_stmt; | |
6de9cd9a | 796 | |
726a989a RB |
797 | stmt = orig_stmt = gsi_stmt (t->call_gsi); |
798 | bb = gsi_bb (t->call_gsi); | |
6de9cd9a DN |
799 | |
800 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
801 | { | |
802 | fprintf (dump_file, "Eliminated tail recursion in bb %d : ", | |
803 | bb->index); | |
726a989a | 804 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); |
6de9cd9a DN |
805 | fprintf (dump_file, "\n"); |
806 | } | |
807 | ||
726a989a | 808 | gcc_assert (is_gimple_call (stmt)); |
6de9cd9a | 809 | |
c5cbcccf | 810 | first = single_succ (ENTRY_BLOCK_PTR); |
6de9cd9a | 811 | |
726a989a | 812 | /* Remove the code after call_gsi that will become unreachable. The |
8d3d51b5 ZD |
813 | possibly unreachable code in other blocks is removed later in |
814 | cfg cleanup. */ | |
726a989a RB |
815 | gsi = t->call_gsi; |
816 | gsi_next (&gsi); | |
817 | while (!gsi_end_p (gsi)) | |
8d3d51b5 | 818 | { |
726a989a | 819 | gimple t = gsi_stmt (gsi); |
8d3d51b5 ZD |
820 | /* Do not remove the return statement, so that redirect_edge_and_branch |
821 | sees how the block ends. */ | |
726a989a | 822 | if (gimple_code (t) == GIMPLE_RETURN) |
8d3d51b5 ZD |
823 | break; |
824 | ||
726a989a | 825 | gsi_remove (&gsi, true); |
87527e4b | 826 | release_defs (t); |
8d3d51b5 ZD |
827 | } |
828 | ||
23504559 | 829 | /* Number of executions of function has reduced by the tailcall. */ |
726a989a | 830 | e = single_succ_edge (gsi_bb (t->call_gsi)); |
23504559 JH |
831 | decrease_profile (EXIT_BLOCK_PTR, e->count, EDGE_FREQUENCY (e)); |
832 | decrease_profile (ENTRY_BLOCK_PTR, e->count, EDGE_FREQUENCY (e)); | |
833 | if (e->dest != EXIT_BLOCK_PTR) | |
834 | decrease_profile (e->dest, e->count, EDGE_FREQUENCY (e)); | |
835 | ||
6de9cd9a | 836 | /* Replace the call by a jump to the start of function. */ |
726a989a RB |
837 | e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)), |
838 | first); | |
1e128c5f | 839 | gcc_assert (e); |
726a989a | 840 | PENDING_STMT (e) = NULL; |
6de9cd9a | 841 | |
016925bc ZD |
842 | /* Add phi node entries for arguments. The ordering of the phi nodes should |
843 | be the same as the ordering of the arguments. */ | |
6de9cd9a | 844 | for (param = DECL_ARGUMENTS (current_function_decl), |
726a989a | 845 | idx = 0, gsi = gsi_start_phis (first); |
6de9cd9a | 846 | param; |
910ad8de | 847 | param = DECL_CHAIN (param), idx++) |
6de9cd9a | 848 | { |
016925bc | 849 | if (!arg_needs_copy_p (param)) |
6de9cd9a | 850 | continue; |
726a989a RB |
851 | |
852 | arg = gimple_call_arg (stmt, idx); | |
853 | phi = gsi_stmt (gsi); | |
016925bc | 854 | gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi))); |
6de9cd9a | 855 | |
9e227d60 | 856 | add_phi_arg (phi, arg, e, gimple_location (stmt)); |
726a989a | 857 | gsi_next (&gsi); |
6de9cd9a DN |
858 | } |
859 | ||
860 | /* Update the values of accumulators. */ | |
726a989a | 861 | adjust_accumulator_values (t->call_gsi, t->mult, t->add, e); |
6de9cd9a | 862 | |
726a989a RB |
863 | call = gsi_stmt (t->call_gsi); |
864 | rslt = gimple_call_lhs (call); | |
865 | if (rslt != NULL_TREE) | |
6de9cd9a | 866 | { |
6de9cd9a DN |
867 | /* Result of the call will no longer be defined. So adjust the |
868 | SSA_NAME_DEF_STMT accordingly. */ | |
726a989a | 869 | SSA_NAME_DEF_STMT (rslt) = gimple_build_nop (); |
6de9cd9a DN |
870 | } |
871 | ||
726a989a | 872 | gsi_remove (&t->call_gsi, true); |
87527e4b | 873 | release_defs (call); |
6de9cd9a DN |
874 | } |
875 | ||
016925bc ZD |
876 | /* Add phi nodes for the virtual operands defined in the function to the |
877 | header of the loop created by tail recursion elimination. | |
878 | ||
879 | Originally, we used to add phi nodes only for call clobbered variables, | |
880 | as the value of the non-call clobbered ones obviously cannot be used | |
881 | or changed within the recursive call. However, the local variables | |
882 | from multiple calls now share the same location, so the virtual ssa form | |
883 | requires us to say that the location dies on further iterations of the loop, | |
884 | which requires adding phi nodes. | |
885 | */ | |
886 | static void | |
887 | add_virtual_phis (void) | |
888 | { | |
889 | referenced_var_iterator rvi; | |
890 | tree var; | |
891 | ||
892 | /* The problematic part is that there is no way how to know what | |
893 | to put into phi nodes (there in fact does not have to be such | |
894 | ssa name available). A solution would be to have an artificial | |
895 | use/kill for all virtual operands in EXIT node. Unless we have | |
896 | this, we cannot do much better than to rebuild the ssa form for | |
897 | possibly affected virtual ssa names from scratch. */ | |
898 | ||
1b9a784a | 899 | FOR_EACH_REFERENCED_VAR (cfun, var, rvi) |
016925bc | 900 | { |
5cd4ec7f | 901 | if (!is_gimple_reg (var) && gimple_default_def (cfun, var) != NULL_TREE) |
016925bc ZD |
902 | mark_sym_for_renaming (var); |
903 | } | |
016925bc ZD |
904 | } |
905 | ||
6de9cd9a DN |
906 | /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also |
907 | mark the tailcalls for the sibcall optimization. */ | |
908 | ||
909 | static bool | |
910 | optimize_tail_call (struct tailcall *t, bool opt_tailcalls) | |
911 | { | |
912 | if (t->tail_recursion) | |
913 | { | |
914 | eliminate_tail_call (t); | |
915 | return true; | |
916 | } | |
917 | ||
918 | if (opt_tailcalls) | |
919 | { | |
726a989a | 920 | gimple stmt = gsi_stmt (t->call_gsi); |
6de9cd9a | 921 | |
726a989a | 922 | gimple_call_set_tail (stmt, true); |
6de9cd9a DN |
923 | if (dump_file && (dump_flags & TDF_DETAILS)) |
924 | { | |
925 | fprintf (dump_file, "Found tail call "); | |
726a989a RB |
926 | print_gimple_stmt (dump_file, stmt, 0, dump_flags); |
927 | fprintf (dump_file, " in bb %i\n", (gsi_bb (t->call_gsi))->index); | |
6de9cd9a DN |
928 | } |
929 | } | |
930 | ||
931 | return false; | |
932 | } | |
933 | ||
726a989a RB |
934 | /* Creates a tail-call accumulator of the same type as the return type of the |
935 | current function. LABEL is the name used to creating the temporary | |
936 | variable for the accumulator. The accumulator will be inserted in the | |
937 | phis of a basic block BB with single predecessor with an initial value | |
938 | INIT converted to the current function return type. */ | |
939 | ||
940 | static tree | |
941 | create_tailcall_accumulator (const char *label, basic_block bb, tree init) | |
942 | { | |
943 | tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl)); | |
acd63801 | 944 | tree tmp = create_tmp_reg (ret_type, label); |
726a989a RB |
945 | gimple phi; |
946 | ||
947 | add_referenced_var (tmp); | |
948 | phi = create_phi_node (tmp, bb); | |
949 | /* RET_TYPE can be a float when -ffast-maths is enabled. */ | |
f5045c96 | 950 | add_phi_arg (phi, fold_convert (ret_type, init), single_pred_edge (bb), |
9e227d60 | 951 | UNKNOWN_LOCATION); |
726a989a RB |
952 | return PHI_RESULT (phi); |
953 | } | |
b8698a0f | 954 | |
6de9cd9a DN |
955 | /* Optimizes tail calls in the function, turning the tail recursion |
956 | into iteration. */ | |
957 | ||
1994bfea | 958 | static unsigned int |
6de9cd9a DN |
959 | tree_optimize_tail_calls_1 (bool opt_tailcalls) |
960 | { | |
961 | edge e; | |
962 | bool phis_constructed = false; | |
963 | struct tailcall *tailcalls = NULL, *act, *next; | |
964 | bool changed = false; | |
c5cbcccf | 965 | basic_block first = single_succ (ENTRY_BLOCK_PTR); |
726a989a RB |
966 | tree param; |
967 | gimple stmt; | |
628f6a4e | 968 | edge_iterator ei; |
6de9cd9a DN |
969 | |
970 | if (!suitable_for_tail_opt_p ()) | |
1994bfea | 971 | return 0; |
6de9cd9a DN |
972 | if (opt_tailcalls) |
973 | opt_tailcalls = suitable_for_tail_call_opt_p (); | |
974 | ||
628f6a4e | 975 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) |
6de9cd9a DN |
976 | { |
977 | /* Only traverse the normal exits, i.e. those that end with return | |
978 | statement. */ | |
979 | stmt = last_stmt (e->src); | |
980 | ||
981 | if (stmt | |
726a989a | 982 | && gimple_code (stmt) == GIMPLE_RETURN) |
6de9cd9a DN |
983 | find_tail_calls (e->src, &tailcalls); |
984 | } | |
985 | ||
986 | /* Construct the phi nodes and accumulators if necessary. */ | |
987 | a_acc = m_acc = NULL_TREE; | |
988 | for (act = tailcalls; act; act = act->next) | |
989 | { | |
990 | if (!act->tail_recursion) | |
991 | continue; | |
992 | ||
993 | if (!phis_constructed) | |
994 | { | |
5006671f RG |
995 | /* Ensure that there is only one predecessor of the block |
996 | or if there are existing degenerate PHI nodes. */ | |
997 | if (!single_pred_p (first) | |
998 | || !gimple_seq_empty_p (phi_nodes (first))) | |
c5cbcccf | 999 | first = split_edge (single_succ_edge (ENTRY_BLOCK_PTR)); |
6de9cd9a DN |
1000 | |
1001 | /* Copy the args if needed. */ | |
1002 | for (param = DECL_ARGUMENTS (current_function_decl); | |
1003 | param; | |
910ad8de | 1004 | param = DECL_CHAIN (param)) |
016925bc ZD |
1005 | if (arg_needs_copy_p (param)) |
1006 | { | |
5cd4ec7f | 1007 | tree name = gimple_default_def (cfun, param); |
016925bc | 1008 | tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name)); |
726a989a | 1009 | gimple phi; |
016925bc ZD |
1010 | |
1011 | set_default_def (param, new_name); | |
1012 | phi = create_phi_node (name, first); | |
1013 | SSA_NAME_DEF_STMT (name) = phi; | |
b8698a0f | 1014 | add_phi_arg (phi, new_name, single_pred_edge (first), |
9e227d60 | 1015 | EXPR_LOCATION (param)); |
016925bc | 1016 | } |
6de9cd9a DN |
1017 | phis_constructed = true; |
1018 | } | |
1019 | ||
1020 | if (act->add && !a_acc) | |
726a989a RB |
1021 | a_acc = create_tailcall_accumulator ("add_acc", first, |
1022 | integer_zero_node); | |
6de9cd9a DN |
1023 | |
1024 | if (act->mult && !m_acc) | |
726a989a RB |
1025 | m_acc = create_tailcall_accumulator ("mult_acc", first, |
1026 | integer_one_node); | |
016925bc ZD |
1027 | } |
1028 | ||
1400c8e5 ZD |
1029 | if (a_acc || m_acc) |
1030 | { | |
1031 | /* When the tail call elimination using accumulators is performed, | |
1032 | statements adding the accumulated value are inserted at all exits. | |
1033 | This turns all other tail calls to non-tail ones. */ | |
1034 | opt_tailcalls = false; | |
1035 | } | |
1036 | ||
6de9cd9a DN |
1037 | for (; tailcalls; tailcalls = next) |
1038 | { | |
1039 | next = tailcalls->next; | |
1040 | changed |= optimize_tail_call (tailcalls, opt_tailcalls); | |
1041 | free (tailcalls); | |
1042 | } | |
1043 | ||
1044 | if (a_acc || m_acc) | |
1045 | { | |
1046 | /* Modify the remaining return statements. */ | |
628f6a4e | 1047 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) |
6de9cd9a DN |
1048 | { |
1049 | stmt = last_stmt (e->src); | |
1050 | ||
1051 | if (stmt | |
726a989a | 1052 | && gimple_code (stmt) == GIMPLE_RETURN) |
6de9cd9a DN |
1053 | adjust_return_value (e->src, m_acc, a_acc); |
1054 | } | |
1055 | } | |
1056 | ||
1057 | if (changed) | |
1994bfea | 1058 | free_dominance_info (CDI_DOMINATORS); |
016925bc ZD |
1059 | |
1060 | if (phis_constructed) | |
1061 | add_virtual_phis (); | |
1994bfea JH |
1062 | if (changed) |
1063 | return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals; | |
1064 | return 0; | |
6de9cd9a DN |
1065 | } |
1066 | ||
c2924966 | 1067 | static unsigned int |
6de9cd9a DN |
1068 | execute_tail_recursion (void) |
1069 | { | |
1994bfea | 1070 | return tree_optimize_tail_calls_1 (false); |
6de9cd9a DN |
1071 | } |
1072 | ||
1073 | static bool | |
1074 | gate_tail_calls (void) | |
1075 | { | |
6fb5fa3c | 1076 | return flag_optimize_sibling_calls != 0 && dbg_cnt (tail_call); |
6de9cd9a DN |
1077 | } |
1078 | ||
c2924966 | 1079 | static unsigned int |
6de9cd9a DN |
1080 | execute_tail_calls (void) |
1081 | { | |
1994bfea | 1082 | return tree_optimize_tail_calls_1 (true); |
6de9cd9a DN |
1083 | } |
1084 | ||
b8698a0f | 1085 | struct gimple_opt_pass pass_tail_recursion = |
6de9cd9a | 1086 | { |
8ddbbcae JH |
1087 | { |
1088 | GIMPLE_PASS, | |
6de9cd9a | 1089 | "tailr", /* name */ |
e8c3ff97 | 1090 | gate_tail_calls, /* gate */ |
6de9cd9a DN |
1091 | execute_tail_recursion, /* execute */ |
1092 | NULL, /* sub */ | |
1093 | NULL, /* next */ | |
1094 | 0, /* static_pass_number */ | |
7072a650 | 1095 | TV_NONE, /* tv_id */ |
7faade0f | 1096 | PROP_cfg | PROP_ssa, /* properties_required */ |
6de9cd9a DN |
1097 | 0, /* properties_provided */ |
1098 | 0, /* properties_destroyed */ | |
1099 | 0, /* todo_flags_start */ | |
22c5fa5f | 1100 | TODO_verify_ssa /* todo_flags_finish */ |
8ddbbcae | 1101 | } |
6de9cd9a DN |
1102 | }; |
1103 | ||
b8698a0f | 1104 | struct gimple_opt_pass pass_tail_calls = |
6de9cd9a | 1105 | { |
8ddbbcae JH |
1106 | { |
1107 | GIMPLE_PASS, | |
6de9cd9a DN |
1108 | "tailc", /* name */ |
1109 | gate_tail_calls, /* gate */ | |
1110 | execute_tail_calls, /* execute */ | |
1111 | NULL, /* sub */ | |
1112 | NULL, /* next */ | |
1113 | 0, /* static_pass_number */ | |
7072a650 | 1114 | TV_NONE, /* tv_id */ |
4effdf02 | 1115 | PROP_cfg | PROP_ssa, /* properties_required */ |
6de9cd9a DN |
1116 | 0, /* properties_provided */ |
1117 | 0, /* properties_destroyed */ | |
1118 | 0, /* todo_flags_start */ | |
22c5fa5f | 1119 | TODO_verify_ssa /* todo_flags_finish */ |
8ddbbcae | 1120 | } |
6de9cd9a | 1121 | }; |