]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraphunit.c
c-common.c, [...]: Fix comment typos.
[thirdparty/gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based intraprocedural optimizations.
2 Copyright (C) 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tree-inline.h"
28 #include "langhooks.h"
29 #include "hashtab.h"
30 #include "toplev.h"
31 #include "flags.h"
32 #include "ggc.h"
33 #include "debug.h"
34 #include "target.h"
35 #include "cgraph.h"
36 #include "diagnostic.h"
37 #include "timevar.h"
38 #include "params.h"
39 #include "fibheap.h"
40 #include "c-common.h"
41 #include "intl.h"
42
43 #define INSNS_PER_CALL 10
44
45 static void cgraph_expand_all_functions (void);
46 static void cgraph_mark_functions_to_output (void);
47 static void cgraph_expand_function (struct cgraph_node *);
48 static tree record_call_1 (tree *, int *, void *);
49 static void cgraph_mark_local_functions (void);
50 static void cgraph_optimize_function (struct cgraph_node *);
51 static bool cgraph_default_inline_p (struct cgraph_node *n);
52 static void cgraph_analyze_function (struct cgraph_node *node);
53 static void cgraph_decide_inlining_incrementally (struct cgraph_node *);
54
55 /* Statistics we collect about inlining algorithm. */
56 static int ncalls_inlined;
57 static int nfunctions_inlined;
58 static int initial_insns;
59 static int overall_insns;
60
61 /* Records tree nodes seen in cgraph_create_edges. Simply using
62 walk_tree_without_duplicates doesn't guarantee each node is visited
63 once because it gets a new htab upon each recursive call from
64 record_calls_1. */
65 static htab_t visited_nodes;
66
67 /* Determine if function DECL is needed. That is, visible to something
68 either outside this translation unit, something magic in the system
69 configury, or (if not doing unit-at-a-time) to something we havn't
70 seen yet. */
71
72 static bool
73 decide_is_function_needed (struct cgraph_node *node, tree decl)
74 {
75 /* If we decided it was needed before, but at the time we didn't have
76 the body of the function available, then it's still needed. We have
77 to go back and re-check its dependencies now. */
78 if (node->needed)
79 return true;
80
81 /* Externally visible functions must be output. The exception is
82 COMDAT functions that must be output only when they are needed. */
83 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
84 return true;
85
86 /* Constructors and destructors are reachable from the runtime by
87 some mechanism. */
88 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
89 return true;
90
91 /* If the user told us it is used, then it must be so. */
92 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
93 return true;
94
95 /* ??? If the assembler name is set by hand, it is possible to assemble
96 the name later after finalizing the function and the fact is noticed
97 in assemble_name then. This is arguably a bug. */
98 if (DECL_ASSEMBLER_NAME_SET_P (decl)
99 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
100 return true;
101
102 if (flag_unit_at_a_time)
103 return false;
104
105 /* If not doing unit at a time, then we'll only defer this function
106 if its marked for inlining. Otherwise we want to emit it now. */
107
108 /* "extern inline" functions are never output locally. */
109 if (DECL_EXTERNAL (decl))
110 return false;
111 /* We want to emit COMDAT functions only when absolutely necessary. */
112 if (DECL_COMDAT (decl))
113 return false;
114 if (!DECL_INLINE (decl)
115 || (!node->local.disregard_inline_limits
116 /* When declared inline, defer even the uninlinable functions.
117 This allows them to be eliminated when unused. */
118 && !DECL_DECLARED_INLINE_P (decl)
119 && (!node->local.inlinable || !cgraph_default_inline_p (node))))
120 return true;
121
122 return false;
123 }
124
125 /* When not doing unit-at-a-time, output all functions enqueued.
126 Return true when such a functions were found. */
127
128 bool
129 cgraph_assemble_pending_functions (void)
130 {
131 bool output = false;
132
133 if (flag_unit_at_a_time)
134 return false;
135
136 while (cgraph_nodes_queue)
137 {
138 struct cgraph_node *n = cgraph_nodes_queue;
139
140 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
141 if (!n->origin && !DECL_EXTERNAL (n->decl))
142 {
143 cgraph_expand_function (n);
144 output = true;
145 }
146 }
147
148 return output;
149 }
150
151 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
152 logic in effect. If NESTED is true, then our caller cannot stand to have
153 the garbage collector run at the moment. We would need to either create
154 a new GC context, or just not compile right now. */
155
156 void
157 cgraph_finalize_function (tree decl, bool nested)
158 {
159 struct cgraph_node *node = cgraph_node (decl);
160
161 if (node->local.finalized)
162 {
163 /* As an GCC extension we allow redefinition of the function. The
164 semantics when both copies of bodies differ is not well defined.
165 We replace the old body with new body so in unit at a time mode
166 we always use new body, while in normal mode we may end up with
167 old body inlined into some functions and new body expanded and
168 inlined in others.
169
170 ??? It may make more sense to use one body for inlining and other
171 body for expanding the function but this is difficult to do. */
172
173 /* If node->output is set, then this is a unit-at-a-time compilation
174 and we have already begun whole-unit analysis. This is *not*
175 testing for whether we've already emitted the function. That
176 case can be sort-of legitimately seen with real function
177 redefinition errors. I would argue that the front end should
178 never present us with such a case, but don't enforce that for now. */
179 if (node->output)
180 abort ();
181
182 /* Reset our datastructures so we can analyze the function again. */
183 memset (&node->local, 0, sizeof (node->local));
184 memset (&node->global, 0, sizeof (node->global));
185 memset (&node->rtl, 0, sizeof (node->rtl));
186 node->analyzed = false;
187 node->local.redefined_extern_inline = true;
188 while (node->callees)
189 cgraph_remove_edge (node, node->callees->callee);
190
191 /* We may need to re-queue the node for assembling in case
192 we already proceeded it and ignored as not needed. */
193 if (node->reachable && !flag_unit_at_a_time)
194 {
195 struct cgraph_node *n;
196
197 for (n = cgraph_nodes_queue; n; n = n->next_needed)
198 if (n == node)
199 break;
200 if (!n)
201 node->reachable = 0;
202 }
203 }
204
205 notice_global_symbol (decl);
206 node->decl = decl;
207 node->local.finalized = true;
208
209 /* If not unit at a time, then we need to create the call graph
210 now, so that called functions can be queued and emitted now. */
211 if (!flag_unit_at_a_time)
212 {
213 cgraph_analyze_function (node);
214 cgraph_decide_inlining_incrementally (node);
215 }
216
217 if (decide_is_function_needed (node, decl))
218 cgraph_mark_needed_node (node);
219
220 /* If not unit at a time, go ahead and emit everything we've found
221 to be reachable at this time. */
222 if (!nested)
223 {
224 if (!cgraph_assemble_pending_functions ())
225 ggc_collect ();
226 }
227
228 /* If we've not yet emitted decl, tell the debug info about it. */
229 if (!TREE_ASM_WRITTEN (decl))
230 (*debug_hooks->deferred_inline_function) (decl);
231
232 /* We will never really output the function body, clear the SAVED_INSNS array
233 early then. */
234 if (DECL_EXTERNAL (decl))
235 DECL_STRUCT_FUNCTION (decl) = NULL;
236 }
237
238 /* Walk tree and record all calls. Called via walk_tree. */
239 static tree
240 record_call_1 (tree *tp, int *walk_subtrees, void *data)
241 {
242 tree t = *tp;
243
244 switch (TREE_CODE (t))
245 {
246 case VAR_DECL:
247 /* ??? Really, we should mark this decl as *potentially* referenced
248 by this function and re-examine whether the decl is actually used
249 after rtl has been generated. */
250 if (TREE_STATIC (t))
251 cgraph_varpool_mark_needed_node (cgraph_varpool_node (t));
252 break;
253
254 case ADDR_EXPR:
255 if (flag_unit_at_a_time)
256 {
257 /* Record dereferences to the functions. This makes the
258 functions reachable unconditionally. */
259 tree decl = TREE_OPERAND (*tp, 0);
260 if (TREE_CODE (decl) == FUNCTION_DECL)
261 cgraph_mark_needed_node (cgraph_node (decl));
262 }
263 break;
264
265 case CALL_EXPR:
266 {
267 tree decl = get_callee_fndecl (*tp);
268 if (decl && TREE_CODE (decl) == FUNCTION_DECL)
269 {
270 cgraph_record_call (data, decl);
271
272 /* When we see a function call, we don't want to look at the
273 function reference in the ADDR_EXPR that is hanging from
274 the CALL_EXPR we're examining here, because we would
275 conclude incorrectly that the function's address could be
276 taken by something that is not a function call. So only
277 walk the function parameter list, skip the other subtrees. */
278
279 walk_tree (&TREE_OPERAND (*tp, 1), record_call_1, data,
280 visited_nodes);
281 *walk_subtrees = 0;
282 }
283 break;
284 }
285
286 default:
287 /* Save some cycles by not walking types and declaration as we
288 won't find anything useful there anyway. */
289 if (DECL_P (*tp) || TYPE_P (*tp))
290 {
291 *walk_subtrees = 0;
292 break;
293 }
294
295 if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
296 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees, data);
297 break;
298 }
299
300 return NULL;
301 }
302
303 /* Create cgraph edges for function calls inside BODY from DECL. */
304
305 void
306 cgraph_create_edges (tree decl, tree body)
307 {
308 /* The nodes we're interested in are never shared, so walk
309 the tree ignoring duplicates. */
310 visited_nodes = htab_create (37, htab_hash_pointer,
311 htab_eq_pointer, NULL);
312 walk_tree (&body, record_call_1, decl, visited_nodes);
313 htab_delete (visited_nodes);
314 visited_nodes = NULL;
315 }
316
317 /* Analyze the function scheduled to be output. */
318 static void
319 cgraph_analyze_function (struct cgraph_node *node)
320 {
321 tree decl = node->decl;
322 struct cgraph_edge *e;
323
324 current_function_decl = decl;
325
326 /* First kill forward declaration so reverse inlining works properly. */
327 cgraph_create_edges (decl, DECL_SAVED_TREE (decl));
328
329 node->local.inlinable = tree_inlinable_function_p (decl);
330 if (!node->local.self_insns)
331 node->local.self_insns
332 = lang_hooks.tree_inlining.estimate_num_insns (decl);
333 if (node->local.inlinable)
334 node->local.disregard_inline_limits
335 = lang_hooks.tree_inlining.disregard_inline_limits (decl);
336 for (e = node->callers; e; e = e->next_caller)
337 if (e->inline_failed)
338 {
339 if (node->local.redefined_extern_inline)
340 e->inline_failed = N_("redefined extern inline functions are not "
341 "considered for inlining");
342 else if (!node->local.inlinable)
343 e->inline_failed = N_("function not inlinable");
344 else
345 e->inline_failed = N_("function not considered for inlining");
346 }
347 if (flag_really_no_inline && !node->local.disregard_inline_limits)
348 node->local.inlinable = 0;
349 /* Inlining characteristics are maintained by the cgraph_mark_inline. */
350 node->global.insns = node->local.self_insns;
351 if (!DECL_EXTERNAL (decl))
352 {
353 node->global.cloned_times = 1;
354 node->global.will_be_output = true;
355 }
356
357 node->analyzed = true;
358 current_function_decl = NULL;
359 }
360
361 /* Analyze the whole compilation unit once it is parsed completely. */
362
363 void
364 cgraph_finalize_compilation_unit (void)
365 {
366 struct cgraph_node *node;
367
368 if (!flag_unit_at_a_time)
369 {
370 cgraph_assemble_pending_functions ();
371 return;
372 }
373
374 cgraph_varpool_assemble_pending_decls ();
375 if (!quiet_flag)
376 fprintf (stderr, "\nAnalyzing compilation unit\n");
377
378 timevar_push (TV_CGRAPH);
379 if (cgraph_dump_file)
380 {
381 fprintf (cgraph_dump_file, "Initial entry points:");
382 for (node = cgraph_nodes; node; node = node->next)
383 if (node->needed && DECL_SAVED_TREE (node->decl))
384 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
385 fprintf (cgraph_dump_file, "\n");
386 }
387
388 /* Propagate reachability flag and lower representation of all reachable
389 functions. In the future, lowering will introduce new functions and
390 new entry points on the way (by template instantiation and virtual
391 method table generation for instance). */
392 while (cgraph_nodes_queue)
393 {
394 struct cgraph_edge *edge;
395 tree decl = cgraph_nodes_queue->decl;
396
397 node = cgraph_nodes_queue;
398 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
399
400 /* ??? It is possible to create extern inline function and later using
401 weak alas attribute to kill its body. See
402 gcc.c-torture/compile/20011119-1.c */
403 if (!DECL_SAVED_TREE (decl))
404 continue;
405
406 if (node->analyzed || !node->reachable || !DECL_SAVED_TREE (decl))
407 abort ();
408
409 cgraph_analyze_function (node);
410
411 for (edge = node->callees; edge; edge = edge->next_callee)
412 if (!edge->callee->reachable)
413 cgraph_mark_reachable_node (edge->callee);
414
415 cgraph_varpool_assemble_pending_decls ();
416 }
417
418 /* Collect entry points to the unit. */
419
420 if (cgraph_dump_file)
421 {
422 fprintf (cgraph_dump_file, "Unit entry points:");
423 for (node = cgraph_nodes; node; node = node->next)
424 if (node->needed && DECL_SAVED_TREE (node->decl))
425 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
426 fprintf (cgraph_dump_file, "\n\nInitial ");
427 dump_cgraph (cgraph_dump_file);
428 }
429
430 if (cgraph_dump_file)
431 fprintf (cgraph_dump_file, "\nReclaiming functions:");
432
433 for (node = cgraph_nodes; node; node = node->next)
434 {
435 tree decl = node->decl;
436
437 if (!node->reachable && DECL_SAVED_TREE (decl))
438 {
439 cgraph_remove_node (node);
440 if (cgraph_dump_file)
441 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
442 }
443 else
444 node->next_needed = NULL;
445 }
446 if (cgraph_dump_file)
447 {
448 fprintf (cgraph_dump_file, "\n\nReclaimed ");
449 dump_cgraph (cgraph_dump_file);
450 }
451 ggc_collect ();
452 timevar_pop (TV_CGRAPH);
453 }
454
455 /* Figure out what functions we want to assemble. */
456
457 static void
458 cgraph_mark_functions_to_output (void)
459 {
460 struct cgraph_node *node;
461
462 for (node = cgraph_nodes; node; node = node->next)
463 {
464 tree decl = node->decl;
465 struct cgraph_edge *e;
466
467 if (node->output)
468 abort ();
469
470 for (e = node->callers; e; e = e->next_caller)
471 if (e->inline_failed)
472 break;
473
474 /* We need to output all local functions that are used and not
475 always inlined, as well as those that are reachable from
476 outside the current compilation unit. */
477 if (DECL_SAVED_TREE (decl)
478 && (node->needed
479 || (e && node->reachable))
480 && !TREE_ASM_WRITTEN (decl) && !node->origin
481 && !DECL_EXTERNAL (decl))
482 node->output = 1;
483 else
484 DECL_STRUCT_FUNCTION (decl) = NULL;
485 }
486 }
487
488 /* Optimize the function before expansion. */
489
490 static void
491 cgraph_optimize_function (struct cgraph_node *node)
492 {
493 tree decl = node->decl;
494
495 timevar_push (TV_INTEGRATION);
496 /* optimize_inline_calls avoids inlining of current_function_decl. */
497 current_function_decl = decl;
498 if (flag_inline_trees)
499 {
500 struct cgraph_edge *e;
501
502 for (e = node->callees; e; e = e->next_callee)
503 if (!e->inline_failed || warn_inline
504 || (DECL_DECLARED_INLINE_P (e->callee->decl)
505 && lookup_attribute ("always_inline",
506 DECL_ATTRIBUTES (e->callee->decl))))
507 break;
508 if (e)
509 optimize_inline_calls (decl);
510 }
511 if (node->nested)
512 {
513 for (node = node->nested; node; node = node->next_nested)
514 cgraph_optimize_function (node);
515 }
516 timevar_pop (TV_INTEGRATION);
517 }
518
519 /* Expand function specified by NODE. */
520
521 static void
522 cgraph_expand_function (struct cgraph_node *node)
523 {
524 tree decl = node->decl;
525
526 if (flag_unit_at_a_time)
527 announce_function (decl);
528
529 cgraph_optimize_function (node);
530
531 /* Generate RTL for the body of DECL. Nested functions are expanded
532 via lang_expand_decl_stmt. */
533 lang_hooks.callgraph.expand_function (decl);
534 if (DECL_DEFER_OUTPUT (decl))
535 abort ();
536
537 current_function_decl = NULL;
538 }
539
540 /* Fill array order with all nodes with output flag set in the reverse
541 topological order. */
542
543 static int
544 cgraph_postorder (struct cgraph_node **order)
545 {
546 struct cgraph_node *node, *node2;
547 int stack_size = 0;
548 int order_pos = 0;
549 struct cgraph_edge *edge, last;
550
551 struct cgraph_node **stack =
552 xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
553
554 /* We have to deal with cycles nicely, so use a depth first traversal
555 output algorithm. Ignore the fact that some functions won't need
556 to be output and put them into order as well, so we get dependencies
557 right throughout inline functions. */
558 for (node = cgraph_nodes; node; node = node->next)
559 node->aux = NULL;
560 for (node = cgraph_nodes; node; node = node->next)
561 if (!node->aux)
562 {
563 node2 = node;
564 if (!node->callers)
565 node->aux = &last;
566 else
567 node->aux = node->callers;
568 while (node2)
569 {
570 while (node2->aux != &last)
571 {
572 edge = node2->aux;
573 if (edge->next_caller)
574 node2->aux = edge->next_caller;
575 else
576 node2->aux = &last;
577 if (!edge->caller->aux)
578 {
579 if (!edge->caller->callers)
580 edge->caller->aux = &last;
581 else
582 edge->caller->aux = edge->caller->callers;
583 stack[stack_size++] = node2;
584 node2 = edge->caller;
585 break;
586 }
587 }
588 if (node2->aux == &last)
589 {
590 order[order_pos++] = node2;
591 if (stack_size)
592 node2 = stack[--stack_size];
593 else
594 node2 = NULL;
595 }
596 }
597 }
598 free (stack);
599 return order_pos;
600 }
601
602 #define INLINED_TIMES(node) ((size_t)(node)->aux)
603 #define SET_INLINED_TIMES(node,times) ((node)->aux = (void *)(times))
604
605 /* Return list of nodes we decided to inline NODE into, set their output
606 flag and compute INLINED_TIMES.
607
608 We do simple backtracing to get INLINED_TIMES right. This should not be
609 expensive as we limit the amount of inlining. Alternatively we may first
610 discover set of nodes, topologically sort these and propagate
611 INLINED_TIMES */
612
613 static int
614 cgraph_inlined_into (struct cgraph_node *node, struct cgraph_node **array)
615 {
616 int nfound = 0;
617 struct cgraph_edge **stack;
618 struct cgraph_edge *e, *e1;
619 int sp;
620 int i;
621
622 /* Fast path: since we traverse in mostly topological order, we will likely
623 find no edges. */
624 for (e = node->callers; e; e = e->next_caller)
625 if (!e->inline_failed)
626 break;
627
628 if (!e)
629 return 0;
630
631 /* Allocate stack for back-tracking up callgraph. */
632 stack = xmalloc ((cgraph_n_nodes + 1) * sizeof (struct cgraph_edge));
633 sp = 0;
634
635 /* Push the first edge on to the stack. */
636 stack[sp++] = e;
637
638 while (sp)
639 {
640 struct cgraph_node *caller;
641
642 /* Look at the edge on the top of the stack. */
643 e = stack[sp - 1];
644 caller = e->caller;
645
646 /* Check if the caller destination has been visited yet. */
647 if (!caller->output)
648 {
649 array[nfound++] = e->caller;
650 /* Mark that we have visited the destination. */
651 caller->output = true;
652 SET_INLINED_TIMES (caller, 0);
653 }
654 SET_INLINED_TIMES (caller, INLINED_TIMES (caller) + 1);
655
656 for (e1 = caller->callers; e1; e1 = e1->next_caller)
657 if (!e1->inline_failed)
658 break;
659
660 if (e1)
661 stack[sp++] = e1;
662 else
663 {
664 while (true)
665 {
666 for (e1 = e->next_caller; e1; e1 = e1->next_caller)
667 if (!e1->inline_failed)
668 break;
669
670 if (e1)
671 {
672 stack[sp - 1] = e1;
673 break;
674 }
675 else
676 {
677 sp--;
678 if (!sp)
679 break;
680 e = stack[sp - 1];
681 }
682 }
683 }
684 }
685
686 free (stack);
687
688
689 if (cgraph_dump_file)
690 {
691 fprintf (cgraph_dump_file, " Found inline predecesors of %s:",
692 cgraph_node_name (node));
693 for (i = 0; i < nfound; i++)
694 {
695 fprintf (cgraph_dump_file, " %s", cgraph_node_name (array[i]));
696 if (INLINED_TIMES (array[i]) != 1)
697 fprintf (cgraph_dump_file, " (%i times)",
698 (int)INLINED_TIMES (array[i]));
699 }
700 fprintf (cgraph_dump_file, "\n");
701 }
702
703 return nfound;
704 }
705
706 /* Return list of nodes we decided to inline into NODE, set their output
707 flag and compute INLINED_TIMES.
708
709 This function is identical to cgraph_inlined_into with callers and callees
710 nodes swapped. */
711
712 static int
713 cgraph_inlined_callees (struct cgraph_node *node, struct cgraph_node **array)
714 {
715 int nfound = 0;
716 struct cgraph_edge **stack;
717 struct cgraph_edge *e, *e1;
718 int sp;
719 int i;
720
721 /* Fast path: since we traverse in mostly topological order, we will likely
722 find no edges. */
723 for (e = node->callees; e; e = e->next_callee)
724 if (!e->inline_failed)
725 break;
726
727 if (!e)
728 return 0;
729
730 /* Allocate stack for back-tracking up callgraph. */
731 stack = xmalloc ((cgraph_n_nodes + 1) * sizeof (struct cgraph_edge));
732 sp = 0;
733
734 /* Push the first edge on to the stack. */
735 stack[sp++] = e;
736
737 while (sp)
738 {
739 struct cgraph_node *callee;
740
741 /* Look at the edge on the top of the stack. */
742 e = stack[sp - 1];
743 callee = e->callee;
744
745 /* Check if the callee destination has been visited yet. */
746 if (!callee->output)
747 {
748 array[nfound++] = e->callee;
749 /* Mark that we have visited the destination. */
750 callee->output = true;
751 SET_INLINED_TIMES (callee, 0);
752 }
753 SET_INLINED_TIMES (callee, INLINED_TIMES (callee) + 1);
754
755 for (e1 = callee->callees; e1; e1 = e1->next_callee)
756 if (!e1->inline_failed)
757 break;
758 if (e1)
759 stack[sp++] = e1;
760 else
761 {
762 while (true)
763 {
764 for (e1 = e->next_callee; e1; e1 = e1->next_callee)
765 if (!e1->inline_failed)
766 break;
767
768 if (e1)
769 {
770 stack[sp - 1] = e1;
771 break;
772 }
773 else
774 {
775 sp--;
776 if (!sp)
777 break;
778 e = stack[sp - 1];
779 }
780 }
781 }
782 }
783
784 free (stack);
785
786 if (cgraph_dump_file)
787 {
788 fprintf (cgraph_dump_file, " Found inline successors of %s:",
789 cgraph_node_name (node));
790 for (i = 0; i < nfound; i++)
791 {
792 fprintf (cgraph_dump_file, " %s", cgraph_node_name (array[i]));
793 if (INLINED_TIMES (array[i]) != 1)
794 fprintf (cgraph_dump_file, " (%i times)",
795 (int)INLINED_TIMES (array[i]));
796 }
797 fprintf (cgraph_dump_file, "\n");
798 }
799
800 return nfound;
801 }
802
803 /* Perform reachability analysis and reclaim all unreachable nodes.
804 This function also remove unneeded bodies of extern inline functions
805 and thus needs to be done only after inlining decisions has been made. */
806 static bool
807 cgraph_remove_unreachable_nodes (void)
808 {
809 struct cgraph_node *first = (void *) 1;
810 struct cgraph_node *node;
811 bool changed = false;
812 int insns = 0;
813
814 if (cgraph_dump_file)
815 fprintf (cgraph_dump_file, "\nReclaiming functions:");
816 #ifdef ENABLE_CHECKING
817 for (node = cgraph_nodes; node; node = node->next)
818 if (node->aux)
819 abort ();
820 #endif
821 for (node = cgraph_nodes; node; node = node->next)
822 if (node->needed && (!DECL_EXTERNAL (node->decl) || !node->analyzed))
823 {
824 node->aux = first;
825 first = node;
826 }
827 else if (node->aux)
828 abort ();
829
830 /* Perform reachability analysis. As a special case do not consider
831 extern inline functions not inlined as live because we won't output
832 them at all. */
833 while (first != (void *) 1)
834 {
835 struct cgraph_edge *e;
836 node = first;
837 first = first->aux;
838
839 for (e = node->callees; e; e = e->next_callee)
840 if (!e->callee->aux
841 && node->analyzed
842 && (!e->inline_failed || !e->callee->analyzed
843 || !DECL_EXTERNAL (e->callee->decl)))
844 {
845 e->callee->aux = first;
846 first = e->callee;
847 }
848 }
849
850 /* Remove unreachable nodes. Extern inline functions need special care;
851 Unreachable extern inline functions shall be removed.
852 Reachable extern inline functions we never inlined shall get their bodies
853 eliminated.
854 Reachable extern inline functions we sometimes inlined will be turned into
855 unanalyzed nodes so they look like for true extern functions to the rest
856 of code. */
857 for (node = cgraph_nodes; node; node = node->next)
858 {
859 if (!node->aux)
860 {
861 int local_insns;
862 tree decl = node->decl;
863
864 if (DECL_STRUCT_FUNCTION (decl))
865 local_insns = node->local.self_insns;
866 else
867 local_insns = 0;
868 if (cgraph_dump_file)
869 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
870 if (!node->analyzed || !DECL_EXTERNAL (node->decl))
871 cgraph_remove_node (node);
872 else
873 {
874 struct cgraph_edge *e;
875
876 for (e = node->callers; e; e = e->next_caller)
877 if (e->caller->aux)
878 break;
879 if (e || node->needed)
880 {
881 DECL_SAVED_TREE (node->decl) = NULL_TREE;
882 while (node->callees)
883 cgraph_remove_edge (node, node->callees->callee);
884 node->analyzed = false;
885 }
886 else
887 cgraph_remove_node (node);
888 }
889 if (!DECL_SAVED_TREE (decl))
890 insns += local_insns;
891 changed = true;
892 }
893 }
894 for (node = cgraph_nodes; node; node = node->next)
895 node->aux = NULL;
896 if (cgraph_dump_file)
897 fprintf (cgraph_dump_file, "\nReclaimed %i insns", insns);
898 return changed;
899 }
900
901
902 /* Estimate size of the function after inlining WHAT into TO. */
903
904 static int
905 cgraph_estimate_size_after_inlining (int times, struct cgraph_node *to,
906 struct cgraph_node *what)
907 {
908 return (what->global.insns - INSNS_PER_CALL) * times + to->global.insns;
909 }
910
911 /* Estimate the growth caused by inlining NODE into all callees. */
912
913 static int
914 cgraph_estimate_growth (struct cgraph_node *node)
915 {
916 int growth = 0;
917 int calls_saved = 0;
918 int clones_added = 0;
919 struct cgraph_edge *e;
920
921 for (e = node->callers; e; e = e->next_caller)
922 if (e->inline_failed)
923 {
924 growth += ((cgraph_estimate_size_after_inlining (1, e->caller, node)
925 -
926 e->caller->global.insns) *e->caller->global.cloned_times);
927 calls_saved += e->caller->global.cloned_times;
928 clones_added += e->caller->global.cloned_times;
929 }
930
931 /* ??? Wrong for self recursive functions or cases where we decide to not
932 inline for different reasons, but it is not big deal as in that case
933 we will keep the body around, but we will also avoid some inlining. */
934 if (!node->needed && !node->origin && !DECL_EXTERNAL (node->decl))
935 growth -= node->global.insns, clones_added--;
936
937 if (!calls_saved)
938 calls_saved = 1;
939
940 return growth;
941 }
942
943 /* Update insn sizes after inlining WHAT into TO that is already inlined into
944 all nodes in INLINED array. */
945
946 static void
947 cgraph_mark_inline (struct cgraph_node *to, struct cgraph_node *what,
948 struct cgraph_node **inlined, int ninlined,
949 struct cgraph_node **inlined_callees,
950 int ninlined_callees)
951 {
952 int i;
953 int times = 0;
954 int clones = 0;
955 struct cgraph_edge *e;
956 bool called = false;
957 int new_insns;
958
959 what->global.inlined = 1;
960 for (e = what->callers; e; e = e->next_caller)
961 {
962 if (e->caller == to)
963 {
964 if (!e->inline_failed)
965 continue;
966 e->inline_failed = NULL;
967 times++;
968 clones += e->caller->global.cloned_times;
969 }
970 else if (e->inline_failed)
971 called = true;
972 }
973 if (!times)
974 abort ();
975 ncalls_inlined += times;
976
977 new_insns = cgraph_estimate_size_after_inlining (times, to, what);
978 if (to->global.will_be_output)
979 overall_insns += new_insns - to->global.insns;
980 to->global.insns = new_insns;
981
982 if (!called && !what->needed && !what->origin
983 && flag_unit_at_a_time
984 && !DECL_EXTERNAL (what->decl))
985 {
986 if (!what->global.will_be_output)
987 abort ();
988 clones--;
989 nfunctions_inlined++;
990 what->global.will_be_output = 0;
991 overall_insns -= what->global.insns;
992 }
993 what->global.cloned_times += clones;
994 for (i = 0; i < ninlined; i++)
995 {
996 new_insns =
997 cgraph_estimate_size_after_inlining (INLINED_TIMES (inlined[i]) *
998 times, inlined[i], what);
999 if (inlined[i]->global.will_be_output)
1000 overall_insns += new_insns - inlined[i]->global.insns;
1001 inlined[i]->global.insns = new_insns;
1002 }
1003 for (i = 0; i < ninlined_callees; i++)
1004 {
1005 inlined_callees[i]->global.cloned_times +=
1006 INLINED_TIMES (inlined_callees[i]) * clones;
1007 }
1008 }
1009
1010 /* Return false when inlining WHAT into TO is not good idea as it would cause
1011 too large growth of function bodies. */
1012
1013 static bool
1014 cgraph_check_inline_limits (struct cgraph_node *to, struct cgraph_node *what,
1015 struct cgraph_node **inlined, int ninlined,
1016 const char **reason)
1017 {
1018 int i;
1019 int times = 0;
1020 struct cgraph_edge *e;
1021 int newsize;
1022 int limit;
1023
1024 for (e = to->callees; e; e = e->next_callee)
1025 if (e->callee == what)
1026 times++;
1027
1028 /* When inlining large function body called once into small function,
1029 take the inlined function as base for limiting the growth. */
1030 if (to->local.self_insns > what->local.self_insns)
1031 limit = to->local.self_insns;
1032 else
1033 limit = what->local.self_insns;
1034
1035 limit += limit * PARAM_VALUE (PARAM_LARGE_FUNCTION_GROWTH) / 100;
1036
1037 newsize = cgraph_estimate_size_after_inlining (times, to, what);
1038 if (newsize > PARAM_VALUE (PARAM_LARGE_FUNCTION_INSNS)
1039 && newsize > limit)
1040 {
1041 *reason = N_("--param large-function-growth limit reached");
1042 return false;
1043 }
1044 for (i = 0; i < ninlined; i++)
1045 {
1046 newsize =
1047 cgraph_estimate_size_after_inlining (INLINED_TIMES (inlined[i]) *
1048 times, inlined[i], what);
1049 if (newsize > PARAM_VALUE (PARAM_LARGE_FUNCTION_INSNS)
1050 && newsize >
1051 inlined[i]->local.self_insns *
1052 (100 + PARAM_VALUE (PARAM_LARGE_FUNCTION_GROWTH)) / 100)
1053 {
1054 *reason = N_("--param large-function-growth limit reached while inlining the caller");
1055 return false;
1056 }
1057 }
1058 return true;
1059 }
1060
1061 /* Return true when function N is small enough to be inlined. */
1062
1063 static bool
1064 cgraph_default_inline_p (struct cgraph_node *n)
1065 {
1066 if (!DECL_INLINE (n->decl) || !DECL_SAVED_TREE (n->decl))
1067 return false;
1068 if (DECL_DECLARED_INLINE_P (n->decl))
1069 return n->global.insns < MAX_INLINE_INSNS_SINGLE;
1070 else
1071 return n->global.insns < MAX_INLINE_INSNS_AUTO;
1072 }
1073
1074 /* Set inline_failed for all callers of given function to REASON. */
1075
1076 static void
1077 cgraph_set_inline_failed (struct cgraph_node *node, const char *reason)
1078 {
1079 struct cgraph_edge *e;
1080
1081 if (cgraph_dump_file)
1082 fprintf (cgraph_dump_file, "Inlining failed: %s\n", reason);
1083 for (e = node->callers; e; e = e->next_caller)
1084 if (e->inline_failed)
1085 e->inline_failed = reason;
1086 }
1087
1088 /* We use greedy algorithm for inlining of small functions:
1089 All inline candidates are put into prioritized heap based on estimated
1090 growth of the overall number of instructions and then update the estimates.
1091
1092 INLINED and INLINED_CALEES are just pointers to arrays large enough
1093 to be passed to cgraph_inlined_into and cgraph_inlined_callees. */
1094
1095 static void
1096 cgraph_decide_inlining_of_small_functions (struct cgraph_node **inlined,
1097 struct cgraph_node **inlined_callees)
1098 {
1099 int i;
1100 struct cgraph_node *node;
1101 fibheap_t heap = fibheap_new ();
1102 struct fibnode **heap_node =
1103 xcalloc (cgraph_max_uid, sizeof (struct fibnode *));
1104 int ninlined, ninlined_callees;
1105 int max_insns = ((HOST_WIDEST_INT) initial_insns
1106 * (100 + PARAM_VALUE (PARAM_INLINE_UNIT_GROWTH)) / 100);
1107
1108 /* Put all inline candidates into the heap. */
1109
1110 for (node = cgraph_nodes; node; node = node->next)
1111 {
1112 if (!node->local.inlinable || !node->callers
1113 || node->local.disregard_inline_limits)
1114 continue;
1115
1116 if (!cgraph_default_inline_p (node))
1117 {
1118 cgraph_set_inline_failed (node,
1119 N_("--param max-inline-insns-single limit reached"));
1120 continue;
1121 }
1122 heap_node[node->uid] =
1123 fibheap_insert (heap, cgraph_estimate_growth (node), node);
1124 }
1125
1126 if (cgraph_dump_file)
1127 fprintf (cgraph_dump_file, "\nDeciding on smaller functions:\n");
1128 while (overall_insns <= max_insns && (node = fibheap_extract_min (heap)))
1129 {
1130 struct cgraph_edge *e;
1131 int old_insns = overall_insns;
1132
1133 heap_node[node->uid] = NULL;
1134 if (cgraph_dump_file)
1135 fprintf (cgraph_dump_file,
1136 "\nConsidering %s with %i insns\n"
1137 " Estimated growth is %+i insns.\n",
1138 cgraph_node_name (node), node->global.insns,
1139 cgraph_estimate_growth (node));
1140 if (!cgraph_default_inline_p (node))
1141 {
1142 cgraph_set_inline_failed (node,
1143 N_("--param max-inline-insns-single limit reached after inlining into the callee"));
1144 continue;
1145 }
1146 ninlined_callees = cgraph_inlined_callees (node, inlined_callees);
1147 for (e = node->callers; e; e = e->next_caller)
1148 if (e->inline_failed)
1149 {
1150 /* Marking recursive function inlinine has sane semantic and
1151 thus we should not warn on it. */
1152 if (e->caller == node)
1153 {
1154 e->inline_failed = "";
1155 continue;
1156 }
1157 ninlined = cgraph_inlined_into (e->caller, inlined);
1158 if (e->callee->output)
1159 e->inline_failed = "";
1160 if (e->callee->output
1161 || !cgraph_check_inline_limits (e->caller, node, inlined,
1162 ninlined, &e->inline_failed))
1163 {
1164 for (i = 0; i < ninlined; i++)
1165 inlined[i]->output = 0, inlined[i]->aux = 0;
1166 if (cgraph_dump_file)
1167 fprintf (cgraph_dump_file, " Not inlining into %s.\n",
1168 cgraph_node_name (e->caller));
1169 continue;
1170 }
1171 cgraph_mark_inline (e->caller, node, inlined, ninlined,
1172 inlined_callees, ninlined_callees);
1173 if (heap_node[e->caller->uid])
1174 fibheap_replace_key (heap, heap_node[e->caller->uid],
1175 cgraph_estimate_growth (e->caller));
1176
1177 /* Size of the functions we updated into has changed, so update
1178 the keys. */
1179 for (i = 0; i < ninlined; i++)
1180 {
1181 inlined[i]->output = 0, inlined[i]->aux = 0;
1182 if (heap_node[inlined[i]->uid])
1183 fibheap_replace_key (heap, heap_node[inlined[i]->uid],
1184 cgraph_estimate_growth (inlined[i]));
1185 }
1186 if (cgraph_dump_file)
1187 fprintf (cgraph_dump_file,
1188 " Inlined into %s which now has %i insns.\n",
1189 cgraph_node_name (e->caller),
1190 e->caller->global.insns);
1191 }
1192
1193 /* Similarly all functions called by the function we just inlined
1194 are now called more times; update keys. */
1195
1196 for (e = node->callees; e; e = e->next_callee)
1197 if (e->inline_failed && heap_node[e->callee->uid])
1198 fibheap_replace_key (heap, heap_node[e->callee->uid],
1199 cgraph_estimate_growth (e->callee));
1200
1201 for (i = 0; i < ninlined_callees; i++)
1202 {
1203 struct cgraph_edge *e;
1204
1205 for (e = inlined_callees[i]->callees; e; e = e->next_callee)
1206 if (e->inline_failed && heap_node[e->callee->uid])
1207 fibheap_replace_key (heap, heap_node[e->callee->uid],
1208 cgraph_estimate_growth (e->callee));
1209
1210 inlined_callees[i]->output = 0;
1211 inlined_callees[i]->aux = 0;
1212 }
1213 if (cgraph_dump_file)
1214 fprintf (cgraph_dump_file,
1215 " Inlined %i times for a net change of %+i insns.\n",
1216 node->global.cloned_times, overall_insns - old_insns);
1217 }
1218 while ((node = fibheap_extract_min (heap)) != NULL)
1219 if (!node->local.disregard_inline_limits)
1220 cgraph_set_inline_failed (node, N_("--param inline-unit-growth limit reached"));
1221 fibheap_delete (heap);
1222 free (heap_node);
1223 }
1224
1225 /* Decide on the inlining. We do so in the topological order to avoid
1226 expenses on updating datastructures. */
1227
1228 static void
1229 cgraph_decide_inlining (void)
1230 {
1231 struct cgraph_node *node;
1232 int nnodes;
1233 struct cgraph_node **order =
1234 xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
1235 struct cgraph_node **inlined =
1236 xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
1237 struct cgraph_node **inlined_callees =
1238 xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
1239 int ninlined;
1240 int ninlined_callees;
1241 int old_insns = 0;
1242 int i, y;
1243
1244 for (node = cgraph_nodes; node; node = node->next)
1245 initial_insns += node->local.self_insns;
1246 overall_insns = initial_insns;
1247
1248 nnodes = cgraph_postorder (order);
1249
1250 if (cgraph_dump_file)
1251 fprintf (cgraph_dump_file,
1252 "\nDeciding on inlining. Starting with %i insns.\n",
1253 initial_insns);
1254
1255 for (node = cgraph_nodes; node; node = node->next)
1256 node->aux = 0;
1257
1258 if (cgraph_dump_file)
1259 fprintf (cgraph_dump_file, "\nInlining always_inline functions:\n");
1260 #ifdef ENABLE_CHECKING
1261 for (node = cgraph_nodes; node; node = node->next)
1262 if (node->aux || node->output)
1263 abort ();
1264 #endif
1265
1266 /* In the first pass mark all always_inline edges. Do this with a priority
1267 so none of our later choices will make this impossible. */
1268 for (i = nnodes - 1; i >= 0; i--)
1269 {
1270 struct cgraph_edge *e;
1271
1272 node = order[i];
1273
1274 for (e = node->callees; e; e = e->next_callee)
1275 if (e->callee->local.disregard_inline_limits)
1276 break;
1277 if (!e)
1278 continue;
1279 if (cgraph_dump_file)
1280 fprintf (cgraph_dump_file,
1281 "\nConsidering %s %i insns (always inline)\n",
1282 cgraph_node_name (e->callee), e->callee->global.insns);
1283 ninlined = cgraph_inlined_into (order[i], inlined);
1284 for (; e; e = e->next_callee)
1285 {
1286 old_insns = overall_insns;
1287 if (!e->inline_failed || !e->callee->local.inlinable
1288 || !e->callee->local.disregard_inline_limits)
1289 continue;
1290 if (e->callee->output || e->callee == node)
1291 {
1292 e->inline_failed = N_("recursive inlining");
1293 continue;
1294 }
1295 ninlined_callees =
1296 cgraph_inlined_callees (e->callee, inlined_callees);
1297 cgraph_mark_inline (node, e->callee, inlined, ninlined,
1298 inlined_callees, ninlined_callees);
1299 for (y = 0; y < ninlined_callees; y++)
1300 inlined_callees[y]->output = 0, inlined_callees[y]->aux = 0;
1301 if (cgraph_dump_file)
1302 fprintf (cgraph_dump_file,
1303 " Inlined into %s which now has %i insns.\n",
1304 cgraph_node_name (node->callees->caller),
1305 node->callees->caller->global.insns);
1306 }
1307 if (cgraph_dump_file && node->global.cloned_times > 0)
1308 fprintf (cgraph_dump_file,
1309 " Inlined %i times for a net change of %+i insns.\n",
1310 node->global.cloned_times, overall_insns - old_insns);
1311 for (y = 0; y < ninlined; y++)
1312 inlined[y]->output = 0, inlined[y]->aux = 0;
1313 }
1314 #ifdef ENABLE_CHECKING
1315 for (node = cgraph_nodes; node; node = node->next)
1316 if (node->aux || node->output)
1317 abort ();
1318 #endif
1319
1320 if (!flag_really_no_inline)
1321 {
1322 cgraph_decide_inlining_of_small_functions (inlined, inlined_callees);
1323 #ifdef ENABLE_CHECKING
1324 for (node = cgraph_nodes; node; node = node->next)
1325 if (node->aux || node->output)
1326 abort ();
1327 #endif
1328
1329 if (cgraph_dump_file)
1330 fprintf (cgraph_dump_file, "\nDeciding on functions called once:\n");
1331
1332 /* And finally decide what functions are called once. */
1333
1334 for (i = nnodes - 1; i >= 0; i--)
1335 {
1336 node = order[i];
1337
1338 if (node->callers && !node->callers->next_caller && !node->needed
1339 && node->local.inlinable && node->callers->inline_failed
1340 && !DECL_EXTERNAL (node->decl) && !DECL_COMDAT (node->decl))
1341 {
1342 bool ok = true;
1343 struct cgraph_node *node1;
1344
1345 /* Verify that we won't duplicate the caller. */
1346 for (node1 = node->callers->caller;
1347 node1->callers && !node1->callers->inline_failed
1348 && ok; node1 = node1->callers->caller)
1349 if (node1->callers->next_caller || node1->needed)
1350 ok = false;
1351 if (ok)
1352 {
1353 const char *dummy_reason;
1354 if (cgraph_dump_file)
1355 fprintf (cgraph_dump_file,
1356 "\nConsidering %s %i insns.\n"
1357 " Called once from %s %i insns.\n",
1358 cgraph_node_name (node), node->global.insns,
1359 cgraph_node_name (node->callers->caller),
1360 node->callers->caller->global.insns);
1361 ninlined = cgraph_inlined_into (node->callers->caller,
1362 inlined);
1363 old_insns = overall_insns;
1364
1365 /* Inlining functions once would never cause inlining warnings. */
1366 if (cgraph_check_inline_limits
1367 (node->callers->caller, node, inlined, ninlined,
1368 &dummy_reason))
1369 {
1370 ninlined_callees =
1371 cgraph_inlined_callees (node, inlined_callees);
1372 cgraph_mark_inline (node->callers->caller, node, inlined,
1373 ninlined, inlined_callees,
1374 ninlined_callees);
1375 for (y = 0; y < ninlined_callees; y++)
1376 inlined_callees[y]->output = 0, inlined_callees[y]->aux = 0;
1377 if (cgraph_dump_file)
1378 fprintf (cgraph_dump_file,
1379 " Inlined into %s which now has %i insns"
1380 " for a net change of %+i insns.\n",
1381 cgraph_node_name (node->callers->caller),
1382 node->callers->caller->global.insns,
1383 overall_insns - old_insns);
1384 }
1385 else
1386 {
1387 if (cgraph_dump_file)
1388 fprintf (cgraph_dump_file,
1389 " Inline limit reached, not inlined.\n");
1390 }
1391 for (y = 0; y < ninlined; y++)
1392 inlined[y]->output = 0, inlined[y]->aux = 0;
1393 }
1394 }
1395 }
1396 }
1397 cgraph_remove_unreachable_nodes ();
1398
1399 if (cgraph_dump_file)
1400 fprintf (cgraph_dump_file,
1401 "\nInlined %i calls, eliminated %i functions, "
1402 "%i insns turned to %i insns.\n\n",
1403 ncalls_inlined, nfunctions_inlined, initial_insns,
1404 overall_insns);
1405 free (order);
1406 free (inlined);
1407 free (inlined_callees);
1408 }
1409
1410 /* Decide on the inlining. We do so in the topological order to avoid
1411 expenses on updating datastructures. */
1412
1413 static void
1414 cgraph_decide_inlining_incrementally (struct cgraph_node *node)
1415 {
1416 struct cgraph_edge *e;
1417 struct cgraph_node **inlined =
1418 xmalloc (sizeof (struct cgraph_node *) * cgraph_n_nodes);
1419 struct cgraph_node **inlined_callees =
1420 xmalloc (sizeof (struct cgraph_node *) * cgraph_n_nodes);
1421 int ninlined;
1422 int ninlined_callees;
1423 int y;
1424
1425 ninlined = cgraph_inlined_into (node, inlined);
1426
1427 /* First of all look for always inline functions. */
1428 for (e = node->callees; e; e = e->next_callee)
1429 if (e->callee->local.disregard_inline_limits && e->inline_failed
1430 /* ??? It is possible that renaming variable removed the function body
1431 in duplicate_decls. See gcc.c-torture/compile/20011119-2.c */
1432 && DECL_SAVED_TREE (e->callee->decl))
1433 {
1434 if (e->callee->output || e->callee == node)
1435 {
1436 e->inline_failed = N_("recursive inlining");
1437 continue;
1438 }
1439 ninlined_callees = cgraph_inlined_callees (e->callee, inlined_callees);
1440 cgraph_mark_inline (node, e->callee, inlined, ninlined,
1441 inlined_callees, ninlined_callees);
1442 for (y = 0; y < ninlined_callees; y++)
1443 inlined_callees[y]->output = 0, inlined_callees[y]->aux = 0;
1444 }
1445
1446 if (!flag_really_no_inline)
1447 {
1448 /* Now do the automatic inlining. */
1449 for (e = node->callees; e; e = e->next_callee)
1450 if (e->callee->local.inlinable && e->inline_failed
1451 && cgraph_default_inline_p (e->callee)
1452 && cgraph_check_inline_limits (node, e->callee, inlined,
1453 ninlined, &e->inline_failed)
1454 && DECL_SAVED_TREE (e->callee->decl))
1455 {
1456 /* Marking recursive function inlinine has sane semantic and thus
1457 we should not warn on it. */
1458 if (e->callee->output || e->callee == node)
1459 {
1460 e->inline_failed = "";
1461 continue;
1462 }
1463 ninlined_callees = cgraph_inlined_callees (e->callee,
1464 inlined_callees);
1465 cgraph_mark_inline (node, e->callee, inlined, ninlined,
1466 inlined_callees, ninlined_callees);
1467 for (y = 0; y < ninlined_callees; y++)
1468 inlined_callees[y]->output = 0, inlined_callees[y]->aux = 0;
1469 }
1470 }
1471
1472 /* Clear the flags set by cgraph_inlined_into. */
1473 for (y = 0; y < ninlined; y++)
1474 inlined[y]->output = 0, inlined[y]->aux = 0;
1475
1476 free (inlined);
1477 free (inlined_callees);
1478 }
1479
1480
1481 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL.
1482 When returned false and reason is non-NULL, set it to the reason
1483 why the call was not inlined. */
1484
1485 bool
1486 cgraph_inline_p (tree caller_decl, tree callee_decl, const char **reason)
1487 {
1488 struct cgraph_node *caller = cgraph_node (caller_decl);
1489 struct cgraph_node *callee = cgraph_node (callee_decl);
1490 struct cgraph_edge *e;
1491
1492 for (e = caller->callees; e; e = e->next_callee)
1493 if (e->callee == callee)
1494 {
1495 if (e->inline_failed && reason)
1496 *reason = e->inline_failed;
1497 return !e->inline_failed;
1498 }
1499 /* We do not record builtins in the callgraph. Perhaps it would make more
1500 sense to do so and then prune out those not overwritten by explicit
1501 function body. */
1502 if (reason)
1503 *reason = "originally indirect function calls never inlined";
1504 return false;
1505 }
1506 /* Expand all functions that must be output.
1507
1508 Attempt to topologically sort the nodes so function is output when
1509 all called functions are already assembled to allow data to be
1510 propagated across the callgraph. Use a stack to get smaller distance
1511 between a function and its callees (later we may choose to use a more
1512 sophisticated algorithm for function reordering; we will likely want
1513 to use subsections to make the output functions appear in top-down
1514 order). */
1515
1516 static void
1517 cgraph_expand_all_functions (void)
1518 {
1519 struct cgraph_node *node;
1520 struct cgraph_node **order =
1521 xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
1522 int order_pos = 0;
1523 int i;
1524
1525 cgraph_mark_functions_to_output ();
1526
1527 order_pos = cgraph_postorder (order);
1528
1529 for (i = order_pos - 1; i >= 0; i--)
1530 {
1531 node = order[i];
1532 if (node->output)
1533 {
1534 if (!node->reachable)
1535 abort ();
1536 node->output = 0;
1537 cgraph_expand_function (node);
1538 }
1539 }
1540 free (order);
1541 }
1542
1543 /* Mark all local functions.
1544
1545 A local function is one whose calls can occur only in the
1546 current compilation unit and all its calls are explicit,
1547 so we can change its calling convention.
1548 We simply mark all static functions whose address is not taken
1549 as local. */
1550
1551 static void
1552 cgraph_mark_local_functions (void)
1553 {
1554 struct cgraph_node *node;
1555
1556 if (cgraph_dump_file)
1557 fprintf (cgraph_dump_file, "\nMarking local functions:");
1558
1559 /* Figure out functions we want to assemble. */
1560 for (node = cgraph_nodes; node; node = node->next)
1561 {
1562 node->local.local = (!node->needed
1563 && DECL_SAVED_TREE (node->decl)
1564 && !TREE_PUBLIC (node->decl));
1565 if (cgraph_dump_file && node->local.local)
1566 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1567 }
1568 if (cgraph_dump_file)
1569 fprintf (cgraph_dump_file, "\n\n");
1570 }
1571
1572 /* Perform simple optimizations based on callgraph. */
1573
1574 void
1575 cgraph_optimize (void)
1576 {
1577 if (!flag_unit_at_a_time)
1578 return;
1579 timevar_push (TV_CGRAPHOPT);
1580 if (!quiet_flag)
1581 fprintf (stderr, "Performing intraprocedural optimizations\n");
1582
1583 cgraph_mark_local_functions ();
1584 if (cgraph_dump_file)
1585 {
1586 fprintf (cgraph_dump_file, "Marked ");
1587 dump_cgraph (cgraph_dump_file);
1588 }
1589
1590 cgraph_decide_inlining ();
1591 cgraph_global_info_ready = true;
1592 if (cgraph_dump_file)
1593 {
1594 fprintf (cgraph_dump_file, "Optimized ");
1595 dump_cgraph (cgraph_dump_file);
1596 }
1597 timevar_pop (TV_CGRAPHOPT);
1598
1599 /* Output everything. */
1600 if (!quiet_flag)
1601 fprintf (stderr, "Assembling functions:\n");
1602 cgraph_expand_all_functions ();
1603 if (cgraph_dump_file)
1604 {
1605 fprintf (cgraph_dump_file, "\nFinal ");
1606 dump_cgraph (cgraph_dump_file);
1607 }
1608 }