]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa.c
gcc/
[thirdparty/gcc.git] / gcc / ipa.c
CommitLineData
65c1a668 1/* Basic IPA optimizations and utilities.
7cf0dbf3 2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
65c1a668 4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
8c4c00c1 9Software Foundation; either version 3, or (at your option) any later
65c1a668 10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
65c1a668 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "cgraph.h"
f37a5008 26#include "tree-pass.h"
27#include "timevar.h"
ccf4ab6b 28#include "gimple.h"
13f90d63 29#include "ggc.h"
8dfbf71d 30#include "flags.h"
65c1a668 31
32/* Fill array order with all nodes with output flag set in the reverse
33 topological order. */
34
35int
36cgraph_postorder (struct cgraph_node **order)
37{
38 struct cgraph_node *node, *node2;
39 int stack_size = 0;
40 int order_pos = 0;
41 struct cgraph_edge *edge, last;
2cb64f78 42 int pass;
65c1a668 43
44 struct cgraph_node **stack =
4c36ffe6 45 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
65c1a668 46
47 /* We have to deal with cycles nicely, so use a depth first traversal
48 output algorithm. Ignore the fact that some functions won't need
49 to be output and put them into order as well, so we get dependencies
f0b5f617 50 right through inline functions. */
65c1a668 51 for (node = cgraph_nodes; node; node = node->next)
52 node->aux = NULL;
2cb64f78 53 for (pass = 0; pass < 2; pass++)
54 for (node = cgraph_nodes; node; node = node->next)
55 if (!node->aux
59dd4830 56 && (pass
57 || (!cgraph_only_called_directly_p (node)
58 && !node->address_taken)))
2cb64f78 59 {
60 node2 = node;
61 if (!node->callers)
62 node->aux = &last;
63 else
64 node->aux = node->callers;
65 while (node2)
66 {
67 while (node2->aux != &last)
68 {
69 edge = (struct cgraph_edge *) node2->aux;
70 if (edge->next_caller)
71 node2->aux = edge->next_caller;
72 else
73 node2->aux = &last;
d160af41 74 /* Break possible cycles involving always-inline
75 functions by ignoring edges from always-inline
76 functions to non-always-inline functions. */
77 if (edge->caller->local.disregard_inline_limits
78 && !edge->callee->local.disregard_inline_limits)
79 continue;
2cb64f78 80 if (!edge->caller->aux)
81 {
82 if (!edge->caller->callers)
83 edge->caller->aux = &last;
84 else
85 edge->caller->aux = edge->caller->callers;
86 stack[stack_size++] = node2;
87 node2 = edge->caller;
88 break;
89 }
90 }
91 if (node2->aux == &last)
92 {
93 order[order_pos++] = node2;
94 if (stack_size)
95 node2 = stack[--stack_size];
96 else
97 node2 = NULL;
98 }
99 }
100 }
65c1a668 101 free (stack);
9e0baf4d 102 for (node = cgraph_nodes; node; node = node->next)
103 node->aux = NULL;
65c1a668 104 return order_pos;
105}
106
21f41380 107/* Look for all functions inlined to NODE and update their inlined_to pointers
108 to INLINED_TO. */
109
110static void
111update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
112{
113 struct cgraph_edge *e;
114 for (e = node->callees; e; e = e->next_callee)
115 if (e->callee->global.inlined_to)
116 {
117 e->callee->global.inlined_to = inlined_to;
118 update_inlined_to_pointer (e->callee, inlined_to);
119 }
120}
121
9da87cb8 122/* Add cgraph NODE to queue starting at FIRST.
123
124 The queue is linked via AUX pointers and terminated by pointer to 1.
125 We enqueue nodes at two occasions: when we find them reachable or when we find
126 their bodies needed for further clonning. In the second case we mark them
127 by pointer to 2 after processing so they are re-queue when they become
128 reachable. */
6f932b06 129
130static void
131enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
132{
9da87cb8 133 /* Node is still in queue; do nothing. */
134 if (node->aux && node->aux != (void *) 2)
135 return;
136 /* Node was already processed as unreachable, re-enqueue
137 only if it became reachable now. */
138 if (node->aux == (void *)2 && !node->reachable)
139 return;
6f932b06 140 node->aux = *first;
141 *first = node;
142}
143
144/* Add varpool NODE to queue starting at FIRST. */
145
146static void
147enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
148{
149 node->aux = *first;
150 *first = node;
151}
152
153/* Process references. */
154
155static void
156process_references (struct ipa_ref_list *list,
157 struct cgraph_node **first,
158 struct varpool_node **first_varpool,
159 bool before_inlining_p)
160{
161 int i;
162 struct ipa_ref *ref;
163 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
164 {
165 if (ref->refered_type == IPA_REF_CGRAPH)
166 {
167 struct cgraph_node *node = ipa_ref_node (ref);
168 if (!node->reachable
169 && (!DECL_EXTERNAL (node->decl)
170 || before_inlining_p))
171 {
172 node->reachable = true;
173 enqueue_cgraph_node (node, first);
174 }
175 }
176 else
177 {
178 struct varpool_node *node = ipa_ref_varpool_node (ref);
179 if (!node->needed)
180 {
181 varpool_mark_needed_node (node);
182 enqueue_varpool_node (node, first_varpool);
183 }
184 }
185 }
186}
187
188/* Return true when function NODE can be removed from callgraph
189 if all direct calls are eliminated. */
190
191static inline bool
192varpool_can_remove_if_no_refs (struct varpool_node *node)
193{
194 return (!node->force_output && !node->used_from_other_partition
195 && (DECL_COMDAT (node->decl) || !node->externally_visible));
196}
197
8dfbf71d 198/* Return true when function can be marked local. */
199
200static bool
201cgraph_local_node_p (struct cgraph_node *node)
202{
203 return (cgraph_only_called_directly_p (node)
204 && node->analyzed
205 && !DECL_EXTERNAL (node->decl)
206 && !node->local.externally_visible
207 && !node->reachable_from_other_partition
208 && !node->in_other_partition);
209}
210
65c1a668 211/* Perform reachability analysis and reclaim all unreachable nodes.
212 If BEFORE_INLINING_P is true this function is called before inlining
48e1416a 213 decisions has been made. If BEFORE_INLINING_P is false this function also
65c1a668 214 removes unneeded bodies of extern inline functions. */
215
216bool
3f5be5f4 217cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
65c1a668 218{
cda6870f 219 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
6f932b06 220 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
f4ec5ce1 221 struct cgraph_node *node, *next;
6f932b06 222 struct varpool_node *vnode, *vnext;
65c1a668 223 bool changed = false;
65c1a668 224
225#ifdef ENABLE_CHECKING
226 verify_cgraph ();
227#endif
3f5be5f4 228 if (file)
229 fprintf (file, "\nReclaiming functions:");
65c1a668 230#ifdef ENABLE_CHECKING
231 for (node = cgraph_nodes; node; node = node->next)
232 gcc_assert (!node->aux);
9da87cb8 233 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
234 gcc_assert (!vnode->aux);
65c1a668 235#endif
6f932b06 236 varpool_reset_queue ();
65c1a668 237 for (node = cgraph_nodes; node; node = node->next)
cdedc740 238 if (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
48e1416a 239 && ((!DECL_EXTERNAL (node->decl))
65c1a668 240 || before_inlining_p))
241 {
59dd4830 242 gcc_assert (!node->global.inlined_to);
6f932b06 243 enqueue_cgraph_node (node, &first);
6d1cc52c 244 node->reachable = true;
65c1a668 245 }
246 else
6d1cc52c 247 {
248 gcc_assert (!node->aux);
249 node->reachable = false;
250 }
6f932b06 251 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
252 {
253 vnode->next_needed = NULL;
254 vnode->prev_needed = NULL;
255 if (!varpool_can_remove_if_no_refs (vnode))
256 {
257 vnode->needed = false;
258 varpool_mark_needed_node (vnode);
259 enqueue_varpool_node (vnode, &first_varpool);
260 }
261 else
262 vnode->needed = false;
263 }
65c1a668 264
265 /* Perform reachability analysis. As a special case do not consider
266 extern inline functions not inlined as live because we won't output
9da87cb8 267 them at all.
268
269 We maintain two worklist, one for cgraph nodes other for varpools and
270 are finished once both are empty. */
271
6f932b06 272 while (first != (struct cgraph_node *) (void *) 1
273 || first_varpool != (struct varpool_node *) (void *) 1)
65c1a668 274 {
6f932b06 275 if (first != (struct cgraph_node *) (void *) 1)
276 {
277 struct cgraph_edge *e;
278 node = first;
279 first = (struct cgraph_node *) first->aux;
9da87cb8 280 if (!node->reachable)
281 node->aux = (void *)2;
6f932b06 282
9da87cb8 283 /* If we found this node reachable, first mark on the callees
284 reachable too, unless they are direct calls to extern inline functions
285 we decided to not inline. */
6f932b06 286 if (node->reachable)
e12f85b7 287 {
288 for (e = node->callees; e; e = e->next_callee)
289 if (!e->callee->reachable
290 && node->analyzed
291 && (!e->inline_failed || !e->callee->analyzed
292 || (!DECL_EXTERNAL (e->callee->decl))
293 || before_inlining_p))
294 {
295 e->callee->reachable = true;
296 enqueue_cgraph_node (e->callee, &first);
297 }
298 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
299 }
6f932b06 300
301 /* If any function in a comdat group is reachable, force
302 all other functions in the same comdat group to be
303 also reachable. */
304 if (node->same_comdat_group
305 && node->reachable
306 && !node->global.inlined_to)
6d1cc52c 307 {
6f932b06 308 for (next = node->same_comdat_group;
309 next != node;
310 next = next->same_comdat_group)
311 if (!next->reachable)
312 {
6f932b06 313 next->reachable = true;
9da87cb8 314 enqueue_cgraph_node (next, &first);
6f932b06 315 }
6d1cc52c 316 }
61c2c7b1 317
6f932b06 318 /* We can freely remove inline clones even if they are cloned, however if
319 function is clone of real clone, we must keep it around in order to
320 make materialize_clones produce function body with the changes
321 applied. */
e12f85b7 322 while (node->clone_of && !node->clone_of->aux
323 && !gimple_has_body_p (node->decl))
ee3f5fc0 324 {
6f932b06 325 bool noninline = node->clone_of->decl != node->decl;
326 node = node->clone_of;
9da87cb8 327 if (noninline && !node->reachable && !node->aux)
6f932b06 328 {
329 enqueue_cgraph_node (node, &first);
330 break;
331 }
ee3f5fc0 332 }
6f932b06 333 }
334 if (first_varpool != (struct varpool_node *) (void *) 1)
335 {
336 vnode = first_varpool;
337 first_varpool = (struct varpool_node *)first_varpool->aux;
338 vnode->aux = NULL;
339 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
933b10c6 340 /* If any function in a comdat group is reachable, force
341 all other functions in the same comdat group to be
342 also reachable. */
343 if (vnode->same_comdat_group)
344 {
345 struct varpool_node *next;
346 for (next = vnode->same_comdat_group;
347 next != vnode;
348 next = next->same_comdat_group)
349 if (!next->needed)
350 {
351 varpool_mark_needed_node (next);
352 enqueue_varpool_node (next, &first_varpool);
353 }
354 }
ccf4ab6b 355 }
65c1a668 356 }
357
9da87cb8 358 /* Remove unreachable nodes.
359
360 Completely unreachable functions can be fully removed from the callgraph.
361 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
362 callgraph (so we still have edges to them). We remove function body then.
363
364 Also we need to care functions that are unreachable but we need to keep them around
365 for later clonning. In this case we also turn them to unanalyzed nodes, but
366 keep the body around. */
f4ec5ce1 367 for (node = cgraph_nodes; node; node = next)
65c1a668 368 {
f4ec5ce1 369 next = node->next;
6d1cc52c 370 if (node->aux && !node->reachable)
371 {
372 cgraph_node_remove_callees (node);
e12f85b7 373 ipa_remove_all_references (&node->ref_list);
6d1cc52c 374 node->analyzed = false;
375 node->local.inlinable = false;
376 }
65c1a668 377 if (!node->aux)
378 {
65c1a668 379 node->global.inlined_to = NULL;
3f5be5f4 380 if (file)
381 fprintf (file, " %s", cgraph_node_name (node));
6d1cc52c 382 if (!node->analyzed || !DECL_EXTERNAL (node->decl) || before_inlining_p)
65c1a668 383 cgraph_remove_node (node);
384 else
385 {
386 struct cgraph_edge *e;
387
ccf4ab6b 388 /* See if there is reachable caller. */
65c1a668 389 for (e = node->callers; e; e = e->next_caller)
9da87cb8 390 if (e->caller->reachable)
65c1a668 391 break;
ccf4ab6b 392
393 /* If so, we need to keep node in the callgraph. */
65c1a668 394 if (e || node->needed)
395 {
396 struct cgraph_node *clone;
397
ccf4ab6b 398 /* If there are still clones, we must keep body around.
399 Otherwise we can just remove the body but keep the clone. */
400 for (clone = node->clones; clone;
401 clone = clone->next_sibling_clone)
65c1a668 402 if (clone->aux)
403 break;
404 if (!clone)
405 {
b62f482d 406 cgraph_release_function_body (node);
65c1a668 407 node->analyzed = false;
ccf4ab6b 408 node->local.inlinable = false;
65c1a668 409 }
ddc90d88 410 else
411 gcc_assert (!clone->in_other_partition);
c596d830 412 cgraph_node_remove_callees (node);
6f932b06 413 ipa_remove_all_references (&node->ref_list);
6d1cc52c 414 if (node->prev_sibling_clone)
415 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
416 else if (node->clone_of)
417 node->clone_of->clones = node->next_sibling_clone;
418 if (node->next_sibling_clone)
419 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
ee3f5fc0 420 node->clone_of = NULL;
421 node->next_sibling_clone = NULL;
422 node->prev_sibling_clone = NULL;
65c1a668 423 }
424 else
425 cgraph_remove_node (node);
426 }
65c1a668 427 changed = true;
428 }
429 }
430 for (node = cgraph_nodes; node; node = node->next)
ccf4ab6b 431 {
432 /* Inline clones might be kept around so their materializing allows further
433 cloning. If the function the clone is inlined into is removed, we need
434 to turn it into normal cone. */
435 if (node->global.inlined_to
436 && !node->callers)
437 {
438 gcc_assert (node->clones);
21f41380 439 node->global.inlined_to = NULL;
440 update_inlined_to_pointer (node, node);
ccf4ab6b 441 }
442 node->aux = NULL;
443 }
8dfbf71d 444
445 if (file)
446 fprintf (file, "\n");
447
448 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
449 are undesirable at -O0 since we do not want to remove anything. */
450 if (!optimize)
451 return changed;
452
6f932b06 453 if (file)
8dfbf71d 454 fprintf (file, "Reclaiming variables:");
6f932b06 455 for (vnode = varpool_nodes; vnode; vnode = vnext)
456 {
457 vnext = vnode->next;
458 if (!vnode->needed)
459 {
8dfbf71d 460 if (file)
461 fprintf (file, " %s", varpool_node_name (vnode));
462 varpool_remove_node (vnode);
463 changed = true;
6f932b06 464 }
465 }
8dfbf71d 466
467 /* Now update address_taken flags and try to promote functions to be local. */
468
cdedc740 469 if (file)
470 fprintf (file, "\nClearing address taken flags:");
471 for (node = cgraph_nodes; node; node = node->next)
472 if (node->address_taken
473 && !node->reachable_from_other_partition)
474 {
475 int i;
476 struct ipa_ref *ref;
477 bool found = false;
478 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
479 && !found; i++)
8dfbf71d 480 {
481 gcc_assert (ref->use == IPA_REF_ADDR);
482 found = true;
483 }
cdedc740 484 if (!found)
485 {
486 if (file)
487 fprintf (file, " %s", cgraph_node_name (node));
488 node->address_taken = false;
8dfbf71d 489 changed = true;
490 if (cgraph_local_node_p (node))
491 {
492 node->local.local = true;
493 if (file)
494 fprintf (file, " (local)");
495 }
cdedc740 496 }
497 }
6f932b06 498
09a2e412 499#ifdef ENABLE_CHECKING
500 verify_cgraph ();
501#endif
34e5cced 502
503 /* Reclaim alias pairs for functions that have disappeared from the
504 call graph. */
505 remove_unreachable_alias_pairs ();
506
65c1a668 507 return changed;
508}
f37a5008 509
8dfbf71d 510/* Discover variables that have no longer address taken or that are read only
511 and update their flags.
512
513 FIXME: This can not be done in between gimplify and omp_expand since
514 readonly flag plays role on what is shared and what is not. Currently we do
515 this transformation as part of ipa-reference pass, but it would make sense
516 to do it before early optimizations. */
517
518void
519ipa_discover_readonly_nonaddressable_vars (void)
520{
521 struct varpool_node *vnode;
522 if (dump_file)
523 fprintf (dump_file, "Clearing variable flags:");
524 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
525 if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
526 && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
527 {
528 bool written = false;
529 bool address_taken = false;
530 int i;
531 struct ipa_ref *ref;
532 for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
533 && (!written || !address_taken); i++)
534 switch (ref->use)
535 {
536 case IPA_REF_ADDR:
537 address_taken = true;
538 break;
539 case IPA_REF_LOAD:
540 break;
541 case IPA_REF_STORE:
542 written = true;
543 break;
544 }
545 if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
546 {
547 if (dump_file)
548 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
549 TREE_ADDRESSABLE (vnode->decl) = 0;
550 }
551 if (!TREE_READONLY (vnode->decl) && !address_taken && !written
552 /* Making variable in explicit section readonly can cause section
553 type conflict.
554 See e.g. gcc.c-torture/compile/pr23237.c */
555 && DECL_SECTION_NAME (vnode->decl) == NULL)
556 {
557 if (dump_file)
558 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
559 TREE_READONLY (vnode->decl) = 1;
560 }
561 }
562 if (dump_file)
563 fprintf (dump_file, "\n");
564}
565
566/* Return true when function NODE should be considered externally visible. */
567
59dd4830 568static bool
569cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program)
570{
e51ee2f1 571 if (!node->local.finalized)
572 return false;
59dd4830 573 if (!DECL_COMDAT (node->decl)
574 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
575 return false;
576 if (!whole_program)
577 return true;
b1be8e04 578 if (DECL_PRESERVE_P (node->decl))
579 return true;
59dd4830 580 /* COMDAT functions must be shared only if they have address taken,
581 otherwise we can produce our own private implementation with
582 -fwhole-program. */
61c2c7b1 583 if (DECL_COMDAT (node->decl))
584 {
585 if (node->address_taken || !node->analyzed)
586 return true;
587 if (node->same_comdat_group)
588 {
589 struct cgraph_node *next;
590
591 /* If more than one function is in the same COMDAT group, it must
592 be shared even if just one function in the comdat group has
593 address taken. */
594 for (next = node->same_comdat_group;
595 next != node;
596 next = next->same_comdat_group)
597 if (next->address_taken || !next->analyzed)
598 return true;
599 }
600 }
59dd4830 601 if (MAIN_NAME_P (DECL_NAME (node->decl)))
602 return true;
603 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
604 return true;
605 return false;
606}
607
c524ac5d 608/* Dissolve the same_comdat_group list in which NODE resides. */
609
610static void
611dissolve_same_comdat_group_list (struct cgraph_node *node)
612{
613 struct cgraph_node *n = node, *next;
614 do
615 {
616 next = n->same_comdat_group;
617 n->same_comdat_group = NULL;
618 n = next;
619 }
620 while (n != node);
621}
622
f37a5008 623/* Mark visibility of all functions.
624
625 A local function is one whose calls can occur only in the current
626 compilation unit and all its calls are explicit, so we can change
627 its calling convention. We simply mark all static functions whose
628 address is not taken as local.
629
630 We also change the TREE_PUBLIC flag of all declarations that are public
631 in language point of view but we want to overwrite this default
632 via visibilities for the backend point of view. */
633
a757b4dc 634static unsigned int
59dd4830 635function_and_variable_visibility (bool whole_program)
f37a5008 636{
637 struct cgraph_node *node;
638 struct varpool_node *vnode;
639
640 for (node = cgraph_nodes; node; node = node->next)
641 {
59cb1238 642 /* C++ FE on lack of COMDAT support create local COMDAT functions
643 (that ought to be shared but can not due to object format
644 limitations). It is neccesary to keep the flag to make rest of C++ FE
645 happy. Clear the flag here to avoid confusion in middle-end. */
646 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
647 DECL_COMDAT (node->decl) = 0;
0ca9d009 648 /* For external decls stop tracking same_comdat_group, it doesn't matter
649 what comdat group they are in when they won't be emitted in this TU,
650 and simplifies later passes. */
651 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
652 {
c524ac5d 653#ifdef ENABLE_CHECKING
654 struct cgraph_node *n;
655
656 for (n = node->same_comdat_group;
657 n != node;
658 n = n->same_comdat_group)
0ca9d009 659 /* If at least one of same comdat group functions is external,
660 all of them have to be, otherwise it is a front-end bug. */
661 gcc_assert (DECL_EXTERNAL (n->decl));
c524ac5d 662#endif
663 dissolve_same_comdat_group_list (node);
0ca9d009 664 }
22671757 665 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
666 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
59dd4830 667 if (cgraph_externally_visible_p (node, whole_program))
668 {
669 gcc_assert (!node->global.inlined_to);
670 node->local.externally_visible = true;
671 }
672 else
673 node->local.externally_visible = false;
f37a5008 674 if (!node->local.externally_visible && node->analyzed
675 && !DECL_EXTERNAL (node->decl))
676 {
3e7743d4 677 struct cgraph_node *alias;
59dd4830 678 gcc_assert (whole_program || !TREE_PUBLIC (node->decl));
6137cc9f 679 cgraph_make_decl_local (node->decl);
3e7743d4 680 for (alias = node->same_body; alias; alias = alias->next)
681 cgraph_make_decl_local (alias->decl);
c524ac5d 682 if (node->same_comdat_group)
683 /* cgraph_externally_visible_p has already checked all other nodes
684 in the group and they will all be made local. We need to
685 dissolve the group at once so that the predicate does not
686 segfault though. */
687 dissolve_same_comdat_group_list (node);
f37a5008 688 }
8dfbf71d 689 node->local.local = cgraph_local_node_p (node);
f37a5008 690 }
930cff67 691 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
692 {
693 /* weak flag makes no sense on local variables. */
694 gcc_assert (!DECL_WEAK (vnode->decl)
695 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
696 /* In several cases declarations can not be common:
697
698 - when declaration has initializer
699 - when it is in weak
700 - when it has specific section
701 - when it resides in non-generic address space.
702 - if declaration is local, it will get into .local common section
703 so common flag is not needed. Frontends still produce these in
704 certain cases, such as for:
705
706 static int a __attribute__ ((common))
707
708 Canonicalize things here and clear the redundant flag. */
709 if (DECL_COMMON (vnode->decl)
710 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
711 || (DECL_INITIAL (vnode->decl)
712 && DECL_INITIAL (vnode->decl) != error_mark_node)
713 || DECL_WEAK (vnode->decl)
714 || DECL_SECTION_NAME (vnode->decl) != NULL
715 || ! (ADDR_SPACE_GENERIC_P
716 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
717 DECL_COMMON (vnode->decl) = 0;
718 }
f37a5008 719 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
720 {
e51ee2f1 721 if (!vnode->finalized)
722 continue;
f37a5008 723 if (vnode->needed
59dd4830 724 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl))
725 && (!whole_program
f7a5b09c 726 /* We can privatize comdat readonly variables whose address is not taken,
727 but doing so is not going to bring us optimization oppurtunities until
728 we start reordering datastructures. */
729 || DECL_COMDAT (vnode->decl)
730 || DECL_WEAK (vnode->decl)
59dd4830 731 || lookup_attribute ("externally_visible",
732 DECL_ATTRIBUTES (vnode->decl))))
733 vnode->externally_visible = true;
734 else
735 vnode->externally_visible = false;
f37a5008 736 if (!vnode->externally_visible)
737 {
59dd4830 738 gcc_assert (whole_program || !TREE_PUBLIC (vnode->decl));
6137cc9f 739 cgraph_make_decl_local (vnode->decl);
f37a5008 740 }
741 gcc_assert (TREE_STATIC (vnode->decl));
742 }
743
744 if (dump_file)
745 {
746 fprintf (dump_file, "\nMarking local functions:");
747 for (node = cgraph_nodes; node; node = node->next)
748 if (node->local.local)
749 fprintf (dump_file, " %s", cgraph_node_name (node));
750 fprintf (dump_file, "\n\n");
751 fprintf (dump_file, "\nMarking externally visible functions:");
752 for (node = cgraph_nodes; node; node = node->next)
753 if (node->local.externally_visible)
754 fprintf (dump_file, " %s", cgraph_node_name (node));
755 fprintf (dump_file, "\n\n");
22671757 756 fprintf (dump_file, "\nMarking externally visible variables:");
757 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
758 if (vnode->externally_visible)
759 fprintf (dump_file, " %s", varpool_node_name (vnode));
760 fprintf (dump_file, "\n\n");
f37a5008 761 }
762 cgraph_function_flags_ready = true;
a757b4dc 763 return 0;
f37a5008 764}
765
59dd4830 766/* Local function pass handling visibilities. This happens before LTO streaming
767 so in particular -fwhole-program should be ignored at this level. */
768
769static unsigned int
770local_function_and_variable_visibility (void)
771{
772 return function_and_variable_visibility (flag_whole_program && !flag_lto && !flag_whopr);
773}
774
48e1416a 775struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
f37a5008 776{
20099e35 777 {
778 SIMPLE_IPA_PASS,
f37a5008 779 "visibility", /* name */
780 NULL, /* gate */
59dd4830 781 local_function_and_variable_visibility,/* execute */
f37a5008 782 NULL, /* sub */
783 NULL, /* next */
784 0, /* static_pass_number */
785 TV_CGRAPHOPT, /* tv_id */
786 0, /* properties_required */
787 0, /* properties_provided */
788 0, /* properties_destroyed */
789 0, /* todo_flags_start */
57305941 790 TODO_remove_functions | TODO_dump_cgraph
791 | TODO_ggc_collect /* todo_flags_finish */
20099e35 792 }
f37a5008 793};
13f90d63 794
59dd4830 795/* Do not re-run on ltrans stage. */
796
797static bool
798gate_whole_program_function_and_variable_visibility (void)
799{
800 return !flag_ltrans;
801}
802
803/* Bring functionss local at LTO time whith -fwhole-program. */
804
805static unsigned int
806whole_program_function_and_variable_visibility (void)
807{
808 struct cgraph_node *node;
809 struct varpool_node *vnode;
810
811 function_and_variable_visibility (flag_whole_program);
812
813 for (node = cgraph_nodes; node; node = node->next)
f7a5b09c 814 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
815 && node->local.finalized)
59dd4830 816 cgraph_mark_needed_node (node);
817 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
f7a5b09c 818 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
59dd4830 819 varpool_mark_needed_node (vnode);
22671757 820 if (dump_file)
821 {
822 fprintf (dump_file, "\nNeeded variables:");
823 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
824 if (vnode->needed)
825 fprintf (dump_file, " %s", varpool_node_name (vnode));
826 fprintf (dump_file, "\n\n");
827 }
59dd4830 828 return 0;
829}
830
831struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
832{
833 {
834 IPA_PASS,
835 "whole-program", /* name */
836 gate_whole_program_function_and_variable_visibility,/* gate */
837 whole_program_function_and_variable_visibility,/* execute */
838 NULL, /* sub */
839 NULL, /* next */
840 0, /* static_pass_number */
841 TV_CGRAPHOPT, /* tv_id */
842 0, /* properties_required */
843 0, /* properties_provided */
844 0, /* properties_destroyed */
845 0, /* todo_flags_start */
57305941 846 TODO_remove_functions | TODO_dump_cgraph
847 | TODO_ggc_collect /* todo_flags_finish */
59dd4830 848 },
849 NULL, /* generate_summary */
850 NULL, /* write_summary */
851 NULL, /* read_summary */
ddc90d88 852 NULL, /* write_optimization_summary */
853 NULL, /* read_optimization_summary */
90464c8b 854 NULL, /* stmt_fixup */
59dd4830 855 0, /* TODOs */
856 NULL, /* function_transform */
857 NULL, /* variable_transform */
858};
13f90d63 859
860/* Hash a cgraph node set element. */
861
862static hashval_t
863hash_cgraph_node_set_element (const void *p)
864{
865 const_cgraph_node_set_element element = (const_cgraph_node_set_element) p;
866 return htab_hash_pointer (element->node);
867}
868
869/* Compare two cgraph node set elements. */
870
871static int
872eq_cgraph_node_set_element (const void *p1, const void *p2)
873{
874 const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1;
875 const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2;
876
877 return e1->node == e2->node;
878}
879
880/* Create a new cgraph node set. */
881
882cgraph_node_set
883cgraph_node_set_new (void)
884{
885 cgraph_node_set new_node_set;
886
887 new_node_set = GGC_NEW (struct cgraph_node_set_def);
888 new_node_set->hashtab = htab_create_ggc (10,
889 hash_cgraph_node_set_element,
890 eq_cgraph_node_set_element,
891 NULL);
892 new_node_set->nodes = NULL;
893 return new_node_set;
894}
895
896/* Add cgraph_node NODE to cgraph_node_set SET. */
897
898void
899cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
900{
901 void **slot;
902 cgraph_node_set_element element;
903 struct cgraph_node_set_element_def dummy;
904
905 dummy.node = node;
906 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
907
908 if (*slot != HTAB_EMPTY_ENTRY)
909 {
910 element = (cgraph_node_set_element) *slot;
911 gcc_assert (node == element->node
912 && (VEC_index (cgraph_node_ptr, set->nodes, element->index)
913 == node));
914 return;
915 }
916
917 /* Insert node into hash table. */
918 element =
919 (cgraph_node_set_element) GGC_NEW (struct cgraph_node_set_element_def);
920 element->node = node;
921 element->index = VEC_length (cgraph_node_ptr, set->nodes);
922 *slot = element;
923
924 /* Insert into node vector. */
925 VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node);
926}
927
928/* Remove cgraph_node NODE from cgraph_node_set SET. */
929
930void
931cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
932{
933 void **slot, **last_slot;
934 cgraph_node_set_element element, last_element;
935 struct cgraph_node *last_node;
936 struct cgraph_node_set_element_def dummy;
937
938 dummy.node = node;
939 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
940 if (slot == NULL)
941 return;
942
943 element = (cgraph_node_set_element) *slot;
944 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
945 == node);
946
947 /* Remove from vector. We do this by swapping node with the last element
948 of the vector. */
949 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
950 if (last_node != node)
951 {
952 dummy.node = last_node;
953 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
954 last_element = (cgraph_node_set_element) *last_slot;
955 gcc_assert (last_element);
956
957 /* Move the last element to the original spot of NODE. */
958 last_element->index = element->index;
959 VEC_replace (cgraph_node_ptr, set->nodes, last_element->index,
960 last_node);
961 }
48e1416a 962
13f90d63 963 /* Remove element from hash table. */
964 htab_clear_slot (set->hashtab, slot);
965 ggc_free (element);
966}
967
968/* Find NODE in SET and return an iterator to it if found. A null iterator
969 is returned if NODE is not in SET. */
970
971cgraph_node_set_iterator
972cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
973{
974 void **slot;
975 struct cgraph_node_set_element_def dummy;
976 cgraph_node_set_element element;
977 cgraph_node_set_iterator csi;
978
979 dummy.node = node;
980 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
981 if (slot == NULL)
982 csi.index = (unsigned) ~0;
983 else
984 {
985 element = (cgraph_node_set_element) *slot;
986 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
987 == node);
988 csi.index = element->index;
989 }
990 csi.set = set;
991
992 return csi;
993}
994
995/* Dump content of SET to file F. */
996
997void
998dump_cgraph_node_set (FILE *f, cgraph_node_set set)
999{
1000 cgraph_node_set_iterator iter;
1001
1002 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
1003 {
1004 struct cgraph_node *node = csi_node (iter);
ecb08119 1005 fprintf (f, " %s/%i", cgraph_node_name (node), node->uid);
13f90d63 1006 }
ecb08119 1007 fprintf (f, "\n");
13f90d63 1008}
1009
1010/* Dump content of SET to stderr. */
1011
4b987fac 1012DEBUG_FUNCTION void
13f90d63 1013debug_cgraph_node_set (cgraph_node_set set)
1014{
1015 dump_cgraph_node_set (stderr, set);
1016}
1017
0cddb138 1018/* Hash a varpool node set element. */
1019
1020static hashval_t
1021hash_varpool_node_set_element (const void *p)
1022{
1023 const_varpool_node_set_element element = (const_varpool_node_set_element) p;
1024 return htab_hash_pointer (element->node);
1025}
1026
1027/* Compare two varpool node set elements. */
1028
1029static int
1030eq_varpool_node_set_element (const void *p1, const void *p2)
1031{
1032 const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
1033 const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
1034
1035 return e1->node == e2->node;
1036}
1037
1038/* Create a new varpool node set. */
1039
1040varpool_node_set
1041varpool_node_set_new (void)
1042{
1043 varpool_node_set new_node_set;
1044
1045 new_node_set = GGC_NEW (struct varpool_node_set_def);
1046 new_node_set->hashtab = htab_create_ggc (10,
1047 hash_varpool_node_set_element,
1048 eq_varpool_node_set_element,
1049 NULL);
1050 new_node_set->nodes = NULL;
1051 return new_node_set;
1052}
1053
1054/* Add varpool_node NODE to varpool_node_set SET. */
1055
1056void
1057varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
1058{
1059 void **slot;
1060 varpool_node_set_element element;
1061 struct varpool_node_set_element_def dummy;
1062
1063 dummy.node = node;
1064 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1065
1066 if (*slot != HTAB_EMPTY_ENTRY)
1067 {
1068 element = (varpool_node_set_element) *slot;
1069 gcc_assert (node == element->node
1070 && (VEC_index (varpool_node_ptr, set->nodes, element->index)
1071 == node));
1072 return;
1073 }
1074
1075 /* Insert node into hash table. */
1076 element =
1077 (varpool_node_set_element) GGC_NEW (struct varpool_node_set_element_def);
1078 element->node = node;
1079 element->index = VEC_length (varpool_node_ptr, set->nodes);
1080 *slot = element;
1081
1082 /* Insert into node vector. */
1083 VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
1084}
1085
1086/* Remove varpool_node NODE from varpool_node_set SET. */
1087
1088void
1089varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
1090{
1091 void **slot, **last_slot;
1092 varpool_node_set_element element, last_element;
1093 struct varpool_node *last_node;
1094 struct varpool_node_set_element_def dummy;
1095
1096 dummy.node = node;
1097 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1098 if (slot == NULL)
1099 return;
1100
1101 element = (varpool_node_set_element) *slot;
1102 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1103 == node);
1104
1105 /* Remove from vector. We do this by swapping node with the last element
1106 of the vector. */
1107 last_node = VEC_pop (varpool_node_ptr, set->nodes);
1108 if (last_node != node)
1109 {
1110 dummy.node = last_node;
1111 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1112 last_element = (varpool_node_set_element) *last_slot;
1113 gcc_assert (last_element);
1114
1115 /* Move the last element to the original spot of NODE. */
1116 last_element->index = element->index;
1117 VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
1118 last_node);
1119 }
1120
1121 /* Remove element from hash table. */
1122 htab_clear_slot (set->hashtab, slot);
1123 ggc_free (element);
1124}
1125
1126/* Find NODE in SET and return an iterator to it if found. A null iterator
1127 is returned if NODE is not in SET. */
1128
1129varpool_node_set_iterator
1130varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
1131{
1132 void **slot;
1133 struct varpool_node_set_element_def dummy;
1134 varpool_node_set_element element;
1135 varpool_node_set_iterator vsi;
1136
1137 dummy.node = node;
1138 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1139 if (slot == NULL)
1140 vsi.index = (unsigned) ~0;
1141 else
1142 {
1143 element = (varpool_node_set_element) *slot;
1144 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1145 == node);
1146 vsi.index = element->index;
1147 }
1148 vsi.set = set;
1149
1150 return vsi;
1151}
1152
1153/* Dump content of SET to file F. */
1154
1155void
1156dump_varpool_node_set (FILE *f, varpool_node_set set)
1157{
1158 varpool_node_set_iterator iter;
1159
1160 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
1161 {
1162 struct varpool_node *node = vsi_node (iter);
ecb08119 1163 fprintf (f, " %s", varpool_node_name (node));
0cddb138 1164 }
ecb08119 1165 fprintf (f, "\n");
0cddb138 1166}
1167
1168/* Dump content of SET to stderr. */
1169
4b987fac 1170DEBUG_FUNCTION void
0cddb138 1171debug_varpool_node_set (varpool_node_set set)
1172{
1173 dump_varpool_node_set (stderr, set);
1174}
1175
1176
4e2db0ad 1177/* Simple ipa profile pass propagating frequencies across the callgraph. */
1178
1179static unsigned int
1180ipa_profile (void)
1181{
1182 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1183 struct cgraph_edge *e;
1184 int order_pos;
1185 bool something_changed = false;
1186 int i;
1187
1188 order_pos = cgraph_postorder (order);
1189 for (i = order_pos - 1; i >= 0; i--)
1190 {
1191 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1192 {
1193 for (e = order[i]->callees; e; e = e->next_callee)
1194 if (e->callee->local.local && !e->callee->aux)
1195 {
1196 something_changed = true;
1197 e->callee->aux = (void *)1;
1198 }
1199 }
1200 order[i]->aux = NULL;
1201 }
1202
1203 while (something_changed)
1204 {
1205 something_changed = false;
1206 for (i = order_pos - 1; i >= 0; i--)
1207 {
1208 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1209 {
1210 for (e = order[i]->callees; e; e = e->next_callee)
1211 if (e->callee->local.local && !e->callee->aux)
1212 {
1213 something_changed = true;
1214 e->callee->aux = (void *)1;
1215 }
1216 }
1217 order[i]->aux = NULL;
1218 }
1219 }
1220 free (order);
1221 return 0;
1222}
1223
1224static bool
1225gate_ipa_profile (void)
1226{
1227 return flag_ipa_profile;
1228}
1229
1230struct ipa_opt_pass_d pass_ipa_profile =
1231{
1232 {
1233 IPA_PASS,
1234 "ipa-profile", /* name */
1235 gate_ipa_profile, /* gate */
1236 ipa_profile, /* execute */
1237 NULL, /* sub */
1238 NULL, /* next */
1239 0, /* static_pass_number */
1240 TV_IPA_PROFILE, /* tv_id */
1241 0, /* properties_required */
1242 0, /* properties_provided */
1243 0, /* properties_destroyed */
1244 0, /* todo_flags_start */
1245 0 /* todo_flags_finish */
1246 },
1247 NULL, /* generate_summary */
1248 NULL, /* write_summary */
1249 NULL, /* read_summary */
1250 NULL, /* write_optimization_summary */
1251 NULL, /* read_optimization_summary */
1252 NULL, /* stmt_fixup */
1253 0, /* TODOs */
1254 NULL, /* function_transform */
1255 NULL /* variable_transform */
1256};