]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa.c
In testsuite/: 2010-10-16 Nicola Pero <nicola.pero@meta-innovation.com>
[thirdparty/gcc.git] / gcc / ipa.c
CommitLineData
ca31b95f 1/* Basic IPA optimizations and utilities.
c75c517d
SB
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
ca31b95f
JH
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9dcd6f09 9Software Foundation; either version 3, or (at your option) any later
ca31b95f
JH
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
ca31b95f
JH
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "cgraph.h"
f4b3ca72
JH
26#include "tree-pass.h"
27#include "timevar.h"
9187e02d 28#include "gimple.h"
fed5ae11 29#include "ggc.h"
4a444e58 30#include "flags.h"
5dde3b01 31#include "pointer-set.h"
9e97ff61
JH
32#include "target.h"
33#include "tree-iterator.h"
ca31b95f
JH
34
35/* Fill array order with all nodes with output flag set in the reverse
36 topological order. */
37
38int
39cgraph_postorder (struct cgraph_node **order)
40{
41 struct cgraph_node *node, *node2;
42 int stack_size = 0;
43 int order_pos = 0;
44 struct cgraph_edge *edge, last;
39ff5a96 45 int pass;
ca31b95f
JH
46
47 struct cgraph_node **stack =
5ed6ace5 48 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
ca31b95f
JH
49
50 /* We have to deal with cycles nicely, so use a depth first traversal
51 output algorithm. Ignore the fact that some functions won't need
52 to be output and put them into order as well, so we get dependencies
fa10beec 53 right through inline functions. */
ca31b95f
JH
54 for (node = cgraph_nodes; node; node = node->next)
55 node->aux = NULL;
39ff5a96
JH
56 for (pass = 0; pass < 2; pass++)
57 for (node = cgraph_nodes; node; node = node->next)
58 if (!node->aux
b20996ff
JH
59 && (pass
60 || (!cgraph_only_called_directly_p (node)
61 && !node->address_taken)))
39ff5a96
JH
62 {
63 node2 = node;
64 if (!node->callers)
65 node->aux = &last;
66 else
67 node->aux = node->callers;
68 while (node2)
69 {
70 while (node2->aux != &last)
71 {
72 edge = (struct cgraph_edge *) node2->aux;
73 if (edge->next_caller)
74 node2->aux = edge->next_caller;
75 else
76 node2->aux = &last;
af961c7f
RG
77 /* Break possible cycles involving always-inline
78 functions by ignoring edges from always-inline
79 functions to non-always-inline functions. */
80 if (edge->caller->local.disregard_inline_limits
81 && !edge->callee->local.disregard_inline_limits)
82 continue;
39ff5a96
JH
83 if (!edge->caller->aux)
84 {
85 if (!edge->caller->callers)
86 edge->caller->aux = &last;
87 else
88 edge->caller->aux = edge->caller->callers;
89 stack[stack_size++] = node2;
90 node2 = edge->caller;
91 break;
92 }
93 }
94 if (node2->aux == &last)
95 {
96 order[order_pos++] = node2;
97 if (stack_size)
98 node2 = stack[--stack_size];
99 else
100 node2 = NULL;
101 }
102 }
103 }
ca31b95f 104 free (stack);
d63db217
JH
105 for (node = cgraph_nodes; node; node = node->next)
106 node->aux = NULL;
ca31b95f
JH
107 return order_pos;
108}
109
d563610d
JH
110/* Look for all functions inlined to NODE and update their inlined_to pointers
111 to INLINED_TO. */
112
113static void
114update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
115{
116 struct cgraph_edge *e;
117 for (e = node->callees; e; e = e->next_callee)
118 if (e->callee->global.inlined_to)
119 {
120 e->callee->global.inlined_to = inlined_to;
121 update_inlined_to_pointer (e->callee, inlined_to);
122 }
123}
124
19fb0b86
JH
125/* Add cgraph NODE to queue starting at FIRST.
126
127 The queue is linked via AUX pointers and terminated by pointer to 1.
128 We enqueue nodes at two occasions: when we find them reachable or when we find
129 their bodies needed for further clonning. In the second case we mark them
130 by pointer to 2 after processing so they are re-queue when they become
131 reachable. */
b34fd25c
JH
132
133static void
134enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
135{
19fb0b86
JH
136 /* Node is still in queue; do nothing. */
137 if (node->aux && node->aux != (void *) 2)
138 return;
139 /* Node was already processed as unreachable, re-enqueue
140 only if it became reachable now. */
141 if (node->aux == (void *)2 && !node->reachable)
142 return;
b34fd25c
JH
143 node->aux = *first;
144 *first = node;
145}
146
147/* Add varpool NODE to queue starting at FIRST. */
148
149static void
150enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
151{
152 node->aux = *first;
153 *first = node;
154}
155
156/* Process references. */
157
158static void
159process_references (struct ipa_ref_list *list,
160 struct cgraph_node **first,
161 struct varpool_node **first_varpool,
162 bool before_inlining_p)
163{
164 int i;
165 struct ipa_ref *ref;
166 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
167 {
168 if (ref->refered_type == IPA_REF_CGRAPH)
169 {
170 struct cgraph_node *node = ipa_ref_node (ref);
171 if (!node->reachable
172 && (!DECL_EXTERNAL (node->decl)
173 || before_inlining_p))
174 {
175 node->reachable = true;
176 enqueue_cgraph_node (node, first);
177 }
178 }
179 else
180 {
181 struct varpool_node *node = ipa_ref_varpool_node (ref);
182 if (!node->needed)
183 {
184 varpool_mark_needed_node (node);
185 enqueue_varpool_node (node, first_varpool);
186 }
187 }
188 }
189}
190
191/* Return true when function NODE can be removed from callgraph
192 if all direct calls are eliminated. */
193
194static inline bool
195varpool_can_remove_if_no_refs (struct varpool_node *node)
196{
197 return (!node->force_output && !node->used_from_other_partition
198 && (DECL_COMDAT (node->decl) || !node->externally_visible));
199}
200
4a444e58
JH
201/* Return true when function can be marked local. */
202
203static bool
204cgraph_local_node_p (struct cgraph_node *node)
205{
206 return (cgraph_only_called_directly_p (node)
207 && node->analyzed
208 && !DECL_EXTERNAL (node->decl)
209 && !node->local.externally_visible
210 && !node->reachable_from_other_partition
211 && !node->in_other_partition);
212}
213
ca31b95f
JH
214/* Perform reachability analysis and reclaim all unreachable nodes.
215 If BEFORE_INLINING_P is true this function is called before inlining
b8698a0f 216 decisions has been made. If BEFORE_INLINING_P is false this function also
ca31b95f
JH
217 removes unneeded bodies of extern inline functions. */
218
219bool
10d22567 220cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
ca31b95f 221{
c5274326 222 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
b34fd25c 223 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
96fc428c 224 struct cgraph_node *node, *next;
b34fd25c 225 struct varpool_node *vnode, *vnext;
ca31b95f 226 bool changed = false;
ca31b95f
JH
227
228#ifdef ENABLE_CHECKING
229 verify_cgraph ();
230#endif
10d22567
ZD
231 if (file)
232 fprintf (file, "\nReclaiming functions:");
ca31b95f
JH
233#ifdef ENABLE_CHECKING
234 for (node = cgraph_nodes; node; node = node->next)
235 gcc_assert (!node->aux);
19fb0b86
JH
236 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
237 gcc_assert (!vnode->aux);
ca31b95f 238#endif
b34fd25c 239 varpool_reset_queue ();
ca31b95f 240 for (node = cgraph_nodes; node; node = node->next)
bd67cff1
JH
241 if ((!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
242 /* Keep around virtual functions for possible devirtualization. */
243 || (!before_inlining_p
244 && !node->global.inlined_to
245 && DECL_VIRTUAL_P (node->decl)
246 && (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))))
247 && ((!DECL_EXTERNAL (node->decl))
248 || before_inlining_p))
ca31b95f 249 {
b20996ff 250 gcc_assert (!node->global.inlined_to);
b34fd25c 251 enqueue_cgraph_node (node, &first);
0e3776db 252 node->reachable = true;
ca31b95f
JH
253 }
254 else
0e3776db
JH
255 {
256 gcc_assert (!node->aux);
257 node->reachable = false;
258 }
b34fd25c
JH
259 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
260 {
261 vnode->next_needed = NULL;
262 vnode->prev_needed = NULL;
263 if (!varpool_can_remove_if_no_refs (vnode))
264 {
265 vnode->needed = false;
266 varpool_mark_needed_node (vnode);
267 enqueue_varpool_node (vnode, &first_varpool);
268 }
269 else
270 vnode->needed = false;
271 }
ca31b95f
JH
272
273 /* Perform reachability analysis. As a special case do not consider
274 extern inline functions not inlined as live because we won't output
19fb0b86
JH
275 them at all.
276
277 We maintain two worklist, one for cgraph nodes other for varpools and
278 are finished once both are empty. */
279
b34fd25c
JH
280 while (first != (struct cgraph_node *) (void *) 1
281 || first_varpool != (struct varpool_node *) (void *) 1)
ca31b95f 282 {
b34fd25c
JH
283 if (first != (struct cgraph_node *) (void *) 1)
284 {
285 struct cgraph_edge *e;
286 node = first;
287 first = (struct cgraph_node *) first->aux;
19fb0b86
JH
288 if (!node->reachable)
289 node->aux = (void *)2;
b34fd25c 290
19fb0b86
JH
291 /* If we found this node reachable, first mark on the callees
292 reachable too, unless they are direct calls to extern inline functions
293 we decided to not inline. */
b34fd25c 294 if (node->reachable)
8a6295ba
JH
295 {
296 for (e = node->callees; e; e = e->next_callee)
297 if (!e->callee->reachable
298 && node->analyzed
299 && (!e->inline_failed || !e->callee->analyzed
300 || (!DECL_EXTERNAL (e->callee->decl))
301 || before_inlining_p))
302 {
303 e->callee->reachable = true;
304 enqueue_cgraph_node (e->callee, &first);
305 }
306 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
307 }
b34fd25c
JH
308
309 /* If any function in a comdat group is reachable, force
310 all other functions in the same comdat group to be
311 also reachable. */
312 if (node->same_comdat_group
313 && node->reachable
314 && !node->global.inlined_to)
0e3776db 315 {
b34fd25c
JH
316 for (next = node->same_comdat_group;
317 next != node;
318 next = next->same_comdat_group)
319 if (!next->reachable)
320 {
b34fd25c 321 next->reachable = true;
19fb0b86 322 enqueue_cgraph_node (next, &first);
b34fd25c 323 }
0e3776db 324 }
b66887e4 325
b34fd25c
JH
326 /* We can freely remove inline clones even if they are cloned, however if
327 function is clone of real clone, we must keep it around in order to
328 make materialize_clones produce function body with the changes
329 applied. */
8a6295ba
JH
330 while (node->clone_of && !node->clone_of->aux
331 && !gimple_has_body_p (node->decl))
47cb0d7d 332 {
b34fd25c
JH
333 bool noninline = node->clone_of->decl != node->decl;
334 node = node->clone_of;
19fb0b86 335 if (noninline && !node->reachable && !node->aux)
b34fd25c
JH
336 {
337 enqueue_cgraph_node (node, &first);
338 break;
339 }
47cb0d7d 340 }
b34fd25c
JH
341 }
342 if (first_varpool != (struct varpool_node *) (void *) 1)
343 {
344 vnode = first_varpool;
345 first_varpool = (struct varpool_node *)first_varpool->aux;
346 vnode->aux = NULL;
347 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
9f90e80a
JH
348 /* If any function in a comdat group is reachable, force
349 all other functions in the same comdat group to be
350 also reachable. */
351 if (vnode->same_comdat_group)
352 {
353 struct varpool_node *next;
354 for (next = vnode->same_comdat_group;
355 next != vnode;
356 next = next->same_comdat_group)
357 if (!next->needed)
358 {
359 varpool_mark_needed_node (next);
360 enqueue_varpool_node (next, &first_varpool);
361 }
362 }
9187e02d 363 }
ca31b95f
JH
364 }
365
19fb0b86
JH
366 /* Remove unreachable nodes.
367
368 Completely unreachable functions can be fully removed from the callgraph.
369 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
370 callgraph (so we still have edges to them). We remove function body then.
371
372 Also we need to care functions that are unreachable but we need to keep them around
373 for later clonning. In this case we also turn them to unanalyzed nodes, but
374 keep the body around. */
96fc428c 375 for (node = cgraph_nodes; node; node = next)
ca31b95f 376 {
96fc428c 377 next = node->next;
0e3776db
JH
378 if (node->aux && !node->reachable)
379 {
380 cgraph_node_remove_callees (node);
8a6295ba 381 ipa_remove_all_references (&node->ref_list);
0e3776db
JH
382 node->analyzed = false;
383 node->local.inlinable = false;
384 }
ca31b95f
JH
385 if (!node->aux)
386 {
ca31b95f 387 node->global.inlined_to = NULL;
10d22567
ZD
388 if (file)
389 fprintf (file, " %s", cgraph_node_name (node));
0e3776db 390 if (!node->analyzed || !DECL_EXTERNAL (node->decl) || before_inlining_p)
ca31b95f
JH
391 cgraph_remove_node (node);
392 else
393 {
394 struct cgraph_edge *e;
395
9187e02d 396 /* See if there is reachable caller. */
ca31b95f 397 for (e = node->callers; e; e = e->next_caller)
19fb0b86 398 if (e->caller->reachable)
ca31b95f 399 break;
9187e02d
JH
400
401 /* If so, we need to keep node in the callgraph. */
ca31b95f
JH
402 if (e || node->needed)
403 {
404 struct cgraph_node *clone;
405
9187e02d
JH
406 /* If there are still clones, we must keep body around.
407 Otherwise we can just remove the body but keep the clone. */
408 for (clone = node->clones; clone;
409 clone = clone->next_sibling_clone)
ca31b95f
JH
410 if (clone->aux)
411 break;
412 if (!clone)
413 {
3a40c18a 414 cgraph_release_function_body (node);
9187e02d 415 node->local.inlinable = false;
0cac82a0
JH
416 if (node->prev_sibling_clone)
417 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
418 else if (node->clone_of)
419 node->clone_of->clones = node->next_sibling_clone;
420 if (node->next_sibling_clone)
421 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
422#ifdef ENABLE_CHECKING
423 if (node->clone_of)
424 node->former_clone_of = node->clone_of->decl;
425#endif
426 node->clone_of = NULL;
427 node->next_sibling_clone = NULL;
428 node->prev_sibling_clone = NULL;
ca31b95f 429 }
e792884f
JH
430 else
431 gcc_assert (!clone->in_other_partition);
0cac82a0 432 node->analyzed = false;
8132a837 433 cgraph_node_remove_callees (node);
b34fd25c 434 ipa_remove_all_references (&node->ref_list);
ca31b95f
JH
435 }
436 else
437 cgraph_remove_node (node);
438 }
ca31b95f
JH
439 changed = true;
440 }
441 }
442 for (node = cgraph_nodes; node; node = node->next)
9187e02d
JH
443 {
444 /* Inline clones might be kept around so their materializing allows further
445 cloning. If the function the clone is inlined into is removed, we need
446 to turn it into normal cone. */
447 if (node->global.inlined_to
448 && !node->callers)
449 {
450 gcc_assert (node->clones);
d563610d
JH
451 node->global.inlined_to = NULL;
452 update_inlined_to_pointer (node, node);
9187e02d
JH
453 }
454 node->aux = NULL;
455 }
4a444e58
JH
456
457 if (file)
458 fprintf (file, "\n");
459
460 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
461 are undesirable at -O0 since we do not want to remove anything. */
462 if (!optimize)
463 return changed;
464
b34fd25c 465 if (file)
4a444e58 466 fprintf (file, "Reclaiming variables:");
b34fd25c
JH
467 for (vnode = varpool_nodes; vnode; vnode = vnext)
468 {
469 vnext = vnode->next;
470 if (!vnode->needed)
471 {
4a444e58
JH
472 if (file)
473 fprintf (file, " %s", varpool_node_name (vnode));
474 varpool_remove_node (vnode);
475 changed = true;
b34fd25c
JH
476 }
477 }
4a444e58
JH
478
479 /* Now update address_taken flags and try to promote functions to be local. */
480
bd3cdcc0
JH
481 if (file)
482 fprintf (file, "\nClearing address taken flags:");
483 for (node = cgraph_nodes; node; node = node->next)
484 if (node->address_taken
485 && !node->reachable_from_other_partition)
486 {
487 int i;
488 struct ipa_ref *ref;
489 bool found = false;
490 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
491 && !found; i++)
4a444e58
JH
492 {
493 gcc_assert (ref->use == IPA_REF_ADDR);
494 found = true;
495 }
bd3cdcc0
JH
496 if (!found)
497 {
498 if (file)
499 fprintf (file, " %s", cgraph_node_name (node));
500 node->address_taken = false;
4a444e58
JH
501 changed = true;
502 if (cgraph_local_node_p (node))
503 {
504 node->local.local = true;
505 if (file)
506 fprintf (file, " (local)");
507 }
bd3cdcc0
JH
508 }
509 }
b34fd25c 510
873aa8f5
JH
511#ifdef ENABLE_CHECKING
512 verify_cgraph ();
513#endif
4537ec0c
DN
514
515 /* Reclaim alias pairs for functions that have disappeared from the
516 call graph. */
517 remove_unreachable_alias_pairs ();
518
ca31b95f
JH
519 return changed;
520}
f4b3ca72 521
4a444e58
JH
522/* Discover variables that have no longer address taken or that are read only
523 and update their flags.
524
525 FIXME: This can not be done in between gimplify and omp_expand since
526 readonly flag plays role on what is shared and what is not. Currently we do
f10ea640
JH
527 this transformation as part of whole program visibility and re-do at
528 ipa-reference pass (to take into account clonning), but it would
529 make sense to do it before early optimizations. */
4a444e58
JH
530
531void
532ipa_discover_readonly_nonaddressable_vars (void)
533{
534 struct varpool_node *vnode;
535 if (dump_file)
536 fprintf (dump_file, "Clearing variable flags:");
537 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
538 if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
539 && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
540 {
541 bool written = false;
542 bool address_taken = false;
543 int i;
544 struct ipa_ref *ref;
545 for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
546 && (!written || !address_taken); i++)
547 switch (ref->use)
548 {
549 case IPA_REF_ADDR:
550 address_taken = true;
551 break;
552 case IPA_REF_LOAD:
553 break;
554 case IPA_REF_STORE:
555 written = true;
556 break;
557 }
558 if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
559 {
560 if (dump_file)
561 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
562 TREE_ADDRESSABLE (vnode->decl) = 0;
563 }
564 if (!TREE_READONLY (vnode->decl) && !address_taken && !written
565 /* Making variable in explicit section readonly can cause section
566 type conflict.
567 See e.g. gcc.c-torture/compile/pr23237.c */
568 && DECL_SECTION_NAME (vnode->decl) == NULL)
569 {
570 if (dump_file)
571 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
572 TREE_READONLY (vnode->decl) = 1;
573 }
574 }
575 if (dump_file)
576 fprintf (dump_file, "\n");
577}
578
579/* Return true when function NODE should be considered externally visible. */
580
b20996ff 581static bool
5dde3b01 582cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program, bool aliased)
b20996ff 583{
b820a2f9
JH
584 if (!node->local.finalized)
585 return false;
b20996ff
JH
586 if (!DECL_COMDAT (node->decl)
587 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
588 return false;
5dde3b01
JH
589
590 /* Do not even try to be smart about aliased nodes. Until we properly
591 represent everything by same body alias, these are just evil. */
592 if (aliased)
6d41cd02 593 return true;
5dde3b01 594
0e9ea52b
JH
595 /* If linker counts on us, we must preserve the function. */
596 if (cgraph_used_from_object_file_p (node))
597 return true;
5dde3b01 598 /* When doing link time optimizations, hidden symbols become local. */
0e9ea52b
JH
599 if (in_lto_p
600 && (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
601 || DECL_VISIBILITY (node->decl) == VISIBILITY_INTERNAL)
99fecd47
JH
602 /* Be sure that node is defined in IR file, not in other object
603 file. In that case we don't set used_from_other_object_file. */
604 && node->analyzed)
5dde3b01
JH
605 ;
606 else if (!whole_program)
b932b8b1 607 return true;
b20996ff
JH
608 /* COMDAT functions must be shared only if they have address taken,
609 otherwise we can produce our own private implementation with
610 -fwhole-program. */
5dde3b01 611 else if (DECL_COMDAT (node->decl))
b66887e4
JJ
612 {
613 if (node->address_taken || !node->analyzed)
614 return true;
615 if (node->same_comdat_group)
616 {
617 struct cgraph_node *next;
618
619 /* If more than one function is in the same COMDAT group, it must
620 be shared even if just one function in the comdat group has
621 address taken. */
622 for (next = node->same_comdat_group;
623 next != node;
624 next = next->same_comdat_group)
625 if (next->address_taken || !next->analyzed)
626 return true;
627 }
628 }
5dde3b01
JH
629 if (DECL_PRESERVE_P (node->decl))
630 return true;
b20996ff
JH
631 if (MAIN_NAME_P (DECL_NAME (node->decl)))
632 return true;
633 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
634 return true;
635 return false;
636}
637
78eaf7bf
MJ
638/* Dissolve the same_comdat_group list in which NODE resides. */
639
640static void
641dissolve_same_comdat_group_list (struct cgraph_node *node)
642{
643 struct cgraph_node *n = node, *next;
644 do
645 {
646 next = n->same_comdat_group;
647 n->same_comdat_group = NULL;
648 n = next;
649 }
650 while (n != node);
651}
652
f4b3ca72
JH
653/* Mark visibility of all functions.
654
655 A local function is one whose calls can occur only in the current
656 compilation unit and all its calls are explicit, so we can change
657 its calling convention. We simply mark all static functions whose
658 address is not taken as local.
659
660 We also change the TREE_PUBLIC flag of all declarations that are public
661 in language point of view but we want to overwrite this default
662 via visibilities for the backend point of view. */
663
4e260309 664static unsigned int
b20996ff 665function_and_variable_visibility (bool whole_program)
f4b3ca72
JH
666{
667 struct cgraph_node *node;
668 struct varpool_node *vnode;
5dde3b01
JH
669 struct pointer_set_t *aliased_nodes = pointer_set_create ();
670 struct pointer_set_t *aliased_vnodes = pointer_set_create ();
671 unsigned i;
672 alias_pair *p;
673
674 /* Discover aliased nodes. */
ac47786e 675 FOR_EACH_VEC_ELT (alias_pair, alias_pairs, i, p)
5dde3b01
JH
676 {
677 if (dump_file)
678 fprintf (dump_file, "Alias %s->%s",
679 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
680 IDENTIFIER_POINTER (p->target));
681
682 if ((node = cgraph_node_for_asm (p->target)) != NULL)
683 {
684 gcc_assert (node->needed);
685 pointer_set_insert (aliased_nodes, node);
686 if (dump_file)
687 fprintf (dump_file, " node %s/%i",
688 cgraph_node_name (node), node->uid);
689 }
690 else if ((vnode = varpool_node_for_asm (p->target)) != NULL)
691 {
692 gcc_assert (vnode->needed);
693 pointer_set_insert (aliased_vnodes, vnode);
694 if (dump_file)
695 fprintf (dump_file, " varpool node %s",
696 varpool_node_name (vnode));
697 }
698 if (dump_file)
699 fprintf (dump_file, "\n");
700 }
f4b3ca72
JH
701
702 for (node = cgraph_nodes; node; node = node->next)
703 {
589520b6
JH
704 /* C++ FE on lack of COMDAT support create local COMDAT functions
705 (that ought to be shared but can not due to object format
706 limitations). It is neccesary to keep the flag to make rest of C++ FE
707 happy. Clear the flag here to avoid confusion in middle-end. */
708 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
709 DECL_COMDAT (node->decl) = 0;
3fd54fb0
JJ
710 /* For external decls stop tracking same_comdat_group, it doesn't matter
711 what comdat group they are in when they won't be emitted in this TU,
712 and simplifies later passes. */
713 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
714 {
78eaf7bf
MJ
715#ifdef ENABLE_CHECKING
716 struct cgraph_node *n;
717
718 for (n = node->same_comdat_group;
719 n != node;
720 n = n->same_comdat_group)
3fd54fb0
JJ
721 /* If at least one of same comdat group functions is external,
722 all of them have to be, otherwise it is a front-end bug. */
723 gcc_assert (DECL_EXTERNAL (n->decl));
78eaf7bf
MJ
724#endif
725 dissolve_same_comdat_group_list (node);
3fd54fb0 726 }
a8289259
JH
727 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
728 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
5dde3b01
JH
729 if (cgraph_externally_visible_p (node, whole_program,
730 pointer_set_contains (aliased_nodes,
731 node)))
b20996ff
JH
732 {
733 gcc_assert (!node->global.inlined_to);
734 node->local.externally_visible = true;
735 }
736 else
737 node->local.externally_visible = false;
f4b3ca72
JH
738 if (!node->local.externally_visible && node->analyzed
739 && !DECL_EXTERNAL (node->decl))
740 {
e419f710 741 struct cgraph_node *alias;
5dde3b01 742 gcc_assert (whole_program || in_lto_p || !TREE_PUBLIC (node->decl));
715a4e08 743 cgraph_make_decl_local (node->decl);
051f8cc6 744 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
e419f710
JH
745 for (alias = node->same_body; alias; alias = alias->next)
746 cgraph_make_decl_local (alias->decl);
78eaf7bf
MJ
747 if (node->same_comdat_group)
748 /* cgraph_externally_visible_p has already checked all other nodes
749 in the group and they will all be made local. We need to
750 dissolve the group at once so that the predicate does not
751 segfault though. */
752 dissolve_same_comdat_group_list (node);
f4b3ca72 753 }
4a444e58 754 node->local.local = cgraph_local_node_p (node);
f4b3ca72 755 }
e9fecf0e
JH
756 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
757 {
758 /* weak flag makes no sense on local variables. */
759 gcc_assert (!DECL_WEAK (vnode->decl)
760 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
761 /* In several cases declarations can not be common:
762
763 - when declaration has initializer
764 - when it is in weak
765 - when it has specific section
766 - when it resides in non-generic address space.
767 - if declaration is local, it will get into .local common section
768 so common flag is not needed. Frontends still produce these in
769 certain cases, such as for:
770
771 static int a __attribute__ ((common))
772
773 Canonicalize things here and clear the redundant flag. */
774 if (DECL_COMMON (vnode->decl)
775 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
776 || (DECL_INITIAL (vnode->decl)
777 && DECL_INITIAL (vnode->decl) != error_mark_node)
778 || DECL_WEAK (vnode->decl)
779 || DECL_SECTION_NAME (vnode->decl) != NULL
780 || ! (ADDR_SPACE_GENERIC_P
781 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
782 DECL_COMMON (vnode->decl) = 0;
783 }
f4b3ca72
JH
784 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
785 {
b820a2f9
JH
786 if (!vnode->finalized)
787 continue;
f4b3ca72 788 if (vnode->needed
b20996ff 789 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl))
5dde3b01
JH
790 && (((!whole_program
791 /* We can privatize comdat readonly variables whose address is
792 not taken, but doing so is not going to bring us
793 optimization oppurtunities until we start reordering
794 datastructures. */
795 || DECL_COMDAT (vnode->decl)
796 || DECL_WEAK (vnode->decl))
797 /* When doing linktime optimizations, all hidden symbols will
798 become local. */
799 && (!in_lto_p
0e9ea52b
JH
800 || (DECL_VISIBILITY (vnode->decl) != VISIBILITY_HIDDEN
801 && DECL_VISIBILITY (vnode->decl) != VISIBILITY_INTERNAL)
99fecd47
JH
802 /* We can get prevailing decision in other object file.
803 In this case we do not sed used_from_object_file. */
804 || !vnode->finalized))
02ee7bea 805 || DECL_PRESERVE_P (vnode->decl)
051f8cc6 806 || varpool_used_from_object_file_p (vnode)
5dde3b01 807 || pointer_set_contains (aliased_vnodes, vnode)
b20996ff
JH
808 || lookup_attribute ("externally_visible",
809 DECL_ATTRIBUTES (vnode->decl))))
810 vnode->externally_visible = true;
811 else
812 vnode->externally_visible = false;
f4b3ca72
JH
813 if (!vnode->externally_visible)
814 {
5dde3b01 815 gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->decl));
715a4e08 816 cgraph_make_decl_local (vnode->decl);
051f8cc6 817 vnode->resolution = LDPR_PREVAILING_DEF_IRONLY;
f4b3ca72
JH
818 }
819 gcc_assert (TREE_STATIC (vnode->decl));
820 }
5dde3b01
JH
821 pointer_set_destroy (aliased_nodes);
822 pointer_set_destroy (aliased_vnodes);
f4b3ca72
JH
823
824 if (dump_file)
825 {
826 fprintf (dump_file, "\nMarking local functions:");
827 for (node = cgraph_nodes; node; node = node->next)
828 if (node->local.local)
829 fprintf (dump_file, " %s", cgraph_node_name (node));
830 fprintf (dump_file, "\n\n");
831 fprintf (dump_file, "\nMarking externally visible functions:");
832 for (node = cgraph_nodes; node; node = node->next)
833 if (node->local.externally_visible)
834 fprintf (dump_file, " %s", cgraph_node_name (node));
835 fprintf (dump_file, "\n\n");
a8289259
JH
836 fprintf (dump_file, "\nMarking externally visible variables:");
837 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
838 if (vnode->externally_visible)
839 fprintf (dump_file, " %s", varpool_node_name (vnode));
840 fprintf (dump_file, "\n\n");
f4b3ca72
JH
841 }
842 cgraph_function_flags_ready = true;
4e260309 843 return 0;
f4b3ca72
JH
844}
845
b20996ff
JH
846/* Local function pass handling visibilities. This happens before LTO streaming
847 so in particular -fwhole-program should be ignored at this level. */
848
849static unsigned int
850local_function_and_variable_visibility (void)
851{
852 return function_and_variable_visibility (flag_whole_program && !flag_lto && !flag_whopr);
853}
854
b8698a0f 855struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
f4b3ca72 856{
8ddbbcae
JH
857 {
858 SIMPLE_IPA_PASS,
f4b3ca72
JH
859 "visibility", /* name */
860 NULL, /* gate */
b20996ff 861 local_function_and_variable_visibility,/* execute */
f4b3ca72
JH
862 NULL, /* sub */
863 NULL, /* next */
864 0, /* static_pass_number */
865 TV_CGRAPHOPT, /* tv_id */
866 0, /* properties_required */
867 0, /* properties_provided */
868 0, /* properties_destroyed */
869 0, /* todo_flags_start */
49ba8180
JH
870 TODO_remove_functions | TODO_dump_cgraph
871 | TODO_ggc_collect /* todo_flags_finish */
8ddbbcae 872 }
f4b3ca72 873};
fed5ae11 874
b20996ff
JH
875/* Do not re-run on ltrans stage. */
876
877static bool
878gate_whole_program_function_and_variable_visibility (void)
879{
880 return !flag_ltrans;
881}
882
883/* Bring functionss local at LTO time whith -fwhole-program. */
884
885static unsigned int
886whole_program_function_and_variable_visibility (void)
887{
888 struct cgraph_node *node;
889 struct varpool_node *vnode;
890
891 function_and_variable_visibility (flag_whole_program);
892
893 for (node = cgraph_nodes; node; node = node->next)
62a0a52e
JH
894 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
895 && node->local.finalized)
b20996ff
JH
896 cgraph_mark_needed_node (node);
897 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
62a0a52e 898 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
b20996ff 899 varpool_mark_needed_node (vnode);
a8289259
JH
900 if (dump_file)
901 {
902 fprintf (dump_file, "\nNeeded variables:");
903 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
904 if (vnode->needed)
905 fprintf (dump_file, " %s", varpool_node_name (vnode));
906 fprintf (dump_file, "\n\n");
907 }
f10ea640
JH
908 if (optimize)
909 ipa_discover_readonly_nonaddressable_vars ();
b20996ff
JH
910 return 0;
911}
912
913struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
914{
915 {
916 IPA_PASS,
917 "whole-program", /* name */
918 gate_whole_program_function_and_variable_visibility,/* gate */
919 whole_program_function_and_variable_visibility,/* execute */
920 NULL, /* sub */
921 NULL, /* next */
922 0, /* static_pass_number */
923 TV_CGRAPHOPT, /* tv_id */
924 0, /* properties_required */
925 0, /* properties_provided */
926 0, /* properties_destroyed */
927 0, /* todo_flags_start */
49ba8180
JH
928 TODO_remove_functions | TODO_dump_cgraph
929 | TODO_ggc_collect /* todo_flags_finish */
b20996ff
JH
930 },
931 NULL, /* generate_summary */
932 NULL, /* write_summary */
933 NULL, /* read_summary */
e792884f
JH
934 NULL, /* write_optimization_summary */
935 NULL, /* read_optimization_summary */
2c5721d9 936 NULL, /* stmt_fixup */
b20996ff
JH
937 0, /* TODOs */
938 NULL, /* function_transform */
939 NULL, /* variable_transform */
940};
fed5ae11
DK
941
942/* Hash a cgraph node set element. */
943
944static hashval_t
945hash_cgraph_node_set_element (const void *p)
946{
947 const_cgraph_node_set_element element = (const_cgraph_node_set_element) p;
948 return htab_hash_pointer (element->node);
949}
950
951/* Compare two cgraph node set elements. */
952
953static int
954eq_cgraph_node_set_element (const void *p1, const void *p2)
955{
956 const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1;
957 const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2;
958
959 return e1->node == e2->node;
960}
961
962/* Create a new cgraph node set. */
963
964cgraph_node_set
965cgraph_node_set_new (void)
966{
967 cgraph_node_set new_node_set;
968
a9429e29 969 new_node_set = ggc_alloc_cgraph_node_set_def ();
fed5ae11
DK
970 new_node_set->hashtab = htab_create_ggc (10,
971 hash_cgraph_node_set_element,
972 eq_cgraph_node_set_element,
973 NULL);
974 new_node_set->nodes = NULL;
975 return new_node_set;
976}
977
978/* Add cgraph_node NODE to cgraph_node_set SET. */
979
980void
981cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
982{
983 void **slot;
984 cgraph_node_set_element element;
985 struct cgraph_node_set_element_def dummy;
986
987 dummy.node = node;
988 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
989
990 if (*slot != HTAB_EMPTY_ENTRY)
991 {
992 element = (cgraph_node_set_element) *slot;
993 gcc_assert (node == element->node
994 && (VEC_index (cgraph_node_ptr, set->nodes, element->index)
995 == node));
996 return;
997 }
998
999 /* Insert node into hash table. */
a9429e29 1000 element = ggc_alloc_cgraph_node_set_element_def ();
fed5ae11
DK
1001 element->node = node;
1002 element->index = VEC_length (cgraph_node_ptr, set->nodes);
1003 *slot = element;
1004
1005 /* Insert into node vector. */
1006 VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node);
1007}
1008
1009/* Remove cgraph_node NODE from cgraph_node_set SET. */
1010
1011void
1012cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
1013{
1014 void **slot, **last_slot;
1015 cgraph_node_set_element element, last_element;
1016 struct cgraph_node *last_node;
1017 struct cgraph_node_set_element_def dummy;
1018
1019 dummy.node = node;
1020 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1021 if (slot == NULL)
1022 return;
1023
1024 element = (cgraph_node_set_element) *slot;
1025 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1026 == node);
1027
1028 /* Remove from vector. We do this by swapping node with the last element
1029 of the vector. */
1030 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
1031 if (last_node != node)
1032 {
1033 dummy.node = last_node;
1034 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1035 last_element = (cgraph_node_set_element) *last_slot;
1036 gcc_assert (last_element);
1037
1038 /* Move the last element to the original spot of NODE. */
1039 last_element->index = element->index;
1040 VEC_replace (cgraph_node_ptr, set->nodes, last_element->index,
1041 last_node);
1042 }
b8698a0f 1043
fed5ae11
DK
1044 /* Remove element from hash table. */
1045 htab_clear_slot (set->hashtab, slot);
1046 ggc_free (element);
1047}
1048
1049/* Find NODE in SET and return an iterator to it if found. A null iterator
1050 is returned if NODE is not in SET. */
1051
1052cgraph_node_set_iterator
1053cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
1054{
1055 void **slot;
1056 struct cgraph_node_set_element_def dummy;
1057 cgraph_node_set_element element;
1058 cgraph_node_set_iterator csi;
1059
1060 dummy.node = node;
1061 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1062 if (slot == NULL)
1063 csi.index = (unsigned) ~0;
1064 else
1065 {
1066 element = (cgraph_node_set_element) *slot;
1067 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1068 == node);
1069 csi.index = element->index;
1070 }
1071 csi.set = set;
1072
1073 return csi;
1074}
1075
1076/* Dump content of SET to file F. */
1077
1078void
1079dump_cgraph_node_set (FILE *f, cgraph_node_set set)
1080{
1081 cgraph_node_set_iterator iter;
1082
1083 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
1084 {
1085 struct cgraph_node *node = csi_node (iter);
0a5fa5a1 1086 fprintf (f, " %s/%i", cgraph_node_name (node), node->uid);
fed5ae11 1087 }
0a5fa5a1 1088 fprintf (f, "\n");
fed5ae11
DK
1089}
1090
1091/* Dump content of SET to stderr. */
1092
24e47c76 1093DEBUG_FUNCTION void
fed5ae11
DK
1094debug_cgraph_node_set (cgraph_node_set set)
1095{
1096 dump_cgraph_node_set (stderr, set);
1097}
1098
2942c502
JH
1099/* Hash a varpool node set element. */
1100
1101static hashval_t
1102hash_varpool_node_set_element (const void *p)
1103{
1104 const_varpool_node_set_element element = (const_varpool_node_set_element) p;
1105 return htab_hash_pointer (element->node);
1106}
1107
1108/* Compare two varpool node set elements. */
1109
1110static int
1111eq_varpool_node_set_element (const void *p1, const void *p2)
1112{
1113 const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
1114 const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
1115
1116 return e1->node == e2->node;
1117}
1118
1119/* Create a new varpool node set. */
1120
1121varpool_node_set
1122varpool_node_set_new (void)
1123{
1124 varpool_node_set new_node_set;
1125
a9429e29 1126 new_node_set = ggc_alloc_varpool_node_set_def ();
2942c502
JH
1127 new_node_set->hashtab = htab_create_ggc (10,
1128 hash_varpool_node_set_element,
1129 eq_varpool_node_set_element,
1130 NULL);
1131 new_node_set->nodes = NULL;
1132 return new_node_set;
1133}
1134
1135/* Add varpool_node NODE to varpool_node_set SET. */
1136
1137void
1138varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
1139{
1140 void **slot;
1141 varpool_node_set_element element;
1142 struct varpool_node_set_element_def dummy;
1143
1144 dummy.node = node;
1145 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1146
1147 if (*slot != HTAB_EMPTY_ENTRY)
1148 {
1149 element = (varpool_node_set_element) *slot;
1150 gcc_assert (node == element->node
1151 && (VEC_index (varpool_node_ptr, set->nodes, element->index)
1152 == node));
1153 return;
1154 }
1155
1156 /* Insert node into hash table. */
a9429e29 1157 element = ggc_alloc_varpool_node_set_element_def ();
2942c502
JH
1158 element->node = node;
1159 element->index = VEC_length (varpool_node_ptr, set->nodes);
1160 *slot = element;
1161
1162 /* Insert into node vector. */
1163 VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
1164}
1165
1166/* Remove varpool_node NODE from varpool_node_set SET. */
1167
1168void
1169varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
1170{
1171 void **slot, **last_slot;
1172 varpool_node_set_element element, last_element;
1173 struct varpool_node *last_node;
1174 struct varpool_node_set_element_def dummy;
1175
1176 dummy.node = node;
1177 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1178 if (slot == NULL)
1179 return;
1180
1181 element = (varpool_node_set_element) *slot;
1182 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1183 == node);
1184
1185 /* Remove from vector. We do this by swapping node with the last element
1186 of the vector. */
1187 last_node = VEC_pop (varpool_node_ptr, set->nodes);
1188 if (last_node != node)
1189 {
1190 dummy.node = last_node;
1191 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1192 last_element = (varpool_node_set_element) *last_slot;
1193 gcc_assert (last_element);
1194
1195 /* Move the last element to the original spot of NODE. */
1196 last_element->index = element->index;
1197 VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
1198 last_node);
1199 }
1200
1201 /* Remove element from hash table. */
1202 htab_clear_slot (set->hashtab, slot);
1203 ggc_free (element);
1204}
1205
1206/* Find NODE in SET and return an iterator to it if found. A null iterator
1207 is returned if NODE is not in SET. */
1208
1209varpool_node_set_iterator
1210varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
1211{
1212 void **slot;
1213 struct varpool_node_set_element_def dummy;
1214 varpool_node_set_element element;
1215 varpool_node_set_iterator vsi;
1216
1217 dummy.node = node;
1218 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1219 if (slot == NULL)
1220 vsi.index = (unsigned) ~0;
1221 else
1222 {
1223 element = (varpool_node_set_element) *slot;
1224 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1225 == node);
1226 vsi.index = element->index;
1227 }
1228 vsi.set = set;
1229
1230 return vsi;
1231}
1232
1233/* Dump content of SET to file F. */
1234
1235void
1236dump_varpool_node_set (FILE *f, varpool_node_set set)
1237{
1238 varpool_node_set_iterator iter;
1239
1240 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
1241 {
1242 struct varpool_node *node = vsi_node (iter);
0a5fa5a1 1243 fprintf (f, " %s", varpool_node_name (node));
2942c502 1244 }
0a5fa5a1 1245 fprintf (f, "\n");
2942c502
JH
1246}
1247
1248/* Dump content of SET to stderr. */
1249
24e47c76 1250DEBUG_FUNCTION void
2942c502
JH
1251debug_varpool_node_set (varpool_node_set set)
1252{
1253 dump_varpool_node_set (stderr, set);
1254}
1255
1256
e65bb9be
JH
1257/* Simple ipa profile pass propagating frequencies across the callgraph. */
1258
1259static unsigned int
1260ipa_profile (void)
1261{
1262 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1263 struct cgraph_edge *e;
1264 int order_pos;
1265 bool something_changed = false;
1266 int i;
1267
1268 order_pos = cgraph_postorder (order);
1269 for (i = order_pos - 1; i >= 0; i--)
1270 {
1271 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1272 {
1273 for (e = order[i]->callees; e; e = e->next_callee)
1274 if (e->callee->local.local && !e->callee->aux)
1275 {
1276 something_changed = true;
1277 e->callee->aux = (void *)1;
1278 }
1279 }
1280 order[i]->aux = NULL;
1281 }
1282
1283 while (something_changed)
1284 {
1285 something_changed = false;
1286 for (i = order_pos - 1; i >= 0; i--)
1287 {
1288 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1289 {
1290 for (e = order[i]->callees; e; e = e->next_callee)
1291 if (e->callee->local.local && !e->callee->aux)
1292 {
1293 something_changed = true;
1294 e->callee->aux = (void *)1;
1295 }
1296 }
1297 order[i]->aux = NULL;
1298 }
1299 }
1300 free (order);
1301 return 0;
1302}
1303
1304static bool
1305gate_ipa_profile (void)
1306{
1307 return flag_ipa_profile;
1308}
1309
1310struct ipa_opt_pass_d pass_ipa_profile =
1311{
1312 {
1313 IPA_PASS,
1314 "ipa-profile", /* name */
1315 gate_ipa_profile, /* gate */
1316 ipa_profile, /* execute */
1317 NULL, /* sub */
1318 NULL, /* next */
1319 0, /* static_pass_number */
1320 TV_IPA_PROFILE, /* tv_id */
1321 0, /* properties_required */
1322 0, /* properties_provided */
1323 0, /* properties_destroyed */
1324 0, /* todo_flags_start */
1325 0 /* todo_flags_finish */
1326 },
1327 NULL, /* generate_summary */
1328 NULL, /* write_summary */
1329 NULL, /* read_summary */
1330 NULL, /* write_optimization_summary */
1331 NULL, /* read_optimization_summary */
1332 NULL, /* stmt_fixup */
1333 0, /* TODOs */
1334 NULL, /* function_transform */
1335 NULL /* variable_transform */
1336};
9e97ff61
JH
1337
1338/* Generate and emit a static constructor or destructor. WHICH must
1339 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1340 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1341 initialization priority for this constructor or destructor. */
1342
1343void
1344cgraph_build_static_cdtor (char which, tree body, int priority)
1345{
1346 static int counter = 0;
1347 char which_buf[16];
1348 tree decl, name, resdecl;
1349
1350 /* The priority is encoded in the constructor or destructor name.
1351 collect2 will sort the names and arrange that they are called at
1352 program startup. */
1353 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1354 name = get_file_function_name (which_buf);
1355
1356 decl = build_decl (input_location, FUNCTION_DECL, name,
1357 build_function_type_list (void_type_node, NULL_TREE));
1358 current_function_decl = decl;
1359
1360 resdecl = build_decl (input_location,
1361 RESULT_DECL, NULL_TREE, void_type_node);
1362 DECL_ARTIFICIAL (resdecl) = 1;
1363 DECL_RESULT (decl) = resdecl;
1364 DECL_CONTEXT (resdecl) = decl;
1365
1366 allocate_struct_function (decl, false);
1367
1368 TREE_STATIC (decl) = 1;
1369 TREE_USED (decl) = 1;
1370 DECL_ARTIFICIAL (decl) = 1;
1371 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1372 DECL_SAVED_TREE (decl) = body;
1373 if (!targetm.have_ctors_dtors)
1374 {
1375 TREE_PUBLIC (decl) = 1;
1376 DECL_PRESERVE_P (decl) = 1;
1377 }
1378 DECL_UNINLINABLE (decl) = 1;
1379
1380 DECL_INITIAL (decl) = make_node (BLOCK);
1381 TREE_USED (DECL_INITIAL (decl)) = 1;
1382
1383 DECL_SOURCE_LOCATION (decl) = input_location;
1384 cfun->function_end_locus = input_location;
1385
1386 switch (which)
1387 {
1388 case 'I':
1389 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1390 decl_init_priority_insert (decl, priority);
1391 break;
1392 case 'D':
1393 DECL_STATIC_DESTRUCTOR (decl) = 1;
1394 decl_fini_priority_insert (decl, priority);
1395 break;
1396 default:
1397 gcc_unreachable ();
1398 }
1399
1400 gimplify_function_tree (decl);
1401
1402 cgraph_add_new_function (decl, false);
1403
1404 set_cfun (NULL);
1405 current_function_decl = NULL;
1406}
1407
1408
1409/* A vector of FUNCTION_DECLs declared as static constructors. */
1410static VEC(tree, heap) *static_ctors;
1411/* A vector of FUNCTION_DECLs declared as static destructors. */
1412static VEC(tree, heap) *static_dtors;
1413
1414/* When target does not have ctors and dtors, we call all constructor
1415 and destructor by special initialization/destruction function
1416 recognized by collect2.
1417
1418 When we are going to build this function, collect all constructors and
1419 destructors and turn them into normal functions. */
1420
1421static void
1422record_cdtor_fn (struct cgraph_node *node)
1423{
1424 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1425 VEC_safe_push (tree, heap, static_ctors, node->decl);
1426 if (DECL_STATIC_DESTRUCTOR (node->decl))
1427 VEC_safe_push (tree, heap, static_dtors, node->decl);
1428 node = cgraph_node (node->decl);
1429 node->local.disregard_inline_limits = 1;
1430}
1431
1432/* Define global constructors/destructor functions for the CDTORS, of
1433 which they are LEN. The CDTORS are sorted by initialization
1434 priority. If CTOR_P is true, these are constructors; otherwise,
1435 they are destructors. */
1436
1437static void
48c24aca 1438build_cdtor (bool ctor_p, VEC (tree, heap) *cdtors)
9e97ff61
JH
1439{
1440 size_t i,j;
48c24aca 1441 size_t len = VEC_length (tree, cdtors);
9e97ff61
JH
1442
1443 i = 0;
1444 while (i < len)
1445 {
1446 tree body;
1447 tree fn;
1448 priority_type priority;
1449
1450 priority = 0;
1451 body = NULL_TREE;
1452 j = i;
1453 do
1454 {
1455 priority_type p;
48c24aca 1456 fn = VEC_index (tree, cdtors, j);
9e97ff61
JH
1457 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1458 if (j == i)
1459 priority = p;
1460 else if (p != priority)
1461 break;
1462 j++;
1463 }
1464 while (j < len);
1465
48c24aca 1466 /* When there is only one cdtor and target supports them, do nothing. */
9e97ff61
JH
1467 if (j == i + 1
1468 && targetm.have_ctors_dtors)
1469 {
1470 i++;
1471 continue;
1472 }
1473 /* Find the next batch of constructors/destructors with the same
1474 initialization priority. */
48c24aca 1475 for (;i < j; i++)
9e97ff61 1476 {
9e97ff61 1477 tree call;
48c24aca 1478 fn = VEC_index (tree, cdtors, i);
9e97ff61
JH
1479 call = build_call_expr (fn, 0);
1480 if (ctor_p)
1481 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1482 else
1483 DECL_STATIC_DESTRUCTOR (fn) = 0;
1484 /* We do not want to optimize away pure/const calls here.
1485 When optimizing, these should be already removed, when not
1486 optimizing, we want user to be able to breakpoint in them. */
1487 TREE_SIDE_EFFECTS (call) = 1;
1488 append_to_statement_list (call, &body);
9e97ff61 1489 }
9e97ff61
JH
1490 gcc_assert (body != NULL_TREE);
1491 /* Generate a function to call all the function of like
1492 priority. */
1493 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
1494 }
1495}
1496
1497/* Comparison function for qsort. P1 and P2 are actually of type
1498 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1499 used to determine the sort order. */
1500
1501static int
1502compare_ctor (const void *p1, const void *p2)
1503{
1504 tree f1;
1505 tree f2;
1506 int priority1;
1507 int priority2;
1508
1509 f1 = *(const tree *)p1;
1510 f2 = *(const tree *)p2;
1511 priority1 = DECL_INIT_PRIORITY (f1);
1512 priority2 = DECL_INIT_PRIORITY (f2);
1513
1514 if (priority1 < priority2)
1515 return -1;
1516 else if (priority1 > priority2)
1517 return 1;
1518 else
1519 /* Ensure a stable sort. Constructors are executed in backwarding
1520 order to make LTO initialize braries first. */
1521 return DECL_UID (f2) - DECL_UID (f1);
1522}
1523
1524/* Comparison function for qsort. P1 and P2 are actually of type
1525 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1526 used to determine the sort order. */
1527
1528static int
1529compare_dtor (const void *p1, const void *p2)
1530{
1531 tree f1;
1532 tree f2;
1533 int priority1;
1534 int priority2;
1535
1536 f1 = *(const tree *)p1;
1537 f2 = *(const tree *)p2;
1538 priority1 = DECL_FINI_PRIORITY (f1);
1539 priority2 = DECL_FINI_PRIORITY (f2);
1540
1541 if (priority1 < priority2)
1542 return -1;
1543 else if (priority1 > priority2)
1544 return 1;
1545 else
1546 /* Ensure a stable sort. */
1547 return DECL_UID (f1) - DECL_UID (f2);
1548}
1549
1550/* Generate functions to call static constructors and destructors
1551 for targets that do not support .ctors/.dtors sections. These
1552 functions have magic names which are detected by collect2. */
1553
1554static void
1555build_cdtor_fns (void)
1556{
1557 if (!VEC_empty (tree, static_ctors))
1558 {
1559 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
5095da95 1560 VEC_qsort (tree, static_ctors, compare_ctor);
48c24aca 1561 build_cdtor (/*ctor_p=*/true, static_ctors);
9e97ff61
JH
1562 }
1563
1564 if (!VEC_empty (tree, static_dtors))
1565 {
1566 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
5095da95 1567 VEC_qsort (tree, static_dtors, compare_dtor);
48c24aca 1568 build_cdtor (/*ctor_p=*/false, static_dtors);
9e97ff61
JH
1569 }
1570}
1571
1572/* Look for constructors and destructors and produce function calling them.
1573 This is needed for targets not supporting ctors or dtors, but we perform the
1574 transformation also at linktime to merge possibly numberous
1575 constructors/destructors into single function to improve code locality and
1576 reduce size. */
1577
1578static unsigned int
1579ipa_cdtor_merge (void)
1580{
1581 struct cgraph_node *node;
1582 for (node = cgraph_nodes; node; node = node->next)
1583 if (node->analyzed
1584 && (DECL_STATIC_CONSTRUCTOR (node->decl)
1585 || DECL_STATIC_DESTRUCTOR (node->decl)))
1586 record_cdtor_fn (node);
1587 build_cdtor_fns ();
1588 VEC_free (tree, heap, static_ctors);
1589 VEC_free (tree, heap, static_dtors);
1590 return 0;
1591}
1592
1593/* Perform the pass when we have no ctors/dtors support
1594 or at LTO time to merge multiple constructors into single
1595 function. */
1596
1597static bool
1598gate_ipa_cdtor_merge (void)
1599{
1600 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1601}
1602
1603struct ipa_opt_pass_d pass_ipa_cdtor_merge =
1604{
1605 {
1606 IPA_PASS,
1607 "cdtor", /* name */
1608 gate_ipa_cdtor_merge, /* gate */
1609 ipa_cdtor_merge, /* execute */
1610 NULL, /* sub */
1611 NULL, /* next */
1612 0, /* static_pass_number */
1613 TV_CGRAPHOPT, /* tv_id */
1614 0, /* properties_required */
1615 0, /* properties_provided */
1616 0, /* properties_destroyed */
1617 0, /* todo_flags_start */
1618 0 /* todo_flags_finish */
1619 },
1620 NULL, /* generate_summary */
1621 NULL, /* write_summary */
1622 NULL, /* read_summary */
1623 NULL, /* write_optimization_summary */
1624 NULL, /* read_optimization_summary */
1625 NULL, /* stmt_fixup */
1626 0, /* TODOs */
1627 NULL, /* function_transform */
1628 NULL /* variable_transform */
1629};