1 /* LTO partitioning logic routines.
2 Copyright (C) 2009-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "lto-streamer.h"
30 #include "ipa-inline.h"
31 #include "ipa-utils.h"
32 #include "lto-partition.h"
34 /* Classifcation of symbols into classes WRT partitioning. */
37 /* External declarations are ignored by partitioning algorithms and they are
38 added into the boundary later via compute_ltrans_boundary. */
40 /* Partitioned symbols are pur into one of partitions. */
42 /* Duplicated symbols (such as comdat or constant pool references) are
43 copied into every node needing them via add_symbol_to_partition. */
47 vec
<ltrans_partition
> ltrans_partitions
;
49 static void add_symbol_to_partition (ltrans_partition part
, symtab_node node
);
51 /* Classify symbol NODE. */
54 get_symbol_class (symtab_node node
)
56 /* Inline clones are always duplicated.
57 This include external delcarations. */
58 cgraph_node
*cnode
= dyn_cast
<cgraph_node
> (node
);
59 if (cnode
&& cnode
->global
.inlined_to
)
60 return SYMBOL_DUPLICATE
;
62 /* Weakref aliases are always duplicated. */
63 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (node
->symbol
.decl
)))
64 return SYMBOL_DUPLICATE
;
66 /* External declarations are external. */
67 if (DECL_EXTERNAL (node
->symbol
.decl
))
68 return SYMBOL_EXTERNAL
;
70 if (varpool_node
*vnode
= dyn_cast
<varpool_node
> (node
))
72 /* Constant pool references use local symbol names that can not
73 be promoted global. We should never put into a constant pool
74 objects that can not be duplicated across partitions. */
75 if (DECL_IN_CONSTANT_POOL (node
->symbol
.decl
))
76 return SYMBOL_DUPLICATE
;
77 gcc_checking_assert (vnode
->analyzed
);
79 /* Functions that are cloned may stay in callgraph even if they are unused.
80 Handle them as external; compute_ltrans_boundary take care to make
81 proper things to happen (i.e. to make them appear in the boundary but
82 with body streamed, so clone can me materialized). */
83 else if (!cgraph (node
)->analyzed
)
84 return SYMBOL_EXTERNAL
;
86 /* Comdats are duplicated to every use unless they are keyed.
87 Those do not need duplication. */
88 if (DECL_COMDAT (node
->symbol
.decl
)
89 && !node
->symbol
.force_output
90 && !symtab_used_from_object_file_p ((symtab_node
) node
))
91 return SYMBOL_DUPLICATE
;
93 return SYMBOL_PARTITION
;
96 /* Create new partition with name NAME. */
98 static ltrans_partition
99 new_partition (const char *name
)
101 ltrans_partition part
= XCNEW (struct ltrans_partition_def
);
102 part
->encoder
= lto_symtab_encoder_new (false);
105 ltrans_partitions
.safe_push (part
);
109 /* Free memory used by ltrans datastructures. */
112 free_ltrans_partitions (void)
115 ltrans_partition part
;
116 for (idx
= 0; ltrans_partitions
.iterate (idx
, &part
); idx
++)
118 if (part
->initializers_visited
)
119 pointer_set_destroy (part
->initializers_visited
);
120 /* Symtab encoder is freed after streaming. */
123 ltrans_partitions
.release ();
126 /* Return true if symbol is already in some partition. */
129 symbol_partitioned_p (symtab_node node
)
131 return node
->symbol
.aux
;
134 /* Add references into the partition. */
136 add_references_to_partition (ltrans_partition part
, symtab_node node
)
141 /* Add all duplicated references to the partition. */
142 for (i
= 0; ipa_ref_list_reference_iterate (&node
->symbol
.ref_list
, i
, ref
); i
++)
143 if (get_symbol_class (ref
->referred
) == SYMBOL_DUPLICATE
)
144 add_symbol_to_partition (part
, ref
->referred
);
145 /* References to a readonly variable may be constant foled into its value.
146 Recursively look into the initializers of the constant variable and add
148 else if (is_a
<varpool_node
> (ref
->referred
)
149 && const_value_known_p (ref
->referred
->symbol
.decl
)
150 && !lto_symtab_encoder_in_partition_p (part
->encoder
, ref
->referred
))
152 if (!part
->initializers_visited
)
153 part
->initializers_visited
= pointer_set_create ();
154 if (!pointer_set_insert (part
->initializers_visited
, ref
->referred
))
155 add_references_to_partition (part
, ref
->referred
);
159 /* Helper function for add_symbol_to_partition doing the actual dirty work
160 of adding NODE to PART. */
163 add_symbol_to_partition_1 (ltrans_partition part
, symtab_node node
)
165 enum symbol_class c
= get_symbol_class (node
);
170 /* If NODE is already there, we have nothing to do. */
171 if (lto_symtab_encoder_in_partition_p (part
->encoder
, (symtab_node
) node
))
174 /* non-duplicated aliases or tunks of a duplicated symbol needs to be output
177 Be lax about comdats; they may or may not be duplicated and we may
178 end up in need to duplicate keyed comdat because it has unkeyed alias. */
179 if (c
== SYMBOL_PARTITION
&& !DECL_COMDAT (node
->symbol
.decl
)
180 && symbol_partitioned_p (node
))
183 /* Be sure that we never try to duplicate partitioned symbol
184 or add external symbol. */
185 gcc_assert (c
!= SYMBOL_EXTERNAL
186 && (c
== SYMBOL_DUPLICATE
|| !symbol_partitioned_p (node
)));
188 lto_set_symtab_encoder_in_partition (part
->encoder
, (symtab_node
) node
);
190 if (symbol_partitioned_p (node
))
192 node
->symbol
.in_other_partition
= 1;
193 if (cgraph_dump_file
)
194 fprintf (cgraph_dump_file
, "Symbol node %s now used in multiple partitions\n",
195 symtab_node_name (node
));
197 node
->symbol
.aux
= (void *)((size_t)node
->symbol
.aux
+ 1);
199 if (cgraph_node
*cnode
= dyn_cast
<cgraph_node
> (node
))
201 struct cgraph_edge
*e
;
202 part
->insns
+= inline_summary (cnode
)->self_size
;
204 /* Add all inline clones and callees that are duplicated. */
205 for (e
= cnode
->callees
; e
; e
= e
->next_callee
)
206 if (!e
->inline_failed
)
207 add_symbol_to_partition_1 (part
, (symtab_node
) e
->callee
);
208 else if (get_symbol_class ((symtab_node
) e
->callee
) == SYMBOL_DUPLICATE
)
209 add_symbol_to_partition (part
, (symtab_node
) e
->callee
);
211 /* Add all thunks associated with the function. */
212 for (e
= cnode
->callers
; e
; e
= e
->next_caller
)
213 if (e
->caller
->thunk
.thunk_p
)
214 add_symbol_to_partition_1 (part
, (symtab_node
) e
->caller
);
217 add_references_to_partition (part
, node
);
219 /* Add all aliases associated with the symbol. */
220 for (i
= 0; ipa_ref_list_referring_iterate (&node
->symbol
.ref_list
, i
, ref
); i
++)
221 if (ref
->use
== IPA_REF_ALIAS
222 && !lookup_attribute ("weakref",
224 (ref
->referring
->symbol
.decl
)))
225 add_symbol_to_partition_1 (part
, ref
->referring
);
227 /* Ensure that SAME_COMDAT_GROUP lists all allways added in a group. */
228 if (node
->symbol
.same_comdat_group
)
229 for (node1
= node
->symbol
.same_comdat_group
;
230 node1
!= node
; node1
= node1
->symbol
.same_comdat_group
)
232 bool added
= add_symbol_to_partition_1 (part
, node1
);
238 /* If symbol NODE is really part of other symbol's definition (i.e. it is
239 internal label, thunk, alias or so), return the outer symbol.
240 When add_symbol_to_partition_1 is called on the outer symbol it must
241 eventually add NODE, too. */
243 contained_in_symbol (symtab_node node
)
245 /* Weakrefs are never contained in anything. */
246 if (lookup_attribute ("weakref",
247 DECL_ATTRIBUTES (node
->symbol
.decl
)))
249 if (cgraph_node
*cnode
= dyn_cast
<cgraph_node
> (node
))
251 cnode
= cgraph_function_node (cnode
, NULL
);
252 if (cnode
->global
.inlined_to
)
253 cnode
= cnode
->global
.inlined_to
;
254 return (symtab_node
) cnode
;
256 else if (varpool_node
*vnode
= dyn_cast
<varpool_node
> (node
))
257 return (symtab_node
) varpool_variable_node (vnode
, NULL
);
261 /* Add symbol NODE to partition. When definition of NODE is part
262 of other symbol definition, add the other symbol, too. */
265 add_symbol_to_partition (ltrans_partition part
, symtab_node node
)
269 /* Verify that we do not try to duplicate something that can not be. */
270 gcc_checking_assert (get_symbol_class (node
) == SYMBOL_DUPLICATE
271 || !symbol_partitioned_p (node
));
273 while ((node1
= contained_in_symbol (node
)) != node
)
276 /* If we have duplicated symbol contained in something we can not duplicate,
277 we are very badly screwed. The other way is possible, so we do not
278 assert this in add_symbol_to_partition_1.
280 Be lax about comdats; they may or may not be duplicated and we may
281 end up in need to duplicate keyed comdat because it has unkeyed alias. */
282 gcc_assert (get_symbol_class (node
) == SYMBOL_DUPLICATE
283 || DECL_COMDAT (node
->symbol
.decl
)
284 || !symbol_partitioned_p (node
));
285 add_symbol_to_partition_1 (part
, node
);
288 /* Undo all additions until number of cgraph nodes in PARITION is N_CGRAPH_NODES
289 and number of varpool nodes is N_VARPOOL_NODES. */
292 undo_partition (ltrans_partition partition
, unsigned int n_nodes
)
294 while (lto_symtab_encoder_size (partition
->encoder
) > (int)n_nodes
)
296 symtab_node node
= lto_symtab_encoder_deref (partition
->encoder
,
299 /* After UNDO we no longer know what was visited. */
300 if (partition
->initializers_visited
)
301 pointer_set_destroy (partition
->initializers_visited
);
302 partition
->initializers_visited
= NULL
;
304 if (cgraph_node
*cnode
= dyn_cast
<cgraph_node
> (node
))
305 partition
->insns
-= inline_summary (cnode
)->self_size
;
306 lto_symtab_encoder_delete_node (partition
->encoder
, node
);
307 node
->symbol
.aux
= (void *)((size_t)node
->symbol
.aux
- 1);
311 /* Group cgrah nodes by input files. This is used mainly for testing
315 lto_1_to_1_map (void)
318 struct lto_file_decl_data
*file_data
;
319 struct pointer_map_t
*pmap
;
320 ltrans_partition partition
;
324 pmap
= pointer_map_create ();
326 FOR_EACH_SYMBOL (node
)
328 if (get_symbol_class (node
) != SYMBOL_PARTITION
329 || symbol_partitioned_p (node
))
332 file_data
= node
->symbol
.lto_file_data
;
336 slot
= pointer_map_contains (pmap
, file_data
);
338 partition
= (ltrans_partition
) *slot
;
341 partition
= new_partition (file_data
->file_name
);
342 slot
= pointer_map_insert (pmap
, file_data
);
347 else if (!file_data
&& ltrans_partitions
.length ())
348 partition
= ltrans_partitions
[0];
351 partition
= new_partition ("");
352 slot
= pointer_map_insert (pmap
, NULL
);
357 add_symbol_to_partition (partition
, (symtab_node
) node
);
360 /* If the cgraph is empty, create one cgraph node set so that there is still
361 an output file for any variables that need to be exported in a DSO. */
363 new_partition ("empty");
365 pointer_map_destroy (pmap
);
369 /* Maximal partitioning. Put every new symbol into new partition if possible. */
375 ltrans_partition partition
;
378 FOR_EACH_SYMBOL (node
)
380 if (get_symbol_class (node
) != SYMBOL_PARTITION
381 || symbol_partitioned_p (node
))
383 partition
= new_partition (symtab_node_asm_name (node
));
384 add_symbol_to_partition (partition
, (symtab_node
) node
);
388 new_partition ("empty");
391 /* Helper function for qsort; sort nodes by order. */
393 node_cmp (const void *pa
, const void *pb
)
395 const struct cgraph_node
*a
= *(const struct cgraph_node
* const *) pa
;
396 const struct cgraph_node
*b
= *(const struct cgraph_node
* const *) pb
;
397 return b
->symbol
.order
- a
->symbol
.order
;
400 /* Helper function for qsort; sort nodes by order. */
402 varpool_node_cmp (const void *pa
, const void *pb
)
404 const struct varpool_node
*a
= *(const struct varpool_node
* const *) pa
;
405 const struct varpool_node
*b
= *(const struct varpool_node
* const *) pb
;
406 return b
->symbol
.order
- a
->symbol
.order
;
409 /* Group cgraph nodes into equally-sized partitions.
411 The partitioning algorithm is simple: nodes are taken in predefined order.
412 The order corresponds to the order we want functions to have in the final
413 output. In the future this will be given by function reordering pass, but
414 at the moment we use the topological order, which is a good approximation.
416 The goal is to partition this linear order into intervals (partitions) so
417 that all the partitions have approximately the same size and the number of
418 callgraph or IPA reference edges crossing boundaries is minimal.
420 This is a lot faster (O(n) in size of callgraph) than algorithms doing
421 priority-based graph clustering that are generally O(n^2) and, since
422 WHOPR is designed to make things go well across partitions, it leads
425 We compute the expected size of a partition as:
427 max (total_size / lto_partitions, min_partition_size)
429 We use dynamic expected size of partition so small programs are partitioned
430 into enough partitions to allow use of multiple CPUs, while large programs
431 are not partitioned too much. Creating too many partitions significantly
432 increases the streaming overhead.
434 In the future, we would like to bound the maximal size of partitions so as
435 to prevent the LTRANS stage from consuming too much memory. At the moment,
436 however, the WPA stage is the most memory intensive for large benchmarks,
437 since too many types and declarations are read into memory.
439 The function implements a simple greedy algorithm. Nodes are being added
440 to the current partition until after 3/4 of the expected partition size is
441 reached. Past this threshold, we keep track of boundary size (number of
442 edges going to other partitions) and continue adding functions until after
443 the current partition has grown to twice the expected partition size. Then
444 the process is undone to the point where the minimal ratio of boundary size
445 and in-partition calls was reached. */
448 lto_balanced_map (void)
451 int n_varpool_nodes
= 0, varpool_pos
= 0;
452 struct cgraph_node
**postorder
=
453 XCNEWVEC (struct cgraph_node
*, cgraph_n_nodes
);
454 struct cgraph_node
**order
= XNEWVEC (struct cgraph_node
*, cgraph_max_uid
);
455 struct varpool_node
**varpool_order
= NULL
;
456 int i
, postorder_len
;
457 struct cgraph_node
*node
;
458 int total_size
= 0, best_total_size
= 0;
460 ltrans_partition partition
;
461 int last_visited_node
= 0;
462 struct varpool_node
*vnode
;
463 int cost
= 0, internal
= 0;
464 int best_n_nodes
= 0, best_i
= 0, best_cost
=
465 INT_MAX
, best_internal
= 0;
467 int current_order
= -1;
469 FOR_EACH_VARIABLE (vnode
)
470 gcc_assert (!vnode
->symbol
.aux
);
471 /* Until we have better ordering facility, use toplogical order.
472 Include only nodes we will partition and compute estimate of program
473 size. Note that since nodes that are not partitioned might be put into
474 multiple partitions, this is just an estimate of real size. This is why
475 we keep partition_size updated after every partition is finalized. */
476 postorder_len
= ipa_reverse_postorder (postorder
);
478 for (i
= 0; i
< postorder_len
; i
++)
481 if (get_symbol_class ((symtab_node
) node
) == SYMBOL_PARTITION
)
483 order
[n_nodes
++] = node
;
484 total_size
+= inline_summary (node
)->size
;
489 if (!flag_toplevel_reorder
)
491 qsort (order
, n_nodes
, sizeof (struct cgraph_node
*), node_cmp
);
493 FOR_EACH_VARIABLE (vnode
)
494 if (get_symbol_class ((symtab_node
) vnode
) == SYMBOL_PARTITION
)
496 varpool_order
= XNEWVEC (struct varpool_node
*, n_varpool_nodes
);
499 FOR_EACH_VARIABLE (vnode
)
500 if (get_symbol_class ((symtab_node
) vnode
) == SYMBOL_PARTITION
)
501 varpool_order
[n_varpool_nodes
++] = vnode
;
502 qsort (varpool_order
, n_varpool_nodes
, sizeof (struct varpool_node
*),
506 /* Compute partition size and create the first partition. */
507 partition_size
= total_size
/ PARAM_VALUE (PARAM_LTO_PARTITIONS
);
508 if (partition_size
< PARAM_VALUE (MIN_PARTITION_SIZE
))
509 partition_size
= PARAM_VALUE (MIN_PARTITION_SIZE
);
511 partition
= new_partition ("");
512 if (cgraph_dump_file
)
513 fprintf (cgraph_dump_file
, "Total unit size: %i, partition size: %i\n",
514 total_size
, partition_size
);
516 for (i
= 0; i
< n_nodes
; i
++)
518 if (symbol_partitioned_p ((symtab_node
) order
[i
]))
521 current_order
= order
[i
]->symbol
.order
;
523 if (!flag_toplevel_reorder
)
524 while (varpool_pos
< n_varpool_nodes
525 && varpool_order
[varpool_pos
]->symbol
.order
< current_order
)
527 if (!symbol_partitioned_p ((symtab_node
) varpool_order
[varpool_pos
]))
528 add_symbol_to_partition (partition
, (symtab_node
) varpool_order
[varpool_pos
]);
532 add_symbol_to_partition (partition
, (symtab_node
) order
[i
]);
533 total_size
-= inline_summary (order
[i
])->size
;
536 /* Once we added a new node to the partition, we also want to add
537 all referenced variables unless they was already added into some
539 add_symbol_to_partition adds possibly multiple nodes and
540 variables that are needed to satisfy needs of ORDER[i].
541 We remember last visited cgraph and varpool node from last iteration
542 of outer loop that allows us to process every new addition.
544 At the same time we compute size of the boundary into COST. Every
545 callgraph or IPA reference edge leaving the partition contributes into
546 COST. Every edge inside partition was earlier computed as one leaving
547 it and thus we need to subtract it from COST. */
548 while (last_visited_node
< lto_symtab_encoder_size (partition
->encoder
))
550 struct ipa_ref_list
*refs
;
553 symtab_node snode
= lto_symtab_encoder_deref (partition
->encoder
,
556 if (cgraph_node
*node
= dyn_cast
<cgraph_node
> (snode
))
558 struct cgraph_edge
*edge
;
560 refs
= &node
->symbol
.ref_list
;
564 gcc_assert (node
->analyzed
565 || lookup_attribute ("weakref", DECL_ATTRIBUTES (node
->symbol
.decl
)));
567 /* Compute boundary cost of callgraph edges. */
568 for (edge
= node
->callees
; edge
; edge
= edge
->next_callee
)
569 if (edge
->callee
->analyzed
)
571 int edge_cost
= edge
->frequency
;
576 gcc_assert (edge_cost
> 0);
577 index
= lto_symtab_encoder_lookup (partition
->encoder
,
578 (symtab_node
)edge
->callee
);
579 if (index
!= LCC_NOT_FOUND
580 && index
< last_visited_node
- 1)
581 cost
-= edge_cost
, internal
+= edge_cost
;
585 for (edge
= node
->callers
; edge
; edge
= edge
->next_caller
)
587 int edge_cost
= edge
->frequency
;
590 gcc_assert (edge
->caller
->analyzed
);
593 gcc_assert (edge_cost
> 0);
594 index
= lto_symtab_encoder_lookup (partition
->encoder
,
595 (symtab_node
)edge
->caller
);
596 if (index
!= LCC_NOT_FOUND
597 && index
< last_visited_node
- 1)
605 refs
= &snode
->symbol
.ref_list
;
609 /* Compute boundary cost of IPA REF edges and at the same time look into
610 variables referenced from current partition and try to add them. */
611 for (j
= 0; ipa_ref_list_reference_iterate (refs
, j
, ref
); j
++)
612 if (is_a
<varpool_node
> (ref
->referred
))
616 vnode
= ipa_ref_varpool_node (ref
);
617 if (!vnode
->finalized
)
619 if (!symbol_partitioned_p ((symtab_node
) vnode
) && flag_toplevel_reorder
620 && get_symbol_class ((symtab_node
) vnode
) == SYMBOL_PARTITION
)
621 add_symbol_to_partition (partition
, (symtab_node
) vnode
);
622 index
= lto_symtab_encoder_lookup (partition
->encoder
,
624 if (index
!= LCC_NOT_FOUND
625 && index
< last_visited_node
- 1)
634 node
= ipa_ref_node (ref
);
637 index
= lto_symtab_encoder_lookup (partition
->encoder
,
639 if (index
!= LCC_NOT_FOUND
640 && index
< last_visited_node
- 1)
645 for (j
= 0; ipa_ref_list_referring_iterate (refs
, j
, ref
); j
++)
646 if (is_a
<varpool_node
> (ref
->referring
))
650 vnode
= ipa_ref_referring_varpool_node (ref
);
651 gcc_assert (vnode
->finalized
);
652 if (!symbol_partitioned_p ((symtab_node
) vnode
) && flag_toplevel_reorder
653 && get_symbol_class ((symtab_node
) vnode
) == SYMBOL_PARTITION
)
654 add_symbol_to_partition (partition
, (symtab_node
) vnode
);
655 index
= lto_symtab_encoder_lookup (partition
->encoder
,
657 if (index
!= LCC_NOT_FOUND
658 && index
< last_visited_node
- 1)
667 node
= ipa_ref_referring_node (ref
);
668 gcc_assert (node
->analyzed
);
669 index
= lto_symtab_encoder_lookup (partition
->encoder
,
671 if (index
!= LCC_NOT_FOUND
672 && index
< last_visited_node
- 1)
679 /* If the partition is large enough, start looking for smallest boundary cost. */
680 if (partition
->insns
< partition_size
* 3 / 4
681 || best_cost
== INT_MAX
683 || (best_internal
* (HOST_WIDE_INT
) cost
684 > (internal
* (HOST_WIDE_INT
)best_cost
)))
685 && partition
->insns
< partition_size
* 5 / 4))
688 best_internal
= internal
;
690 best_n_nodes
= lto_symtab_encoder_size (partition
->encoder
);
691 best_total_size
= total_size
;
693 if (cgraph_dump_file
)
694 fprintf (cgraph_dump_file
, "Step %i: added %s/%i, size %i, cost %i/%i "
695 "best %i/%i, step %i\n", i
,
696 cgraph_node_name (order
[i
]), order
[i
]->symbol
.order
,
697 partition
->insns
, cost
, internal
,
698 best_cost
, best_internal
, best_i
);
699 /* Partition is too large, unwind into step when best cost was reached and
700 start new partition. */
701 if (partition
->insns
> 2 * partition_size
)
705 if (cgraph_dump_file
)
706 fprintf (cgraph_dump_file
, "Unwinding %i insertions to step %i\n",
708 undo_partition (partition
, best_n_nodes
);
711 /* When we are finished, avoid creating empty partition. */
712 while (i
< n_nodes
- 1 && symbol_partitioned_p ((symtab_node
) order
[i
+ 1]))
714 if (i
== n_nodes
- 1)
716 partition
= new_partition ("");
717 last_visited_node
= 0;
718 total_size
= best_total_size
;
721 if (cgraph_dump_file
)
722 fprintf (cgraph_dump_file
, "New partition\n");
726 /* Since the size of partitions is just approximate, update the size after
727 we finished current one. */
728 if (npartitions
< PARAM_VALUE (PARAM_LTO_PARTITIONS
))
729 partition_size
= total_size
730 / (PARAM_VALUE (PARAM_LTO_PARTITIONS
) - npartitions
);
732 partition_size
= INT_MAX
;
734 if (partition_size
< PARAM_VALUE (MIN_PARTITION_SIZE
))
735 partition_size
= PARAM_VALUE (MIN_PARTITION_SIZE
);
740 /* Varables that are not reachable from the code go into last partition. */
741 if (flag_toplevel_reorder
)
743 FOR_EACH_VARIABLE (vnode
)
744 if (get_symbol_class ((symtab_node
) vnode
) == SYMBOL_PARTITION
745 && !symbol_partitioned_p ((symtab_node
) vnode
))
746 add_symbol_to_partition (partition
, (symtab_node
) vnode
);
750 while (varpool_pos
< n_varpool_nodes
)
752 if (!symbol_partitioned_p ((symtab_node
) varpool_order
[varpool_pos
]))
753 add_symbol_to_partition (partition
, (symtab_node
) varpool_order
[varpool_pos
]);
756 free (varpool_order
);
761 /* Mangle NODE symbol name into a local name.
762 This is necessary to do
763 1) if two or more static vars of same assembler name
764 are merged into single ltrans unit.
765 2) if prevoiusly static var was promoted hidden to avoid possible conflict
766 with symbols defined out of the LTO world.
770 privatize_symbol_name (symtab_node node
)
772 tree decl
= node
->symbol
.decl
;
773 const char *name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
775 /* Our renaming machinery do not handle more than one change of assembler name.
776 We should not need more than one anyway. */
777 if (node
->symbol
.lto_file_data
778 && lto_get_decl_name_mapping (node
->symbol
.lto_file_data
, name
) != name
)
780 if (cgraph_dump_file
)
781 fprintf (cgraph_dump_file
,
782 "Not privatizing symbol name: %s. It privatized already.\n",
786 /* Avoid mangling of already mangled clones.
787 ??? should have a flag whether a symbol has a 'private' name already,
788 since we produce some symbols like that i.e. for global constructors
789 that are not really clones. */
790 if (node
->symbol
.unique_name
)
792 if (cgraph_dump_file
)
793 fprintf (cgraph_dump_file
,
794 "Not privatizing symbol name: %s. Has unique name.\n",
798 change_decl_assembler_name (decl
, clone_function_name (decl
, "lto_priv"));
799 if (node
->symbol
.lto_file_data
)
800 lto_record_renamed_decl (node
->symbol
.lto_file_data
, name
,
802 (DECL_ASSEMBLER_NAME (decl
)));
803 if (cgraph_dump_file
)
804 fprintf (cgraph_dump_file
,
805 "Privatizing symbol name: %s -> %s\n",
806 name
, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
)));
810 /* Promote variable VNODE to be static. */
813 promote_symbol (symtab_node node
)
815 /* We already promoted ... */
816 if (DECL_VISIBILITY (node
->symbol
.decl
) == VISIBILITY_HIDDEN
817 && DECL_VISIBILITY_SPECIFIED (node
->symbol
.decl
)
818 && TREE_PUBLIC (node
->symbol
.decl
))
821 gcc_checking_assert (!TREE_PUBLIC (node
->symbol
.decl
)
822 && !DECL_EXTERNAL (node
->symbol
.decl
));
823 /* Be sure that newly public symbol does not conflict with anything already
824 defined by the non-LTO part. */
825 privatize_symbol_name (node
);
826 TREE_PUBLIC (node
->symbol
.decl
) = 1;
827 DECL_VISIBILITY (node
->symbol
.decl
) = VISIBILITY_HIDDEN
;
828 DECL_VISIBILITY_SPECIFIED (node
->symbol
.decl
) = true;
829 if (cgraph_dump_file
)
830 fprintf (cgraph_dump_file
,
831 "Promoting as hidden: %s\n", symtab_node_name (node
));
834 /* Return true if NODE needs named section even if it won't land in the partition
836 FIXME: we should really not use named sections for inline clones and master clones. */
839 may_need_named_section_p (lto_symtab_encoder_t encoder
, symtab_node node
)
841 struct cgraph_node
*cnode
= dyn_cast
<cgraph_node
> (node
);
844 if (symtab_real_symbol_p (node
))
846 if (!cnode
->global
.inlined_to
&& !cnode
->clones
)
849 || (lto_symtab_encoder_lookup (encoder
, node
) != LCC_NOT_FOUND
850 && lto_symtab_encoder_encode_body_p (encoder
,
854 /* If NODE represents a static variable. See if there are other variables
855 of the same name in partition ENCODER (or in whole compilation unit if
856 ENCODER is NULL) and if so, mangle the statics. Always mangle all
857 conflicting statics, so we reduce changes of silently miscompiling
858 asm statemnets refering to them by symbol name. */
861 rename_statics (lto_symtab_encoder_t encoder
, symtab_node node
)
863 tree decl
= node
->symbol
.decl
;
865 tree name
= DECL_ASSEMBLER_NAME (decl
);
867 /* See if this is static symbol. */
868 if ((node
->symbol
.externally_visible
869 /* FIXME: externally_visible is somewhat illogically not set for
870 external symbols (i.e. those not defined). Remove this test
871 once this is fixed. */
872 || DECL_EXTERNAL (node
->symbol
.decl
)
873 || !symtab_real_symbol_p (node
))
874 && !may_need_named_section_p (encoder
, node
)
875 && !lookup_attribute ("weakref", DECL_ATTRIBUTES (node
->symbol
.decl
)))
878 /* Now walk symbols sharing the same name and see if there are any conflicts.
879 (all types of symbols counts here, since we can not have static of the
880 same name as external or public symbol.) */
881 for (s
= symtab_node_for_asm (name
);
882 s
; s
= s
->symbol
.next_sharing_asm_name
)
883 if ((symtab_real_symbol_p (s
) || may_need_named_section_p (encoder
, s
))
884 && s
->symbol
.decl
!= node
->symbol
.decl
886 || lto_symtab_encoder_lookup (encoder
, s
) != LCC_NOT_FOUND
))
889 /* OK, no confict, so we have nothing to do. */
893 if (cgraph_dump_file
)
894 fprintf (cgraph_dump_file
,
895 "Renaming statics with asm name: %s\n", symtab_node_name (node
));
897 /* Assign every symbol in the set that shares the same ASM name an unique
899 for (s
= symtab_node_for_asm (name
); s
;)
900 if ((!s
->symbol
.externally_visible
901 || lookup_attribute ("weakref", DECL_ATTRIBUTES (node
->symbol
.decl
)))
902 && ((symtab_real_symbol_p (s
)
903 && (!DECL_EXTERNAL (node
->symbol
.decl
)
904 || lookup_attribute ("weakref", DECL_ATTRIBUTES (node
->symbol
.decl
)))
905 && !TREE_PUBLIC (node
->symbol
.decl
))
906 || may_need_named_section_p (encoder
, s
))
908 || lto_symtab_encoder_lookup (encoder
, s
) != LCC_NOT_FOUND
))
910 if (privatize_symbol_name (s
))
911 /* Re-start from beggining since we do not know how many symbols changed a name. */
912 s
= symtab_node_for_asm (name
);
913 else s
= s
->symbol
.next_sharing_asm_name
;
915 else s
= s
->symbol
.next_sharing_asm_name
;
918 /* Find out all static decls that need to be promoted to global because
919 of cross file sharing. This function must be run in the WPA mode after
920 all inlinees are added. */
923 lto_promote_cross_file_statics (void)
927 gcc_assert (flag_wpa
);
929 /* First compute boundaries. */
930 n_sets
= ltrans_partitions
.length ();
931 for (i
= 0; i
< n_sets
; i
++)
933 ltrans_partition part
934 = ltrans_partitions
[i
];
935 part
->encoder
= compute_ltrans_boundary (part
->encoder
);
938 /* Look at boundaries and promote symbols as needed. */
939 for (i
= 0; i
< n_sets
; i
++)
941 lto_symtab_encoder_iterator lsei
;
942 lto_symtab_encoder_t encoder
= ltrans_partitions
[i
]->encoder
;
944 for (lsei
= lsei_start (encoder
); !lsei_end_p (lsei
);
947 symtab_node node
= lsei_node (lsei
);
949 /* If symbol is static, rename it if its assembler name clash with
950 anything else in this unit. */
951 rename_statics (encoder
, node
);
953 /* No need to promote if symbol already is externally visible ... */
954 if (node
->symbol
.externally_visible
955 /* ... or if it is part of current partition ... */
956 || lto_symtab_encoder_in_partition_p (encoder
, node
)
957 /* ... or if we do not partition it. This mean that it will
958 appear in every partition refernecing it. */
959 || get_symbol_class ((symtab_node
) node
) != SYMBOL_PARTITION
)
962 promote_symbol (node
);
967 /* Rename statics in the whole unit in the case that
968 we do -flto-partition=none. */
971 lto_promote_statics_nonwpa (void)
974 FOR_EACH_SYMBOL (node
)
975 rename_statics (NULL
, node
);