]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto/lto-partition.c
common.opt (lto_partition_model): New enum.
[thirdparty/gcc.git] / gcc / lto / lto-partition.c
1 /* LTO partitioning logic routines.
2 Copyright (C) 2009-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "toplev.h"
24 #include "tree.h"
25 #include "gcc-symtab.h"
26 #include "basic-block.h"
27 #include "tree-ssa-alias.h"
28 #include "internal-fn.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "tm.h"
33 #include "cgraph.h"
34 #include "lto-streamer.h"
35 #include "timevar.h"
36 #include "params.h"
37 #include "ipa-inline.h"
38 #include "ipa-utils.h"
39 #include "lto-partition.h"
40
41 vec<ltrans_partition> ltrans_partitions;
42
43 static void add_symbol_to_partition (ltrans_partition part, symtab_node *node);
44
45
46 /* Create new partition with name NAME. */
47
48 static ltrans_partition
49 new_partition (const char *name)
50 {
51 ltrans_partition part = XCNEW (struct ltrans_partition_def);
52 part->encoder = lto_symtab_encoder_new (false);
53 part->name = name;
54 part->insns = 0;
55 ltrans_partitions.safe_push (part);
56 return part;
57 }
58
59 /* Free memory used by ltrans datastructures. */
60
61 void
62 free_ltrans_partitions (void)
63 {
64 unsigned int idx;
65 ltrans_partition part;
66 for (idx = 0; ltrans_partitions.iterate (idx, &part); idx++)
67 {
68 if (part->initializers_visited)
69 pointer_set_destroy (part->initializers_visited);
70 /* Symtab encoder is freed after streaming. */
71 free (part);
72 }
73 ltrans_partitions.release ();
74 }
75
76 /* Return true if symbol is already in some partition. */
77
78 static inline bool
79 symbol_partitioned_p (symtab_node *node)
80 {
81 return node->aux;
82 }
83
84 /* Add references into the partition. */
85 static void
86 add_references_to_partition (ltrans_partition part, symtab_node *node)
87 {
88 int i;
89 struct ipa_ref *ref;
90
91 /* Add all duplicated references to the partition. */
92 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
93 if (symtab_get_symbol_partitioning_class (ref->referred) == SYMBOL_DUPLICATE)
94 add_symbol_to_partition (part, ref->referred);
95 /* References to a readonly variable may be constant foled into its value.
96 Recursively look into the initializers of the constant variable and add
97 references, too. */
98 else if (is_a <varpool_node> (ref->referred)
99 && ctor_for_folding (ref->referred->decl) != error_mark_node
100 && !lto_symtab_encoder_in_partition_p (part->encoder, ref->referred))
101 {
102 if (!part->initializers_visited)
103 part->initializers_visited = pointer_set_create ();
104 if (!pointer_set_insert (part->initializers_visited, ref->referred))
105 add_references_to_partition (part, ref->referred);
106 }
107 }
108
109 /* Helper function for add_symbol_to_partition doing the actual dirty work
110 of adding NODE to PART. */
111
112 static bool
113 add_symbol_to_partition_1 (ltrans_partition part, symtab_node *node)
114 {
115 enum symbol_partitioning_class c = symtab_get_symbol_partitioning_class (node);
116 int i;
117 struct ipa_ref *ref;
118 symtab_node *node1;
119
120 /* If NODE is already there, we have nothing to do. */
121 if (lto_symtab_encoder_in_partition_p (part->encoder, node))
122 return true;
123
124 /* non-duplicated aliases or tunks of a duplicated symbol needs to be output
125 just once.
126
127 Be lax about comdats; they may or may not be duplicated and we may
128 end up in need to duplicate keyed comdat because it has unkeyed alias. */
129 if (c == SYMBOL_PARTITION && !DECL_COMDAT (node->decl)
130 && symbol_partitioned_p (node))
131 return false;
132
133 /* Be sure that we never try to duplicate partitioned symbol
134 or add external symbol. */
135 gcc_assert (c != SYMBOL_EXTERNAL
136 && (c == SYMBOL_DUPLICATE || !symbol_partitioned_p (node)));
137
138 lto_set_symtab_encoder_in_partition (part->encoder, node);
139
140 if (symbol_partitioned_p (node))
141 {
142 node->in_other_partition = 1;
143 if (cgraph_dump_file)
144 fprintf (cgraph_dump_file, "Symbol node %s now used in multiple partitions\n",
145 node->name ());
146 }
147 node->aux = (void *)((size_t)node->aux + 1);
148
149 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
150 {
151 struct cgraph_edge *e;
152 if (!node->alias)
153 part->insns += inline_summary (cnode)->self_size;
154
155 /* Add all inline clones and callees that are duplicated. */
156 for (e = cnode->callees; e; e = e->next_callee)
157 if (!e->inline_failed)
158 add_symbol_to_partition_1 (part, e->callee);
159 else if (symtab_get_symbol_partitioning_class (e->callee) == SYMBOL_DUPLICATE)
160 add_symbol_to_partition (part, e->callee);
161
162 /* Add all thunks associated with the function. */
163 for (e = cnode->callers; e; e = e->next_caller)
164 if (e->caller->thunk.thunk_p)
165 add_symbol_to_partition_1 (part, e->caller);
166 }
167
168 add_references_to_partition (part, node);
169
170 /* Add all aliases associated with the symbol. */
171 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list, i, ref); i++)
172 if (ref->use == IPA_REF_ALIAS && !node->weakref)
173 add_symbol_to_partition_1 (part, ref->referring);
174
175 /* Ensure that SAME_COMDAT_GROUP lists all allways added in a group. */
176 if (node->same_comdat_group)
177 for (node1 = node->same_comdat_group;
178 node1 != node; node1 = node1->same_comdat_group)
179 if (!node->alias)
180 {
181 bool added = add_symbol_to_partition_1 (part, node1);
182 gcc_assert (added);
183 }
184 return true;
185 }
186
187 /* If symbol NODE is really part of other symbol's definition (i.e. it is
188 internal label, thunk, alias or so), return the outer symbol.
189 When add_symbol_to_partition_1 is called on the outer symbol it must
190 eventually add NODE, too. */
191 static symtab_node *
192 contained_in_symbol (symtab_node *node)
193 {
194 /* Weakrefs are never contained in anything. */
195 if (node->weakref)
196 return node;
197 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
198 {
199 cnode = cgraph_function_node (cnode, NULL);
200 if (cnode->global.inlined_to)
201 cnode = cnode->global.inlined_to;
202 return cnode;
203 }
204 else if (varpool_node *vnode = dyn_cast <varpool_node> (node))
205 return varpool_variable_node (vnode, NULL);
206 return node;
207 }
208
209 /* Add symbol NODE to partition. When definition of NODE is part
210 of other symbol definition, add the other symbol, too. */
211
212 static void
213 add_symbol_to_partition (ltrans_partition part, symtab_node *node)
214 {
215 symtab_node *node1;
216
217 /* Verify that we do not try to duplicate something that can not be. */
218 gcc_checking_assert (symtab_get_symbol_partitioning_class (node) == SYMBOL_DUPLICATE
219 || !symbol_partitioned_p (node));
220
221 while ((node1 = contained_in_symbol (node)) != node)
222 node = node1;
223
224 /* If we have duplicated symbol contained in something we can not duplicate,
225 we are very badly screwed. The other way is possible, so we do not
226 assert this in add_symbol_to_partition_1.
227
228 Be lax about comdats; they may or may not be duplicated and we may
229 end up in need to duplicate keyed comdat because it has unkeyed alias. */
230
231 gcc_assert (symtab_get_symbol_partitioning_class (node) == SYMBOL_DUPLICATE
232 || DECL_COMDAT (node->decl)
233 || !symbol_partitioned_p (node));
234
235 add_symbol_to_partition_1 (part, node);
236 }
237
238 /* Undo all additions until number of cgraph nodes in PARITION is N_CGRAPH_NODES
239 and number of varpool nodes is N_VARPOOL_NODES. */
240
241 static void
242 undo_partition (ltrans_partition partition, unsigned int n_nodes)
243 {
244 while (lto_symtab_encoder_size (partition->encoder) > (int)n_nodes)
245 {
246 symtab_node *node = lto_symtab_encoder_deref (partition->encoder,
247 n_nodes);
248 cgraph_node *cnode;
249
250 /* After UNDO we no longer know what was visited. */
251 if (partition->initializers_visited)
252 pointer_set_destroy (partition->initializers_visited);
253 partition->initializers_visited = NULL;
254
255 if (!node->alias && (cnode = dyn_cast <cgraph_node> (node)))
256 partition->insns -= inline_summary (cnode)->self_size;
257 lto_symtab_encoder_delete_node (partition->encoder, node);
258 node->aux = (void *)((size_t)node->aux - 1);
259 }
260 }
261
262 /* Group cgrah nodes by input files. This is used mainly for testing
263 right now. */
264
265 void
266 lto_1_to_1_map (void)
267 {
268 symtab_node *node;
269 struct lto_file_decl_data *file_data;
270 struct pointer_map_t *pmap;
271 ltrans_partition partition;
272 void **slot;
273 int npartitions = 0;
274
275 pmap = pointer_map_create ();
276
277 FOR_EACH_SYMBOL (node)
278 {
279 if (symtab_get_symbol_partitioning_class (node) != SYMBOL_PARTITION
280 || symbol_partitioned_p (node))
281 continue;
282
283 file_data = node->lto_file_data;
284
285 if (file_data)
286 {
287 slot = pointer_map_contains (pmap, file_data);
288 if (slot)
289 partition = (ltrans_partition) *slot;
290 else
291 {
292 partition = new_partition (file_data->file_name);
293 slot = pointer_map_insert (pmap, file_data);
294 *slot = partition;
295 npartitions++;
296 }
297 }
298 else if (!file_data && ltrans_partitions.length ())
299 partition = ltrans_partitions[0];
300 else
301 {
302 partition = new_partition ("");
303 slot = pointer_map_insert (pmap, NULL);
304 *slot = partition;
305 npartitions++;
306 }
307
308 add_symbol_to_partition (partition, node);
309 }
310
311 /* If the cgraph is empty, create one cgraph node set so that there is still
312 an output file for any variables that need to be exported in a DSO. */
313 if (!npartitions)
314 new_partition ("empty");
315
316 pointer_map_destroy (pmap);
317
318 }
319
320 /* Maximal partitioning. Put every new symbol into new partition if possible. */
321
322 void
323 lto_max_map (void)
324 {
325 symtab_node *node;
326 ltrans_partition partition;
327 int npartitions = 0;
328
329 FOR_EACH_SYMBOL (node)
330 {
331 if (symtab_get_symbol_partitioning_class (node) != SYMBOL_PARTITION
332 || symbol_partitioned_p (node))
333 continue;
334 partition = new_partition (node->asm_name ());
335 add_symbol_to_partition (partition, node);
336 npartitions++;
337 }
338 if (!npartitions)
339 new_partition ("empty");
340 }
341
342 /* Helper function for qsort; sort nodes by order. */
343 static int
344 node_cmp (const void *pa, const void *pb)
345 {
346 const struct cgraph_node *a = *(const struct cgraph_node * const *) pa;
347 const struct cgraph_node *b = *(const struct cgraph_node * const *) pb;
348
349 /* Profile reorder flag enables function reordering based on first execution
350 of a function. All functions with profile are placed in ascending
351 order at the beginning. */
352
353 if (flag_profile_reorder_functions)
354 {
355 /* Functions with time profile are sorted in ascending order. */
356 if (a->tp_first_run && b->tp_first_run)
357 return a->tp_first_run != b->tp_first_run
358 ? a->tp_first_run - b->tp_first_run
359 : a->order - b->order;
360
361 /* Functions with time profile are sorted before the functions
362 that do not have the profile. */
363 if (a->tp_first_run || b->tp_first_run)
364 return b->tp_first_run - a->tp_first_run;
365 }
366
367 return b->order - a->order;
368 }
369
370 /* Helper function for qsort; sort nodes by order. */
371 static int
372 varpool_node_cmp (const void *pa, const void *pb)
373 {
374 const varpool_node *a = *(const varpool_node * const *) pa;
375 const varpool_node *b = *(const varpool_node * const *) pb;
376 return b->order - a->order;
377 }
378
379 /* Group cgraph nodes into equally-sized partitions.
380
381 The partitioning algorithm is simple: nodes are taken in predefined order.
382 The order corresponds to the order we want functions to have in the final
383 output. In the future this will be given by function reordering pass, but
384 at the moment we use the topological order, which is a good approximation.
385
386 The goal is to partition this linear order into intervals (partitions) so
387 that all the partitions have approximately the same size and the number of
388 callgraph or IPA reference edges crossing boundaries is minimal.
389
390 This is a lot faster (O(n) in size of callgraph) than algorithms doing
391 priority-based graph clustering that are generally O(n^2) and, since
392 WHOPR is designed to make things go well across partitions, it leads
393 to good results.
394
395 We compute the expected size of a partition as:
396
397 max (total_size / lto_partitions, min_partition_size)
398
399 We use dynamic expected size of partition so small programs are partitioned
400 into enough partitions to allow use of multiple CPUs, while large programs
401 are not partitioned too much. Creating too many partitions significantly
402 increases the streaming overhead.
403
404 In the future, we would like to bound the maximal size of partitions so as
405 to prevent the LTRANS stage from consuming too much memory. At the moment,
406 however, the WPA stage is the most memory intensive for large benchmarks,
407 since too many types and declarations are read into memory.
408
409 The function implements a simple greedy algorithm. Nodes are being added
410 to the current partition until after 3/4 of the expected partition size is
411 reached. Past this threshold, we keep track of boundary size (number of
412 edges going to other partitions) and continue adding functions until after
413 the current partition has grown to twice the expected partition size. Then
414 the process is undone to the point where the minimal ratio of boundary size
415 and in-partition calls was reached. */
416
417 void
418 lto_balanced_map (int n_lto_partitions)
419 {
420 int n_nodes = 0;
421 int n_varpool_nodes = 0, varpool_pos = 0, best_varpool_pos = 0;
422 struct cgraph_node **order = XNEWVEC (struct cgraph_node *, cgraph_max_uid);
423 varpool_node **varpool_order = NULL;
424 int i;
425 struct cgraph_node *node;
426 int total_size = 0, best_total_size = 0;
427 int partition_size;
428 ltrans_partition partition;
429 int last_visited_node = 0;
430 varpool_node *vnode;
431 int cost = 0, internal = 0;
432 int best_n_nodes = 0, best_i = 0, best_cost =
433 INT_MAX, best_internal = 0;
434 int npartitions;
435 int current_order = -1;
436
437 FOR_EACH_VARIABLE (vnode)
438 gcc_assert (!vnode->aux);
439
440 FOR_EACH_DEFINED_FUNCTION (node)
441 if (symtab_get_symbol_partitioning_class (node) == SYMBOL_PARTITION)
442 {
443 order[n_nodes++] = node;
444 if (!node->alias)
445 total_size += inline_summary (node)->size;
446 }
447
448 /* Streaming works best when the source units do not cross partition
449 boundaries much. This is because importing function from a source
450 unit tends to import a lot of global trees defined there. We should
451 get better about minimizing the function bounday, but until that
452 things works smoother if we order in source order. */
453 qsort (order, n_nodes, sizeof (struct cgraph_node *), node_cmp);
454
455 if (cgraph_dump_file)
456 for(i = 0; i < n_nodes; i++)
457 fprintf (cgraph_dump_file, "Balanced map symbol order:%s:%u\n", order[i]->name (), order[i]->tp_first_run);
458
459 if (!flag_toplevel_reorder)
460 {
461 FOR_EACH_VARIABLE (vnode)
462 if (symtab_get_symbol_partitioning_class (vnode) == SYMBOL_PARTITION)
463 n_varpool_nodes++;
464 varpool_order = XNEWVEC (varpool_node *, n_varpool_nodes);
465
466 n_varpool_nodes = 0;
467 FOR_EACH_VARIABLE (vnode)
468 if (symtab_get_symbol_partitioning_class (vnode) == SYMBOL_PARTITION)
469 varpool_order[n_varpool_nodes++] = vnode;
470 qsort (varpool_order, n_varpool_nodes, sizeof (varpool_node *),
471 varpool_node_cmp);
472 }
473
474 /* Compute partition size and create the first partition. */
475 partition_size = total_size / n_lto_partitions;
476 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
477 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
478 npartitions = 1;
479 partition = new_partition ("");
480 if (cgraph_dump_file)
481 fprintf (cgraph_dump_file, "Total unit size: %i, partition size: %i\n",
482 total_size, partition_size);
483
484 for (i = 0; i < n_nodes; i++)
485 {
486 if (symbol_partitioned_p (order[i]))
487 continue;
488
489 current_order = order[i]->order;
490
491 if (!flag_toplevel_reorder)
492 while (varpool_pos < n_varpool_nodes
493 && varpool_order[varpool_pos]->order < current_order)
494 {
495 if (!symbol_partitioned_p (varpool_order[varpool_pos]))
496 add_symbol_to_partition (partition, varpool_order[varpool_pos]);
497 varpool_pos++;
498 }
499
500 add_symbol_to_partition (partition, order[i]);
501 if (!order[i]->alias)
502 total_size -= inline_summary (order[i])->size;
503
504
505 /* Once we added a new node to the partition, we also want to add
506 all referenced variables unless they was already added into some
507 earlier partition.
508 add_symbol_to_partition adds possibly multiple nodes and
509 variables that are needed to satisfy needs of ORDER[i].
510 We remember last visited cgraph and varpool node from last iteration
511 of outer loop that allows us to process every new addition.
512
513 At the same time we compute size of the boundary into COST. Every
514 callgraph or IPA reference edge leaving the partition contributes into
515 COST. Every edge inside partition was earlier computed as one leaving
516 it and thus we need to subtract it from COST. */
517 while (last_visited_node < lto_symtab_encoder_size (partition->encoder))
518 {
519 struct ipa_ref_list *refs;
520 int j;
521 struct ipa_ref *ref;
522 symtab_node *snode = lto_symtab_encoder_deref (partition->encoder,
523 last_visited_node);
524
525 if (cgraph_node *node = dyn_cast <cgraph_node> (snode))
526 {
527 struct cgraph_edge *edge;
528
529 refs = &node->ref_list;
530
531 last_visited_node++;
532
533 gcc_assert (node->definition || node->weakref);
534
535 /* Compute boundary cost of callgraph edges. */
536 for (edge = node->callees; edge; edge = edge->next_callee)
537 if (edge->callee->definition)
538 {
539 int edge_cost = edge->frequency;
540 int index;
541
542 if (!edge_cost)
543 edge_cost = 1;
544 gcc_assert (edge_cost > 0);
545 index = lto_symtab_encoder_lookup (partition->encoder,
546 edge->callee);
547 if (index != LCC_NOT_FOUND
548 && index < last_visited_node - 1)
549 cost -= edge_cost, internal += edge_cost;
550 else
551 cost += edge_cost;
552 }
553 for (edge = node->callers; edge; edge = edge->next_caller)
554 {
555 int edge_cost = edge->frequency;
556 int index;
557
558 gcc_assert (edge->caller->definition);
559 if (!edge_cost)
560 edge_cost = 1;
561 gcc_assert (edge_cost > 0);
562 index = lto_symtab_encoder_lookup (partition->encoder,
563 edge->caller);
564 if (index != LCC_NOT_FOUND
565 && index < last_visited_node - 1)
566 cost -= edge_cost;
567 else
568 cost += edge_cost;
569 }
570 }
571 else
572 {
573 refs = &snode->ref_list;
574 last_visited_node++;
575 }
576
577 /* Compute boundary cost of IPA REF edges and at the same time look into
578 variables referenced from current partition and try to add them. */
579 for (j = 0; ipa_ref_list_reference_iterate (refs, j, ref); j++)
580 if (is_a <varpool_node> (ref->referred))
581 {
582 int index;
583
584 vnode = ipa_ref_varpool_node (ref);
585 if (!vnode->definition)
586 continue;
587 if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
588 && symtab_get_symbol_partitioning_class (vnode) == SYMBOL_PARTITION)
589 add_symbol_to_partition (partition, vnode);
590 index = lto_symtab_encoder_lookup (partition->encoder,
591 vnode);
592 if (index != LCC_NOT_FOUND
593 && index < last_visited_node - 1)
594 cost--, internal++;
595 else
596 cost++;
597 }
598 else
599 {
600 int index;
601
602 node = ipa_ref_node (ref);
603 if (!node->definition)
604 continue;
605 index = lto_symtab_encoder_lookup (partition->encoder,
606 node);
607 if (index != LCC_NOT_FOUND
608 && index < last_visited_node - 1)
609 cost--, internal++;
610 else
611 cost++;
612 }
613 for (j = 0; ipa_ref_list_referring_iterate (refs, j, ref); j++)
614 if (is_a <varpool_node> (ref->referring))
615 {
616 int index;
617
618 vnode = ipa_ref_referring_varpool_node (ref);
619 gcc_assert (vnode->definition);
620 /* It is better to couple variables with their users, because it allows them
621 to be removed. Coupling with objects they refer to only helps to reduce
622 number of symbols promoted to hidden. */
623 if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
624 && !varpool_can_remove_if_no_refs (vnode)
625 && symtab_get_symbol_partitioning_class (vnode) == SYMBOL_PARTITION)
626 add_symbol_to_partition (partition, vnode);
627 index = lto_symtab_encoder_lookup (partition->encoder,
628 vnode);
629 if (index != LCC_NOT_FOUND
630 && index < last_visited_node - 1)
631 cost--;
632 else
633 cost++;
634 }
635 else
636 {
637 int index;
638
639 node = ipa_ref_referring_node (ref);
640 gcc_assert (node->definition);
641 index = lto_symtab_encoder_lookup (partition->encoder,
642 node);
643 if (index != LCC_NOT_FOUND
644 && index < last_visited_node - 1)
645 cost--;
646 else
647 cost++;
648 }
649 }
650
651 /* If the partition is large enough, start looking for smallest boundary cost. */
652 if (partition->insns < partition_size * 3 / 4
653 || best_cost == INT_MAX
654 || ((!cost
655 || (best_internal * (HOST_WIDE_INT) cost
656 > (internal * (HOST_WIDE_INT)best_cost)))
657 && partition->insns < partition_size * 5 / 4))
658 {
659 best_cost = cost;
660 best_internal = internal;
661 best_i = i;
662 best_n_nodes = lto_symtab_encoder_size (partition->encoder);
663 best_total_size = total_size;
664 best_varpool_pos = varpool_pos;
665 }
666 if (cgraph_dump_file)
667 fprintf (cgraph_dump_file, "Step %i: added %s/%i, size %i, cost %i/%i "
668 "best %i/%i, step %i\n", i,
669 order[i]->name (), order[i]->order,
670 partition->insns, cost, internal,
671 best_cost, best_internal, best_i);
672 /* Partition is too large, unwind into step when best cost was reached and
673 start new partition. */
674 if (partition->insns > 2 * partition_size)
675 {
676 if (best_i != i)
677 {
678 if (cgraph_dump_file)
679 fprintf (cgraph_dump_file, "Unwinding %i insertions to step %i\n",
680 i - best_i, best_i);
681 undo_partition (partition, best_n_nodes);
682 varpool_pos = best_varpool_pos;
683 }
684 i = best_i;
685 /* When we are finished, avoid creating empty partition. */
686 while (i < n_nodes - 1 && symbol_partitioned_p (order[i + 1]))
687 i++;
688 if (i == n_nodes - 1)
689 break;
690 partition = new_partition ("");
691 last_visited_node = 0;
692 total_size = best_total_size;
693 cost = 0;
694
695 if (cgraph_dump_file)
696 fprintf (cgraph_dump_file, "New partition\n");
697 best_n_nodes = 0;
698 best_cost = INT_MAX;
699
700 /* Since the size of partitions is just approximate, update the size after
701 we finished current one. */
702 if (npartitions < n_lto_partitions)
703 partition_size = total_size / (n_lto_partitions - npartitions);
704 else
705 partition_size = INT_MAX;
706
707 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
708 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
709 npartitions ++;
710 }
711 }
712
713 /* Varables that are not reachable from the code go into last partition. */
714 if (flag_toplevel_reorder)
715 {
716 FOR_EACH_VARIABLE (vnode)
717 if (symtab_get_symbol_partitioning_class (vnode) == SYMBOL_PARTITION
718 && !symbol_partitioned_p (vnode))
719 add_symbol_to_partition (partition, vnode);
720 }
721 else
722 {
723 while (varpool_pos < n_varpool_nodes)
724 {
725 if (!symbol_partitioned_p (varpool_order[varpool_pos]))
726 add_symbol_to_partition (partition, varpool_order[varpool_pos]);
727 varpool_pos++;
728 }
729 free (varpool_order);
730 }
731 free (order);
732 }
733
734 /* Mangle NODE symbol name into a local name.
735 This is necessary to do
736 1) if two or more static vars of same assembler name
737 are merged into single ltrans unit.
738 2) if prevoiusly static var was promoted hidden to avoid possible conflict
739 with symbols defined out of the LTO world.
740 */
741
742 static bool
743 privatize_symbol_name (symtab_node *node)
744 {
745 tree decl = node->decl;
746 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
747
748 /* Our renaming machinery do not handle more than one change of assembler name.
749 We should not need more than one anyway. */
750 if (node->lto_file_data
751 && lto_get_decl_name_mapping (node->lto_file_data, name) != name)
752 {
753 if (cgraph_dump_file)
754 fprintf (cgraph_dump_file,
755 "Not privatizing symbol name: %s. It privatized already.\n",
756 name);
757 return false;
758 }
759 /* Avoid mangling of already mangled clones.
760 ??? should have a flag whether a symbol has a 'private' name already,
761 since we produce some symbols like that i.e. for global constructors
762 that are not really clones. */
763 if (node->unique_name)
764 {
765 if (cgraph_dump_file)
766 fprintf (cgraph_dump_file,
767 "Not privatizing symbol name: %s. Has unique name.\n",
768 name);
769 return false;
770 }
771 change_decl_assembler_name (decl, clone_function_name (decl, "lto_priv"));
772 if (node->lto_file_data)
773 lto_record_renamed_decl (node->lto_file_data, name,
774 IDENTIFIER_POINTER
775 (DECL_ASSEMBLER_NAME (decl)));
776 if (cgraph_dump_file)
777 fprintf (cgraph_dump_file,
778 "Privatizing symbol name: %s -> %s\n",
779 name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
780 return true;
781 }
782
783 /* Promote variable VNODE to be static. */
784
785 static void
786 promote_symbol (symtab_node *node)
787 {
788 /* We already promoted ... */
789 if (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
790 && DECL_VISIBILITY_SPECIFIED (node->decl)
791 && TREE_PUBLIC (node->decl))
792 return;
793
794 gcc_checking_assert (!TREE_PUBLIC (node->decl)
795 && !DECL_EXTERNAL (node->decl));
796 /* Be sure that newly public symbol does not conflict with anything already
797 defined by the non-LTO part. */
798 privatize_symbol_name (node);
799 TREE_PUBLIC (node->decl) = 1;
800 DECL_VISIBILITY (node->decl) = VISIBILITY_HIDDEN;
801 DECL_VISIBILITY_SPECIFIED (node->decl) = true;
802 if (cgraph_dump_file)
803 fprintf (cgraph_dump_file,
804 "Promoting as hidden: %s\n", node->name ());
805 }
806
807 /* Return true if NODE needs named section even if it won't land in the partition
808 symbol table.
809 FIXME: we should really not use named sections for inline clones and master clones. */
810
811 static bool
812 may_need_named_section_p (lto_symtab_encoder_t encoder, symtab_node *node)
813 {
814 struct cgraph_node *cnode = dyn_cast <cgraph_node> (node);
815 if (!cnode)
816 return false;
817 if (symtab_real_symbol_p (node))
818 return false;
819 return (!encoder
820 || (lto_symtab_encoder_lookup (encoder, node) != LCC_NOT_FOUND
821 && lto_symtab_encoder_encode_body_p (encoder,
822 cnode)));
823 }
824
825 /* If NODE represents a static variable. See if there are other variables
826 of the same name in partition ENCODER (or in whole compilation unit if
827 ENCODER is NULL) and if so, mangle the statics. Always mangle all
828 conflicting statics, so we reduce changes of silently miscompiling
829 asm statements referring to them by symbol name. */
830
831 static void
832 rename_statics (lto_symtab_encoder_t encoder, symtab_node *node)
833 {
834 tree decl = node->decl;
835 symtab_node *s;
836 tree name = DECL_ASSEMBLER_NAME (decl);
837
838 /* See if this is static symbol. */
839 if ((node->externally_visible
840 /* FIXME: externally_visible is somewhat illogically not set for
841 external symbols (i.e. those not defined). Remove this test
842 once this is fixed. */
843 || DECL_EXTERNAL (node->decl)
844 || !symtab_real_symbol_p (node))
845 && !may_need_named_section_p (encoder, node))
846 return;
847
848 /* Now walk symbols sharing the same name and see if there are any conflicts.
849 (all types of symbols counts here, since we can not have static of the
850 same name as external or public symbol.) */
851 for (s = symtab_node_for_asm (name);
852 s; s = s->next_sharing_asm_name)
853 if ((symtab_real_symbol_p (s) || may_need_named_section_p (encoder, s))
854 && s->decl != node->decl
855 && (!encoder
856 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
857 break;
858
859 /* OK, no confict, so we have nothing to do. */
860 if (!s)
861 return;
862
863 if (cgraph_dump_file)
864 fprintf (cgraph_dump_file,
865 "Renaming statics with asm name: %s\n", node->name ());
866
867 /* Assign every symbol in the set that shares the same ASM name an unique
868 mangled name. */
869 for (s = symtab_node_for_asm (name); s;)
870 if (!s->externally_visible
871 && ((symtab_real_symbol_p (s)
872 && !DECL_EXTERNAL (node->decl)
873 && !TREE_PUBLIC (node->decl))
874 || may_need_named_section_p (encoder, s))
875 && (!encoder
876 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
877 {
878 if (privatize_symbol_name (s))
879 /* Re-start from beginning since we do not know how many symbols changed a name. */
880 s = symtab_node_for_asm (name);
881 else s = s->next_sharing_asm_name;
882 }
883 else s = s->next_sharing_asm_name;
884 }
885
886 /* Find out all static decls that need to be promoted to global because
887 of cross file sharing. This function must be run in the WPA mode after
888 all inlinees are added. */
889
890 void
891 lto_promote_cross_file_statics (void)
892 {
893 unsigned i, n_sets;
894
895 gcc_assert (flag_wpa);
896
897 /* First compute boundaries. */
898 n_sets = ltrans_partitions.length ();
899 for (i = 0; i < n_sets; i++)
900 {
901 ltrans_partition part
902 = ltrans_partitions[i];
903 part->encoder = compute_ltrans_boundary (part->encoder);
904 }
905
906 /* Look at boundaries and promote symbols as needed. */
907 for (i = 0; i < n_sets; i++)
908 {
909 lto_symtab_encoder_iterator lsei;
910 lto_symtab_encoder_t encoder = ltrans_partitions[i]->encoder;
911
912 for (lsei = lsei_start (encoder); !lsei_end_p (lsei);
913 lsei_next (&lsei))
914 {
915 symtab_node *node = lsei_node (lsei);
916
917 /* If symbol is static, rename it if its assembler name clash with
918 anything else in this unit. */
919 rename_statics (encoder, node);
920
921 /* No need to promote if symbol already is externally visible ... */
922 if (node->externally_visible
923 /* ... or if it is part of current partition ... */
924 || lto_symtab_encoder_in_partition_p (encoder, node)
925 /* ... or if we do not partition it. This mean that it will
926 appear in every partition refernecing it. */
927 || symtab_get_symbol_partitioning_class (node) != SYMBOL_PARTITION)
928 continue;
929
930 promote_symbol (node);
931 }
932 }
933 }
934
935 /* Rename statics in the whole unit in the case that
936 we do -flto-partition=none. */
937
938 void
939 lto_promote_statics_nonwpa (void)
940 {
941 symtab_node *node;
942 FOR_EACH_SYMBOL (node)
943 rename_statics (NULL, node);
944 }