]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto/lto-partition.c
IPA C++ refactoring 4/N
[thirdparty/gcc.git] / gcc / lto / lto-partition.c
1 /* LTO partitioning logic routines.
2 Copyright (C) 2009-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "toplev.h"
24 #include "tree.h"
25 #include "basic-block.h"
26 #include "tree-ssa-alias.h"
27 #include "internal-fn.h"
28 #include "gimple-expr.h"
29 #include "is-a.h"
30 #include "gimple.h"
31 #include "tm.h"
32 #include "cgraph.h"
33 #include "lto-streamer.h"
34 #include "timevar.h"
35 #include "params.h"
36 #include "ipa-inline.h"
37 #include "ipa-utils.h"
38 #include "lto-partition.h"
39
40 vec<ltrans_partition> ltrans_partitions;
41
42 static void add_symbol_to_partition (ltrans_partition part, symtab_node *node);
43
44
45 /* Create new partition with name NAME. */
46
47 static ltrans_partition
48 new_partition (const char *name)
49 {
50 ltrans_partition part = XCNEW (struct ltrans_partition_def);
51 part->encoder = lto_symtab_encoder_new (false);
52 part->name = name;
53 part->insns = 0;
54 ltrans_partitions.safe_push (part);
55 return part;
56 }
57
58 /* Free memory used by ltrans datastructures. */
59
60 void
61 free_ltrans_partitions (void)
62 {
63 unsigned int idx;
64 ltrans_partition part;
65 for (idx = 0; ltrans_partitions.iterate (idx, &part); idx++)
66 {
67 if (part->initializers_visited)
68 delete part->initializers_visited;
69 /* Symtab encoder is freed after streaming. */
70 free (part);
71 }
72 ltrans_partitions.release ();
73 }
74
75 /* Return true if symbol is already in some partition. */
76
77 static inline bool
78 symbol_partitioned_p (symtab_node *node)
79 {
80 return node->aux;
81 }
82
83 /* Add references into the partition. */
84 static void
85 add_references_to_partition (ltrans_partition part, symtab_node *node)
86 {
87 int i;
88 struct ipa_ref *ref = NULL;
89
90 /* Add all duplicated references to the partition. */
91 for (i = 0; node->iterate_reference (i, ref); i++)
92 if (ref->referred->get_partitioning_class () == SYMBOL_DUPLICATE)
93 add_symbol_to_partition (part, ref->referred);
94 /* References to a readonly variable may be constant foled into its value.
95 Recursively look into the initializers of the constant variable and add
96 references, too. */
97 else if (is_a <varpool_node *> (ref->referred)
98 && dyn_cast <varpool_node *> (ref->referred)
99 ->ctor_useable_for_folding_p ()
100 && !lto_symtab_encoder_in_partition_p (part->encoder, ref->referred))
101 {
102 if (!part->initializers_visited)
103 part->initializers_visited = new hash_set<symtab_node *>;
104 if (!part->initializers_visited->add (ref->referred))
105 add_references_to_partition (part, ref->referred);
106 }
107 }
108
109 /* Helper function for add_symbol_to_partition doing the actual dirty work
110 of adding NODE to PART. */
111
112 static bool
113 add_symbol_to_partition_1 (ltrans_partition part, symtab_node *node)
114 {
115 enum symbol_partitioning_class c = node->get_partitioning_class ();
116 struct ipa_ref *ref;
117 symtab_node *node1;
118
119 /* If NODE is already there, we have nothing to do. */
120 if (lto_symtab_encoder_in_partition_p (part->encoder, node))
121 return true;
122
123 /* non-duplicated aliases or tunks of a duplicated symbol needs to be output
124 just once.
125
126 Be lax about comdats; they may or may not be duplicated and we may
127 end up in need to duplicate keyed comdat because it has unkeyed alias. */
128 if (c == SYMBOL_PARTITION && !DECL_COMDAT (node->decl)
129 && symbol_partitioned_p (node))
130 return false;
131
132 /* Be sure that we never try to duplicate partitioned symbol
133 or add external symbol. */
134 gcc_assert (c != SYMBOL_EXTERNAL
135 && (c == SYMBOL_DUPLICATE || !symbol_partitioned_p (node)));
136
137 lto_set_symtab_encoder_in_partition (part->encoder, node);
138
139 if (symbol_partitioned_p (node))
140 {
141 node->in_other_partition = 1;
142 if (symtab->dump_file)
143 fprintf (symtab->dump_file,
144 "Symbol node %s now used in multiple partitions\n",
145 node->name ());
146 }
147 node->aux = (void *)((size_t)node->aux + 1);
148
149 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
150 {
151 struct cgraph_edge *e;
152 if (!node->alias)
153 part->insns += inline_summary (cnode)->self_size;
154
155 /* Add all inline clones and callees that are duplicated. */
156 for (e = cnode->callees; e; e = e->next_callee)
157 if (!e->inline_failed)
158 add_symbol_to_partition_1 (part, e->callee);
159 else if (e->callee->get_partitioning_class () == SYMBOL_DUPLICATE)
160 add_symbol_to_partition (part, e->callee);
161
162 /* Add all thunks associated with the function. */
163 for (e = cnode->callers; e; e = e->next_caller)
164 if (e->caller->thunk.thunk_p)
165 add_symbol_to_partition_1 (part, e->caller);
166 }
167
168 add_references_to_partition (part, node);
169
170 /* Add all aliases associated with the symbol. */
171
172 FOR_EACH_ALIAS (node, ref)
173 if (!node->weakref)
174 add_symbol_to_partition_1 (part, ref->referring);
175
176 /* Ensure that SAME_COMDAT_GROUP lists all allways added in a group. */
177 if (node->same_comdat_group)
178 for (node1 = node->same_comdat_group;
179 node1 != node; node1 = node1->same_comdat_group)
180 if (!node->alias)
181 {
182 bool added = add_symbol_to_partition_1 (part, node1);
183 gcc_assert (added);
184 }
185 return true;
186 }
187
188 /* If symbol NODE is really part of other symbol's definition (i.e. it is
189 internal label, thunk, alias or so), return the outer symbol.
190 When add_symbol_to_partition_1 is called on the outer symbol it must
191 eventually add NODE, too. */
192 static symtab_node *
193 contained_in_symbol (symtab_node *node)
194 {
195 /* Weakrefs are never contained in anything. */
196 if (node->weakref)
197 return node;
198 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
199 {
200 cnode = cnode->function_symbol ();
201 if (cnode->global.inlined_to)
202 cnode = cnode->global.inlined_to;
203 return cnode;
204 }
205 else if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
206 return vnode->ultimate_alias_target ();
207 return node;
208 }
209
210 /* Add symbol NODE to partition. When definition of NODE is part
211 of other symbol definition, add the other symbol, too. */
212
213 static void
214 add_symbol_to_partition (ltrans_partition part, symtab_node *node)
215 {
216 symtab_node *node1;
217
218 /* Verify that we do not try to duplicate something that can not be. */
219 gcc_checking_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
220 || !symbol_partitioned_p (node));
221
222 while ((node1 = contained_in_symbol (node)) != node)
223 node = node1;
224
225 /* If we have duplicated symbol contained in something we can not duplicate,
226 we are very badly screwed. The other way is possible, so we do not
227 assert this in add_symbol_to_partition_1.
228
229 Be lax about comdats; they may or may not be duplicated and we may
230 end up in need to duplicate keyed comdat because it has unkeyed alias. */
231
232 gcc_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
233 || DECL_COMDAT (node->decl)
234 || !symbol_partitioned_p (node));
235
236 add_symbol_to_partition_1 (part, node);
237 }
238
239 /* Undo all additions until number of cgraph nodes in PARITION is N_CGRAPH_NODES
240 and number of varpool nodes is N_VARPOOL_NODES. */
241
242 static void
243 undo_partition (ltrans_partition partition, unsigned int n_nodes)
244 {
245 while (lto_symtab_encoder_size (partition->encoder) > (int)n_nodes)
246 {
247 symtab_node *node = lto_symtab_encoder_deref (partition->encoder,
248 n_nodes);
249 cgraph_node *cnode;
250
251 /* After UNDO we no longer know what was visited. */
252 if (partition->initializers_visited)
253 delete partition->initializers_visited;
254 partition->initializers_visited = NULL;
255
256 if (!node->alias && (cnode = dyn_cast <cgraph_node *> (node)))
257 partition->insns -= inline_summary (cnode)->self_size;
258 lto_symtab_encoder_delete_node (partition->encoder, node);
259 node->aux = (void *)((size_t)node->aux - 1);
260 }
261 }
262
263 /* Group cgrah nodes by input files. This is used mainly for testing
264 right now. */
265
266 void
267 lto_1_to_1_map (void)
268 {
269 symtab_node *node;
270 struct lto_file_decl_data *file_data;
271 hash_map<lto_file_decl_data *, ltrans_partition> pmap;
272 ltrans_partition partition;
273 int npartitions = 0;
274
275 FOR_EACH_SYMBOL (node)
276 {
277 if (node->get_partitioning_class () != SYMBOL_PARTITION
278 || symbol_partitioned_p (node))
279 continue;
280
281 file_data = node->lto_file_data;
282
283 if (file_data)
284 {
285 ltrans_partition *slot = &pmap.get_or_insert (file_data);
286 if (*slot)
287 partition = *slot;
288 else
289 {
290 partition = new_partition (file_data->file_name);
291 *slot = partition;
292 npartitions++;
293 }
294 }
295 else if (!file_data && ltrans_partitions.length ())
296 partition = ltrans_partitions[0];
297 else
298 {
299 partition = new_partition ("");
300 pmap.put (NULL, partition);
301 npartitions++;
302 }
303
304 add_symbol_to_partition (partition, node);
305 }
306
307 /* If the cgraph is empty, create one cgraph node set so that there is still
308 an output file for any variables that need to be exported in a DSO. */
309 if (!npartitions)
310 new_partition ("empty");
311
312 }
313
314 /* Maximal partitioning. Put every new symbol into new partition if possible. */
315
316 void
317 lto_max_map (void)
318 {
319 symtab_node *node;
320 ltrans_partition partition;
321 int npartitions = 0;
322
323 FOR_EACH_SYMBOL (node)
324 {
325 if (node->get_partitioning_class () != SYMBOL_PARTITION
326 || symbol_partitioned_p (node))
327 continue;
328 partition = new_partition (node->asm_name ());
329 add_symbol_to_partition (partition, node);
330 npartitions++;
331 }
332 if (!npartitions)
333 new_partition ("empty");
334 }
335
336 /* Helper function for qsort; sort nodes by order. */
337 static int
338 node_cmp (const void *pa, const void *pb)
339 {
340 const struct cgraph_node *a = *(const struct cgraph_node * const *) pa;
341 const struct cgraph_node *b = *(const struct cgraph_node * const *) pb;
342
343 /* Profile reorder flag enables function reordering based on first execution
344 of a function. All functions with profile are placed in ascending
345 order at the beginning. */
346
347 if (flag_profile_reorder_functions)
348 {
349 /* Functions with time profile are sorted in ascending order. */
350 if (a->tp_first_run && b->tp_first_run)
351 return a->tp_first_run != b->tp_first_run
352 ? a->tp_first_run - b->tp_first_run
353 : a->order - b->order;
354
355 /* Functions with time profile are sorted before the functions
356 that do not have the profile. */
357 if (a->tp_first_run || b->tp_first_run)
358 return b->tp_first_run - a->tp_first_run;
359 }
360
361 return b->order - a->order;
362 }
363
364 /* Helper function for qsort; sort nodes by order. */
365 static int
366 varpool_node_cmp (const void *pa, const void *pb)
367 {
368 const varpool_node *a = *(const varpool_node * const *) pa;
369 const varpool_node *b = *(const varpool_node * const *) pb;
370 return b->order - a->order;
371 }
372
373 /* Group cgraph nodes into equally-sized partitions.
374
375 The partitioning algorithm is simple: nodes are taken in predefined order.
376 The order corresponds to the order we want functions to have in the final
377 output. In the future this will be given by function reordering pass, but
378 at the moment we use the topological order, which is a good approximation.
379
380 The goal is to partition this linear order into intervals (partitions) so
381 that all the partitions have approximately the same size and the number of
382 callgraph or IPA reference edges crossing boundaries is minimal.
383
384 This is a lot faster (O(n) in size of callgraph) than algorithms doing
385 priority-based graph clustering that are generally O(n^2) and, since
386 WHOPR is designed to make things go well across partitions, it leads
387 to good results.
388
389 We compute the expected size of a partition as:
390
391 max (total_size / lto_partitions, min_partition_size)
392
393 We use dynamic expected size of partition so small programs are partitioned
394 into enough partitions to allow use of multiple CPUs, while large programs
395 are not partitioned too much. Creating too many partitions significantly
396 increases the streaming overhead.
397
398 In the future, we would like to bound the maximal size of partitions so as
399 to prevent the LTRANS stage from consuming too much memory. At the moment,
400 however, the WPA stage is the most memory intensive for large benchmarks,
401 since too many types and declarations are read into memory.
402
403 The function implements a simple greedy algorithm. Nodes are being added
404 to the current partition until after 3/4 of the expected partition size is
405 reached. Past this threshold, we keep track of boundary size (number of
406 edges going to other partitions) and continue adding functions until after
407 the current partition has grown to twice the expected partition size. Then
408 the process is undone to the point where the minimal ratio of boundary size
409 and in-partition calls was reached. */
410
411 void
412 lto_balanced_map (int n_lto_partitions)
413 {
414 int n_nodes = 0;
415 int n_varpool_nodes = 0, varpool_pos = 0, best_varpool_pos = 0;
416 struct cgraph_node **order = XNEWVEC (cgraph_node *, symtab->cgraph_max_uid);
417 varpool_node **varpool_order = NULL;
418 int i;
419 struct cgraph_node *node;
420 int total_size = 0, best_total_size = 0;
421 int partition_size;
422 ltrans_partition partition;
423 int last_visited_node = 0;
424 varpool_node *vnode;
425 int cost = 0, internal = 0;
426 int best_n_nodes = 0, best_i = 0, best_cost =
427 INT_MAX, best_internal = 0;
428 int npartitions;
429 int current_order = -1;
430
431 FOR_EACH_VARIABLE (vnode)
432 gcc_assert (!vnode->aux);
433
434 FOR_EACH_DEFINED_FUNCTION (node)
435 if (node->get_partitioning_class () == SYMBOL_PARTITION)
436 {
437 order[n_nodes++] = node;
438 if (!node->alias)
439 total_size += inline_summary (node)->size;
440 }
441
442 /* Streaming works best when the source units do not cross partition
443 boundaries much. This is because importing function from a source
444 unit tends to import a lot of global trees defined there. We should
445 get better about minimizing the function bounday, but until that
446 things works smoother if we order in source order. */
447 qsort (order, n_nodes, sizeof (struct cgraph_node *), node_cmp);
448
449 if (symtab->dump_file)
450 for(i = 0; i < n_nodes; i++)
451 fprintf (symtab->dump_file, "Balanced map symbol order:%s:%u\n",
452 order[i]->name (), order[i]->tp_first_run);
453
454 if (!flag_toplevel_reorder)
455 {
456 FOR_EACH_VARIABLE (vnode)
457 if (vnode->get_partitioning_class () == SYMBOL_PARTITION)
458 n_varpool_nodes++;
459 varpool_order = XNEWVEC (varpool_node *, n_varpool_nodes);
460
461 n_varpool_nodes = 0;
462 FOR_EACH_VARIABLE (vnode)
463 if (vnode->get_partitioning_class () == SYMBOL_PARTITION)
464 varpool_order[n_varpool_nodes++] = vnode;
465 qsort (varpool_order, n_varpool_nodes, sizeof (varpool_node *),
466 varpool_node_cmp);
467 }
468
469 /* Compute partition size and create the first partition. */
470 partition_size = total_size / n_lto_partitions;
471 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
472 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
473 npartitions = 1;
474 partition = new_partition ("");
475 if (symtab->dump_file)
476 fprintf (symtab->dump_file, "Total unit size: %i, partition size: %i\n",
477 total_size, partition_size);
478
479 for (i = 0; i < n_nodes; i++)
480 {
481 if (symbol_partitioned_p (order[i]))
482 continue;
483
484 current_order = order[i]->order;
485
486 if (!flag_toplevel_reorder)
487 while (varpool_pos < n_varpool_nodes
488 && varpool_order[varpool_pos]->order < current_order)
489 {
490 if (!symbol_partitioned_p (varpool_order[varpool_pos]))
491 add_symbol_to_partition (partition, varpool_order[varpool_pos]);
492 varpool_pos++;
493 }
494
495 add_symbol_to_partition (partition, order[i]);
496 if (!order[i]->alias)
497 total_size -= inline_summary (order[i])->size;
498
499
500 /* Once we added a new node to the partition, we also want to add
501 all referenced variables unless they was already added into some
502 earlier partition.
503 add_symbol_to_partition adds possibly multiple nodes and
504 variables that are needed to satisfy needs of ORDER[i].
505 We remember last visited cgraph and varpool node from last iteration
506 of outer loop that allows us to process every new addition.
507
508 At the same time we compute size of the boundary into COST. Every
509 callgraph or IPA reference edge leaving the partition contributes into
510 COST. Every edge inside partition was earlier computed as one leaving
511 it and thus we need to subtract it from COST. */
512 while (last_visited_node < lto_symtab_encoder_size (partition->encoder))
513 {
514 symtab_node *refs_node;
515 int j;
516 struct ipa_ref *ref = NULL;
517 symtab_node *snode = lto_symtab_encoder_deref (partition->encoder,
518 last_visited_node);
519
520 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
521 {
522 struct cgraph_edge *edge;
523
524 refs_node = node;
525
526 last_visited_node++;
527
528 gcc_assert (node->definition || node->weakref);
529
530 /* Compute boundary cost of callgraph edges. */
531 for (edge = node->callees; edge; edge = edge->next_callee)
532 if (edge->callee->definition)
533 {
534 int edge_cost = edge->frequency;
535 int index;
536
537 if (!edge_cost)
538 edge_cost = 1;
539 gcc_assert (edge_cost > 0);
540 index = lto_symtab_encoder_lookup (partition->encoder,
541 edge->callee);
542 if (index != LCC_NOT_FOUND
543 && index < last_visited_node - 1)
544 cost -= edge_cost, internal += edge_cost;
545 else
546 cost += edge_cost;
547 }
548 for (edge = node->callers; edge; edge = edge->next_caller)
549 {
550 int edge_cost = edge->frequency;
551 int index;
552
553 gcc_assert (edge->caller->definition);
554 if (!edge_cost)
555 edge_cost = 1;
556 gcc_assert (edge_cost > 0);
557 index = lto_symtab_encoder_lookup (partition->encoder,
558 edge->caller);
559 if (index != LCC_NOT_FOUND
560 && index < last_visited_node - 1)
561 cost -= edge_cost;
562 else
563 cost += edge_cost;
564 }
565 }
566 else
567 {
568 refs_node = snode;
569 last_visited_node++;
570 }
571
572 /* Compute boundary cost of IPA REF edges and at the same time look into
573 variables referenced from current partition and try to add them. */
574 for (j = 0; refs_node->iterate_reference (j, ref); j++)
575 if (is_a <varpool_node *> (ref->referred))
576 {
577 int index;
578
579 vnode = dyn_cast <varpool_node *> (ref->referred);
580 if (!vnode->definition)
581 continue;
582 if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
583 && vnode->get_partitioning_class () == SYMBOL_PARTITION)
584 add_symbol_to_partition (partition, vnode);
585 index = lto_symtab_encoder_lookup (partition->encoder,
586 vnode);
587 if (index != LCC_NOT_FOUND
588 && index < last_visited_node - 1)
589 cost--, internal++;
590 else
591 cost++;
592 }
593 else
594 {
595 int index;
596
597 node = dyn_cast <cgraph_node *> (ref->referred);
598 if (!node->definition)
599 continue;
600 index = lto_symtab_encoder_lookup (partition->encoder,
601 node);
602 if (index != LCC_NOT_FOUND
603 && index < last_visited_node - 1)
604 cost--, internal++;
605 else
606 cost++;
607 }
608 for (j = 0; refs_node->iterate_referring (j, ref); j++)
609 if (is_a <varpool_node *> (ref->referring))
610 {
611 int index;
612
613 vnode = dyn_cast <varpool_node *> (ref->referring);
614 gcc_assert (vnode->definition);
615 /* It is better to couple variables with their users, because it allows them
616 to be removed. Coupling with objects they refer to only helps to reduce
617 number of symbols promoted to hidden. */
618 if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
619 && !vnode->can_remove_if_no_refs_p ()
620 && vnode->get_partitioning_class () == SYMBOL_PARTITION)
621 add_symbol_to_partition (partition, vnode);
622 index = lto_symtab_encoder_lookup (partition->encoder,
623 vnode);
624 if (index != LCC_NOT_FOUND
625 && index < last_visited_node - 1)
626 cost--;
627 else
628 cost++;
629 }
630 else
631 {
632 int index;
633
634 node = dyn_cast <cgraph_node *> (ref->referring);
635 gcc_assert (node->definition);
636 index = lto_symtab_encoder_lookup (partition->encoder,
637 node);
638 if (index != LCC_NOT_FOUND
639 && index < last_visited_node - 1)
640 cost--;
641 else
642 cost++;
643 }
644 }
645
646 /* If the partition is large enough, start looking for smallest boundary cost. */
647 if (partition->insns < partition_size * 3 / 4
648 || best_cost == INT_MAX
649 || ((!cost
650 || (best_internal * (HOST_WIDE_INT) cost
651 > (internal * (HOST_WIDE_INT)best_cost)))
652 && partition->insns < partition_size * 5 / 4))
653 {
654 best_cost = cost;
655 best_internal = internal;
656 best_i = i;
657 best_n_nodes = lto_symtab_encoder_size (partition->encoder);
658 best_total_size = total_size;
659 best_varpool_pos = varpool_pos;
660 }
661 if (symtab->dump_file)
662 fprintf (symtab->dump_file, "Step %i: added %s/%i, size %i, cost %i/%i "
663 "best %i/%i, step %i\n", i,
664 order[i]->name (), order[i]->order,
665 partition->insns, cost, internal,
666 best_cost, best_internal, best_i);
667 /* Partition is too large, unwind into step when best cost was reached and
668 start new partition. */
669 if (partition->insns > 2 * partition_size)
670 {
671 if (best_i != i)
672 {
673 if (symtab->dump_file)
674 fprintf (symtab->dump_file, "Unwinding %i insertions to step %i\n",
675 i - best_i, best_i);
676 undo_partition (partition, best_n_nodes);
677 varpool_pos = best_varpool_pos;
678 }
679 i = best_i;
680 /* When we are finished, avoid creating empty partition. */
681 while (i < n_nodes - 1 && symbol_partitioned_p (order[i + 1]))
682 i++;
683 if (i == n_nodes - 1)
684 break;
685 partition = new_partition ("");
686 last_visited_node = 0;
687 total_size = best_total_size;
688 cost = 0;
689
690 if (symtab->dump_file)
691 fprintf (symtab->dump_file, "New partition\n");
692 best_n_nodes = 0;
693 best_cost = INT_MAX;
694
695 /* Since the size of partitions is just approximate, update the size after
696 we finished current one. */
697 if (npartitions < n_lto_partitions)
698 partition_size = total_size / (n_lto_partitions - npartitions);
699 else
700 partition_size = INT_MAX;
701
702 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
703 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
704 npartitions ++;
705 }
706 }
707
708 /* Varables that are not reachable from the code go into last partition. */
709 if (flag_toplevel_reorder)
710 {
711 FOR_EACH_VARIABLE (vnode)
712 if (vnode->get_partitioning_class () == SYMBOL_PARTITION
713 && !symbol_partitioned_p (vnode))
714 add_symbol_to_partition (partition, vnode);
715 }
716 else
717 {
718 while (varpool_pos < n_varpool_nodes)
719 {
720 if (!symbol_partitioned_p (varpool_order[varpool_pos]))
721 add_symbol_to_partition (partition, varpool_order[varpool_pos]);
722 varpool_pos++;
723 }
724 free (varpool_order);
725 }
726 free (order);
727 }
728
729 /* Mangle NODE symbol name into a local name.
730 This is necessary to do
731 1) if two or more static vars of same assembler name
732 are merged into single ltrans unit.
733 2) if prevoiusly static var was promoted hidden to avoid possible conflict
734 with symbols defined out of the LTO world.
735 */
736
737 static bool
738 privatize_symbol_name (symtab_node *node)
739 {
740 tree decl = node->decl;
741 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
742
743 /* Our renaming machinery do not handle more than one change of assembler name.
744 We should not need more than one anyway. */
745 if (node->lto_file_data
746 && lto_get_decl_name_mapping (node->lto_file_data, name) != name)
747 {
748 if (symtab->dump_file)
749 fprintf (symtab->dump_file,
750 "Not privatizing symbol name: %s. It privatized already.\n",
751 name);
752 return false;
753 }
754 /* Avoid mangling of already mangled clones.
755 ??? should have a flag whether a symbol has a 'private' name already,
756 since we produce some symbols like that i.e. for global constructors
757 that are not really clones. */
758 if (node->unique_name)
759 {
760 if (symtab->dump_file)
761 fprintf (symtab->dump_file,
762 "Not privatizing symbol name: %s. Has unique name.\n",
763 name);
764 return false;
765 }
766 symtab->change_decl_assembler_name (decl,
767 clone_function_name (decl, "lto_priv"));
768 if (node->lto_file_data)
769 lto_record_renamed_decl (node->lto_file_data, name,
770 IDENTIFIER_POINTER
771 (DECL_ASSEMBLER_NAME (decl)));
772 if (symtab->dump_file)
773 fprintf (symtab->dump_file,
774 "Privatizing symbol name: %s -> %s\n",
775 name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
776 return true;
777 }
778
779 /* Promote variable VNODE to be static. */
780
781 static void
782 promote_symbol (symtab_node *node)
783 {
784 /* We already promoted ... */
785 if (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
786 && DECL_VISIBILITY_SPECIFIED (node->decl)
787 && TREE_PUBLIC (node->decl))
788 return;
789
790 gcc_checking_assert (!TREE_PUBLIC (node->decl)
791 && !DECL_EXTERNAL (node->decl));
792 /* Be sure that newly public symbol does not conflict with anything already
793 defined by the non-LTO part. */
794 privatize_symbol_name (node);
795 TREE_PUBLIC (node->decl) = 1;
796 DECL_VISIBILITY (node->decl) = VISIBILITY_HIDDEN;
797 DECL_VISIBILITY_SPECIFIED (node->decl) = true;
798 if (symtab->dump_file)
799 fprintf (symtab->dump_file,
800 "Promoting as hidden: %s\n", node->name ());
801 }
802
803 /* Return true if NODE needs named section even if it won't land in the partition
804 symbol table.
805 FIXME: we should really not use named sections for inline clones and master clones. */
806
807 static bool
808 may_need_named_section_p (lto_symtab_encoder_t encoder, symtab_node *node)
809 {
810 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
811 if (!cnode)
812 return false;
813 if (node->real_symbol_p ())
814 return false;
815 return (!encoder
816 || (lto_symtab_encoder_lookup (encoder, node) != LCC_NOT_FOUND
817 && lto_symtab_encoder_encode_body_p (encoder,
818 cnode)));
819 }
820
821 /* If NODE represents a static variable. See if there are other variables
822 of the same name in partition ENCODER (or in whole compilation unit if
823 ENCODER is NULL) and if so, mangle the statics. Always mangle all
824 conflicting statics, so we reduce changes of silently miscompiling
825 asm statements referring to them by symbol name. */
826
827 static void
828 rename_statics (lto_symtab_encoder_t encoder, symtab_node *node)
829 {
830 tree decl = node->decl;
831 symtab_node *s;
832 tree name = DECL_ASSEMBLER_NAME (decl);
833
834 /* See if this is static symbol. */
835 if ((node->externally_visible
836 /* FIXME: externally_visible is somewhat illogically not set for
837 external symbols (i.e. those not defined). Remove this test
838 once this is fixed. */
839 || DECL_EXTERNAL (node->decl)
840 || !node->real_symbol_p ())
841 && !may_need_named_section_p (encoder, node))
842 return;
843
844 /* Now walk symbols sharing the same name and see if there are any conflicts.
845 (all types of symbols counts here, since we can not have static of the
846 same name as external or public symbol.) */
847 for (s = symtab_node::get_for_asmname (name);
848 s; s = s->next_sharing_asm_name)
849 if ((s->real_symbol_p () || may_need_named_section_p (encoder, s))
850 && s->decl != node->decl
851 && (!encoder
852 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
853 break;
854
855 /* OK, no confict, so we have nothing to do. */
856 if (!s)
857 return;
858
859 if (symtab->dump_file)
860 fprintf (symtab->dump_file,
861 "Renaming statics with asm name: %s\n", node->name ());
862
863 /* Assign every symbol in the set that shares the same ASM name an unique
864 mangled name. */
865 for (s = symtab_node::get_for_asmname (name); s;)
866 if (!s->externally_visible
867 && ((s->real_symbol_p ()
868 && !DECL_EXTERNAL (node->decl)
869 && !TREE_PUBLIC (node->decl))
870 || may_need_named_section_p (encoder, s))
871 && (!encoder
872 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
873 {
874 if (privatize_symbol_name (s))
875 /* Re-start from beginning since we do not know how many symbols changed a name. */
876 s = symtab_node::get_for_asmname (name);
877 else s = s->next_sharing_asm_name;
878 }
879 else s = s->next_sharing_asm_name;
880 }
881
882 /* Find out all static decls that need to be promoted to global because
883 of cross file sharing. This function must be run in the WPA mode after
884 all inlinees are added. */
885
886 void
887 lto_promote_cross_file_statics (void)
888 {
889 unsigned i, n_sets;
890
891 gcc_assert (flag_wpa);
892
893 /* First compute boundaries. */
894 n_sets = ltrans_partitions.length ();
895 for (i = 0; i < n_sets; i++)
896 {
897 ltrans_partition part
898 = ltrans_partitions[i];
899 part->encoder = compute_ltrans_boundary (part->encoder);
900 }
901
902 /* Look at boundaries and promote symbols as needed. */
903 for (i = 0; i < n_sets; i++)
904 {
905 lto_symtab_encoder_iterator lsei;
906 lto_symtab_encoder_t encoder = ltrans_partitions[i]->encoder;
907
908 for (lsei = lsei_start (encoder); !lsei_end_p (lsei);
909 lsei_next (&lsei))
910 {
911 symtab_node *node = lsei_node (lsei);
912
913 /* If symbol is static, rename it if its assembler name clash with
914 anything else in this unit. */
915 rename_statics (encoder, node);
916
917 /* No need to promote if symbol already is externally visible ... */
918 if (node->externally_visible
919 /* ... or if it is part of current partition ... */
920 || lto_symtab_encoder_in_partition_p (encoder, node)
921 /* ... or if we do not partition it. This mean that it will
922 appear in every partition refernecing it. */
923 || node->get_partitioning_class () != SYMBOL_PARTITION)
924 continue;
925
926 promote_symbol (node);
927 }
928 }
929 }
930
931 /* Rename statics in the whole unit in the case that
932 we do -flto-partition=none. */
933
934 void
935 lto_promote_statics_nonwpa (void)
936 {
937 symtab_node *node;
938 FOR_EACH_SYMBOL (node)
939 rename_statics (NULL, node);
940 }