]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto/lto-partition.c
Update copyright years.
[thirdparty/gcc.git] / gcc / lto / lto-partition.c
1 /* LTO partitioning logic routines.
2 Copyright (C) 2009-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "toplev.h"
24 #include "tree.h"
25 #include "predict.h"
26 #include "vec.h"
27 #include "hashtab.h"
28 #include "hash-set.h"
29 #include "machmode.h"
30 #include "tm.h"
31 #include "hard-reg-set.h"
32 #include "input.h"
33 #include "function.h"
34 #include "basic-block.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "gimple-expr.h"
38 #include "is-a.h"
39 #include "gimple.h"
40 #include "hash-map.h"
41 #include "plugin-api.h"
42 #include "ipa-ref.h"
43 #include "cgraph.h"
44 #include "lto-streamer.h"
45 #include "timevar.h"
46 #include "params.h"
47 #include "alloc-pool.h"
48 #include "symbol-summary.h"
49 #include "ipa-prop.h"
50 #include "ipa-inline.h"
51 #include "ipa-utils.h"
52 #include "lto-partition.h"
53
54 vec<ltrans_partition> ltrans_partitions;
55
56 static void add_symbol_to_partition (ltrans_partition part, symtab_node *node);
57
58
59 /* Create new partition with name NAME. */
60
61 static ltrans_partition
62 new_partition (const char *name)
63 {
64 ltrans_partition part = XCNEW (struct ltrans_partition_def);
65 part->encoder = lto_symtab_encoder_new (false);
66 part->name = name;
67 part->insns = 0;
68 ltrans_partitions.safe_push (part);
69 return part;
70 }
71
72 /* Free memory used by ltrans datastructures. */
73
74 void
75 free_ltrans_partitions (void)
76 {
77 unsigned int idx;
78 ltrans_partition part;
79 for (idx = 0; ltrans_partitions.iterate (idx, &part); idx++)
80 {
81 if (part->initializers_visited)
82 delete part->initializers_visited;
83 /* Symtab encoder is freed after streaming. */
84 free (part);
85 }
86 ltrans_partitions.release ();
87 }
88
89 /* Return true if symbol is already in some partition. */
90
91 static inline bool
92 symbol_partitioned_p (symtab_node *node)
93 {
94 return node->aux;
95 }
96
97 /* Add references into the partition. */
98 static void
99 add_references_to_partition (ltrans_partition part, symtab_node *node)
100 {
101 int i;
102 struct ipa_ref *ref = NULL;
103
104 /* Add all duplicated references to the partition. */
105 for (i = 0; node->iterate_reference (i, ref); i++)
106 if (ref->referred->get_partitioning_class () == SYMBOL_DUPLICATE)
107 add_symbol_to_partition (part, ref->referred);
108 /* References to a readonly variable may be constant foled into its value.
109 Recursively look into the initializers of the constant variable and add
110 references, too. */
111 else if (is_a <varpool_node *> (ref->referred)
112 && (dyn_cast <varpool_node *> (ref->referred)
113 ->ctor_useable_for_folding_p ()
114 || POINTER_BOUNDS_P (ref->referred->decl))
115 && !lto_symtab_encoder_in_partition_p (part->encoder, ref->referred))
116 {
117 if (!part->initializers_visited)
118 part->initializers_visited = new hash_set<symtab_node *>;
119 if (!part->initializers_visited->add (ref->referred))
120 add_references_to_partition (part, ref->referred);
121 }
122 }
123
124 /* Helper function for add_symbol_to_partition doing the actual dirty work
125 of adding NODE to PART. */
126
127 static bool
128 add_symbol_to_partition_1 (ltrans_partition part, symtab_node *node)
129 {
130 enum symbol_partitioning_class c = node->get_partitioning_class ();
131 struct ipa_ref *ref;
132 symtab_node *node1;
133
134 /* If NODE is already there, we have nothing to do. */
135 if (lto_symtab_encoder_in_partition_p (part->encoder, node))
136 return true;
137
138 /* non-duplicated aliases or tunks of a duplicated symbol needs to be output
139 just once.
140
141 Be lax about comdats; they may or may not be duplicated and we may
142 end up in need to duplicate keyed comdat because it has unkeyed alias. */
143 if (c == SYMBOL_PARTITION && !DECL_COMDAT (node->decl)
144 && symbol_partitioned_p (node))
145 return false;
146
147 /* Be sure that we never try to duplicate partitioned symbol
148 or add external symbol. */
149 gcc_assert (c != SYMBOL_EXTERNAL
150 && (c == SYMBOL_DUPLICATE || !symbol_partitioned_p (node)));
151
152 lto_set_symtab_encoder_in_partition (part->encoder, node);
153
154 if (symbol_partitioned_p (node))
155 {
156 node->in_other_partition = 1;
157 if (symtab->dump_file)
158 fprintf (symtab->dump_file,
159 "Symbol node %s now used in multiple partitions\n",
160 node->name ());
161 }
162 node->aux = (void *)((size_t)node->aux + 1);
163
164 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
165 {
166 struct cgraph_edge *e;
167 if (!node->alias)
168 part->insns += inline_summaries->get (cnode)->self_size;
169
170 /* Add all inline clones and callees that are duplicated. */
171 for (e = cnode->callees; e; e = e->next_callee)
172 if (!e->inline_failed)
173 add_symbol_to_partition_1 (part, e->callee);
174 else if (e->callee->get_partitioning_class () == SYMBOL_DUPLICATE)
175 add_symbol_to_partition (part, e->callee);
176
177 /* Add all thunks associated with the function. */
178 for (e = cnode->callers; e; e = e->next_caller)
179 if (e->caller->thunk.thunk_p)
180 add_symbol_to_partition_1 (part, e->caller);
181
182 /* Instrumented version is actually the same function.
183 Therefore put it into the same partition. */
184 if (cnode->instrumented_version)
185 add_symbol_to_partition_1 (part, cnode->instrumented_version);
186 }
187
188 add_references_to_partition (part, node);
189
190 /* Add all aliases associated with the symbol. */
191
192 FOR_EACH_ALIAS (node, ref)
193 if (!node->weakref)
194 add_symbol_to_partition_1 (part, ref->referring);
195
196 /* Ensure that SAME_COMDAT_GROUP lists all allways added in a group. */
197 if (node->same_comdat_group)
198 for (node1 = node->same_comdat_group;
199 node1 != node; node1 = node1->same_comdat_group)
200 if (!node->alias)
201 {
202 bool added = add_symbol_to_partition_1 (part, node1);
203 gcc_assert (added);
204 }
205 return true;
206 }
207
208 /* If symbol NODE is really part of other symbol's definition (i.e. it is
209 internal label, thunk, alias or so), return the outer symbol.
210 When add_symbol_to_partition_1 is called on the outer symbol it must
211 eventually add NODE, too. */
212 static symtab_node *
213 contained_in_symbol (symtab_node *node)
214 {
215 /* Weakrefs are never contained in anything. */
216 if (node->weakref)
217 return node;
218 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
219 {
220 cnode = cnode->function_symbol ();
221 if (cnode->global.inlined_to)
222 cnode = cnode->global.inlined_to;
223 return cnode;
224 }
225 else if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
226 return vnode->ultimate_alias_target ();
227 return node;
228 }
229
230 /* Add symbol NODE to partition. When definition of NODE is part
231 of other symbol definition, add the other symbol, too. */
232
233 static void
234 add_symbol_to_partition (ltrans_partition part, symtab_node *node)
235 {
236 symtab_node *node1;
237
238 /* Verify that we do not try to duplicate something that can not be. */
239 gcc_checking_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
240 || !symbol_partitioned_p (node));
241
242 while ((node1 = contained_in_symbol (node)) != node)
243 node = node1;
244
245 /* If we have duplicated symbol contained in something we can not duplicate,
246 we are very badly screwed. The other way is possible, so we do not
247 assert this in add_symbol_to_partition_1.
248
249 Be lax about comdats; they may or may not be duplicated and we may
250 end up in need to duplicate keyed comdat because it has unkeyed alias. */
251
252 gcc_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
253 || DECL_COMDAT (node->decl)
254 || !symbol_partitioned_p (node));
255
256 add_symbol_to_partition_1 (part, node);
257 }
258
259 /* Undo all additions until number of cgraph nodes in PARITION is N_CGRAPH_NODES
260 and number of varpool nodes is N_VARPOOL_NODES. */
261
262 static void
263 undo_partition (ltrans_partition partition, unsigned int n_nodes)
264 {
265 while (lto_symtab_encoder_size (partition->encoder) > (int)n_nodes)
266 {
267 symtab_node *node = lto_symtab_encoder_deref (partition->encoder,
268 n_nodes);
269 cgraph_node *cnode;
270
271 /* After UNDO we no longer know what was visited. */
272 if (partition->initializers_visited)
273 delete partition->initializers_visited;
274 partition->initializers_visited = NULL;
275
276 if (!node->alias && (cnode = dyn_cast <cgraph_node *> (node)))
277 partition->insns -= inline_summaries->get (cnode)->self_size;
278 lto_symtab_encoder_delete_node (partition->encoder, node);
279 node->aux = (void *)((size_t)node->aux - 1);
280 }
281 }
282
283 /* Group cgrah nodes by input files. This is used mainly for testing
284 right now. */
285
286 void
287 lto_1_to_1_map (void)
288 {
289 symtab_node *node;
290 struct lto_file_decl_data *file_data;
291 hash_map<lto_file_decl_data *, ltrans_partition> pmap;
292 ltrans_partition partition;
293 int npartitions = 0;
294
295 FOR_EACH_SYMBOL (node)
296 {
297 if (node->get_partitioning_class () != SYMBOL_PARTITION
298 || symbol_partitioned_p (node))
299 continue;
300
301 file_data = node->lto_file_data;
302
303 if (file_data)
304 {
305 ltrans_partition *slot = &pmap.get_or_insert (file_data);
306 if (*slot)
307 partition = *slot;
308 else
309 {
310 partition = new_partition (file_data->file_name);
311 *slot = partition;
312 npartitions++;
313 }
314 }
315 else if (!file_data && ltrans_partitions.length ())
316 partition = ltrans_partitions[0];
317 else
318 {
319 partition = new_partition ("");
320 pmap.put (NULL, partition);
321 npartitions++;
322 }
323
324 add_symbol_to_partition (partition, node);
325 }
326
327 /* If the cgraph is empty, create one cgraph node set so that there is still
328 an output file for any variables that need to be exported in a DSO. */
329 if (!npartitions)
330 new_partition ("empty");
331
332 }
333
334 /* Maximal partitioning. Put every new symbol into new partition if possible. */
335
336 void
337 lto_max_map (void)
338 {
339 symtab_node *node;
340 ltrans_partition partition;
341 int npartitions = 0;
342
343 FOR_EACH_SYMBOL (node)
344 {
345 if (node->get_partitioning_class () != SYMBOL_PARTITION
346 || symbol_partitioned_p (node))
347 continue;
348 partition = new_partition (node->asm_name ());
349 add_symbol_to_partition (partition, node);
350 npartitions++;
351 }
352 if (!npartitions)
353 new_partition ("empty");
354 }
355
356 /* Helper function for qsort; sort nodes by order. noreorder functions must have
357 been removed earlier. */
358 static int
359 node_cmp (const void *pa, const void *pb)
360 {
361 const struct cgraph_node *a = *(const struct cgraph_node * const *) pa;
362 const struct cgraph_node *b = *(const struct cgraph_node * const *) pb;
363
364 /* Profile reorder flag enables function reordering based on first execution
365 of a function. All functions with profile are placed in ascending
366 order at the beginning. */
367
368 if (flag_profile_reorder_functions)
369 {
370 /* Functions with time profile are sorted in ascending order. */
371 if (a->tp_first_run && b->tp_first_run)
372 return a->tp_first_run != b->tp_first_run
373 ? a->tp_first_run - b->tp_first_run
374 : a->order - b->order;
375
376 /* Functions with time profile are sorted before the functions
377 that do not have the profile. */
378 if (a->tp_first_run || b->tp_first_run)
379 return b->tp_first_run - a->tp_first_run;
380 }
381
382 return b->order - a->order;
383 }
384
385 /* Helper function for qsort; sort nodes by order. */
386 static int
387 varpool_node_cmp (const void *pa, const void *pb)
388 {
389 const symtab_node *a = *static_cast<const symtab_node * const *> (pa);
390 const symtab_node *b = *static_cast<const symtab_node * const *> (pb);
391 return b->order - a->order;
392 }
393
394 /* Add all symtab nodes from NEXT_NODE to PARTITION in order. */
395
396 static void
397 add_sorted_nodes (vec<symtab_node *> &next_nodes, ltrans_partition partition)
398 {
399 unsigned i;
400 symtab_node *node;
401
402 next_nodes.qsort (varpool_node_cmp);
403 FOR_EACH_VEC_ELT (next_nodes, i, node)
404 if (!symbol_partitioned_p (node))
405 add_symbol_to_partition (partition, node);
406 }
407
408
409 /* Group cgraph nodes into equally-sized partitions.
410
411 The partitioning algorithm is simple: nodes are taken in predefined order.
412 The order corresponds to the order we want functions to have in the final
413 output. In the future this will be given by function reordering pass, but
414 at the moment we use the topological order, which is a good approximation.
415
416 The goal is to partition this linear order into intervals (partitions) so
417 that all the partitions have approximately the same size and the number of
418 callgraph or IPA reference edges crossing boundaries is minimal.
419
420 This is a lot faster (O(n) in size of callgraph) than algorithms doing
421 priority-based graph clustering that are generally O(n^2) and, since
422 WHOPR is designed to make things go well across partitions, it leads
423 to good results.
424
425 We compute the expected size of a partition as:
426
427 max (total_size / lto_partitions, min_partition_size)
428
429 We use dynamic expected size of partition so small programs are partitioned
430 into enough partitions to allow use of multiple CPUs, while large programs
431 are not partitioned too much. Creating too many partitions significantly
432 increases the streaming overhead.
433
434 In the future, we would like to bound the maximal size of partitions so as
435 to prevent the LTRANS stage from consuming too much memory. At the moment,
436 however, the WPA stage is the most memory intensive for large benchmarks,
437 since too many types and declarations are read into memory.
438
439 The function implements a simple greedy algorithm. Nodes are being added
440 to the current partition until after 3/4 of the expected partition size is
441 reached. Past this threshold, we keep track of boundary size (number of
442 edges going to other partitions) and continue adding functions until after
443 the current partition has grown to twice the expected partition size. Then
444 the process is undone to the point where the minimal ratio of boundary size
445 and in-partition calls was reached. */
446
447 void
448 lto_balanced_map (int n_lto_partitions)
449 {
450 int n_nodes = 0;
451 int n_varpool_nodes = 0, varpool_pos = 0, best_varpool_pos = 0;
452 struct cgraph_node **order = XNEWVEC (cgraph_node *, symtab->cgraph_max_uid);
453 auto_vec<cgraph_node *> noreorder;
454 auto_vec<varpool_node *> varpool_order;
455 int i;
456 struct cgraph_node *node;
457 int total_size = 0, best_total_size = 0;
458 int partition_size;
459 ltrans_partition partition;
460 int last_visited_node = 0;
461 varpool_node *vnode;
462 int cost = 0, internal = 0;
463 int best_n_nodes = 0, best_i = 0, best_cost =
464 INT_MAX, best_internal = 0;
465 int npartitions;
466 int current_order = -1;
467 int noreorder_pos = 0;
468
469 FOR_EACH_VARIABLE (vnode)
470 gcc_assert (!vnode->aux);
471
472 FOR_EACH_DEFINED_FUNCTION (node)
473 if (node->get_partitioning_class () == SYMBOL_PARTITION)
474 {
475 if (node->no_reorder)
476 noreorder.safe_push (node);
477 else
478 order[n_nodes++] = node;
479 if (!node->alias)
480 total_size += inline_summaries->get (node)->size;
481 }
482
483 /* Streaming works best when the source units do not cross partition
484 boundaries much. This is because importing function from a source
485 unit tends to import a lot of global trees defined there. We should
486 get better about minimizing the function bounday, but until that
487 things works smoother if we order in source order. */
488 qsort (order, n_nodes, sizeof (struct cgraph_node *), node_cmp);
489 noreorder.qsort (node_cmp);
490
491 if (symtab->dump_file)
492 {
493 for(i = 0; i < n_nodes; i++)
494 fprintf (symtab->dump_file, "Balanced map symbol order:%s:%u\n",
495 order[i]->name (), order[i]->tp_first_run);
496 for(i = 0; i < (int)noreorder.length(); i++)
497 fprintf (symtab->dump_file, "Balanced map symbol no_reorder:%s:%u\n",
498 noreorder[i]->name (), noreorder[i]->tp_first_run);
499 }
500
501 /* Collect all variables that should not be reordered. */
502 FOR_EACH_VARIABLE (vnode)
503 if (vnode->get_partitioning_class () == SYMBOL_PARTITION
504 && (!flag_toplevel_reorder || vnode->no_reorder))
505 varpool_order.safe_push (vnode);
506 n_varpool_nodes = varpool_order.length ();
507 varpool_order.qsort (varpool_node_cmp);
508
509 /* Compute partition size and create the first partition. */
510 partition_size = total_size / n_lto_partitions;
511 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
512 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
513 npartitions = 1;
514 partition = new_partition ("");
515 if (symtab->dump_file)
516 fprintf (symtab->dump_file, "Total unit size: %i, partition size: %i\n",
517 total_size, partition_size);
518
519 auto_vec<symtab_node *> next_nodes;
520
521 for (i = 0; i < n_nodes; i++)
522 {
523 if (symbol_partitioned_p (order[i]))
524 continue;
525
526 current_order = order[i]->order;
527
528 /* Output noreorder and varpool in program order first. */
529 next_nodes.truncate (0);
530 while (varpool_pos < n_varpool_nodes
531 && varpool_order[varpool_pos]->order < current_order)
532 next_nodes.safe_push (varpool_order[varpool_pos++]);
533 while (noreorder_pos < (int)noreorder.length ()
534 && noreorder[noreorder_pos]->order < current_order)
535 {
536 if (!noreorder[noreorder_pos]->alias)
537 total_size -= inline_summaries->get (noreorder[noreorder_pos])->size;
538 next_nodes.safe_push (noreorder[noreorder_pos++]);
539 }
540 add_sorted_nodes (next_nodes, partition);
541
542 add_symbol_to_partition (partition, order[i]);
543 if (!order[i]->alias)
544 total_size -= inline_summaries->get (order[i])->size;
545
546
547 /* Once we added a new node to the partition, we also want to add
548 all referenced variables unless they was already added into some
549 earlier partition.
550 add_symbol_to_partition adds possibly multiple nodes and
551 variables that are needed to satisfy needs of ORDER[i].
552 We remember last visited cgraph and varpool node from last iteration
553 of outer loop that allows us to process every new addition.
554
555 At the same time we compute size of the boundary into COST. Every
556 callgraph or IPA reference edge leaving the partition contributes into
557 COST. Every edge inside partition was earlier computed as one leaving
558 it and thus we need to subtract it from COST. */
559 while (last_visited_node < lto_symtab_encoder_size (partition->encoder))
560 {
561 symtab_node *refs_node;
562 int j;
563 struct ipa_ref *ref = NULL;
564 symtab_node *snode = lto_symtab_encoder_deref (partition->encoder,
565 last_visited_node);
566
567 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
568 {
569 struct cgraph_edge *edge;
570
571 refs_node = node;
572
573 last_visited_node++;
574
575 gcc_assert (node->definition || node->weakref);
576
577 /* Compute boundary cost of callgraph edges. */
578 for (edge = node->callees; edge; edge = edge->next_callee)
579 if (edge->callee->definition)
580 {
581 int edge_cost = edge->frequency;
582 int index;
583
584 if (!edge_cost)
585 edge_cost = 1;
586 gcc_assert (edge_cost > 0);
587 index = lto_symtab_encoder_lookup (partition->encoder,
588 edge->callee);
589 if (index != LCC_NOT_FOUND
590 && index < last_visited_node - 1)
591 cost -= edge_cost, internal += edge_cost;
592 else
593 cost += edge_cost;
594 }
595 for (edge = node->callers; edge; edge = edge->next_caller)
596 {
597 int edge_cost = edge->frequency;
598 int index;
599
600 gcc_assert (edge->caller->definition);
601 if (!edge_cost)
602 edge_cost = 1;
603 gcc_assert (edge_cost > 0);
604 index = lto_symtab_encoder_lookup (partition->encoder,
605 edge->caller);
606 if (index != LCC_NOT_FOUND
607 && index < last_visited_node - 1)
608 cost -= edge_cost;
609 else
610 cost += edge_cost;
611 }
612 }
613 else
614 {
615 refs_node = snode;
616 last_visited_node++;
617 }
618
619 /* Compute boundary cost of IPA REF edges and at the same time look into
620 variables referenced from current partition and try to add them. */
621 for (j = 0; refs_node->iterate_reference (j, ref); j++)
622 if (is_a <varpool_node *> (ref->referred))
623 {
624 int index;
625
626 vnode = dyn_cast <varpool_node *> (ref->referred);
627 if (!vnode->definition)
628 continue;
629 if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
630 && !vnode->no_reorder
631 && vnode->get_partitioning_class () == SYMBOL_PARTITION)
632 add_symbol_to_partition (partition, vnode);
633 index = lto_symtab_encoder_lookup (partition->encoder,
634 vnode);
635 if (index != LCC_NOT_FOUND
636 && index < last_visited_node - 1)
637 cost--, internal++;
638 else
639 cost++;
640 }
641 else
642 {
643 int index;
644
645 node = dyn_cast <cgraph_node *> (ref->referred);
646 if (!node->definition)
647 continue;
648 index = lto_symtab_encoder_lookup (partition->encoder,
649 node);
650 if (index != LCC_NOT_FOUND
651 && index < last_visited_node - 1)
652 cost--, internal++;
653 else
654 cost++;
655 }
656 for (j = 0; refs_node->iterate_referring (j, ref); j++)
657 if (is_a <varpool_node *> (ref->referring))
658 {
659 int index;
660
661 vnode = dyn_cast <varpool_node *> (ref->referring);
662 gcc_assert (vnode->definition);
663 /* It is better to couple variables with their users, because it allows them
664 to be removed. Coupling with objects they refer to only helps to reduce
665 number of symbols promoted to hidden. */
666 if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
667 && !vnode->no_reorder
668 && !vnode->can_remove_if_no_refs_p ()
669 && vnode->get_partitioning_class () == SYMBOL_PARTITION)
670 add_symbol_to_partition (partition, vnode);
671 index = lto_symtab_encoder_lookup (partition->encoder,
672 vnode);
673 if (index != LCC_NOT_FOUND
674 && index < last_visited_node - 1)
675 cost--;
676 else
677 cost++;
678 }
679 else
680 {
681 int index;
682
683 node = dyn_cast <cgraph_node *> (ref->referring);
684 gcc_assert (node->definition);
685 index = lto_symtab_encoder_lookup (partition->encoder,
686 node);
687 if (index != LCC_NOT_FOUND
688 && index < last_visited_node - 1)
689 cost--;
690 else
691 cost++;
692 }
693 }
694
695 /* If the partition is large enough, start looking for smallest boundary cost. */
696 if (partition->insns < partition_size * 3 / 4
697 || best_cost == INT_MAX
698 || ((!cost
699 || (best_internal * (HOST_WIDE_INT) cost
700 > (internal * (HOST_WIDE_INT)best_cost)))
701 && partition->insns < partition_size * 5 / 4))
702 {
703 best_cost = cost;
704 best_internal = internal;
705 best_i = i;
706 best_n_nodes = lto_symtab_encoder_size (partition->encoder);
707 best_total_size = total_size;
708 best_varpool_pos = varpool_pos;
709 }
710 if (symtab->dump_file)
711 fprintf (symtab->dump_file, "Step %i: added %s/%i, size %i, cost %i/%i "
712 "best %i/%i, step %i\n", i,
713 order[i]->name (), order[i]->order,
714 partition->insns, cost, internal,
715 best_cost, best_internal, best_i);
716 /* Partition is too large, unwind into step when best cost was reached and
717 start new partition. */
718 if (partition->insns > 2 * partition_size)
719 {
720 if (best_i != i)
721 {
722 if (symtab->dump_file)
723 fprintf (symtab->dump_file, "Unwinding %i insertions to step %i\n",
724 i - best_i, best_i);
725 undo_partition (partition, best_n_nodes);
726 varpool_pos = best_varpool_pos;
727 }
728 i = best_i;
729 /* When we are finished, avoid creating empty partition. */
730 while (i < n_nodes - 1 && symbol_partitioned_p (order[i + 1]))
731 i++;
732 if (i == n_nodes - 1)
733 break;
734 partition = new_partition ("");
735 last_visited_node = 0;
736 total_size = best_total_size;
737 cost = 0;
738
739 if (symtab->dump_file)
740 fprintf (symtab->dump_file, "New partition\n");
741 best_n_nodes = 0;
742 best_cost = INT_MAX;
743
744 /* Since the size of partitions is just approximate, update the size after
745 we finished current one. */
746 if (npartitions < n_lto_partitions)
747 partition_size = total_size / (n_lto_partitions - npartitions);
748 else
749 partition_size = INT_MAX;
750
751 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
752 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
753 npartitions ++;
754 }
755 }
756
757 next_nodes.truncate (0);
758
759 /* Varables that are not reachable from the code go into last partition. */
760 if (flag_toplevel_reorder)
761 {
762 FOR_EACH_VARIABLE (vnode)
763 if (vnode->get_partitioning_class () == SYMBOL_PARTITION
764 && !symbol_partitioned_p (vnode)
765 && !vnode->no_reorder)
766 next_nodes.safe_push (vnode);
767 }
768
769 /* Output remaining ordered symbols. */
770 while (varpool_pos < n_varpool_nodes)
771 next_nodes.safe_push (varpool_order[varpool_pos++]);
772 while (noreorder_pos < (int)noreorder.length ())
773 next_nodes.safe_push (noreorder[noreorder_pos++]);
774 add_sorted_nodes (next_nodes, partition);
775
776 free (order);
777 }
778
779 /* Mangle NODE symbol name into a local name.
780 This is necessary to do
781 1) if two or more static vars of same assembler name
782 are merged into single ltrans unit.
783 2) if prevoiusly static var was promoted hidden to avoid possible conflict
784 with symbols defined out of the LTO world.
785 */
786
787 static bool
788 privatize_symbol_name (symtab_node *node)
789 {
790 tree decl = node->decl;
791 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
792 const char *name;
793
794 /* If we want to privatize instrumentation clone
795 then we need to change original function name
796 which is used via transparent alias chain. */
797 if (cnode && cnode->instrumentation_clone)
798 decl = cnode->orig_decl;
799
800 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
801
802 /* Our renaming machinery do not handle more than one change of assembler name.
803 We should not need more than one anyway. */
804 if (node->lto_file_data
805 && lto_get_decl_name_mapping (node->lto_file_data, name) != name)
806 {
807 if (symtab->dump_file)
808 fprintf (symtab->dump_file,
809 "Not privatizing symbol name: %s. It privatized already.\n",
810 name);
811 return false;
812 }
813 /* Avoid mangling of already mangled clones.
814 ??? should have a flag whether a symbol has a 'private' name already,
815 since we produce some symbols like that i.e. for global constructors
816 that are not really clones. */
817 if (node->unique_name)
818 {
819 if (symtab->dump_file)
820 fprintf (symtab->dump_file,
821 "Not privatizing symbol name: %s. Has unique name.\n",
822 name);
823 return false;
824 }
825 symtab->change_decl_assembler_name (decl,
826 clone_function_name (decl, "lto_priv"));
827 if (node->lto_file_data)
828 lto_record_renamed_decl (node->lto_file_data, name,
829 IDENTIFIER_POINTER
830 (DECL_ASSEMBLER_NAME (decl)));
831 /* We could change name which is a target of transparent alias
832 chain of instrumented function name. Fix alias chain if so .*/
833 if (cnode)
834 {
835 tree iname = NULL_TREE;
836 if (cnode->instrumentation_clone)
837 iname = DECL_ASSEMBLER_NAME (cnode->decl);
838 else if (cnode->instrumented_version
839 && cnode->instrumented_version->orig_decl == decl)
840 iname = DECL_ASSEMBLER_NAME (cnode->instrumented_version->decl);
841
842 if (iname)
843 {
844 gcc_assert (IDENTIFIER_TRANSPARENT_ALIAS (iname));
845 TREE_CHAIN (iname) = DECL_ASSEMBLER_NAME (decl);
846 }
847 }
848 if (symtab->dump_file)
849 fprintf (symtab->dump_file,
850 "Privatizing symbol name: %s -> %s\n",
851 name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
852 return true;
853 }
854
855 /* Promote variable VNODE to be static. */
856
857 static void
858 promote_symbol (symtab_node *node)
859 {
860 /* We already promoted ... */
861 if (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
862 && DECL_VISIBILITY_SPECIFIED (node->decl)
863 && TREE_PUBLIC (node->decl))
864 return;
865
866 gcc_checking_assert (!TREE_PUBLIC (node->decl)
867 && !DECL_EXTERNAL (node->decl));
868 /* Be sure that newly public symbol does not conflict with anything already
869 defined by the non-LTO part. */
870 privatize_symbol_name (node);
871 TREE_PUBLIC (node->decl) = 1;
872 DECL_VISIBILITY (node->decl) = VISIBILITY_HIDDEN;
873 DECL_VISIBILITY_SPECIFIED (node->decl) = true;
874 if (symtab->dump_file)
875 fprintf (symtab->dump_file,
876 "Promoting as hidden: %s\n", node->name ());
877 }
878
879 /* Return true if NODE needs named section even if it won't land in the partition
880 symbol table.
881 FIXME: we should really not use named sections for inline clones and master clones. */
882
883 static bool
884 may_need_named_section_p (lto_symtab_encoder_t encoder, symtab_node *node)
885 {
886 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
887 if (!cnode)
888 return false;
889 if (node->real_symbol_p ())
890 return false;
891 return (!encoder
892 || (lto_symtab_encoder_lookup (encoder, node) != LCC_NOT_FOUND
893 && lto_symtab_encoder_encode_body_p (encoder,
894 cnode)));
895 }
896
897 /* If NODE represents a static variable. See if there are other variables
898 of the same name in partition ENCODER (or in whole compilation unit if
899 ENCODER is NULL) and if so, mangle the statics. Always mangle all
900 conflicting statics, so we reduce changes of silently miscompiling
901 asm statements referring to them by symbol name. */
902
903 static void
904 rename_statics (lto_symtab_encoder_t encoder, symtab_node *node)
905 {
906 tree decl = node->decl;
907 symtab_node *s;
908 tree name = DECL_ASSEMBLER_NAME (decl);
909
910 /* See if this is static symbol. */
911 if ((node->externally_visible
912 /* FIXME: externally_visible is somewhat illogically not set for
913 external symbols (i.e. those not defined). Remove this test
914 once this is fixed. */
915 || DECL_EXTERNAL (node->decl)
916 || !node->real_symbol_p ())
917 && !may_need_named_section_p (encoder, node))
918 return;
919
920 /* Now walk symbols sharing the same name and see if there are any conflicts.
921 (all types of symbols counts here, since we can not have static of the
922 same name as external or public symbol.) */
923 for (s = symtab_node::get_for_asmname (name);
924 s; s = s->next_sharing_asm_name)
925 if ((s->real_symbol_p () || may_need_named_section_p (encoder, s))
926 && s->decl != node->decl
927 && (!encoder
928 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
929 break;
930
931 /* OK, no confict, so we have nothing to do. */
932 if (!s)
933 return;
934
935 if (symtab->dump_file)
936 fprintf (symtab->dump_file,
937 "Renaming statics with asm name: %s\n", node->name ());
938
939 /* Assign every symbol in the set that shares the same ASM name an unique
940 mangled name. */
941 for (s = symtab_node::get_for_asmname (name); s;)
942 if (!s->externally_visible
943 && ((s->real_symbol_p ()
944 && !DECL_EXTERNAL (node->decl)
945 && !TREE_PUBLIC (node->decl))
946 || may_need_named_section_p (encoder, s))
947 && (!encoder
948 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
949 {
950 if (privatize_symbol_name (s))
951 /* Re-start from beginning since we do not know how many symbols changed a name. */
952 s = symtab_node::get_for_asmname (name);
953 else s = s->next_sharing_asm_name;
954 }
955 else s = s->next_sharing_asm_name;
956 }
957
958 /* Find out all static decls that need to be promoted to global because
959 of cross file sharing. This function must be run in the WPA mode after
960 all inlinees are added. */
961
962 void
963 lto_promote_cross_file_statics (void)
964 {
965 unsigned i, n_sets;
966
967 gcc_assert (flag_wpa);
968
969 select_what_to_stream (false);
970
971 /* First compute boundaries. */
972 n_sets = ltrans_partitions.length ();
973 for (i = 0; i < n_sets; i++)
974 {
975 ltrans_partition part
976 = ltrans_partitions[i];
977 part->encoder = compute_ltrans_boundary (part->encoder);
978 }
979
980 /* Look at boundaries and promote symbols as needed. */
981 for (i = 0; i < n_sets; i++)
982 {
983 lto_symtab_encoder_iterator lsei;
984 lto_symtab_encoder_t encoder = ltrans_partitions[i]->encoder;
985
986 for (lsei = lsei_start (encoder); !lsei_end_p (lsei);
987 lsei_next (&lsei))
988 {
989 symtab_node *node = lsei_node (lsei);
990
991 /* If symbol is static, rename it if its assembler name clash with
992 anything else in this unit. */
993 rename_statics (encoder, node);
994
995 /* No need to promote if symbol already is externally visible ... */
996 if (node->externally_visible
997 /* ... or if it is part of current partition ... */
998 || lto_symtab_encoder_in_partition_p (encoder, node)
999 /* ... or if we do not partition it. This mean that it will
1000 appear in every partition refernecing it. */
1001 || node->get_partitioning_class () != SYMBOL_PARTITION)
1002 continue;
1003
1004 promote_symbol (node);
1005 }
1006 }
1007 }
1008
1009 /* Rename statics in the whole unit in the case that
1010 we do -flto-partition=none. */
1011
1012 void
1013 lto_promote_statics_nonwpa (void)
1014 {
1015 symtab_node *node;
1016 FOR_EACH_SYMBOL (node)
1017 rename_statics (NULL, node);
1018 }