]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto/lto-partition.c
2015-06-17 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / lto / lto-partition.c
1 /* LTO partitioning logic routines.
2 Copyright (C) 2009-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "toplev.h"
24 #include "alias.h"
25 #include "symtab.h"
26 #include "options.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "predict.h"
30 #include "tm.h"
31 #include "hard-reg-set.h"
32 #include "function.h"
33 #include "basic-block.h"
34 #include "tree-ssa-alias.h"
35 #include "internal-fn.h"
36 #include "gimple-expr.h"
37 #include "gimple.h"
38 #include "plugin-api.h"
39 #include "ipa-ref.h"
40 #include "cgraph.h"
41 #include "lto-streamer.h"
42 #include "timevar.h"
43 #include "params.h"
44 #include "alloc-pool.h"
45 #include "symbol-summary.h"
46 #include "ipa-prop.h"
47 #include "ipa-inline.h"
48 #include "ipa-utils.h"
49 #include "lto-partition.h"
50 #include "stringpool.h"
51
52 vec<ltrans_partition> ltrans_partitions;
53
54 static void add_symbol_to_partition (ltrans_partition part, symtab_node *node);
55
56
57 /* Create new partition with name NAME. */
58
59 static ltrans_partition
60 new_partition (const char *name)
61 {
62 ltrans_partition part = XCNEW (struct ltrans_partition_def);
63 part->encoder = lto_symtab_encoder_new (false);
64 part->name = name;
65 part->insns = 0;
66 part->symbols = 0;
67 ltrans_partitions.safe_push (part);
68 return part;
69 }
70
71 /* Free memory used by ltrans datastructures. */
72
73 void
74 free_ltrans_partitions (void)
75 {
76 unsigned int idx;
77 ltrans_partition part;
78 for (idx = 0; ltrans_partitions.iterate (idx, &part); idx++)
79 {
80 if (part->initializers_visited)
81 delete part->initializers_visited;
82 /* Symtab encoder is freed after streaming. */
83 free (part);
84 }
85 ltrans_partitions.release ();
86 }
87
88 /* Return true if symbol is already in some partition. */
89
90 static inline bool
91 symbol_partitioned_p (symtab_node *node)
92 {
93 return node->aux;
94 }
95
96 /* Add references into the partition. */
97 static void
98 add_references_to_partition (ltrans_partition part, symtab_node *node)
99 {
100 int i;
101 struct ipa_ref *ref = NULL;
102
103 /* Add all duplicated references to the partition. */
104 for (i = 0; node->iterate_reference (i, ref); i++)
105 if (ref->referred->get_partitioning_class () == SYMBOL_DUPLICATE)
106 add_symbol_to_partition (part, ref->referred);
107 /* References to a readonly variable may be constant foled into its value.
108 Recursively look into the initializers of the constant variable and add
109 references, too. */
110 else if (is_a <varpool_node *> (ref->referred)
111 && (dyn_cast <varpool_node *> (ref->referred)
112 ->ctor_useable_for_folding_p ()
113 || POINTER_BOUNDS_P (ref->referred->decl))
114 && !lto_symtab_encoder_in_partition_p (part->encoder, ref->referred))
115 {
116 if (!part->initializers_visited)
117 part->initializers_visited = new hash_set<symtab_node *>;
118 if (!part->initializers_visited->add (ref->referred))
119 add_references_to_partition (part, ref->referred);
120 }
121 }
122
123 /* Helper function for add_symbol_to_partition doing the actual dirty work
124 of adding NODE to PART. */
125
126 static bool
127 add_symbol_to_partition_1 (ltrans_partition part, symtab_node *node)
128 {
129 enum symbol_partitioning_class c = node->get_partitioning_class ();
130 struct ipa_ref *ref;
131 symtab_node *node1;
132
133 /* If NODE is already there, we have nothing to do. */
134 if (lto_symtab_encoder_in_partition_p (part->encoder, node))
135 return true;
136
137 /* non-duplicated aliases or tunks of a duplicated symbol needs to be output
138 just once.
139
140 Be lax about comdats; they may or may not be duplicated and we may
141 end up in need to duplicate keyed comdat because it has unkeyed alias. */
142 if (c == SYMBOL_PARTITION && !DECL_COMDAT (node->decl)
143 && symbol_partitioned_p (node))
144 return false;
145
146 /* Be sure that we never try to duplicate partitioned symbol
147 or add external symbol. */
148 gcc_assert (c != SYMBOL_EXTERNAL
149 && (c == SYMBOL_DUPLICATE || !symbol_partitioned_p (node)));
150
151 part->symbols++;
152
153 lto_set_symtab_encoder_in_partition (part->encoder, node);
154
155 if (symbol_partitioned_p (node))
156 {
157 node->in_other_partition = 1;
158 if (symtab->dump_file)
159 fprintf (symtab->dump_file,
160 "Symbol node %s now used in multiple partitions\n",
161 node->name ());
162 }
163 node->aux = (void *)((size_t)node->aux + 1);
164
165 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
166 {
167 struct cgraph_edge *e;
168 if (!node->alias)
169 part->insns += inline_summaries->get (cnode)->self_size;
170
171 /* Add all inline clones and callees that are duplicated. */
172 for (e = cnode->callees; e; e = e->next_callee)
173 if (!e->inline_failed)
174 add_symbol_to_partition_1 (part, e->callee);
175 else if (e->callee->get_partitioning_class () == SYMBOL_DUPLICATE)
176 add_symbol_to_partition (part, e->callee);
177
178 /* Add all thunks associated with the function. */
179 for (e = cnode->callers; e; e = e->next_caller)
180 if (e->caller->thunk.thunk_p)
181 add_symbol_to_partition_1 (part, e->caller);
182
183 /* Instrumented version is actually the same function.
184 Therefore put it into the same partition. */
185 if (cnode->instrumented_version)
186 add_symbol_to_partition_1 (part, cnode->instrumented_version);
187 }
188
189 add_references_to_partition (part, node);
190
191 /* Add all aliases associated with the symbol. */
192
193 FOR_EACH_ALIAS (node, ref)
194 if (!node->weakref)
195 add_symbol_to_partition_1 (part, ref->referring);
196
197 /* Ensure that SAME_COMDAT_GROUP lists all allways added in a group. */
198 if (node->same_comdat_group)
199 for (node1 = node->same_comdat_group;
200 node1 != node; node1 = node1->same_comdat_group)
201 if (!node->alias)
202 {
203 bool added = add_symbol_to_partition_1 (part, node1);
204 gcc_assert (added);
205 }
206 return true;
207 }
208
209 /* If symbol NODE is really part of other symbol's definition (i.e. it is
210 internal label, thunk, alias or so), return the outer symbol.
211 When add_symbol_to_partition_1 is called on the outer symbol it must
212 eventually add NODE, too. */
213 static symtab_node *
214 contained_in_symbol (symtab_node *node)
215 {
216 /* Weakrefs are never contained in anything. */
217 if (node->weakref)
218 return node;
219 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
220 {
221 cnode = cnode->function_symbol ();
222 if (cnode->global.inlined_to)
223 cnode = cnode->global.inlined_to;
224 return cnode;
225 }
226 else if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
227 return vnode->ultimate_alias_target ();
228 return node;
229 }
230
231 /* Add symbol NODE to partition. When definition of NODE is part
232 of other symbol definition, add the other symbol, too. */
233
234 static void
235 add_symbol_to_partition (ltrans_partition part, symtab_node *node)
236 {
237 symtab_node *node1;
238
239 /* Verify that we do not try to duplicate something that can not be. */
240 gcc_checking_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
241 || !symbol_partitioned_p (node));
242
243 while ((node1 = contained_in_symbol (node)) != node)
244 node = node1;
245
246 /* If we have duplicated symbol contained in something we can not duplicate,
247 we are very badly screwed. The other way is possible, so we do not
248 assert this in add_symbol_to_partition_1.
249
250 Be lax about comdats; they may or may not be duplicated and we may
251 end up in need to duplicate keyed comdat because it has unkeyed alias. */
252
253 gcc_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
254 || DECL_COMDAT (node->decl)
255 || !symbol_partitioned_p (node));
256
257 add_symbol_to_partition_1 (part, node);
258 }
259
260 /* Undo all additions until number of cgraph nodes in PARITION is N_CGRAPH_NODES
261 and number of varpool nodes is N_VARPOOL_NODES. */
262
263 static void
264 undo_partition (ltrans_partition partition, unsigned int n_nodes)
265 {
266 while (lto_symtab_encoder_size (partition->encoder) > (int)n_nodes)
267 {
268 symtab_node *node = lto_symtab_encoder_deref (partition->encoder,
269 n_nodes);
270 partition->symbols--;
271 cgraph_node *cnode;
272
273 /* After UNDO we no longer know what was visited. */
274 if (partition->initializers_visited)
275 delete partition->initializers_visited;
276 partition->initializers_visited = NULL;
277
278 if (!node->alias && (cnode = dyn_cast <cgraph_node *> (node)))
279 partition->insns -= inline_summaries->get (cnode)->self_size;
280 lto_symtab_encoder_delete_node (partition->encoder, node);
281 node->aux = (void *)((size_t)node->aux - 1);
282 }
283 }
284
285 /* Group cgrah nodes by input files. This is used mainly for testing
286 right now. */
287
288 void
289 lto_1_to_1_map (void)
290 {
291 symtab_node *node;
292 struct lto_file_decl_data *file_data;
293 hash_map<lto_file_decl_data *, ltrans_partition> pmap;
294 ltrans_partition partition;
295 int npartitions = 0;
296
297 FOR_EACH_SYMBOL (node)
298 {
299 if (node->get_partitioning_class () != SYMBOL_PARTITION
300 || symbol_partitioned_p (node))
301 continue;
302
303 file_data = node->lto_file_data;
304
305 if (file_data)
306 {
307 ltrans_partition *slot = &pmap.get_or_insert (file_data);
308 if (*slot)
309 partition = *slot;
310 else
311 {
312 partition = new_partition (file_data->file_name);
313 *slot = partition;
314 npartitions++;
315 }
316 }
317 else if (!file_data && ltrans_partitions.length ())
318 partition = ltrans_partitions[0];
319 else
320 {
321 partition = new_partition ("");
322 pmap.put (NULL, partition);
323 npartitions++;
324 }
325
326 add_symbol_to_partition (partition, node);
327 }
328
329 /* If the cgraph is empty, create one cgraph node set so that there is still
330 an output file for any variables that need to be exported in a DSO. */
331 if (!npartitions)
332 new_partition ("empty");
333
334 }
335
336 /* Maximal partitioning. Put every new symbol into new partition if possible. */
337
338 void
339 lto_max_map (void)
340 {
341 symtab_node *node;
342 ltrans_partition partition;
343 int npartitions = 0;
344
345 FOR_EACH_SYMBOL (node)
346 {
347 if (node->get_partitioning_class () != SYMBOL_PARTITION
348 || symbol_partitioned_p (node))
349 continue;
350 partition = new_partition (node->asm_name ());
351 add_symbol_to_partition (partition, node);
352 npartitions++;
353 }
354 if (!npartitions)
355 new_partition ("empty");
356 }
357
358 /* Helper function for qsort; sort nodes by order. noreorder functions must have
359 been removed earlier. */
360 static int
361 node_cmp (const void *pa, const void *pb)
362 {
363 const struct cgraph_node *a = *(const struct cgraph_node * const *) pa;
364 const struct cgraph_node *b = *(const struct cgraph_node * const *) pb;
365
366 /* Profile reorder flag enables function reordering based on first execution
367 of a function. All functions with profile are placed in ascending
368 order at the beginning. */
369
370 if (flag_profile_reorder_functions)
371 {
372 /* Functions with time profile are sorted in ascending order. */
373 if (a->tp_first_run && b->tp_first_run)
374 return a->tp_first_run != b->tp_first_run
375 ? a->tp_first_run - b->tp_first_run
376 : a->order - b->order;
377
378 /* Functions with time profile are sorted before the functions
379 that do not have the profile. */
380 if (a->tp_first_run || b->tp_first_run)
381 return b->tp_first_run - a->tp_first_run;
382 }
383
384 return b->order - a->order;
385 }
386
387 /* Helper function for qsort; sort nodes by order. */
388 static int
389 varpool_node_cmp (const void *pa, const void *pb)
390 {
391 const symtab_node *a = *static_cast<const symtab_node * const *> (pa);
392 const symtab_node *b = *static_cast<const symtab_node * const *> (pb);
393 return b->order - a->order;
394 }
395
396 /* Add all symtab nodes from NEXT_NODE to PARTITION in order. */
397
398 static void
399 add_sorted_nodes (vec<symtab_node *> &next_nodes, ltrans_partition partition)
400 {
401 unsigned i;
402 symtab_node *node;
403
404 next_nodes.qsort (varpool_node_cmp);
405 FOR_EACH_VEC_ELT (next_nodes, i, node)
406 if (!symbol_partitioned_p (node))
407 add_symbol_to_partition (partition, node);
408 }
409
410
411 /* Group cgraph nodes into equally-sized partitions.
412
413 The partitioning algorithm is simple: nodes are taken in predefined order.
414 The order corresponds to the order we want functions to have in the final
415 output. In the future this will be given by function reordering pass, but
416 at the moment we use the topological order, which is a good approximation.
417
418 The goal is to partition this linear order into intervals (partitions) so
419 that all the partitions have approximately the same size and the number of
420 callgraph or IPA reference edges crossing boundaries is minimal.
421
422 This is a lot faster (O(n) in size of callgraph) than algorithms doing
423 priority-based graph clustering that are generally O(n^2) and, since
424 WHOPR is designed to make things go well across partitions, it leads
425 to good results.
426
427 We compute the expected size of a partition as:
428
429 max (total_size / lto_partitions, min_partition_size)
430
431 We use dynamic expected size of partition so small programs are partitioned
432 into enough partitions to allow use of multiple CPUs, while large programs
433 are not partitioned too much. Creating too many partitions significantly
434 increases the streaming overhead.
435
436 In the future, we would like to bound the maximal size of partitions so as
437 to prevent the LTRANS stage from consuming too much memory. At the moment,
438 however, the WPA stage is the most memory intensive for large benchmarks,
439 since too many types and declarations are read into memory.
440
441 The function implements a simple greedy algorithm. Nodes are being added
442 to the current partition until after 3/4 of the expected partition size is
443 reached. Past this threshold, we keep track of boundary size (number of
444 edges going to other partitions) and continue adding functions until after
445 the current partition has grown to twice the expected partition size. Then
446 the process is undone to the point where the minimal ratio of boundary size
447 and in-partition calls was reached. */
448
449 void
450 lto_balanced_map (int n_lto_partitions)
451 {
452 int n_nodes = 0;
453 int n_varpool_nodes = 0, varpool_pos = 0, best_varpool_pos = 0;
454 struct cgraph_node **order = XNEWVEC (cgraph_node *, symtab->cgraph_max_uid);
455 auto_vec<cgraph_node *> noreorder;
456 auto_vec<varpool_node *> varpool_order;
457 int i;
458 struct cgraph_node *node;
459 int original_total_size, total_size = 0, best_total_size = 0;
460 int partition_size;
461 ltrans_partition partition;
462 int last_visited_node = 0;
463 varpool_node *vnode;
464 int cost = 0, internal = 0;
465 int best_n_nodes = 0, best_i = 0, best_cost =
466 INT_MAX, best_internal = 0;
467 int npartitions;
468 int current_order = -1;
469 int noreorder_pos = 0;
470
471 FOR_EACH_VARIABLE (vnode)
472 gcc_assert (!vnode->aux);
473
474 FOR_EACH_DEFINED_FUNCTION (node)
475 if (node->get_partitioning_class () == SYMBOL_PARTITION)
476 {
477 if (node->no_reorder)
478 noreorder.safe_push (node);
479 else
480 order[n_nodes++] = node;
481 if (!node->alias)
482 total_size += inline_summaries->get (node)->size;
483 }
484
485 original_total_size = total_size;
486
487 /* Streaming works best when the source units do not cross partition
488 boundaries much. This is because importing function from a source
489 unit tends to import a lot of global trees defined there. We should
490 get better about minimizing the function bounday, but until that
491 things works smoother if we order in source order. */
492 qsort (order, n_nodes, sizeof (struct cgraph_node *), node_cmp);
493 noreorder.qsort (node_cmp);
494
495 if (symtab->dump_file)
496 {
497 for(i = 0; i < n_nodes; i++)
498 fprintf (symtab->dump_file, "Balanced map symbol order:%s:%u\n",
499 order[i]->name (), order[i]->tp_first_run);
500 for(i = 0; i < (int)noreorder.length(); i++)
501 fprintf (symtab->dump_file, "Balanced map symbol no_reorder:%s:%u\n",
502 noreorder[i]->name (), noreorder[i]->tp_first_run);
503 }
504
505 /* Collect all variables that should not be reordered. */
506 FOR_EACH_VARIABLE (vnode)
507 if (vnode->get_partitioning_class () == SYMBOL_PARTITION
508 && (!flag_toplevel_reorder || vnode->no_reorder))
509 varpool_order.safe_push (vnode);
510 n_varpool_nodes = varpool_order.length ();
511 varpool_order.qsort (varpool_node_cmp);
512
513 /* Compute partition size and create the first partition. */
514 partition_size = total_size / n_lto_partitions;
515 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
516 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
517 npartitions = 1;
518 partition = new_partition ("");
519 if (symtab->dump_file)
520 fprintf (symtab->dump_file, "Total unit size: %i, partition size: %i\n",
521 total_size, partition_size);
522
523 auto_vec<symtab_node *> next_nodes;
524
525 for (i = 0; i < n_nodes; i++)
526 {
527 if (symbol_partitioned_p (order[i]))
528 continue;
529
530 current_order = order[i]->order;
531
532 /* Output noreorder and varpool in program order first. */
533 next_nodes.truncate (0);
534 while (varpool_pos < n_varpool_nodes
535 && varpool_order[varpool_pos]->order < current_order)
536 next_nodes.safe_push (varpool_order[varpool_pos++]);
537 while (noreorder_pos < (int)noreorder.length ()
538 && noreorder[noreorder_pos]->order < current_order)
539 {
540 if (!noreorder[noreorder_pos]->alias)
541 total_size -= inline_summaries->get (noreorder[noreorder_pos])->size;
542 next_nodes.safe_push (noreorder[noreorder_pos++]);
543 }
544 add_sorted_nodes (next_nodes, partition);
545
546 add_symbol_to_partition (partition, order[i]);
547 if (!order[i]->alias)
548 total_size -= inline_summaries->get (order[i])->size;
549
550
551 /* Once we added a new node to the partition, we also want to add
552 all referenced variables unless they was already added into some
553 earlier partition.
554 add_symbol_to_partition adds possibly multiple nodes and
555 variables that are needed to satisfy needs of ORDER[i].
556 We remember last visited cgraph and varpool node from last iteration
557 of outer loop that allows us to process every new addition.
558
559 At the same time we compute size of the boundary into COST. Every
560 callgraph or IPA reference edge leaving the partition contributes into
561 COST. Every edge inside partition was earlier computed as one leaving
562 it and thus we need to subtract it from COST. */
563 while (last_visited_node < lto_symtab_encoder_size (partition->encoder))
564 {
565 symtab_node *refs_node;
566 int j;
567 struct ipa_ref *ref = NULL;
568 symtab_node *snode = lto_symtab_encoder_deref (partition->encoder,
569 last_visited_node);
570
571 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
572 {
573 struct cgraph_edge *edge;
574
575 refs_node = node;
576
577 last_visited_node++;
578
579 gcc_assert (node->definition || node->weakref);
580
581 /* Compute boundary cost of callgraph edges. */
582 for (edge = node->callees; edge; edge = edge->next_callee)
583 if (edge->callee->definition)
584 {
585 int edge_cost = edge->frequency;
586 int index;
587
588 if (!edge_cost)
589 edge_cost = 1;
590 gcc_assert (edge_cost > 0);
591 index = lto_symtab_encoder_lookup (partition->encoder,
592 edge->callee);
593 if (index != LCC_NOT_FOUND
594 && index < last_visited_node - 1)
595 cost -= edge_cost, internal += edge_cost;
596 else
597 cost += edge_cost;
598 }
599 for (edge = node->callers; edge; edge = edge->next_caller)
600 {
601 int edge_cost = edge->frequency;
602 int index;
603
604 gcc_assert (edge->caller->definition);
605 if (!edge_cost)
606 edge_cost = 1;
607 gcc_assert (edge_cost > 0);
608 index = lto_symtab_encoder_lookup (partition->encoder,
609 edge->caller);
610 if (index != LCC_NOT_FOUND
611 && index < last_visited_node - 1)
612 cost -= edge_cost;
613 else
614 cost += edge_cost;
615 }
616 }
617 else
618 {
619 refs_node = snode;
620 last_visited_node++;
621 }
622
623 /* Compute boundary cost of IPA REF edges and at the same time look into
624 variables referenced from current partition and try to add them. */
625 for (j = 0; refs_node->iterate_reference (j, ref); j++)
626 if (is_a <varpool_node *> (ref->referred))
627 {
628 int index;
629
630 vnode = dyn_cast <varpool_node *> (ref->referred);
631 if (!vnode->definition)
632 continue;
633 if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
634 && !vnode->no_reorder
635 && vnode->get_partitioning_class () == SYMBOL_PARTITION)
636 add_symbol_to_partition (partition, vnode);
637 index = lto_symtab_encoder_lookup (partition->encoder,
638 vnode);
639 if (index != LCC_NOT_FOUND
640 && index < last_visited_node - 1)
641 cost--, internal++;
642 else
643 cost++;
644 }
645 else
646 {
647 int index;
648
649 node = dyn_cast <cgraph_node *> (ref->referred);
650 if (!node->definition)
651 continue;
652 index = lto_symtab_encoder_lookup (partition->encoder,
653 node);
654 if (index != LCC_NOT_FOUND
655 && index < last_visited_node - 1)
656 cost--, internal++;
657 else
658 cost++;
659 }
660 for (j = 0; refs_node->iterate_referring (j, ref); j++)
661 if (is_a <varpool_node *> (ref->referring))
662 {
663 int index;
664
665 vnode = dyn_cast <varpool_node *> (ref->referring);
666 gcc_assert (vnode->definition);
667 /* It is better to couple variables with their users, because it allows them
668 to be removed. Coupling with objects they refer to only helps to reduce
669 number of symbols promoted to hidden. */
670 if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
671 && !vnode->no_reorder
672 && !vnode->can_remove_if_no_refs_p ()
673 && vnode->get_partitioning_class () == SYMBOL_PARTITION)
674 add_symbol_to_partition (partition, vnode);
675 index = lto_symtab_encoder_lookup (partition->encoder,
676 vnode);
677 if (index != LCC_NOT_FOUND
678 && index < last_visited_node - 1)
679 cost--;
680 else
681 cost++;
682 }
683 else
684 {
685 int index;
686
687 node = dyn_cast <cgraph_node *> (ref->referring);
688 gcc_assert (node->definition);
689 index = lto_symtab_encoder_lookup (partition->encoder,
690 node);
691 if (index != LCC_NOT_FOUND
692 && index < last_visited_node - 1)
693 cost--;
694 else
695 cost++;
696 }
697 }
698
699 /* If the partition is large enough, start looking for smallest boundary cost. */
700 if (partition->insns < partition_size * 3 / 4
701 || best_cost == INT_MAX
702 || ((!cost
703 || (best_internal * (HOST_WIDE_INT) cost
704 > (internal * (HOST_WIDE_INT)best_cost)))
705 && partition->insns < partition_size * 5 / 4))
706 {
707 best_cost = cost;
708 best_internal = internal;
709 best_i = i;
710 best_n_nodes = lto_symtab_encoder_size (partition->encoder);
711 best_total_size = total_size;
712 best_varpool_pos = varpool_pos;
713 }
714 if (symtab->dump_file)
715 fprintf (symtab->dump_file, "Step %i: added %s/%i, size %i, cost %i/%i "
716 "best %i/%i, step %i\n", i,
717 order[i]->name (), order[i]->order,
718 partition->insns, cost, internal,
719 best_cost, best_internal, best_i);
720 /* Partition is too large, unwind into step when best cost was reached and
721 start new partition. */
722 if (partition->insns > 2 * partition_size)
723 {
724 if (best_i != i)
725 {
726 if (symtab->dump_file)
727 fprintf (symtab->dump_file, "Unwinding %i insertions to step %i\n",
728 i - best_i, best_i);
729 undo_partition (partition, best_n_nodes);
730 varpool_pos = best_varpool_pos;
731 }
732 i = best_i;
733 /* When we are finished, avoid creating empty partition. */
734 while (i < n_nodes - 1 && symbol_partitioned_p (order[i + 1]))
735 i++;
736 if (i == n_nodes - 1)
737 break;
738 partition = new_partition ("");
739 last_visited_node = 0;
740 total_size = best_total_size;
741 cost = 0;
742
743 if (symtab->dump_file)
744 fprintf (symtab->dump_file, "New partition\n");
745 best_n_nodes = 0;
746 best_cost = INT_MAX;
747
748 /* Since the size of partitions is just approximate, update the size after
749 we finished current one. */
750 if (npartitions < n_lto_partitions)
751 partition_size = total_size / (n_lto_partitions - npartitions);
752 else
753 partition_size = INT_MAX;
754
755 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
756 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
757 npartitions ++;
758 }
759 }
760
761 next_nodes.truncate (0);
762
763 /* Varables that are not reachable from the code go into last partition. */
764 if (flag_toplevel_reorder)
765 {
766 FOR_EACH_VARIABLE (vnode)
767 if (vnode->get_partitioning_class () == SYMBOL_PARTITION
768 && !symbol_partitioned_p (vnode)
769 && !vnode->no_reorder)
770 next_nodes.safe_push (vnode);
771 }
772
773 /* Output remaining ordered symbols. */
774 while (varpool_pos < n_varpool_nodes)
775 next_nodes.safe_push (varpool_order[varpool_pos++]);
776 while (noreorder_pos < (int)noreorder.length ())
777 next_nodes.safe_push (noreorder[noreorder_pos++]);
778 add_sorted_nodes (next_nodes, partition);
779
780 free (order);
781
782 if (symtab->dump_file)
783 {
784 fprintf (symtab->dump_file, "\nPartition sizes:\n");
785 unsigned partitions = ltrans_partitions.length ();
786
787 for (unsigned i = 0; i < partitions ; i++)
788 {
789 ltrans_partition p = ltrans_partitions[i];
790 fprintf (symtab->dump_file, "partition %d contains %d (%2.2f%%)"
791 " symbols and %d (%2.2f%%) insns\n", i, p->symbols,
792 100.0 * p->symbols / n_nodes, p->insns,
793 100.0 * p->insns / original_total_size);
794 }
795
796 fprintf (symtab->dump_file, "\n");
797 }
798 }
799
800 /* Return true if we must not change the name of the NODE. The name as
801 extracted from the corresponding decl should be passed in NAME. */
802
803 static bool
804 must_not_rename (symtab_node *node, const char *name)
805 {
806 /* Our renaming machinery do not handle more than one change of assembler name.
807 We should not need more than one anyway. */
808 if (node->lto_file_data
809 && lto_get_decl_name_mapping (node->lto_file_data, name) != name)
810 {
811 if (symtab->dump_file)
812 fprintf (symtab->dump_file,
813 "Not privatizing symbol name: %s. It privatized already.\n",
814 name);
815 return true;
816 }
817 /* Avoid mangling of already mangled clones.
818 ??? should have a flag whether a symbol has a 'private' name already,
819 since we produce some symbols like that i.e. for global constructors
820 that are not really clones. */
821 if (node->unique_name)
822 {
823 if (symtab->dump_file)
824 fprintf (symtab->dump_file,
825 "Not privatizing symbol name: %s. Has unique name.\n",
826 name);
827 return true;
828 }
829 return false;
830 }
831
832 /* If we are an offload compiler, we may have to rewrite symbols to be
833 valid on this target. Return either PTR or a modified version of it. */
834
835 static const char *
836 maybe_rewrite_identifier (const char *ptr)
837 {
838 #if defined ACCEL_COMPILER && (defined NO_DOT_IN_LABEL || defined NO_DOLLAR_IN_LABEL)
839 #ifndef NO_DOT_IN_LABEL
840 char valid = '.';
841 const char reject[] = "$";
842 #elif !defined NO_DOLLAR_IN_LABEL
843 char valid = '$';
844 const char reject[] = ".";
845 #else
846 char valid = '_';
847 const char reject[] = ".$";
848 #endif
849
850 char *copy = NULL;
851 const char *match = ptr;
852 for (;;)
853 {
854 size_t off = strcspn (match, reject);
855 if (match[off] == '\0')
856 break;
857 if (copy == NULL)
858 {
859 copy = xstrdup (ptr);
860 match = copy;
861 }
862 copy[off] = valid;
863 }
864 return match;
865 #else
866 return ptr;
867 #endif
868 }
869
870 /* Ensure that the symbol in NODE is valid for the target, and if not,
871 rewrite it. */
872
873 static void
874 validize_symbol_for_target (symtab_node *node)
875 {
876 tree decl = node->decl;
877 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
878
879 if (must_not_rename (node, name))
880 return;
881
882 const char *name2 = maybe_rewrite_identifier (name);
883 if (name2 != name)
884 {
885 symtab->change_decl_assembler_name (decl, get_identifier (name2));
886 if (node->lto_file_data)
887 lto_record_renamed_decl (node->lto_file_data, name,
888 IDENTIFIER_POINTER
889 (DECL_ASSEMBLER_NAME (decl)));
890 }
891 }
892
893 /* Helper for privatize_symbol_name. Mangle NODE symbol name
894 represented by DECL. */
895
896 static bool
897 privatize_symbol_name_1 (symtab_node *node, tree decl)
898 {
899 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
900
901 if (must_not_rename (node, name))
902 return false;
903
904 name = maybe_rewrite_identifier (name);
905 symtab->change_decl_assembler_name (decl,
906 clone_function_name_1 (name,
907 "lto_priv"));
908
909 if (node->lto_file_data)
910 lto_record_renamed_decl (node->lto_file_data, name,
911 IDENTIFIER_POINTER
912 (DECL_ASSEMBLER_NAME (decl)));
913
914 if (symtab->dump_file)
915 fprintf (symtab->dump_file,
916 "Privatizing symbol name: %s -> %s\n",
917 name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
918
919 return true;
920 }
921
922 /* Mangle NODE symbol name into a local name.
923 This is necessary to do
924 1) if two or more static vars of same assembler name
925 are merged into single ltrans unit.
926 2) if previously static var was promoted hidden to avoid possible conflict
927 with symbols defined out of the LTO world. */
928
929 static bool
930 privatize_symbol_name (symtab_node *node)
931 {
932 if (!privatize_symbol_name_1 (node, node->decl))
933 return false;
934
935 /* We could change name which is a target of transparent alias
936 chain of instrumented function name. Fix alias chain if so .*/
937 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
938 {
939 tree iname = NULL_TREE;
940 if (cnode->instrumentation_clone)
941 {
942 /* If we want to privatize instrumentation clone
943 then we also need to privatize original function. */
944 if (cnode->instrumented_version)
945 privatize_symbol_name (cnode->instrumented_version);
946 else
947 privatize_symbol_name_1 (cnode, cnode->orig_decl);
948 iname = DECL_ASSEMBLER_NAME (cnode->decl);
949 TREE_CHAIN (iname) = DECL_ASSEMBLER_NAME (cnode->orig_decl);
950 }
951 else if (cnode->instrumented_version
952 && cnode->instrumented_version->orig_decl == cnode->decl)
953 {
954 iname = DECL_ASSEMBLER_NAME (cnode->instrumented_version->decl);
955 TREE_CHAIN (iname) = DECL_ASSEMBLER_NAME (cnode->decl);
956 }
957 }
958
959 return true;
960 }
961
962 /* Promote variable VNODE to be static. */
963
964 static void
965 promote_symbol (symtab_node *node)
966 {
967 /* We already promoted ... */
968 if (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
969 && DECL_VISIBILITY_SPECIFIED (node->decl)
970 && TREE_PUBLIC (node->decl))
971 {
972 validize_symbol_for_target (node);
973 return;
974 }
975
976 gcc_checking_assert (!TREE_PUBLIC (node->decl)
977 && !DECL_EXTERNAL (node->decl));
978 /* Be sure that newly public symbol does not conflict with anything already
979 defined by the non-LTO part. */
980 privatize_symbol_name (node);
981 TREE_PUBLIC (node->decl) = 1;
982 DECL_VISIBILITY (node->decl) = VISIBILITY_HIDDEN;
983 DECL_VISIBILITY_SPECIFIED (node->decl) = true;
984 if (symtab->dump_file)
985 fprintf (symtab->dump_file,
986 "Promoting as hidden: %s\n", node->name ());
987 }
988
989 /* Return true if NODE needs named section even if it won't land in the partition
990 symbol table.
991 FIXME: we should really not use named sections for inline clones and master clones. */
992
993 static bool
994 may_need_named_section_p (lto_symtab_encoder_t encoder, symtab_node *node)
995 {
996 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
997 if (!cnode)
998 return false;
999 if (node->real_symbol_p ())
1000 return false;
1001 return (!encoder
1002 || (lto_symtab_encoder_lookup (encoder, node) != LCC_NOT_FOUND
1003 && lto_symtab_encoder_encode_body_p (encoder,
1004 cnode)));
1005 }
1006
1007 /* If NODE represents a static variable. See if there are other variables
1008 of the same name in partition ENCODER (or in whole compilation unit if
1009 ENCODER is NULL) and if so, mangle the statics. Always mangle all
1010 conflicting statics, so we reduce changes of silently miscompiling
1011 asm statements referring to them by symbol name. */
1012
1013 static void
1014 rename_statics (lto_symtab_encoder_t encoder, symtab_node *node)
1015 {
1016 tree decl = node->decl;
1017 symtab_node *s;
1018 tree name = DECL_ASSEMBLER_NAME (decl);
1019
1020 /* See if this is static symbol. */
1021 if ((node->externally_visible
1022 /* FIXME: externally_visible is somewhat illogically not set for
1023 external symbols (i.e. those not defined). Remove this test
1024 once this is fixed. */
1025 || DECL_EXTERNAL (node->decl)
1026 || !node->real_symbol_p ())
1027 && !may_need_named_section_p (encoder, node))
1028 return;
1029
1030 /* Now walk symbols sharing the same name and see if there are any conflicts.
1031 (all types of symbols counts here, since we can not have static of the
1032 same name as external or public symbol.) */
1033 for (s = symtab_node::get_for_asmname (name);
1034 s; s = s->next_sharing_asm_name)
1035 if ((s->real_symbol_p () || may_need_named_section_p (encoder, s))
1036 && s->decl != node->decl
1037 && (!encoder
1038 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
1039 break;
1040
1041 /* OK, no confict, so we have nothing to do. */
1042 if (!s)
1043 return;
1044
1045 if (symtab->dump_file)
1046 fprintf (symtab->dump_file,
1047 "Renaming statics with asm name: %s\n", node->name ());
1048
1049 /* Assign every symbol in the set that shares the same ASM name an unique
1050 mangled name. */
1051 for (s = symtab_node::get_for_asmname (name); s;)
1052 if (!s->externally_visible
1053 && ((s->real_symbol_p ()
1054 && !DECL_EXTERNAL (node->decl)
1055 && !TREE_PUBLIC (node->decl))
1056 || may_need_named_section_p (encoder, s))
1057 && (!encoder
1058 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
1059 {
1060 if (privatize_symbol_name (s))
1061 /* Re-start from beginning since we do not know how many symbols changed a name. */
1062 s = symtab_node::get_for_asmname (name);
1063 else s = s->next_sharing_asm_name;
1064 }
1065 else s = s->next_sharing_asm_name;
1066 }
1067
1068 /* Find out all static decls that need to be promoted to global because
1069 of cross file sharing. This function must be run in the WPA mode after
1070 all inlinees are added. */
1071
1072 void
1073 lto_promote_cross_file_statics (void)
1074 {
1075 unsigned i, n_sets;
1076
1077 gcc_assert (flag_wpa);
1078
1079 lto_stream_offload_p = false;
1080 select_what_to_stream ();
1081
1082 /* First compute boundaries. */
1083 n_sets = ltrans_partitions.length ();
1084 for (i = 0; i < n_sets; i++)
1085 {
1086 ltrans_partition part
1087 = ltrans_partitions[i];
1088 part->encoder = compute_ltrans_boundary (part->encoder);
1089 }
1090
1091 /* Look at boundaries and promote symbols as needed. */
1092 for (i = 0; i < n_sets; i++)
1093 {
1094 lto_symtab_encoder_iterator lsei;
1095 lto_symtab_encoder_t encoder = ltrans_partitions[i]->encoder;
1096
1097 for (lsei = lsei_start (encoder); !lsei_end_p (lsei);
1098 lsei_next (&lsei))
1099 {
1100 symtab_node *node = lsei_node (lsei);
1101
1102 /* If symbol is static, rename it if its assembler name clash with
1103 anything else in this unit. */
1104 rename_statics (encoder, node);
1105
1106 /* No need to promote if symbol already is externally visible ... */
1107 if (node->externally_visible
1108 /* ... or if it is part of current partition ... */
1109 || lto_symtab_encoder_in_partition_p (encoder, node)
1110 /* ... or if we do not partition it. This mean that it will
1111 appear in every partition refernecing it. */
1112 || node->get_partitioning_class () != SYMBOL_PARTITION)
1113 {
1114 validize_symbol_for_target (node);
1115 continue;
1116 }
1117
1118 promote_symbol (node);
1119 }
1120 }
1121 }
1122
1123 /* Rename statics in the whole unit in the case that
1124 we do -flto-partition=none. */
1125
1126 void
1127 lto_promote_statics_nonwpa (void)
1128 {
1129 symtab_node *node;
1130 FOR_EACH_SYMBOL (node)
1131 {
1132 rename_statics (NULL, node);
1133 validize_symbol_for_target (node);
1134 }
1135 }