]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto/lto-partition.c
Update copyright years.
[thirdparty/gcc.git] / gcc / lto / lto-partition.c
1 /* LTO partitioning logic routines.
2 Copyright (C) 2009-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "target.h"
24 #include "function.h"
25 #include "basic-block.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "stringpool.h"
30 #include "cgraph.h"
31 #include "lto-streamer.h"
32 #include "params.h"
33 #include "symbol-summary.h"
34 #include "ipa-prop.h"
35 #include "ipa-inline.h"
36 #include "lto-partition.h"
37
38 vec<ltrans_partition> ltrans_partitions;
39
40 static void add_symbol_to_partition (ltrans_partition part, symtab_node *node);
41
42
43 /* Create new partition with name NAME. */
44
45 static ltrans_partition
46 new_partition (const char *name)
47 {
48 ltrans_partition part = XCNEW (struct ltrans_partition_def);
49 part->encoder = lto_symtab_encoder_new (false);
50 part->name = name;
51 part->insns = 0;
52 part->symbols = 0;
53 ltrans_partitions.safe_push (part);
54 return part;
55 }
56
57 /* Free memory used by ltrans datastructures. */
58
59 void
60 free_ltrans_partitions (void)
61 {
62 unsigned int idx;
63 ltrans_partition part;
64 for (idx = 0; ltrans_partitions.iterate (idx, &part); idx++)
65 {
66 if (part->initializers_visited)
67 delete part->initializers_visited;
68 /* Symtab encoder is freed after streaming. */
69 free (part);
70 }
71 ltrans_partitions.release ();
72 }
73
74 /* Return true if symbol is already in some partition. */
75
76 static inline bool
77 symbol_partitioned_p (symtab_node *node)
78 {
79 return node->aux;
80 }
81
82 /* Add references into the partition. */
83 static void
84 add_references_to_partition (ltrans_partition part, symtab_node *node)
85 {
86 int i;
87 struct ipa_ref *ref = NULL;
88
89 /* Add all duplicated references to the partition. */
90 for (i = 0; node->iterate_reference (i, ref); i++)
91 if (ref->referred->get_partitioning_class () == SYMBOL_DUPLICATE)
92 add_symbol_to_partition (part, ref->referred);
93 /* References to a readonly variable may be constant foled into its value.
94 Recursively look into the initializers of the constant variable and add
95 references, too. */
96 else if (is_a <varpool_node *> (ref->referred)
97 && (dyn_cast <varpool_node *> (ref->referred)
98 ->ctor_useable_for_folding_p ()
99 || POINTER_BOUNDS_P (ref->referred->decl))
100 && !lto_symtab_encoder_in_partition_p (part->encoder, ref->referred))
101 {
102 if (!part->initializers_visited)
103 part->initializers_visited = new hash_set<symtab_node *>;
104 if (!part->initializers_visited->add (ref->referred))
105 add_references_to_partition (part, ref->referred);
106 }
107 }
108
109 /* Helper function for add_symbol_to_partition doing the actual dirty work
110 of adding NODE to PART. */
111
112 static bool
113 add_symbol_to_partition_1 (ltrans_partition part, symtab_node *node)
114 {
115 enum symbol_partitioning_class c = node->get_partitioning_class ();
116 struct ipa_ref *ref;
117 symtab_node *node1;
118
119 /* If NODE is already there, we have nothing to do. */
120 if (lto_symtab_encoder_in_partition_p (part->encoder, node))
121 return true;
122
123 /* non-duplicated aliases or tunks of a duplicated symbol needs to be output
124 just once.
125
126 Be lax about comdats; they may or may not be duplicated and we may
127 end up in need to duplicate keyed comdat because it has unkeyed alias. */
128 if (c == SYMBOL_PARTITION && !DECL_COMDAT (node->decl)
129 && symbol_partitioned_p (node))
130 return false;
131
132 /* Be sure that we never try to duplicate partitioned symbol
133 or add external symbol. */
134 gcc_assert (c != SYMBOL_EXTERNAL
135 && (c == SYMBOL_DUPLICATE || !symbol_partitioned_p (node)));
136
137 part->symbols++;
138
139 lto_set_symtab_encoder_in_partition (part->encoder, node);
140
141 if (symbol_partitioned_p (node))
142 {
143 node->in_other_partition = 1;
144 if (symtab->dump_file)
145 fprintf (symtab->dump_file,
146 "Symbol node %s now used in multiple partitions\n",
147 node->name ());
148 }
149 node->aux = (void *)((size_t)node->aux + 1);
150
151 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
152 {
153 struct cgraph_edge *e;
154 if (!node->alias)
155 part->insns += inline_summaries->get (cnode)->self_size;
156
157 /* Add all inline clones and callees that are duplicated. */
158 for (e = cnode->callees; e; e = e->next_callee)
159 if (!e->inline_failed)
160 add_symbol_to_partition_1 (part, e->callee);
161 else if (e->callee->get_partitioning_class () == SYMBOL_DUPLICATE)
162 add_symbol_to_partition (part, e->callee);
163
164 /* Add all thunks associated with the function. */
165 for (e = cnode->callers; e; e = e->next_caller)
166 if (e->caller->thunk.thunk_p)
167 add_symbol_to_partition_1 (part, e->caller);
168
169 /* Instrumented version is actually the same function.
170 Therefore put it into the same partition. */
171 if (cnode->instrumented_version)
172 add_symbol_to_partition_1 (part, cnode->instrumented_version);
173 }
174
175 add_references_to_partition (part, node);
176
177 /* Add all aliases associated with the symbol. */
178
179 FOR_EACH_ALIAS (node, ref)
180 if (!ref->referring->transparent_alias)
181 add_symbol_to_partition_1 (part, ref->referring);
182 else
183 {
184 struct ipa_ref *ref2;
185 /* We do not need to add transparent aliases if they are not used.
186 However we must add aliases of transparent aliases if they exist. */
187 FOR_EACH_ALIAS (ref->referring, ref2)
188 {
189 /* Nested transparent aliases are not permitted. */
190 gcc_checking_assert (!ref2->referring->transparent_alias);
191 add_symbol_to_partition_1 (part, ref2->referring);
192 }
193 }
194
195 /* Ensure that SAME_COMDAT_GROUP lists all allways added in a group. */
196 if (node->same_comdat_group)
197 for (node1 = node->same_comdat_group;
198 node1 != node; node1 = node1->same_comdat_group)
199 if (!node->alias)
200 {
201 bool added = add_symbol_to_partition_1 (part, node1);
202 gcc_assert (added);
203 }
204 return true;
205 }
206
207 /* If symbol NODE is really part of other symbol's definition (i.e. it is
208 internal label, thunk, alias or so), return the outer symbol.
209 When add_symbol_to_partition_1 is called on the outer symbol it must
210 eventually add NODE, too. */
211 static symtab_node *
212 contained_in_symbol (symtab_node *node)
213 {
214 /* There is no need to consider transparent aliases to be part of the
215 definition: they are only useful insite the partition they are output
216 and thus we will always see an explicit reference to it. */
217 if (node->transparent_alias)
218 return node;
219 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
220 {
221 cnode = cnode->function_symbol ();
222 if (cnode->global.inlined_to)
223 cnode = cnode->global.inlined_to;
224 return cnode;
225 }
226 else if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
227 return vnode->ultimate_alias_target ();
228 return node;
229 }
230
231 /* Add symbol NODE to partition. When definition of NODE is part
232 of other symbol definition, add the other symbol, too. */
233
234 static void
235 add_symbol_to_partition (ltrans_partition part, symtab_node *node)
236 {
237 symtab_node *node1;
238
239 /* Verify that we do not try to duplicate something that can not be. */
240 gcc_checking_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
241 || !symbol_partitioned_p (node));
242
243 while ((node1 = contained_in_symbol (node)) != node)
244 node = node1;
245
246 /* If we have duplicated symbol contained in something we can not duplicate,
247 we are very badly screwed. The other way is possible, so we do not
248 assert this in add_symbol_to_partition_1.
249
250 Be lax about comdats; they may or may not be duplicated and we may
251 end up in need to duplicate keyed comdat because it has unkeyed alias. */
252
253 gcc_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
254 || DECL_COMDAT (node->decl)
255 || !symbol_partitioned_p (node));
256
257 add_symbol_to_partition_1 (part, node);
258 }
259
260 /* Undo all additions until number of cgraph nodes in PARITION is N_CGRAPH_NODES
261 and number of varpool nodes is N_VARPOOL_NODES. */
262
263 static void
264 undo_partition (ltrans_partition partition, unsigned int n_nodes)
265 {
266 while (lto_symtab_encoder_size (partition->encoder) > (int)n_nodes)
267 {
268 symtab_node *node = lto_symtab_encoder_deref (partition->encoder,
269 n_nodes);
270 partition->symbols--;
271 cgraph_node *cnode;
272
273 /* After UNDO we no longer know what was visited. */
274 if (partition->initializers_visited)
275 delete partition->initializers_visited;
276 partition->initializers_visited = NULL;
277
278 if (!node->alias && (cnode = dyn_cast <cgraph_node *> (node)))
279 partition->insns -= inline_summaries->get (cnode)->self_size;
280 lto_symtab_encoder_delete_node (partition->encoder, node);
281 node->aux = (void *)((size_t)node->aux - 1);
282 }
283 }
284
285 /* Group cgrah nodes by input files. This is used mainly for testing
286 right now. */
287
288 void
289 lto_1_to_1_map (void)
290 {
291 symtab_node *node;
292 struct lto_file_decl_data *file_data;
293 hash_map<lto_file_decl_data *, ltrans_partition> pmap;
294 ltrans_partition partition;
295 int npartitions = 0;
296
297 FOR_EACH_SYMBOL (node)
298 {
299 if (node->get_partitioning_class () != SYMBOL_PARTITION
300 || symbol_partitioned_p (node))
301 continue;
302
303 file_data = node->lto_file_data;
304
305 if (file_data)
306 {
307 ltrans_partition *slot = &pmap.get_or_insert (file_data);
308 if (*slot)
309 partition = *slot;
310 else
311 {
312 partition = new_partition (file_data->file_name);
313 *slot = partition;
314 npartitions++;
315 }
316 }
317 else if (!file_data && ltrans_partitions.length ())
318 partition = ltrans_partitions[0];
319 else
320 {
321 partition = new_partition ("");
322 pmap.put (NULL, partition);
323 npartitions++;
324 }
325
326 add_symbol_to_partition (partition, node);
327 }
328
329 /* If the cgraph is empty, create one cgraph node set so that there is still
330 an output file for any variables that need to be exported in a DSO. */
331 if (!npartitions)
332 new_partition ("empty");
333
334 }
335
336 /* Maximal partitioning. Put every new symbol into new partition if possible. */
337
338 void
339 lto_max_map (void)
340 {
341 symtab_node *node;
342 ltrans_partition partition;
343 int npartitions = 0;
344
345 FOR_EACH_SYMBOL (node)
346 {
347 if (node->get_partitioning_class () != SYMBOL_PARTITION
348 || symbol_partitioned_p (node))
349 continue;
350 partition = new_partition (node->asm_name ());
351 add_symbol_to_partition (partition, node);
352 npartitions++;
353 }
354 if (!npartitions)
355 new_partition ("empty");
356 }
357
358 /* Helper function for qsort; sort nodes by order. noreorder functions must have
359 been removed earlier. */
360 static int
361 node_cmp (const void *pa, const void *pb)
362 {
363 const struct cgraph_node *a = *(const struct cgraph_node * const *) pa;
364 const struct cgraph_node *b = *(const struct cgraph_node * const *) pb;
365
366 /* Profile reorder flag enables function reordering based on first execution
367 of a function. All functions with profile are placed in ascending
368 order at the beginning. */
369
370 if (flag_profile_reorder_functions)
371 {
372 /* Functions with time profile are sorted in ascending order. */
373 if (a->tp_first_run && b->tp_first_run)
374 return a->tp_first_run != b->tp_first_run
375 ? a->tp_first_run - b->tp_first_run
376 : a->order - b->order;
377
378 /* Functions with time profile are sorted before the functions
379 that do not have the profile. */
380 if (a->tp_first_run || b->tp_first_run)
381 return b->tp_first_run - a->tp_first_run;
382 }
383
384 return b->order - a->order;
385 }
386
387 /* Helper function for qsort; sort nodes by order. */
388 static int
389 varpool_node_cmp (const void *pa, const void *pb)
390 {
391 const symtab_node *a = *static_cast<const symtab_node * const *> (pa);
392 const symtab_node *b = *static_cast<const symtab_node * const *> (pb);
393 return b->order - a->order;
394 }
395
396 /* Add all symtab nodes from NEXT_NODE to PARTITION in order. */
397
398 static void
399 add_sorted_nodes (vec<symtab_node *> &next_nodes, ltrans_partition partition)
400 {
401 unsigned i;
402 symtab_node *node;
403
404 next_nodes.qsort (varpool_node_cmp);
405 FOR_EACH_VEC_ELT (next_nodes, i, node)
406 if (!symbol_partitioned_p (node))
407 add_symbol_to_partition (partition, node);
408 }
409
410
411 /* Group cgraph nodes into equally-sized partitions.
412
413 The partitioning algorithm is simple: nodes are taken in predefined order.
414 The order corresponds to the order we want functions to have in the final
415 output. In the future this will be given by function reordering pass, but
416 at the moment we use the topological order, which is a good approximation.
417
418 The goal is to partition this linear order into intervals (partitions) so
419 that all the partitions have approximately the same size and the number of
420 callgraph or IPA reference edges crossing boundaries is minimal.
421
422 This is a lot faster (O(n) in size of callgraph) than algorithms doing
423 priority-based graph clustering that are generally O(n^2) and, since
424 WHOPR is designed to make things go well across partitions, it leads
425 to good results.
426
427 We compute the expected size of a partition as:
428
429 max (total_size / lto_partitions, min_partition_size)
430
431 We use dynamic expected size of partition so small programs are partitioned
432 into enough partitions to allow use of multiple CPUs, while large programs
433 are not partitioned too much. Creating too many partitions significantly
434 increases the streaming overhead.
435
436 In the future, we would like to bound the maximal size of partitions so as
437 to prevent the LTRANS stage from consuming too much memory. At the moment,
438 however, the WPA stage is the most memory intensive for large benchmarks,
439 since too many types and declarations are read into memory.
440
441 The function implements a simple greedy algorithm. Nodes are being added
442 to the current partition until after 3/4 of the expected partition size is
443 reached. Past this threshold, we keep track of boundary size (number of
444 edges going to other partitions) and continue adding functions until after
445 the current partition has grown to twice the expected partition size. Then
446 the process is undone to the point where the minimal ratio of boundary size
447 and in-partition calls was reached. */
448
449 void
450 lto_balanced_map (int n_lto_partitions)
451 {
452 int n_nodes = 0;
453 int n_varpool_nodes = 0, varpool_pos = 0, best_varpool_pos = 0;
454 struct cgraph_node **order = XNEWVEC (cgraph_node *, symtab->cgraph_max_uid);
455 auto_vec<cgraph_node *> noreorder;
456 auto_vec<varpool_node *> varpool_order;
457 int i;
458 struct cgraph_node *node;
459 int original_total_size, total_size = 0, best_total_size = 0;
460 int partition_size;
461 ltrans_partition partition;
462 int last_visited_node = 0;
463 varpool_node *vnode;
464 int cost = 0, internal = 0;
465 int best_n_nodes = 0, best_i = 0, best_cost =
466 INT_MAX, best_internal = 0;
467 int npartitions;
468 int current_order = -1;
469 int noreorder_pos = 0;
470
471 FOR_EACH_VARIABLE (vnode)
472 gcc_assert (!vnode->aux);
473
474 FOR_EACH_DEFINED_FUNCTION (node)
475 if (node->get_partitioning_class () == SYMBOL_PARTITION)
476 {
477 if (node->no_reorder)
478 noreorder.safe_push (node);
479 else
480 order[n_nodes++] = node;
481 if (!node->alias)
482 total_size += inline_summaries->get (node)->size;
483 }
484
485 original_total_size = total_size;
486
487 /* Streaming works best when the source units do not cross partition
488 boundaries much. This is because importing function from a source
489 unit tends to import a lot of global trees defined there. We should
490 get better about minimizing the function bounday, but until that
491 things works smoother if we order in source order. */
492 qsort (order, n_nodes, sizeof (struct cgraph_node *), node_cmp);
493 noreorder.qsort (node_cmp);
494
495 if (symtab->dump_file)
496 {
497 for(i = 0; i < n_nodes; i++)
498 fprintf (symtab->dump_file, "Balanced map symbol order:%s:%u\n",
499 order[i]->name (), order[i]->tp_first_run);
500 for(i = 0; i < (int)noreorder.length(); i++)
501 fprintf (symtab->dump_file, "Balanced map symbol no_reorder:%s:%u\n",
502 noreorder[i]->name (), noreorder[i]->tp_first_run);
503 }
504
505 /* Collect all variables that should not be reordered. */
506 FOR_EACH_VARIABLE (vnode)
507 if (vnode->get_partitioning_class () == SYMBOL_PARTITION
508 && (!flag_toplevel_reorder || vnode->no_reorder))
509 varpool_order.safe_push (vnode);
510 n_varpool_nodes = varpool_order.length ();
511 varpool_order.qsort (varpool_node_cmp);
512
513 /* Compute partition size and create the first partition. */
514 partition_size = total_size / n_lto_partitions;
515 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
516 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
517 npartitions = 1;
518 partition = new_partition ("");
519 if (symtab->dump_file)
520 fprintf (symtab->dump_file, "Total unit size: %i, partition size: %i\n",
521 total_size, partition_size);
522
523 auto_vec<symtab_node *> next_nodes;
524
525 for (i = 0; i < n_nodes; i++)
526 {
527 if (symbol_partitioned_p (order[i]))
528 continue;
529
530 current_order = order[i]->order;
531
532 /* Output noreorder and varpool in program order first. */
533 next_nodes.truncate (0);
534 while (varpool_pos < n_varpool_nodes
535 && varpool_order[varpool_pos]->order < current_order)
536 next_nodes.safe_push (varpool_order[varpool_pos++]);
537 while (noreorder_pos < (int)noreorder.length ()
538 && noreorder[noreorder_pos]->order < current_order)
539 {
540 if (!noreorder[noreorder_pos]->alias)
541 total_size -= inline_summaries->get (noreorder[noreorder_pos])->size;
542 next_nodes.safe_push (noreorder[noreorder_pos++]);
543 }
544 add_sorted_nodes (next_nodes, partition);
545
546 add_symbol_to_partition (partition, order[i]);
547 if (!order[i]->alias)
548 total_size -= inline_summaries->get (order[i])->size;
549
550
551 /* Once we added a new node to the partition, we also want to add
552 all referenced variables unless they was already added into some
553 earlier partition.
554 add_symbol_to_partition adds possibly multiple nodes and
555 variables that are needed to satisfy needs of ORDER[i].
556 We remember last visited cgraph and varpool node from last iteration
557 of outer loop that allows us to process every new addition.
558
559 At the same time we compute size of the boundary into COST. Every
560 callgraph or IPA reference edge leaving the partition contributes into
561 COST. Every edge inside partition was earlier computed as one leaving
562 it and thus we need to subtract it from COST. */
563 while (last_visited_node < lto_symtab_encoder_size (partition->encoder))
564 {
565 symtab_node *refs_node;
566 int j;
567 struct ipa_ref *ref = NULL;
568 symtab_node *snode = lto_symtab_encoder_deref (partition->encoder,
569 last_visited_node);
570
571 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
572 {
573 struct cgraph_edge *edge;
574
575 refs_node = node;
576
577 last_visited_node++;
578
579 gcc_assert (node->definition || node->weakref);
580
581 /* Compute boundary cost of callgraph edges. */
582 for (edge = node->callees; edge; edge = edge->next_callee)
583 if (edge->callee->definition)
584 {
585 int edge_cost = edge->frequency;
586 int index;
587
588 if (!edge_cost)
589 edge_cost = 1;
590 gcc_assert (edge_cost > 0);
591 index = lto_symtab_encoder_lookup (partition->encoder,
592 edge->callee);
593 if (index != LCC_NOT_FOUND
594 && index < last_visited_node - 1)
595 cost -= edge_cost, internal += edge_cost;
596 else
597 cost += edge_cost;
598 }
599 for (edge = node->callers; edge; edge = edge->next_caller)
600 {
601 int edge_cost = edge->frequency;
602 int index;
603
604 gcc_assert (edge->caller->definition);
605 if (!edge_cost)
606 edge_cost = 1;
607 gcc_assert (edge_cost > 0);
608 index = lto_symtab_encoder_lookup (partition->encoder,
609 edge->caller);
610 if (index != LCC_NOT_FOUND
611 && index < last_visited_node - 1)
612 cost -= edge_cost;
613 else
614 cost += edge_cost;
615 }
616 }
617 else
618 {
619 refs_node = snode;
620 last_visited_node++;
621 }
622
623 /* Compute boundary cost of IPA REF edges and at the same time look into
624 variables referenced from current partition and try to add them. */
625 for (j = 0; refs_node->iterate_reference (j, ref); j++)
626 if (is_a <varpool_node *> (ref->referred))
627 {
628 int index;
629
630 vnode = dyn_cast <varpool_node *> (ref->referred);
631 if (!vnode->definition)
632 continue;
633 if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
634 && !vnode->no_reorder
635 && vnode->get_partitioning_class () == SYMBOL_PARTITION)
636 add_symbol_to_partition (partition, vnode);
637 index = lto_symtab_encoder_lookup (partition->encoder,
638 vnode);
639 if (index != LCC_NOT_FOUND
640 && index < last_visited_node - 1)
641 cost--, internal++;
642 else
643 cost++;
644 }
645 else
646 {
647 int index;
648
649 node = dyn_cast <cgraph_node *> (ref->referred);
650 if (!node->definition)
651 continue;
652 index = lto_symtab_encoder_lookup (partition->encoder,
653 node);
654 if (index != LCC_NOT_FOUND
655 && index < last_visited_node - 1)
656 cost--, internal++;
657 else
658 cost++;
659 }
660 for (j = 0; refs_node->iterate_referring (j, ref); j++)
661 if (is_a <varpool_node *> (ref->referring))
662 {
663 int index;
664
665 vnode = dyn_cast <varpool_node *> (ref->referring);
666 gcc_assert (vnode->definition);
667 /* It is better to couple variables with their users, because it allows them
668 to be removed. Coupling with objects they refer to only helps to reduce
669 number of symbols promoted to hidden. */
670 if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
671 && !vnode->no_reorder
672 && !vnode->can_remove_if_no_refs_p ()
673 && vnode->get_partitioning_class () == SYMBOL_PARTITION)
674 add_symbol_to_partition (partition, vnode);
675 index = lto_symtab_encoder_lookup (partition->encoder,
676 vnode);
677 if (index != LCC_NOT_FOUND
678 && index < last_visited_node - 1)
679 cost--;
680 else
681 cost++;
682 }
683 else
684 {
685 int index;
686
687 node = dyn_cast <cgraph_node *> (ref->referring);
688 gcc_assert (node->definition);
689 index = lto_symtab_encoder_lookup (partition->encoder,
690 node);
691 if (index != LCC_NOT_FOUND
692 && index < last_visited_node - 1)
693 cost--;
694 else
695 cost++;
696 }
697 }
698
699 /* If the partition is large enough, start looking for smallest boundary cost. */
700 if (partition->insns < partition_size * 3 / 4
701 || best_cost == INT_MAX
702 || ((!cost
703 || (best_internal * (HOST_WIDE_INT) cost
704 > (internal * (HOST_WIDE_INT)best_cost)))
705 && partition->insns < partition_size * 5 / 4))
706 {
707 best_cost = cost;
708 best_internal = internal;
709 best_i = i;
710 best_n_nodes = lto_symtab_encoder_size (partition->encoder);
711 best_total_size = total_size;
712 best_varpool_pos = varpool_pos;
713 }
714 if (symtab->dump_file)
715 fprintf (symtab->dump_file, "Step %i: added %s/%i, size %i, cost %i/%i "
716 "best %i/%i, step %i\n", i,
717 order[i]->name (), order[i]->order,
718 partition->insns, cost, internal,
719 best_cost, best_internal, best_i);
720 /* Partition is too large, unwind into step when best cost was reached and
721 start new partition. */
722 if (partition->insns > 2 * partition_size)
723 {
724 if (best_i != i)
725 {
726 if (symtab->dump_file)
727 fprintf (symtab->dump_file, "Unwinding %i insertions to step %i\n",
728 i - best_i, best_i);
729 undo_partition (partition, best_n_nodes);
730 varpool_pos = best_varpool_pos;
731 }
732 i = best_i;
733 /* When we are finished, avoid creating empty partition. */
734 while (i < n_nodes - 1 && symbol_partitioned_p (order[i + 1]))
735 i++;
736 if (i == n_nodes - 1)
737 break;
738 partition = new_partition ("");
739 last_visited_node = 0;
740 total_size = best_total_size;
741 cost = 0;
742
743 if (symtab->dump_file)
744 fprintf (symtab->dump_file, "New partition\n");
745 best_n_nodes = 0;
746 best_cost = INT_MAX;
747
748 /* Since the size of partitions is just approximate, update the size after
749 we finished current one. */
750 if (npartitions < n_lto_partitions)
751 partition_size = total_size / (n_lto_partitions - npartitions);
752 else
753 partition_size = INT_MAX;
754
755 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
756 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
757 npartitions ++;
758 }
759 }
760
761 next_nodes.truncate (0);
762
763 /* Varables that are not reachable from the code go into last partition. */
764 if (flag_toplevel_reorder)
765 {
766 FOR_EACH_VARIABLE (vnode)
767 if (vnode->get_partitioning_class () == SYMBOL_PARTITION
768 && !symbol_partitioned_p (vnode)
769 && !vnode->no_reorder)
770 next_nodes.safe_push (vnode);
771 }
772
773 /* Output remaining ordered symbols. */
774 while (varpool_pos < n_varpool_nodes)
775 next_nodes.safe_push (varpool_order[varpool_pos++]);
776 while (noreorder_pos < (int)noreorder.length ())
777 next_nodes.safe_push (noreorder[noreorder_pos++]);
778 add_sorted_nodes (next_nodes, partition);
779
780 free (order);
781
782 if (symtab->dump_file)
783 {
784 fprintf (symtab->dump_file, "\nPartition sizes:\n");
785 unsigned partitions = ltrans_partitions.length ();
786
787 for (unsigned i = 0; i < partitions ; i++)
788 {
789 ltrans_partition p = ltrans_partitions[i];
790 fprintf (symtab->dump_file, "partition %d contains %d (%2.2f%%)"
791 " symbols and %d (%2.2f%%) insns\n", i, p->symbols,
792 100.0 * p->symbols / n_nodes, p->insns,
793 100.0 * p->insns / original_total_size);
794 }
795
796 fprintf (symtab->dump_file, "\n");
797 }
798 }
799
800 /* Return true if we must not change the name of the NODE. The name as
801 extracted from the corresponding decl should be passed in NAME. */
802
803 static bool
804 must_not_rename (symtab_node *node, const char *name)
805 {
806 /* Our renaming machinery do not handle more than one change of assembler name.
807 We should not need more than one anyway. */
808 if (node->lto_file_data
809 && lto_get_decl_name_mapping (node->lto_file_data, name) != name)
810 {
811 if (symtab->dump_file)
812 fprintf (symtab->dump_file,
813 "Not privatizing symbol name: %s. It privatized already.\n",
814 name);
815 return true;
816 }
817 /* Avoid mangling of already mangled clones.
818 ??? should have a flag whether a symbol has a 'private' name already,
819 since we produce some symbols like that i.e. for global constructors
820 that are not really clones. */
821 if (node->unique_name)
822 {
823 if (symtab->dump_file)
824 fprintf (symtab->dump_file,
825 "Not privatizing symbol name: %s. Has unique name.\n",
826 name);
827 return true;
828 }
829 return false;
830 }
831
832 /* If we are an offload compiler, we may have to rewrite symbols to be
833 valid on this target. Return either PTR or a modified version of it. */
834
835 static const char *
836 maybe_rewrite_identifier (const char *ptr)
837 {
838 #if defined ACCEL_COMPILER && (defined NO_DOT_IN_LABEL || defined NO_DOLLAR_IN_LABEL)
839 #ifndef NO_DOT_IN_LABEL
840 char valid = '.';
841 const char reject[] = "$";
842 #elif !defined NO_DOLLAR_IN_LABEL
843 char valid = '$';
844 const char reject[] = ".";
845 #else
846 char valid = '_';
847 const char reject[] = ".$";
848 #endif
849
850 char *copy = NULL;
851 const char *match = ptr;
852 for (;;)
853 {
854 size_t off = strcspn (match, reject);
855 if (match[off] == '\0')
856 break;
857 if (copy == NULL)
858 {
859 copy = xstrdup (ptr);
860 match = copy;
861 }
862 copy[off] = valid;
863 }
864 return match;
865 #else
866 return ptr;
867 #endif
868 }
869
870 /* Ensure that the symbol in NODE is valid for the target, and if not,
871 rewrite it. */
872
873 static void
874 validize_symbol_for_target (symtab_node *node)
875 {
876 tree decl = node->decl;
877 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
878
879 if (must_not_rename (node, name))
880 return;
881
882 const char *name2 = maybe_rewrite_identifier (name);
883 if (name2 != name)
884 {
885 symtab->change_decl_assembler_name (decl, get_identifier (name2));
886 if (node->lto_file_data)
887 lto_record_renamed_decl (node->lto_file_data, name,
888 IDENTIFIER_POINTER
889 (DECL_ASSEMBLER_NAME (decl)));
890 }
891 }
892
893 /* Helper for privatize_symbol_name. Mangle NODE symbol name
894 represented by DECL. */
895
896 static bool
897 privatize_symbol_name_1 (symtab_node *node, tree decl)
898 {
899 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
900
901 if (must_not_rename (node, name))
902 return false;
903
904 name = maybe_rewrite_identifier (name);
905 symtab->change_decl_assembler_name (decl,
906 clone_function_name_1 (name,
907 "lto_priv"));
908
909 if (node->lto_file_data)
910 lto_record_renamed_decl (node->lto_file_data, name,
911 IDENTIFIER_POINTER
912 (DECL_ASSEMBLER_NAME (decl)));
913
914 if (symtab->dump_file)
915 fprintf (symtab->dump_file,
916 "Privatizing symbol name: %s -> %s\n",
917 name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
918
919 return true;
920 }
921
922 /* Mangle NODE symbol name into a local name.
923 This is necessary to do
924 1) if two or more static vars of same assembler name
925 are merged into single ltrans unit.
926 2) if previously static var was promoted hidden to avoid possible conflict
927 with symbols defined out of the LTO world. */
928
929 static bool
930 privatize_symbol_name (symtab_node *node)
931 {
932 if (!privatize_symbol_name_1 (node, node->decl))
933 return false;
934
935 /* We could change name which is a target of transparent alias
936 chain of instrumented function name. Fix alias chain if so .*/
937 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
938 {
939 tree iname = NULL_TREE;
940 if (cnode->instrumentation_clone)
941 {
942 /* If we want to privatize instrumentation clone
943 then we also need to privatize original function. */
944 if (cnode->instrumented_version)
945 privatize_symbol_name (cnode->instrumented_version);
946 else
947 privatize_symbol_name_1 (cnode, cnode->orig_decl);
948 iname = DECL_ASSEMBLER_NAME (cnode->decl);
949 TREE_CHAIN (iname) = DECL_ASSEMBLER_NAME (cnode->orig_decl);
950 }
951 else if (cnode->instrumented_version
952 && cnode->instrumented_version->orig_decl == cnode->decl)
953 {
954 iname = DECL_ASSEMBLER_NAME (cnode->instrumented_version->decl);
955 TREE_CHAIN (iname) = DECL_ASSEMBLER_NAME (cnode->decl);
956 }
957 }
958
959 return true;
960 }
961
962 /* Promote variable VNODE to be static. */
963
964 static void
965 promote_symbol (symtab_node *node)
966 {
967 /* We already promoted ... */
968 if (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
969 && DECL_VISIBILITY_SPECIFIED (node->decl)
970 && TREE_PUBLIC (node->decl))
971 {
972 validize_symbol_for_target (node);
973 return;
974 }
975
976 gcc_checking_assert (!TREE_PUBLIC (node->decl)
977 && !DECL_EXTERNAL (node->decl));
978 /* Be sure that newly public symbol does not conflict with anything already
979 defined by the non-LTO part. */
980 privatize_symbol_name (node);
981 TREE_PUBLIC (node->decl) = 1;
982 DECL_VISIBILITY (node->decl) = VISIBILITY_HIDDEN;
983 DECL_VISIBILITY_SPECIFIED (node->decl) = true;
984 ipa_ref *ref;
985
986 /* Promoting a symbol also promotes all trasparent aliases with exception
987 of weakref where the visibility flags are always wrong and set to
988 !PUBLIC. */
989 for (unsigned i = 0; node->iterate_direct_aliases (i, ref); i++)
990 {
991 struct symtab_node *alias = ref->referring;
992 if (alias->transparent_alias && !alias->weakref)
993 {
994 TREE_PUBLIC (alias->decl) = 1;
995 DECL_VISIBILITY (alias->decl) = VISIBILITY_HIDDEN;
996 DECL_VISIBILITY_SPECIFIED (alias->decl) = true;
997 }
998 gcc_assert (!alias->weakref || TREE_PUBLIC (alias->decl));
999 }
1000
1001 if (symtab->dump_file)
1002 fprintf (symtab->dump_file,
1003 "Promoting as hidden: %s\n", node->name ());
1004 }
1005
1006 /* Return true if NODE needs named section even if it won't land in the partition
1007 symbol table.
1008 FIXME: we should really not use named sections for inline clones and master
1009 clones. */
1010
1011 static bool
1012 may_need_named_section_p (lto_symtab_encoder_t encoder, symtab_node *node)
1013 {
1014 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1015 if (!cnode)
1016 return false;
1017 if (node->real_symbol_p ())
1018 return false;
1019 return (!encoder
1020 || (lto_symtab_encoder_lookup (encoder, node) != LCC_NOT_FOUND
1021 && lto_symtab_encoder_encode_body_p (encoder,
1022 cnode)));
1023 }
1024
1025 /* If NODE represents a static variable. See if there are other variables
1026 of the same name in partition ENCODER (or in whole compilation unit if
1027 ENCODER is NULL) and if so, mangle the statics. Always mangle all
1028 conflicting statics, so we reduce changes of silently miscompiling
1029 asm statements referring to them by symbol name. */
1030
1031 static void
1032 rename_statics (lto_symtab_encoder_t encoder, symtab_node *node)
1033 {
1034 tree decl = node->decl;
1035 symtab_node *s;
1036 tree name = DECL_ASSEMBLER_NAME (decl);
1037
1038 /* See if this is static symbol. */
1039 if (((node->externally_visible && !node->weakref)
1040 /* FIXME: externally_visible is somewhat illogically not set for
1041 external symbols (i.e. those not defined). Remove this test
1042 once this is fixed. */
1043 || DECL_EXTERNAL (node->decl)
1044 || !node->real_symbol_p ())
1045 && !may_need_named_section_p (encoder, node))
1046 return;
1047
1048 /* Now walk symbols sharing the same name and see if there are any conflicts.
1049 (all types of symbols counts here, since we can not have static of the
1050 same name as external or public symbol.) */
1051 for (s = symtab_node::get_for_asmname (name);
1052 s; s = s->next_sharing_asm_name)
1053 if ((s->real_symbol_p () || may_need_named_section_p (encoder, s))
1054 && s->decl != node->decl
1055 && (!encoder
1056 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
1057 break;
1058
1059 /* OK, no confict, so we have nothing to do. */
1060 if (!s)
1061 return;
1062
1063 if (symtab->dump_file)
1064 fprintf (symtab->dump_file,
1065 "Renaming statics with asm name: %s\n", node->name ());
1066
1067 /* Assign every symbol in the set that shares the same ASM name an unique
1068 mangled name. */
1069 for (s = symtab_node::get_for_asmname (name); s;)
1070 if ((!s->externally_visible || s->weakref)
1071 /* Transparent aliases having same name as target are renamed at a
1072 time their target gets new name. Transparent aliases that use
1073 separate assembler name require the name to be unique. */
1074 && (!s->transparent_alias || !s->definition || s->weakref
1075 || !symbol_table::assembler_names_equal_p
1076 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (s->decl)),
1077 IDENTIFIER_POINTER
1078 (DECL_ASSEMBLER_NAME (s->get_alias_target()->decl))))
1079 && ((s->real_symbol_p ()
1080 && !DECL_EXTERNAL (node->decl)
1081 && !TREE_PUBLIC (node->decl))
1082 || may_need_named_section_p (encoder, s))
1083 && (!encoder
1084 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
1085 {
1086 if (privatize_symbol_name (s))
1087 /* Re-start from beginning since we do not know how many symbols changed a name. */
1088 s = symtab_node::get_for_asmname (name);
1089 else s = s->next_sharing_asm_name;
1090 }
1091 else s = s->next_sharing_asm_name;
1092 }
1093
1094 /* Find out all static decls that need to be promoted to global because
1095 of cross file sharing. This function must be run in the WPA mode after
1096 all inlinees are added. */
1097
1098 void
1099 lto_promote_cross_file_statics (void)
1100 {
1101 unsigned i, n_sets;
1102
1103 gcc_assert (flag_wpa);
1104
1105 lto_stream_offload_p = false;
1106 select_what_to_stream ();
1107
1108 /* First compute boundaries. */
1109 n_sets = ltrans_partitions.length ();
1110 for (i = 0; i < n_sets; i++)
1111 {
1112 ltrans_partition part
1113 = ltrans_partitions[i];
1114 part->encoder = compute_ltrans_boundary (part->encoder);
1115 }
1116
1117 /* Look at boundaries and promote symbols as needed. */
1118 for (i = 0; i < n_sets; i++)
1119 {
1120 lto_symtab_encoder_iterator lsei;
1121 lto_symtab_encoder_t encoder = ltrans_partitions[i]->encoder;
1122
1123 for (lsei = lsei_start (encoder); !lsei_end_p (lsei);
1124 lsei_next (&lsei))
1125 {
1126 symtab_node *node = lsei_node (lsei);
1127
1128 /* If symbol is static, rename it if its assembler name clash with
1129 anything else in this unit. */
1130 rename_statics (encoder, node);
1131
1132 /* No need to promote if symbol already is externally visible ... */
1133 if (node->externally_visible
1134 /* ... or if it is part of current partition ... */
1135 || lto_symtab_encoder_in_partition_p (encoder, node)
1136 /* ... or if we do not partition it. This mean that it will
1137 appear in every partition refernecing it. */
1138 || node->get_partitioning_class () != SYMBOL_PARTITION)
1139 {
1140 validize_symbol_for_target (node);
1141 continue;
1142 }
1143
1144 promote_symbol (node);
1145 }
1146 }
1147 }
1148
1149 /* Rename statics in the whole unit in the case that
1150 we do -flto-partition=none. */
1151
1152 void
1153 lto_promote_statics_nonwpa (void)
1154 {
1155 symtab_node *node;
1156 FOR_EACH_SYMBOL (node)
1157 {
1158 rename_statics (NULL, node);
1159 validize_symbol_for_target (node);
1160 }
1161 }