]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto/lto-partition.c
1f25702035fe16beb38ff6c7f92c01de0cc01759
[thirdparty/gcc.git] / gcc / lto / lto-partition.c
1 /* LTO partitioning logic routines.
2 Copyright (C) 2009-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "toplev.h"
24 #include "alias.h"
25 #include "tm.h"
26 #include "function.h"
27 #include "predict.h"
28 #include "basic-block.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "hard-reg-set.h"
32 #include "options.h"
33 #include "fold-const.h"
34 #include "internal-fn.h"
35 #include "cgraph.h"
36 #include "lto-streamer.h"
37 #include "timevar.h"
38 #include "params.h"
39 #include "alloc-pool.h"
40 #include "symbol-summary.h"
41 #include "ipa-prop.h"
42 #include "ipa-inline.h"
43 #include "ipa-utils.h"
44 #include "lto-partition.h"
45 #include "stringpool.h"
46
47 vec<ltrans_partition> ltrans_partitions;
48
49 static void add_symbol_to_partition (ltrans_partition part, symtab_node *node);
50
51
52 /* Create new partition with name NAME. */
53
54 static ltrans_partition
55 new_partition (const char *name)
56 {
57 ltrans_partition part = XCNEW (struct ltrans_partition_def);
58 part->encoder = lto_symtab_encoder_new (false);
59 part->name = name;
60 part->insns = 0;
61 part->symbols = 0;
62 ltrans_partitions.safe_push (part);
63 return part;
64 }
65
66 /* Free memory used by ltrans datastructures. */
67
68 void
69 free_ltrans_partitions (void)
70 {
71 unsigned int idx;
72 ltrans_partition part;
73 for (idx = 0; ltrans_partitions.iterate (idx, &part); idx++)
74 {
75 if (part->initializers_visited)
76 delete part->initializers_visited;
77 /* Symtab encoder is freed after streaming. */
78 free (part);
79 }
80 ltrans_partitions.release ();
81 }
82
83 /* Return true if symbol is already in some partition. */
84
85 static inline bool
86 symbol_partitioned_p (symtab_node *node)
87 {
88 return node->aux;
89 }
90
91 /* Add references into the partition. */
92 static void
93 add_references_to_partition (ltrans_partition part, symtab_node *node)
94 {
95 int i;
96 struct ipa_ref *ref = NULL;
97
98 /* Add all duplicated references to the partition. */
99 for (i = 0; node->iterate_reference (i, ref); i++)
100 if (ref->referred->get_partitioning_class () == SYMBOL_DUPLICATE)
101 add_symbol_to_partition (part, ref->referred);
102 /* References to a readonly variable may be constant foled into its value.
103 Recursively look into the initializers of the constant variable and add
104 references, too. */
105 else if (is_a <varpool_node *> (ref->referred)
106 && (dyn_cast <varpool_node *> (ref->referred)
107 ->ctor_useable_for_folding_p ()
108 || POINTER_BOUNDS_P (ref->referred->decl))
109 && !lto_symtab_encoder_in_partition_p (part->encoder, ref->referred))
110 {
111 if (!part->initializers_visited)
112 part->initializers_visited = new hash_set<symtab_node *>;
113 if (!part->initializers_visited->add (ref->referred))
114 add_references_to_partition (part, ref->referred);
115 }
116 }
117
118 /* Helper function for add_symbol_to_partition doing the actual dirty work
119 of adding NODE to PART. */
120
121 static bool
122 add_symbol_to_partition_1 (ltrans_partition part, symtab_node *node)
123 {
124 enum symbol_partitioning_class c = node->get_partitioning_class ();
125 struct ipa_ref *ref;
126 symtab_node *node1;
127
128 /* If NODE is already there, we have nothing to do. */
129 if (lto_symtab_encoder_in_partition_p (part->encoder, node))
130 return true;
131
132 /* non-duplicated aliases or tunks of a duplicated symbol needs to be output
133 just once.
134
135 Be lax about comdats; they may or may not be duplicated and we may
136 end up in need to duplicate keyed comdat because it has unkeyed alias. */
137 if (c == SYMBOL_PARTITION && !DECL_COMDAT (node->decl)
138 && symbol_partitioned_p (node))
139 return false;
140
141 /* Be sure that we never try to duplicate partitioned symbol
142 or add external symbol. */
143 gcc_assert (c != SYMBOL_EXTERNAL
144 && (c == SYMBOL_DUPLICATE || !symbol_partitioned_p (node)));
145
146 part->symbols++;
147
148 lto_set_symtab_encoder_in_partition (part->encoder, node);
149
150 if (symbol_partitioned_p (node))
151 {
152 node->in_other_partition = 1;
153 if (symtab->dump_file)
154 fprintf (symtab->dump_file,
155 "Symbol node %s now used in multiple partitions\n",
156 node->name ());
157 }
158 node->aux = (void *)((size_t)node->aux + 1);
159
160 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
161 {
162 struct cgraph_edge *e;
163 if (!node->alias)
164 part->insns += inline_summaries->get (cnode)->self_size;
165
166 /* Add all inline clones and callees that are duplicated. */
167 for (e = cnode->callees; e; e = e->next_callee)
168 if (!e->inline_failed)
169 add_symbol_to_partition_1 (part, e->callee);
170 else if (e->callee->get_partitioning_class () == SYMBOL_DUPLICATE)
171 add_symbol_to_partition (part, e->callee);
172
173 /* Add all thunks associated with the function. */
174 for (e = cnode->callers; e; e = e->next_caller)
175 if (e->caller->thunk.thunk_p)
176 add_symbol_to_partition_1 (part, e->caller);
177
178 /* Instrumented version is actually the same function.
179 Therefore put it into the same partition. */
180 if (cnode->instrumented_version)
181 add_symbol_to_partition_1 (part, cnode->instrumented_version);
182 }
183
184 add_references_to_partition (part, node);
185
186 /* Add all aliases associated with the symbol. */
187
188 FOR_EACH_ALIAS (node, ref)
189 if (!node->weakref)
190 add_symbol_to_partition_1 (part, ref->referring);
191
192 /* Ensure that SAME_COMDAT_GROUP lists all allways added in a group. */
193 if (node->same_comdat_group)
194 for (node1 = node->same_comdat_group;
195 node1 != node; node1 = node1->same_comdat_group)
196 if (!node->alias)
197 {
198 bool added = add_symbol_to_partition_1 (part, node1);
199 gcc_assert (added);
200 }
201 return true;
202 }
203
204 /* If symbol NODE is really part of other symbol's definition (i.e. it is
205 internal label, thunk, alias or so), return the outer symbol.
206 When add_symbol_to_partition_1 is called on the outer symbol it must
207 eventually add NODE, too. */
208 static symtab_node *
209 contained_in_symbol (symtab_node *node)
210 {
211 /* Weakrefs are never contained in anything. */
212 if (node->weakref)
213 return node;
214 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
215 {
216 cnode = cnode->function_symbol ();
217 if (cnode->global.inlined_to)
218 cnode = cnode->global.inlined_to;
219 return cnode;
220 }
221 else if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
222 return vnode->ultimate_alias_target ();
223 return node;
224 }
225
226 /* Add symbol NODE to partition. When definition of NODE is part
227 of other symbol definition, add the other symbol, too. */
228
229 static void
230 add_symbol_to_partition (ltrans_partition part, symtab_node *node)
231 {
232 symtab_node *node1;
233
234 /* Verify that we do not try to duplicate something that can not be. */
235 gcc_checking_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
236 || !symbol_partitioned_p (node));
237
238 while ((node1 = contained_in_symbol (node)) != node)
239 node = node1;
240
241 /* If we have duplicated symbol contained in something we can not duplicate,
242 we are very badly screwed. The other way is possible, so we do not
243 assert this in add_symbol_to_partition_1.
244
245 Be lax about comdats; they may or may not be duplicated and we may
246 end up in need to duplicate keyed comdat because it has unkeyed alias. */
247
248 gcc_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
249 || DECL_COMDAT (node->decl)
250 || !symbol_partitioned_p (node));
251
252 add_symbol_to_partition_1 (part, node);
253 }
254
255 /* Undo all additions until number of cgraph nodes in PARITION is N_CGRAPH_NODES
256 and number of varpool nodes is N_VARPOOL_NODES. */
257
258 static void
259 undo_partition (ltrans_partition partition, unsigned int n_nodes)
260 {
261 while (lto_symtab_encoder_size (partition->encoder) > (int)n_nodes)
262 {
263 symtab_node *node = lto_symtab_encoder_deref (partition->encoder,
264 n_nodes);
265 partition->symbols--;
266 cgraph_node *cnode;
267
268 /* After UNDO we no longer know what was visited. */
269 if (partition->initializers_visited)
270 delete partition->initializers_visited;
271 partition->initializers_visited = NULL;
272
273 if (!node->alias && (cnode = dyn_cast <cgraph_node *> (node)))
274 partition->insns -= inline_summaries->get (cnode)->self_size;
275 lto_symtab_encoder_delete_node (partition->encoder, node);
276 node->aux = (void *)((size_t)node->aux - 1);
277 }
278 }
279
280 /* Group cgrah nodes by input files. This is used mainly for testing
281 right now. */
282
283 void
284 lto_1_to_1_map (void)
285 {
286 symtab_node *node;
287 struct lto_file_decl_data *file_data;
288 hash_map<lto_file_decl_data *, ltrans_partition> pmap;
289 ltrans_partition partition;
290 int npartitions = 0;
291
292 FOR_EACH_SYMBOL (node)
293 {
294 if (node->get_partitioning_class () != SYMBOL_PARTITION
295 || symbol_partitioned_p (node))
296 continue;
297
298 file_data = node->lto_file_data;
299
300 if (file_data)
301 {
302 ltrans_partition *slot = &pmap.get_or_insert (file_data);
303 if (*slot)
304 partition = *slot;
305 else
306 {
307 partition = new_partition (file_data->file_name);
308 *slot = partition;
309 npartitions++;
310 }
311 }
312 else if (!file_data && ltrans_partitions.length ())
313 partition = ltrans_partitions[0];
314 else
315 {
316 partition = new_partition ("");
317 pmap.put (NULL, partition);
318 npartitions++;
319 }
320
321 add_symbol_to_partition (partition, node);
322 }
323
324 /* If the cgraph is empty, create one cgraph node set so that there is still
325 an output file for any variables that need to be exported in a DSO. */
326 if (!npartitions)
327 new_partition ("empty");
328
329 }
330
331 /* Maximal partitioning. Put every new symbol into new partition if possible. */
332
333 void
334 lto_max_map (void)
335 {
336 symtab_node *node;
337 ltrans_partition partition;
338 int npartitions = 0;
339
340 FOR_EACH_SYMBOL (node)
341 {
342 if (node->get_partitioning_class () != SYMBOL_PARTITION
343 || symbol_partitioned_p (node))
344 continue;
345 partition = new_partition (node->asm_name ());
346 add_symbol_to_partition (partition, node);
347 npartitions++;
348 }
349 if (!npartitions)
350 new_partition ("empty");
351 }
352
353 /* Helper function for qsort; sort nodes by order. noreorder functions must have
354 been removed earlier. */
355 static int
356 node_cmp (const void *pa, const void *pb)
357 {
358 const struct cgraph_node *a = *(const struct cgraph_node * const *) pa;
359 const struct cgraph_node *b = *(const struct cgraph_node * const *) pb;
360
361 /* Profile reorder flag enables function reordering based on first execution
362 of a function. All functions with profile are placed in ascending
363 order at the beginning. */
364
365 if (flag_profile_reorder_functions)
366 {
367 /* Functions with time profile are sorted in ascending order. */
368 if (a->tp_first_run && b->tp_first_run)
369 return a->tp_first_run != b->tp_first_run
370 ? a->tp_first_run - b->tp_first_run
371 : a->order - b->order;
372
373 /* Functions with time profile are sorted before the functions
374 that do not have the profile. */
375 if (a->tp_first_run || b->tp_first_run)
376 return b->tp_first_run - a->tp_first_run;
377 }
378
379 return b->order - a->order;
380 }
381
382 /* Helper function for qsort; sort nodes by order. */
383 static int
384 varpool_node_cmp (const void *pa, const void *pb)
385 {
386 const symtab_node *a = *static_cast<const symtab_node * const *> (pa);
387 const symtab_node *b = *static_cast<const symtab_node * const *> (pb);
388 return b->order - a->order;
389 }
390
391 /* Add all symtab nodes from NEXT_NODE to PARTITION in order. */
392
393 static void
394 add_sorted_nodes (vec<symtab_node *> &next_nodes, ltrans_partition partition)
395 {
396 unsigned i;
397 symtab_node *node;
398
399 next_nodes.qsort (varpool_node_cmp);
400 FOR_EACH_VEC_ELT (next_nodes, i, node)
401 if (!symbol_partitioned_p (node))
402 add_symbol_to_partition (partition, node);
403 }
404
405
406 /* Group cgraph nodes into equally-sized partitions.
407
408 The partitioning algorithm is simple: nodes are taken in predefined order.
409 The order corresponds to the order we want functions to have in the final
410 output. In the future this will be given by function reordering pass, but
411 at the moment we use the topological order, which is a good approximation.
412
413 The goal is to partition this linear order into intervals (partitions) so
414 that all the partitions have approximately the same size and the number of
415 callgraph or IPA reference edges crossing boundaries is minimal.
416
417 This is a lot faster (O(n) in size of callgraph) than algorithms doing
418 priority-based graph clustering that are generally O(n^2) and, since
419 WHOPR is designed to make things go well across partitions, it leads
420 to good results.
421
422 We compute the expected size of a partition as:
423
424 max (total_size / lto_partitions, min_partition_size)
425
426 We use dynamic expected size of partition so small programs are partitioned
427 into enough partitions to allow use of multiple CPUs, while large programs
428 are not partitioned too much. Creating too many partitions significantly
429 increases the streaming overhead.
430
431 In the future, we would like to bound the maximal size of partitions so as
432 to prevent the LTRANS stage from consuming too much memory. At the moment,
433 however, the WPA stage is the most memory intensive for large benchmarks,
434 since too many types and declarations are read into memory.
435
436 The function implements a simple greedy algorithm. Nodes are being added
437 to the current partition until after 3/4 of the expected partition size is
438 reached. Past this threshold, we keep track of boundary size (number of
439 edges going to other partitions) and continue adding functions until after
440 the current partition has grown to twice the expected partition size. Then
441 the process is undone to the point where the minimal ratio of boundary size
442 and in-partition calls was reached. */
443
444 void
445 lto_balanced_map (int n_lto_partitions)
446 {
447 int n_nodes = 0;
448 int n_varpool_nodes = 0, varpool_pos = 0, best_varpool_pos = 0;
449 struct cgraph_node **order = XNEWVEC (cgraph_node *, symtab->cgraph_max_uid);
450 auto_vec<cgraph_node *> noreorder;
451 auto_vec<varpool_node *> varpool_order;
452 int i;
453 struct cgraph_node *node;
454 int original_total_size, total_size = 0, best_total_size = 0;
455 int partition_size;
456 ltrans_partition partition;
457 int last_visited_node = 0;
458 varpool_node *vnode;
459 int cost = 0, internal = 0;
460 int best_n_nodes = 0, best_i = 0, best_cost =
461 INT_MAX, best_internal = 0;
462 int npartitions;
463 int current_order = -1;
464 int noreorder_pos = 0;
465
466 FOR_EACH_VARIABLE (vnode)
467 gcc_assert (!vnode->aux);
468
469 FOR_EACH_DEFINED_FUNCTION (node)
470 if (node->get_partitioning_class () == SYMBOL_PARTITION)
471 {
472 if (node->no_reorder)
473 noreorder.safe_push (node);
474 else
475 order[n_nodes++] = node;
476 if (!node->alias)
477 total_size += inline_summaries->get (node)->size;
478 }
479
480 original_total_size = total_size;
481
482 /* Streaming works best when the source units do not cross partition
483 boundaries much. This is because importing function from a source
484 unit tends to import a lot of global trees defined there. We should
485 get better about minimizing the function bounday, but until that
486 things works smoother if we order in source order. */
487 qsort (order, n_nodes, sizeof (struct cgraph_node *), node_cmp);
488 noreorder.qsort (node_cmp);
489
490 if (symtab->dump_file)
491 {
492 for(i = 0; i < n_nodes; i++)
493 fprintf (symtab->dump_file, "Balanced map symbol order:%s:%u\n",
494 order[i]->name (), order[i]->tp_first_run);
495 for(i = 0; i < (int)noreorder.length(); i++)
496 fprintf (symtab->dump_file, "Balanced map symbol no_reorder:%s:%u\n",
497 noreorder[i]->name (), noreorder[i]->tp_first_run);
498 }
499
500 /* Collect all variables that should not be reordered. */
501 FOR_EACH_VARIABLE (vnode)
502 if (vnode->get_partitioning_class () == SYMBOL_PARTITION
503 && (!flag_toplevel_reorder || vnode->no_reorder))
504 varpool_order.safe_push (vnode);
505 n_varpool_nodes = varpool_order.length ();
506 varpool_order.qsort (varpool_node_cmp);
507
508 /* Compute partition size and create the first partition. */
509 partition_size = total_size / n_lto_partitions;
510 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
511 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
512 npartitions = 1;
513 partition = new_partition ("");
514 if (symtab->dump_file)
515 fprintf (symtab->dump_file, "Total unit size: %i, partition size: %i\n",
516 total_size, partition_size);
517
518 auto_vec<symtab_node *> next_nodes;
519
520 for (i = 0; i < n_nodes; i++)
521 {
522 if (symbol_partitioned_p (order[i]))
523 continue;
524
525 current_order = order[i]->order;
526
527 /* Output noreorder and varpool in program order first. */
528 next_nodes.truncate (0);
529 while (varpool_pos < n_varpool_nodes
530 && varpool_order[varpool_pos]->order < current_order)
531 next_nodes.safe_push (varpool_order[varpool_pos++]);
532 while (noreorder_pos < (int)noreorder.length ()
533 && noreorder[noreorder_pos]->order < current_order)
534 {
535 if (!noreorder[noreorder_pos]->alias)
536 total_size -= inline_summaries->get (noreorder[noreorder_pos])->size;
537 next_nodes.safe_push (noreorder[noreorder_pos++]);
538 }
539 add_sorted_nodes (next_nodes, partition);
540
541 add_symbol_to_partition (partition, order[i]);
542 if (!order[i]->alias)
543 total_size -= inline_summaries->get (order[i])->size;
544
545
546 /* Once we added a new node to the partition, we also want to add
547 all referenced variables unless they was already added into some
548 earlier partition.
549 add_symbol_to_partition adds possibly multiple nodes and
550 variables that are needed to satisfy needs of ORDER[i].
551 We remember last visited cgraph and varpool node from last iteration
552 of outer loop that allows us to process every new addition.
553
554 At the same time we compute size of the boundary into COST. Every
555 callgraph or IPA reference edge leaving the partition contributes into
556 COST. Every edge inside partition was earlier computed as one leaving
557 it and thus we need to subtract it from COST. */
558 while (last_visited_node < lto_symtab_encoder_size (partition->encoder))
559 {
560 symtab_node *refs_node;
561 int j;
562 struct ipa_ref *ref = NULL;
563 symtab_node *snode = lto_symtab_encoder_deref (partition->encoder,
564 last_visited_node);
565
566 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
567 {
568 struct cgraph_edge *edge;
569
570 refs_node = node;
571
572 last_visited_node++;
573
574 gcc_assert (node->definition || node->weakref);
575
576 /* Compute boundary cost of callgraph edges. */
577 for (edge = node->callees; edge; edge = edge->next_callee)
578 if (edge->callee->definition)
579 {
580 int edge_cost = edge->frequency;
581 int index;
582
583 if (!edge_cost)
584 edge_cost = 1;
585 gcc_assert (edge_cost > 0);
586 index = lto_symtab_encoder_lookup (partition->encoder,
587 edge->callee);
588 if (index != LCC_NOT_FOUND
589 && index < last_visited_node - 1)
590 cost -= edge_cost, internal += edge_cost;
591 else
592 cost += edge_cost;
593 }
594 for (edge = node->callers; edge; edge = edge->next_caller)
595 {
596 int edge_cost = edge->frequency;
597 int index;
598
599 gcc_assert (edge->caller->definition);
600 if (!edge_cost)
601 edge_cost = 1;
602 gcc_assert (edge_cost > 0);
603 index = lto_symtab_encoder_lookup (partition->encoder,
604 edge->caller);
605 if (index != LCC_NOT_FOUND
606 && index < last_visited_node - 1)
607 cost -= edge_cost;
608 else
609 cost += edge_cost;
610 }
611 }
612 else
613 {
614 refs_node = snode;
615 last_visited_node++;
616 }
617
618 /* Compute boundary cost of IPA REF edges and at the same time look into
619 variables referenced from current partition and try to add them. */
620 for (j = 0; refs_node->iterate_reference (j, ref); j++)
621 if (is_a <varpool_node *> (ref->referred))
622 {
623 int index;
624
625 vnode = dyn_cast <varpool_node *> (ref->referred);
626 if (!vnode->definition)
627 continue;
628 if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
629 && !vnode->no_reorder
630 && vnode->get_partitioning_class () == SYMBOL_PARTITION)
631 add_symbol_to_partition (partition, vnode);
632 index = lto_symtab_encoder_lookup (partition->encoder,
633 vnode);
634 if (index != LCC_NOT_FOUND
635 && index < last_visited_node - 1)
636 cost--, internal++;
637 else
638 cost++;
639 }
640 else
641 {
642 int index;
643
644 node = dyn_cast <cgraph_node *> (ref->referred);
645 if (!node->definition)
646 continue;
647 index = lto_symtab_encoder_lookup (partition->encoder,
648 node);
649 if (index != LCC_NOT_FOUND
650 && index < last_visited_node - 1)
651 cost--, internal++;
652 else
653 cost++;
654 }
655 for (j = 0; refs_node->iterate_referring (j, ref); j++)
656 if (is_a <varpool_node *> (ref->referring))
657 {
658 int index;
659
660 vnode = dyn_cast <varpool_node *> (ref->referring);
661 gcc_assert (vnode->definition);
662 /* It is better to couple variables with their users, because it allows them
663 to be removed. Coupling with objects they refer to only helps to reduce
664 number of symbols promoted to hidden. */
665 if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
666 && !vnode->no_reorder
667 && !vnode->can_remove_if_no_refs_p ()
668 && vnode->get_partitioning_class () == SYMBOL_PARTITION)
669 add_symbol_to_partition (partition, vnode);
670 index = lto_symtab_encoder_lookup (partition->encoder,
671 vnode);
672 if (index != LCC_NOT_FOUND
673 && index < last_visited_node - 1)
674 cost--;
675 else
676 cost++;
677 }
678 else
679 {
680 int index;
681
682 node = dyn_cast <cgraph_node *> (ref->referring);
683 gcc_assert (node->definition);
684 index = lto_symtab_encoder_lookup (partition->encoder,
685 node);
686 if (index != LCC_NOT_FOUND
687 && index < last_visited_node - 1)
688 cost--;
689 else
690 cost++;
691 }
692 }
693
694 /* If the partition is large enough, start looking for smallest boundary cost. */
695 if (partition->insns < partition_size * 3 / 4
696 || best_cost == INT_MAX
697 || ((!cost
698 || (best_internal * (HOST_WIDE_INT) cost
699 > (internal * (HOST_WIDE_INT)best_cost)))
700 && partition->insns < partition_size * 5 / 4))
701 {
702 best_cost = cost;
703 best_internal = internal;
704 best_i = i;
705 best_n_nodes = lto_symtab_encoder_size (partition->encoder);
706 best_total_size = total_size;
707 best_varpool_pos = varpool_pos;
708 }
709 if (symtab->dump_file)
710 fprintf (symtab->dump_file, "Step %i: added %s/%i, size %i, cost %i/%i "
711 "best %i/%i, step %i\n", i,
712 order[i]->name (), order[i]->order,
713 partition->insns, cost, internal,
714 best_cost, best_internal, best_i);
715 /* Partition is too large, unwind into step when best cost was reached and
716 start new partition. */
717 if (partition->insns > 2 * partition_size)
718 {
719 if (best_i != i)
720 {
721 if (symtab->dump_file)
722 fprintf (symtab->dump_file, "Unwinding %i insertions to step %i\n",
723 i - best_i, best_i);
724 undo_partition (partition, best_n_nodes);
725 varpool_pos = best_varpool_pos;
726 }
727 i = best_i;
728 /* When we are finished, avoid creating empty partition. */
729 while (i < n_nodes - 1 && symbol_partitioned_p (order[i + 1]))
730 i++;
731 if (i == n_nodes - 1)
732 break;
733 partition = new_partition ("");
734 last_visited_node = 0;
735 total_size = best_total_size;
736 cost = 0;
737
738 if (symtab->dump_file)
739 fprintf (symtab->dump_file, "New partition\n");
740 best_n_nodes = 0;
741 best_cost = INT_MAX;
742
743 /* Since the size of partitions is just approximate, update the size after
744 we finished current one. */
745 if (npartitions < n_lto_partitions)
746 partition_size = total_size / (n_lto_partitions - npartitions);
747 else
748 partition_size = INT_MAX;
749
750 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
751 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
752 npartitions ++;
753 }
754 }
755
756 next_nodes.truncate (0);
757
758 /* Varables that are not reachable from the code go into last partition. */
759 if (flag_toplevel_reorder)
760 {
761 FOR_EACH_VARIABLE (vnode)
762 if (vnode->get_partitioning_class () == SYMBOL_PARTITION
763 && !symbol_partitioned_p (vnode)
764 && !vnode->no_reorder)
765 next_nodes.safe_push (vnode);
766 }
767
768 /* Output remaining ordered symbols. */
769 while (varpool_pos < n_varpool_nodes)
770 next_nodes.safe_push (varpool_order[varpool_pos++]);
771 while (noreorder_pos < (int)noreorder.length ())
772 next_nodes.safe_push (noreorder[noreorder_pos++]);
773 add_sorted_nodes (next_nodes, partition);
774
775 free (order);
776
777 if (symtab->dump_file)
778 {
779 fprintf (symtab->dump_file, "\nPartition sizes:\n");
780 unsigned partitions = ltrans_partitions.length ();
781
782 for (unsigned i = 0; i < partitions ; i++)
783 {
784 ltrans_partition p = ltrans_partitions[i];
785 fprintf (symtab->dump_file, "partition %d contains %d (%2.2f%%)"
786 " symbols and %d (%2.2f%%) insns\n", i, p->symbols,
787 100.0 * p->symbols / n_nodes, p->insns,
788 100.0 * p->insns / original_total_size);
789 }
790
791 fprintf (symtab->dump_file, "\n");
792 }
793 }
794
795 /* Return true if we must not change the name of the NODE. The name as
796 extracted from the corresponding decl should be passed in NAME. */
797
798 static bool
799 must_not_rename (symtab_node *node, const char *name)
800 {
801 /* Our renaming machinery do not handle more than one change of assembler name.
802 We should not need more than one anyway. */
803 if (node->lto_file_data
804 && lto_get_decl_name_mapping (node->lto_file_data, name) != name)
805 {
806 if (symtab->dump_file)
807 fprintf (symtab->dump_file,
808 "Not privatizing symbol name: %s. It privatized already.\n",
809 name);
810 return true;
811 }
812 /* Avoid mangling of already mangled clones.
813 ??? should have a flag whether a symbol has a 'private' name already,
814 since we produce some symbols like that i.e. for global constructors
815 that are not really clones. */
816 if (node->unique_name)
817 {
818 if (symtab->dump_file)
819 fprintf (symtab->dump_file,
820 "Not privatizing symbol name: %s. Has unique name.\n",
821 name);
822 return true;
823 }
824 return false;
825 }
826
827 /* If we are an offload compiler, we may have to rewrite symbols to be
828 valid on this target. Return either PTR or a modified version of it. */
829
830 static const char *
831 maybe_rewrite_identifier (const char *ptr)
832 {
833 #if defined ACCEL_COMPILER && (defined NO_DOT_IN_LABEL || defined NO_DOLLAR_IN_LABEL)
834 #ifndef NO_DOT_IN_LABEL
835 char valid = '.';
836 const char reject[] = "$";
837 #elif !defined NO_DOLLAR_IN_LABEL
838 char valid = '$';
839 const char reject[] = ".";
840 #else
841 char valid = '_';
842 const char reject[] = ".$";
843 #endif
844
845 char *copy = NULL;
846 const char *match = ptr;
847 for (;;)
848 {
849 size_t off = strcspn (match, reject);
850 if (match[off] == '\0')
851 break;
852 if (copy == NULL)
853 {
854 copy = xstrdup (ptr);
855 match = copy;
856 }
857 copy[off] = valid;
858 }
859 return match;
860 #else
861 return ptr;
862 #endif
863 }
864
865 /* Ensure that the symbol in NODE is valid for the target, and if not,
866 rewrite it. */
867
868 static void
869 validize_symbol_for_target (symtab_node *node)
870 {
871 tree decl = node->decl;
872 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
873
874 if (must_not_rename (node, name))
875 return;
876
877 const char *name2 = maybe_rewrite_identifier (name);
878 if (name2 != name)
879 {
880 symtab->change_decl_assembler_name (decl, get_identifier (name2));
881 if (node->lto_file_data)
882 lto_record_renamed_decl (node->lto_file_data, name,
883 IDENTIFIER_POINTER
884 (DECL_ASSEMBLER_NAME (decl)));
885 }
886 }
887
888 /* Helper for privatize_symbol_name. Mangle NODE symbol name
889 represented by DECL. */
890
891 static bool
892 privatize_symbol_name_1 (symtab_node *node, tree decl)
893 {
894 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
895
896 if (must_not_rename (node, name))
897 return false;
898
899 name = maybe_rewrite_identifier (name);
900 symtab->change_decl_assembler_name (decl,
901 clone_function_name_1 (name,
902 "lto_priv"));
903
904 if (node->lto_file_data)
905 lto_record_renamed_decl (node->lto_file_data, name,
906 IDENTIFIER_POINTER
907 (DECL_ASSEMBLER_NAME (decl)));
908
909 if (symtab->dump_file)
910 fprintf (symtab->dump_file,
911 "Privatizing symbol name: %s -> %s\n",
912 name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
913
914 return true;
915 }
916
917 /* Mangle NODE symbol name into a local name.
918 This is necessary to do
919 1) if two or more static vars of same assembler name
920 are merged into single ltrans unit.
921 2) if previously static var was promoted hidden to avoid possible conflict
922 with symbols defined out of the LTO world. */
923
924 static bool
925 privatize_symbol_name (symtab_node *node)
926 {
927 if (!privatize_symbol_name_1 (node, node->decl))
928 return false;
929
930 /* We could change name which is a target of transparent alias
931 chain of instrumented function name. Fix alias chain if so .*/
932 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
933 {
934 tree iname = NULL_TREE;
935 if (cnode->instrumentation_clone)
936 {
937 /* If we want to privatize instrumentation clone
938 then we also need to privatize original function. */
939 if (cnode->instrumented_version)
940 privatize_symbol_name (cnode->instrumented_version);
941 else
942 privatize_symbol_name_1 (cnode, cnode->orig_decl);
943 iname = DECL_ASSEMBLER_NAME (cnode->decl);
944 TREE_CHAIN (iname) = DECL_ASSEMBLER_NAME (cnode->orig_decl);
945 }
946 else if (cnode->instrumented_version
947 && cnode->instrumented_version->orig_decl == cnode->decl)
948 {
949 iname = DECL_ASSEMBLER_NAME (cnode->instrumented_version->decl);
950 TREE_CHAIN (iname) = DECL_ASSEMBLER_NAME (cnode->decl);
951 }
952 }
953
954 return true;
955 }
956
957 /* Promote variable VNODE to be static. */
958
959 static void
960 promote_symbol (symtab_node *node)
961 {
962 /* We already promoted ... */
963 if (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
964 && DECL_VISIBILITY_SPECIFIED (node->decl)
965 && TREE_PUBLIC (node->decl))
966 {
967 validize_symbol_for_target (node);
968 return;
969 }
970
971 gcc_checking_assert (!TREE_PUBLIC (node->decl)
972 && !DECL_EXTERNAL (node->decl));
973 /* Be sure that newly public symbol does not conflict with anything already
974 defined by the non-LTO part. */
975 privatize_symbol_name (node);
976 TREE_PUBLIC (node->decl) = 1;
977 DECL_VISIBILITY (node->decl) = VISIBILITY_HIDDEN;
978 DECL_VISIBILITY_SPECIFIED (node->decl) = true;
979 if (symtab->dump_file)
980 fprintf (symtab->dump_file,
981 "Promoting as hidden: %s\n", node->name ());
982 }
983
984 /* Return true if NODE needs named section even if it won't land in the partition
985 symbol table.
986 FIXME: we should really not use named sections for inline clones and master clones. */
987
988 static bool
989 may_need_named_section_p (lto_symtab_encoder_t encoder, symtab_node *node)
990 {
991 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
992 if (!cnode)
993 return false;
994 if (node->real_symbol_p ())
995 return false;
996 return (!encoder
997 || (lto_symtab_encoder_lookup (encoder, node) != LCC_NOT_FOUND
998 && lto_symtab_encoder_encode_body_p (encoder,
999 cnode)));
1000 }
1001
1002 /* If NODE represents a static variable. See if there are other variables
1003 of the same name in partition ENCODER (or in whole compilation unit if
1004 ENCODER is NULL) and if so, mangle the statics. Always mangle all
1005 conflicting statics, so we reduce changes of silently miscompiling
1006 asm statements referring to them by symbol name. */
1007
1008 static void
1009 rename_statics (lto_symtab_encoder_t encoder, symtab_node *node)
1010 {
1011 tree decl = node->decl;
1012 symtab_node *s;
1013 tree name = DECL_ASSEMBLER_NAME (decl);
1014
1015 /* See if this is static symbol. */
1016 if ((node->externally_visible
1017 /* FIXME: externally_visible is somewhat illogically not set for
1018 external symbols (i.e. those not defined). Remove this test
1019 once this is fixed. */
1020 || DECL_EXTERNAL (node->decl)
1021 || !node->real_symbol_p ())
1022 && !may_need_named_section_p (encoder, node))
1023 return;
1024
1025 /* Now walk symbols sharing the same name and see if there are any conflicts.
1026 (all types of symbols counts here, since we can not have static of the
1027 same name as external or public symbol.) */
1028 for (s = symtab_node::get_for_asmname (name);
1029 s; s = s->next_sharing_asm_name)
1030 if ((s->real_symbol_p () || may_need_named_section_p (encoder, s))
1031 && s->decl != node->decl
1032 && (!encoder
1033 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
1034 break;
1035
1036 /* OK, no confict, so we have nothing to do. */
1037 if (!s)
1038 return;
1039
1040 if (symtab->dump_file)
1041 fprintf (symtab->dump_file,
1042 "Renaming statics with asm name: %s\n", node->name ());
1043
1044 /* Assign every symbol in the set that shares the same ASM name an unique
1045 mangled name. */
1046 for (s = symtab_node::get_for_asmname (name); s;)
1047 if (!s->externally_visible
1048 && ((s->real_symbol_p ()
1049 && !DECL_EXTERNAL (node->decl)
1050 && !TREE_PUBLIC (node->decl))
1051 || may_need_named_section_p (encoder, s))
1052 && (!encoder
1053 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
1054 {
1055 if (privatize_symbol_name (s))
1056 /* Re-start from beginning since we do not know how many symbols changed a name. */
1057 s = symtab_node::get_for_asmname (name);
1058 else s = s->next_sharing_asm_name;
1059 }
1060 else s = s->next_sharing_asm_name;
1061 }
1062
1063 /* Find out all static decls that need to be promoted to global because
1064 of cross file sharing. This function must be run in the WPA mode after
1065 all inlinees are added. */
1066
1067 void
1068 lto_promote_cross_file_statics (void)
1069 {
1070 unsigned i, n_sets;
1071
1072 gcc_assert (flag_wpa);
1073
1074 lto_stream_offload_p = false;
1075 select_what_to_stream ();
1076
1077 /* First compute boundaries. */
1078 n_sets = ltrans_partitions.length ();
1079 for (i = 0; i < n_sets; i++)
1080 {
1081 ltrans_partition part
1082 = ltrans_partitions[i];
1083 part->encoder = compute_ltrans_boundary (part->encoder);
1084 }
1085
1086 /* Look at boundaries and promote symbols as needed. */
1087 for (i = 0; i < n_sets; i++)
1088 {
1089 lto_symtab_encoder_iterator lsei;
1090 lto_symtab_encoder_t encoder = ltrans_partitions[i]->encoder;
1091
1092 for (lsei = lsei_start (encoder); !lsei_end_p (lsei);
1093 lsei_next (&lsei))
1094 {
1095 symtab_node *node = lsei_node (lsei);
1096
1097 /* If symbol is static, rename it if its assembler name clash with
1098 anything else in this unit. */
1099 rename_statics (encoder, node);
1100
1101 /* No need to promote if symbol already is externally visible ... */
1102 if (node->externally_visible
1103 /* ... or if it is part of current partition ... */
1104 || lto_symtab_encoder_in_partition_p (encoder, node)
1105 /* ... or if we do not partition it. This mean that it will
1106 appear in every partition refernecing it. */
1107 || node->get_partitioning_class () != SYMBOL_PARTITION)
1108 {
1109 validize_symbol_for_target (node);
1110 continue;
1111 }
1112
1113 promote_symbol (node);
1114 }
1115 }
1116 }
1117
1118 /* Rename statics in the whole unit in the case that
1119 we do -flto-partition=none. */
1120
1121 void
1122 lto_promote_statics_nonwpa (void)
1123 {
1124 symtab_node *node;
1125 FOR_EACH_SYMBOL (node)
1126 {
1127 rename_statics (NULL, node);
1128 validize_symbol_for_target (node);
1129 }
1130 }