]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto/lto-partition.c
* lto-partition.c (lto_balanced_map): Fix sanity check.
[thirdparty/gcc.git] / gcc / lto / lto-partition.c
1 /* LTO partitioning logic routines.
2 Copyright (C) 2009-2018 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "target.h"
24 #include "function.h"
25 #include "basic-block.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "stringpool.h"
30 #include "cgraph.h"
31 #include "lto-streamer.h"
32 #include "params.h"
33 #include "symbol-summary.h"
34 #include "tree-vrp.h"
35 #include "ipa-prop.h"
36 #include "ipa-fnsummary.h"
37 #include "lto-partition.h"
38 #include "sreal.h"
39
40 vec<ltrans_partition> ltrans_partitions;
41
42 static void add_symbol_to_partition (ltrans_partition part, symtab_node *node);
43
44
45 /* Create new partition with name NAME. */
46
47 static ltrans_partition
48 new_partition (const char *name)
49 {
50 ltrans_partition part = XCNEW (struct ltrans_partition_def);
51 part->encoder = lto_symtab_encoder_new (false);
52 part->name = name;
53 part->insns = 0;
54 part->symbols = 0;
55 ltrans_partitions.safe_push (part);
56 return part;
57 }
58
59 /* Free memory used by ltrans datastructures. */
60
61 void
62 free_ltrans_partitions (void)
63 {
64 unsigned int idx;
65 ltrans_partition part;
66 for (idx = 0; ltrans_partitions.iterate (idx, &part); idx++)
67 {
68 if (part->initializers_visited)
69 delete part->initializers_visited;
70 /* Symtab encoder is freed after streaming. */
71 free (part);
72 }
73 ltrans_partitions.release ();
74 }
75
76 /* Return true if symbol is already in some partition. */
77
78 static inline bool
79 symbol_partitioned_p (symtab_node *node)
80 {
81 return node->aux;
82 }
83
84 /* Add references into the partition. */
85 static void
86 add_references_to_partition (ltrans_partition part, symtab_node *node)
87 {
88 int i;
89 struct ipa_ref *ref = NULL;
90
91 /* Add all duplicated references to the partition. */
92 for (i = 0; node->iterate_reference (i, ref); i++)
93 if (ref->referred->get_partitioning_class () == SYMBOL_DUPLICATE)
94 add_symbol_to_partition (part, ref->referred);
95 /* References to a readonly variable may be constant foled into its value.
96 Recursively look into the initializers of the constant variable and add
97 references, too. */
98 else if (is_a <varpool_node *> (ref->referred)
99 && (dyn_cast <varpool_node *> (ref->referred)
100 ->ctor_useable_for_folding_p ()
101 || POINTER_BOUNDS_P (ref->referred->decl))
102 && !lto_symtab_encoder_in_partition_p (part->encoder, ref->referred))
103 {
104 if (!part->initializers_visited)
105 part->initializers_visited = new hash_set<symtab_node *>;
106 if (!part->initializers_visited->add (ref->referred))
107 add_references_to_partition (part, ref->referred);
108 }
109 }
110
111 /* Helper function for add_symbol_to_partition doing the actual dirty work
112 of adding NODE to PART. */
113
114 static bool
115 add_symbol_to_partition_1 (ltrans_partition part, symtab_node *node)
116 {
117 enum symbol_partitioning_class c = node->get_partitioning_class ();
118 struct ipa_ref *ref;
119 symtab_node *node1;
120
121 /* If NODE is already there, we have nothing to do. */
122 if (lto_symtab_encoder_in_partition_p (part->encoder, node))
123 return true;
124
125 /* non-duplicated aliases or tunks of a duplicated symbol needs to be output
126 just once.
127
128 Be lax about comdats; they may or may not be duplicated and we may
129 end up in need to duplicate keyed comdat because it has unkeyed alias. */
130 if (c == SYMBOL_PARTITION && !DECL_COMDAT (node->decl)
131 && symbol_partitioned_p (node))
132 return false;
133
134 /* Be sure that we never try to duplicate partitioned symbol
135 or add external symbol. */
136 gcc_assert (c != SYMBOL_EXTERNAL
137 && (c == SYMBOL_DUPLICATE || !symbol_partitioned_p (node)));
138
139 part->symbols++;
140
141 lto_set_symtab_encoder_in_partition (part->encoder, node);
142
143 if (symbol_partitioned_p (node))
144 {
145 node->in_other_partition = 1;
146 if (symtab->dump_file)
147 fprintf (symtab->dump_file,
148 "Symbol node %s now used in multiple partitions\n",
149 node->name ());
150 }
151 node->aux = (void *)((size_t)node->aux + 1);
152
153 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
154 {
155 struct cgraph_edge *e;
156 if (!node->alias && c == SYMBOL_PARTITION)
157 part->insns += ipa_fn_summaries->get (cnode)->size;
158
159 /* Add all inline clones and callees that are duplicated. */
160 for (e = cnode->callees; e; e = e->next_callee)
161 if (!e->inline_failed)
162 add_symbol_to_partition_1 (part, e->callee);
163 else if (e->callee->get_partitioning_class () == SYMBOL_DUPLICATE)
164 add_symbol_to_partition (part, e->callee);
165
166 /* Add all thunks associated with the function. */
167 for (e = cnode->callers; e; e = e->next_caller)
168 if (e->caller->thunk.thunk_p && !e->caller->global.inlined_to)
169 add_symbol_to_partition_1 (part, e->caller);
170
171 /* Instrumented version is actually the same function.
172 Therefore put it into the same partition. */
173 if (cnode->instrumented_version)
174 add_symbol_to_partition_1 (part, cnode->instrumented_version);
175 }
176
177 add_references_to_partition (part, node);
178
179 /* Add all aliases associated with the symbol. */
180
181 FOR_EACH_ALIAS (node, ref)
182 if (!ref->referring->transparent_alias)
183 add_symbol_to_partition_1 (part, ref->referring);
184 else
185 {
186 struct ipa_ref *ref2;
187 /* We do not need to add transparent aliases if they are not used.
188 However we must add aliases of transparent aliases if they exist. */
189 FOR_EACH_ALIAS (ref->referring, ref2)
190 {
191 /* Nested transparent aliases are not permitted. */
192 gcc_checking_assert (!ref2->referring->transparent_alias);
193 add_symbol_to_partition_1 (part, ref2->referring);
194 }
195 }
196
197 /* Ensure that SAME_COMDAT_GROUP lists all allways added in a group. */
198 if (node->same_comdat_group)
199 for (node1 = node->same_comdat_group;
200 node1 != node; node1 = node1->same_comdat_group)
201 if (!node->alias)
202 {
203 bool added = add_symbol_to_partition_1 (part, node1);
204 gcc_assert (added);
205 }
206 return true;
207 }
208
209 /* If symbol NODE is really part of other symbol's definition (i.e. it is
210 internal label, thunk, alias or so), return the outer symbol.
211 When add_symbol_to_partition_1 is called on the outer symbol it must
212 eventually add NODE, too. */
213 static symtab_node *
214 contained_in_symbol (symtab_node *node)
215 {
216 /* There is no need to consider transparent aliases to be part of the
217 definition: they are only useful insite the partition they are output
218 and thus we will always see an explicit reference to it. */
219 if (node->transparent_alias)
220 return node;
221 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
222 {
223 cnode = cnode->function_symbol ();
224 if (cnode->global.inlined_to)
225 cnode = cnode->global.inlined_to;
226 return cnode;
227 }
228 else if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
229 return vnode->ultimate_alias_target ();
230 return node;
231 }
232
233 /* Add symbol NODE to partition. When definition of NODE is part
234 of other symbol definition, add the other symbol, too. */
235
236 static void
237 add_symbol_to_partition (ltrans_partition part, symtab_node *node)
238 {
239 symtab_node *node1;
240
241 /* Verify that we do not try to duplicate something that can not be. */
242 gcc_checking_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
243 || !symbol_partitioned_p (node));
244
245 while ((node1 = contained_in_symbol (node)) != node)
246 node = node1;
247
248 /* If we have duplicated symbol contained in something we can not duplicate,
249 we are very badly screwed. The other way is possible, so we do not
250 assert this in add_symbol_to_partition_1.
251
252 Be lax about comdats; they may or may not be duplicated and we may
253 end up in need to duplicate keyed comdat because it has unkeyed alias. */
254
255 gcc_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
256 || DECL_COMDAT (node->decl)
257 || !symbol_partitioned_p (node));
258
259 add_symbol_to_partition_1 (part, node);
260 }
261
262 /* Undo all additions until number of cgraph nodes in PARITION is N_CGRAPH_NODES
263 and number of varpool nodes is N_VARPOOL_NODES. */
264
265 static void
266 undo_partition (ltrans_partition partition, unsigned int n_nodes)
267 {
268 while (lto_symtab_encoder_size (partition->encoder) > (int)n_nodes)
269 {
270 symtab_node *node = lto_symtab_encoder_deref (partition->encoder,
271 n_nodes);
272 partition->symbols--;
273 cgraph_node *cnode;
274
275 /* After UNDO we no longer know what was visited. */
276 if (partition->initializers_visited)
277 delete partition->initializers_visited;
278 partition->initializers_visited = NULL;
279
280 if (!node->alias && (cnode = dyn_cast <cgraph_node *> (node))
281 && node->get_partitioning_class () == SYMBOL_PARTITION)
282 partition->insns -= ipa_fn_summaries->get (cnode)->size;
283 lto_symtab_encoder_delete_node (partition->encoder, node);
284 node->aux = (void *)((size_t)node->aux - 1);
285 }
286 }
287
288 /* Group cgrah nodes by input files. This is used mainly for testing
289 right now. */
290
291 void
292 lto_1_to_1_map (void)
293 {
294 symtab_node *node;
295 struct lto_file_decl_data *file_data;
296 hash_map<lto_file_decl_data *, ltrans_partition> pmap;
297 ltrans_partition partition;
298 int npartitions = 0;
299
300 FOR_EACH_SYMBOL (node)
301 {
302 if (node->get_partitioning_class () != SYMBOL_PARTITION
303 || symbol_partitioned_p (node))
304 continue;
305
306 file_data = node->lto_file_data;
307
308 if (file_data)
309 {
310 ltrans_partition *slot = &pmap.get_or_insert (file_data);
311 if (*slot)
312 partition = *slot;
313 else
314 {
315 partition = new_partition (file_data->file_name);
316 *slot = partition;
317 npartitions++;
318 }
319 }
320 else if (!file_data && ltrans_partitions.length ())
321 partition = ltrans_partitions[0];
322 else
323 {
324 partition = new_partition ("");
325 pmap.put (NULL, partition);
326 npartitions++;
327 }
328
329 add_symbol_to_partition (partition, node);
330 }
331
332 /* If the cgraph is empty, create one cgraph node set so that there is still
333 an output file for any variables that need to be exported in a DSO. */
334 if (!npartitions)
335 new_partition ("empty");
336
337 }
338
339 /* Maximal partitioning. Put every new symbol into new partition if possible. */
340
341 void
342 lto_max_map (void)
343 {
344 symtab_node *node;
345 ltrans_partition partition;
346 int npartitions = 0;
347
348 FOR_EACH_SYMBOL (node)
349 {
350 if (node->get_partitioning_class () != SYMBOL_PARTITION
351 || symbol_partitioned_p (node))
352 continue;
353 partition = new_partition (node->asm_name ());
354 add_symbol_to_partition (partition, node);
355 npartitions++;
356 }
357 if (!npartitions)
358 new_partition ("empty");
359 }
360
361 /* Helper function for qsort; sort nodes by order. noreorder functions must have
362 been removed earlier. */
363 static int
364 node_cmp (const void *pa, const void *pb)
365 {
366 const struct cgraph_node *a = *(const struct cgraph_node * const *) pa;
367 const struct cgraph_node *b = *(const struct cgraph_node * const *) pb;
368
369 /* Profile reorder flag enables function reordering based on first execution
370 of a function. All functions with profile are placed in ascending
371 order at the beginning. */
372
373 if (flag_profile_reorder_functions)
374 {
375 /* Functions with time profile are sorted in ascending order. */
376 if (a->tp_first_run && b->tp_first_run)
377 return a->tp_first_run != b->tp_first_run
378 ? a->tp_first_run - b->tp_first_run
379 : a->order - b->order;
380
381 /* Functions with time profile are sorted before the functions
382 that do not have the profile. */
383 if (a->tp_first_run || b->tp_first_run)
384 return b->tp_first_run - a->tp_first_run;
385 }
386
387 return b->order - a->order;
388 }
389
390 /* Helper function for qsort; sort nodes by order. */
391 static int
392 varpool_node_cmp (const void *pa, const void *pb)
393 {
394 const symtab_node *a = *static_cast<const symtab_node * const *> (pa);
395 const symtab_node *b = *static_cast<const symtab_node * const *> (pb);
396 return b->order - a->order;
397 }
398
399 /* Add all symtab nodes from NEXT_NODE to PARTITION in order. */
400
401 static void
402 add_sorted_nodes (vec<symtab_node *> &next_nodes, ltrans_partition partition)
403 {
404 unsigned i;
405 symtab_node *node;
406
407 next_nodes.qsort (varpool_node_cmp);
408 FOR_EACH_VEC_ELT (next_nodes, i, node)
409 if (!symbol_partitioned_p (node))
410 add_symbol_to_partition (partition, node);
411 }
412
413 /* Return true if we should account reference from N1 to N2 in cost
414 of partition boundary. */
415
416 bool
417 account_reference_p (symtab_node *n1, symtab_node *n2)
418 {
419 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (n1))
420 n1 = cnode;
421 /* Do not account recursion - the code below will handle it incorrectly
422 otherwise. Also do not account references to external symbols.
423 They will never become local. */
424 if (n1 == n2
425 || DECL_EXTERNAL (n2->decl)
426 || !n2->definition)
427 return false;
428 return true;
429 }
430
431
432 /* Group cgraph nodes into equally-sized partitions.
433
434 The partitioning algorithm is simple: nodes are taken in predefined order.
435 The order corresponds to the order we want functions to have in the final
436 output. In the future this will be given by function reordering pass, but
437 at the moment we use the topological order, which is a good approximation.
438
439 The goal is to partition this linear order into intervals (partitions) so
440 that all the partitions have approximately the same size and the number of
441 callgraph or IPA reference edges crossing boundaries is minimal.
442
443 This is a lot faster (O(n) in size of callgraph) than algorithms doing
444 priority-based graph clustering that are generally O(n^2) and, since
445 WHOPR is designed to make things go well across partitions, it leads
446 to good results.
447
448 We compute the expected size of a partition as:
449
450 max (total_size / lto_partitions, min_partition_size)
451
452 We use dynamic expected size of partition so small programs are partitioned
453 into enough partitions to allow use of multiple CPUs, while large programs
454 are not partitioned too much. Creating too many partitions significantly
455 increases the streaming overhead.
456
457 In the future, we would like to bound the maximal size of partitions so as
458 to prevent the LTRANS stage from consuming too much memory. At the moment,
459 however, the WPA stage is the most memory intensive for large benchmarks,
460 since too many types and declarations are read into memory.
461
462 The function implements a simple greedy algorithm. Nodes are being added
463 to the current partition until after 3/4 of the expected partition size is
464 reached. Past this threshold, we keep track of boundary size (number of
465 edges going to other partitions) and continue adding functions until after
466 the current partition has grown to twice the expected partition size. Then
467 the process is undone to the point where the minimal ratio of boundary size
468 and in-partition calls was reached. */
469
470 void
471 lto_balanced_map (int n_lto_partitions, int max_partition_size)
472 {
473 int n_nodes = 0;
474 int n_varpool_nodes = 0, varpool_pos = 0, best_varpool_pos = 0;
475 struct cgraph_node **order = XNEWVEC (cgraph_node *, symtab->cgraph_max_uid);
476 auto_vec<cgraph_node *> noreorder;
477 auto_vec<varpool_node *> varpool_order;
478 int i;
479 struct cgraph_node *node;
480 int64_t original_total_size, total_size = 0;
481 int64_t partition_size;
482 ltrans_partition partition;
483 int last_visited_node = 0;
484 varpool_node *vnode;
485 int64_t cost = 0, internal = 0;
486 int best_n_nodes = 0, best_i = 0;
487 int64_t best_cost = -1, best_internal = 0, best_size = 0;
488 int npartitions;
489 int current_order = -1;
490 int noreorder_pos = 0;
491
492 FOR_EACH_VARIABLE (vnode)
493 gcc_assert (!vnode->aux);
494
495 FOR_EACH_DEFINED_FUNCTION (node)
496 if (node->get_partitioning_class () == SYMBOL_PARTITION)
497 {
498 if (node->no_reorder)
499 noreorder.safe_push (node);
500 else
501 order[n_nodes++] = node;
502 if (!node->alias)
503 total_size += ipa_fn_summaries->get (node)->size;
504 }
505
506 original_total_size = total_size;
507
508 /* Streaming works best when the source units do not cross partition
509 boundaries much. This is because importing function from a source
510 unit tends to import a lot of global trees defined there. We should
511 get better about minimizing the function bounday, but until that
512 things works smoother if we order in source order. */
513 qsort (order, n_nodes, sizeof (struct cgraph_node *), node_cmp);
514 noreorder.qsort (node_cmp);
515
516 if (symtab->dump_file)
517 {
518 for(i = 0; i < n_nodes; i++)
519 fprintf (symtab->dump_file, "Balanced map symbol order:%s:%u\n",
520 order[i]->name (), order[i]->tp_first_run);
521 for(i = 0; i < (int)noreorder.length(); i++)
522 fprintf (symtab->dump_file, "Balanced map symbol no_reorder:%s:%u\n",
523 noreorder[i]->name (), noreorder[i]->tp_first_run);
524 }
525
526 /* Collect all variables that should not be reordered. */
527 FOR_EACH_VARIABLE (vnode)
528 if (vnode->get_partitioning_class () == SYMBOL_PARTITION
529 && vnode->no_reorder)
530 varpool_order.safe_push (vnode);
531 n_varpool_nodes = varpool_order.length ();
532 varpool_order.qsort (varpool_node_cmp);
533
534 /* Compute partition size and create the first partition. */
535 if (PARAM_VALUE (MIN_PARTITION_SIZE) > max_partition_size)
536 fatal_error (input_location, "min partition size cannot be greater "
537 "than max partition size");
538
539 partition_size = total_size / n_lto_partitions;
540 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
541 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
542 npartitions = 1;
543 partition = new_partition ("");
544 if (symtab->dump_file)
545 fprintf (symtab->dump_file, "Total unit size: %" PRId64 ", partition size: %" PRId64 "\n",
546 total_size, partition_size);
547
548 auto_vec<symtab_node *> next_nodes;
549
550 for (i = 0; i < n_nodes; i++)
551 {
552 if (symbol_partitioned_p (order[i]))
553 continue;
554
555 current_order = order[i]->order;
556
557 /* Output noreorder and varpool in program order first. */
558 next_nodes.truncate (0);
559 while (varpool_pos < n_varpool_nodes
560 && varpool_order[varpool_pos]->order < current_order)
561 next_nodes.safe_push (varpool_order[varpool_pos++]);
562 while (noreorder_pos < (int)noreorder.length ()
563 && noreorder[noreorder_pos]->order < current_order)
564 next_nodes.safe_push (noreorder[noreorder_pos++]);
565 add_sorted_nodes (next_nodes, partition);
566
567 if (!symbol_partitioned_p (order[i]))
568 add_symbol_to_partition (partition, order[i]);
569
570
571 /* Once we added a new node to the partition, we also want to add
572 all referenced variables unless they was already added into some
573 earlier partition.
574 add_symbol_to_partition adds possibly multiple nodes and
575 variables that are needed to satisfy needs of ORDER[i].
576 We remember last visited cgraph and varpool node from last iteration
577 of outer loop that allows us to process every new addition.
578
579 At the same time we compute size of the boundary into COST. Every
580 callgraph or IPA reference edge leaving the partition contributes into
581 COST. Every edge inside partition was earlier computed as one leaving
582 it and thus we need to subtract it from COST. */
583 while (last_visited_node < lto_symtab_encoder_size (partition->encoder))
584 {
585 int j;
586 struct ipa_ref *ref = NULL;
587 symtab_node *snode = lto_symtab_encoder_deref (partition->encoder,
588 last_visited_node);
589
590 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
591 {
592 struct cgraph_edge *edge;
593
594
595 last_visited_node++;
596
597 gcc_assert (node->definition || node->weakref);
598
599 /* Compute boundary cost of callgraph edges. */
600 for (edge = node->callees; edge; edge = edge->next_callee)
601 /* Inline edges will always end up local. */
602 if (edge->inline_failed
603 && account_reference_p (node, edge->callee))
604 {
605 int edge_cost = edge->frequency ();
606 int index;
607
608 if (!edge_cost)
609 edge_cost = 1;
610 gcc_assert (edge_cost > 0);
611 index = lto_symtab_encoder_lookup (partition->encoder,
612 edge->callee);
613 if (index != LCC_NOT_FOUND
614 && index < last_visited_node - 1)
615 cost -= edge_cost, internal += edge_cost;
616 else
617 cost += edge_cost;
618 }
619 for (edge = node->callers; edge; edge = edge->next_caller)
620 if (edge->inline_failed
621 && account_reference_p (edge->caller, node))
622 {
623 int edge_cost = edge->frequency ();
624 int index;
625
626 gcc_assert (edge->caller->definition);
627 if (!edge_cost)
628 edge_cost = 1;
629 gcc_assert (edge_cost > 0);
630 index = lto_symtab_encoder_lookup (partition->encoder,
631 edge->caller);
632 if (index != LCC_NOT_FOUND
633 && index < last_visited_node - 1)
634 cost -= edge_cost, internal += edge_cost;
635 else
636 cost += edge_cost;
637 }
638 }
639 else
640 last_visited_node++;
641
642 /* Compute boundary cost of IPA REF edges and at the same time look into
643 variables referenced from current partition and try to add them. */
644 for (j = 0; snode->iterate_reference (j, ref); j++)
645 if (!account_reference_p (snode, ref->referred))
646 ;
647 else if (is_a <varpool_node *> (ref->referred))
648 {
649 int index;
650
651 vnode = dyn_cast <varpool_node *> (ref->referred);
652 if (!symbol_partitioned_p (vnode)
653 && !vnode->no_reorder
654 && vnode->get_partitioning_class () == SYMBOL_PARTITION)
655 add_symbol_to_partition (partition, vnode);
656 index = lto_symtab_encoder_lookup (partition->encoder,
657 vnode);
658 if (index != LCC_NOT_FOUND
659 && index < last_visited_node - 1)
660 cost--, internal++;
661 else
662 cost++;
663 }
664 else
665 {
666 int index;
667
668 node = dyn_cast <cgraph_node *> (ref->referred);
669 index = lto_symtab_encoder_lookup (partition->encoder,
670 node);
671 if (index != LCC_NOT_FOUND
672 && index < last_visited_node - 1)
673 cost--, internal++;
674 else
675 cost++;
676 }
677 for (j = 0; snode->iterate_referring (j, ref); j++)
678 if (!account_reference_p (ref->referring, snode))
679 ;
680 else if (is_a <varpool_node *> (ref->referring))
681 {
682 int index;
683
684 vnode = dyn_cast <varpool_node *> (ref->referring);
685 gcc_assert (vnode->definition);
686 /* It is better to couple variables with their users,
687 because it allows them to be removed. Coupling
688 with objects they refer to only helps to reduce
689 number of symbols promoted to hidden. */
690 if (!symbol_partitioned_p (vnode)
691 && !vnode->no_reorder
692 && !vnode->can_remove_if_no_refs_p ()
693 && vnode->get_partitioning_class () == SYMBOL_PARTITION)
694 add_symbol_to_partition (partition, vnode);
695 index = lto_symtab_encoder_lookup (partition->encoder,
696 vnode);
697 if (index != LCC_NOT_FOUND
698 && index < last_visited_node - 1)
699 cost--, internal++;
700 else
701 cost++;
702 }
703 else
704 {
705 int index;
706
707 node = dyn_cast <cgraph_node *> (ref->referring);
708 gcc_assert (node->definition);
709 index = lto_symtab_encoder_lookup (partition->encoder,
710 node);
711 if (index != LCC_NOT_FOUND
712 && index < last_visited_node - 1)
713 cost--, internal++;
714 else
715 cost++;
716 }
717 }
718
719 gcc_assert (cost >= 0 && internal >= 0);
720
721 /* If the partition is large enough, start looking for smallest boundary cost.
722 If partition still seems too small (less than 7/8 of target weight) accept
723 any cost. If partition has right size, optimize for highest internal/cost.
724 Later we stop building partition if its size is 9/8 of the target wight. */
725 if (partition->insns < partition_size * 7 / 8
726 || best_cost == -1
727 || (!cost
728 || ((sreal)best_internal * (sreal) cost
729 < ((sreal) internal * (sreal)best_cost))))
730 {
731 best_cost = cost;
732 best_internal = internal;
733 best_size = partition->insns;
734 best_i = i;
735 best_n_nodes = lto_symtab_encoder_size (partition->encoder);
736 best_varpool_pos = varpool_pos;
737 }
738 if (symtab->dump_file)
739 fprintf (symtab->dump_file, "Step %i: added %s/%i, size %i, "
740 "cost %" PRId64 "/%" PRId64 " "
741 "best %" PRId64 "/%" PRId64", step %i\n", i,
742 order[i]->name (), order[i]->order,
743 partition->insns, cost, internal,
744 best_cost, best_internal, best_i);
745 /* Partition is too large, unwind into step when best cost was reached and
746 start new partition. */
747 if (partition->insns > 9 * partition_size / 8
748 || partition->insns > max_partition_size)
749 {
750 if (best_i != i)
751 {
752 if (symtab->dump_file)
753 fprintf (symtab->dump_file, "Unwinding %i insertions to step %i\n",
754 i - best_i, best_i);
755 undo_partition (partition, best_n_nodes);
756 varpool_pos = best_varpool_pos;
757 }
758 gcc_assert (best_size == partition->insns);
759 i = best_i;
760 if (symtab->dump_file)
761 fprintf (symtab->dump_file,
762 "Partition insns: %i (want %" PRId64 ")\n",
763 partition->insns, partition_size);
764 /* When we are finished, avoid creating empty partition. */
765 while (i < n_nodes - 1 && symbol_partitioned_p (order[i + 1]))
766 i++;
767 if (i == n_nodes - 1)
768 break;
769 total_size -= partition->insns;
770 partition = new_partition ("");
771 last_visited_node = 0;
772 cost = 0;
773
774 if (symtab->dump_file)
775 fprintf (symtab->dump_file, "New partition\n");
776 best_n_nodes = 0;
777 best_cost = -1;
778
779 /* Since the size of partitions is just approximate, update the size after
780 we finished current one. */
781 if (npartitions < n_lto_partitions)
782 partition_size = total_size / (n_lto_partitions - npartitions);
783 else
784 /* Watch for overflow. */
785 partition_size = INT_MAX / 16;
786
787 if (symtab->dump_file)
788 fprintf (symtab->dump_file,
789 "Total size: %" PRId64 " partition_size: %" PRId64 "\n",
790 total_size, partition_size);
791 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
792 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
793 npartitions ++;
794 }
795 }
796
797 next_nodes.truncate (0);
798
799 /* Varables that are not reachable from the code go into last partition. */
800 FOR_EACH_VARIABLE (vnode)
801 if (vnode->get_partitioning_class () == SYMBOL_PARTITION
802 && !symbol_partitioned_p (vnode))
803 next_nodes.safe_push (vnode);
804
805 /* Output remaining ordered symbols. */
806 while (varpool_pos < n_varpool_nodes)
807 next_nodes.safe_push (varpool_order[varpool_pos++]);
808 while (noreorder_pos < (int)noreorder.length ())
809 next_nodes.safe_push (noreorder[noreorder_pos++]);
810 /* For one partition the cost of boundary should be 0 unless we added final
811 symbols here (these are not accounted) or we have accounting bug. */
812 gcc_assert (next_nodes.length () || npartitions != 1 || !best_cost || best_cost == -1);
813 add_sorted_nodes (next_nodes, partition);
814
815 free (order);
816
817 if (symtab->dump_file)
818 {
819 fprintf (symtab->dump_file, "\nPartition sizes:\n");
820 unsigned partitions = ltrans_partitions.length ();
821
822 for (unsigned i = 0; i < partitions ; i++)
823 {
824 ltrans_partition p = ltrans_partitions[i];
825 fprintf (symtab->dump_file, "partition %d contains %d (%2.2f%%)"
826 " symbols and %d (%2.2f%%) insns\n", i, p->symbols,
827 100.0 * p->symbols / n_nodes, p->insns,
828 100.0 * p->insns / original_total_size);
829 }
830
831 fprintf (symtab->dump_file, "\n");
832 }
833 }
834
835 /* Return true if we must not change the name of the NODE. The name as
836 extracted from the corresponding decl should be passed in NAME. */
837
838 static bool
839 must_not_rename (symtab_node *node, const char *name)
840 {
841 /* Our renaming machinery do not handle more than one change of assembler name.
842 We should not need more than one anyway. */
843 if (node->lto_file_data
844 && lto_get_decl_name_mapping (node->lto_file_data, name) != name)
845 {
846 if (symtab->dump_file)
847 fprintf (symtab->dump_file,
848 "Not privatizing symbol name: %s. It privatized already.\n",
849 name);
850 return true;
851 }
852 /* Avoid mangling of already mangled clones.
853 ??? should have a flag whether a symbol has a 'private' name already,
854 since we produce some symbols like that i.e. for global constructors
855 that are not really clones. */
856 if (node->unique_name)
857 {
858 if (symtab->dump_file)
859 fprintf (symtab->dump_file,
860 "Not privatizing symbol name: %s. Has unique name.\n",
861 name);
862 return true;
863 }
864 return false;
865 }
866
867 /* If we are an offload compiler, we may have to rewrite symbols to be
868 valid on this target. Return either PTR or a modified version of it. */
869
870 static const char *
871 maybe_rewrite_identifier (const char *ptr)
872 {
873 #if defined ACCEL_COMPILER && (defined NO_DOT_IN_LABEL || defined NO_DOLLAR_IN_LABEL)
874 #ifndef NO_DOT_IN_LABEL
875 char valid = '.';
876 const char reject[] = "$";
877 #elif !defined NO_DOLLAR_IN_LABEL
878 char valid = '$';
879 const char reject[] = ".";
880 #else
881 char valid = '_';
882 const char reject[] = ".$";
883 #endif
884
885 char *copy = NULL;
886 const char *match = ptr;
887 for (;;)
888 {
889 size_t off = strcspn (match, reject);
890 if (match[off] == '\0')
891 break;
892 if (copy == NULL)
893 {
894 copy = xstrdup (ptr);
895 match = copy;
896 }
897 copy[off] = valid;
898 }
899 return match;
900 #else
901 return ptr;
902 #endif
903 }
904
905 /* Ensure that the symbol in NODE is valid for the target, and if not,
906 rewrite it. */
907
908 static void
909 validize_symbol_for_target (symtab_node *node)
910 {
911 tree decl = node->decl;
912 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
913
914 if (must_not_rename (node, name))
915 return;
916
917 const char *name2 = maybe_rewrite_identifier (name);
918 if (name2 != name)
919 {
920 symtab->change_decl_assembler_name (decl, get_identifier (name2));
921 if (node->lto_file_data)
922 lto_record_renamed_decl (node->lto_file_data, name,
923 IDENTIFIER_POINTER
924 (DECL_ASSEMBLER_NAME (decl)));
925 }
926 }
927
928 /* Helper for privatize_symbol_name. Mangle NODE symbol name
929 represented by DECL. */
930
931 static bool
932 privatize_symbol_name_1 (symtab_node *node, tree decl)
933 {
934 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
935
936 if (must_not_rename (node, name))
937 return false;
938
939 name = maybe_rewrite_identifier (name);
940 symtab->change_decl_assembler_name (decl,
941 clone_function_name_1 (name,
942 "lto_priv"));
943
944 if (node->lto_file_data)
945 lto_record_renamed_decl (node->lto_file_data, name,
946 IDENTIFIER_POINTER
947 (DECL_ASSEMBLER_NAME (decl)));
948
949 if (symtab->dump_file)
950 fprintf (symtab->dump_file,
951 "Privatizing symbol name: %s -> %s\n",
952 name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
953
954 return true;
955 }
956
957 /* Mangle NODE symbol name into a local name.
958 This is necessary to do
959 1) if two or more static vars of same assembler name
960 are merged into single ltrans unit.
961 2) if previously static var was promoted hidden to avoid possible conflict
962 with symbols defined out of the LTO world. */
963
964 static bool
965 privatize_symbol_name (symtab_node *node)
966 {
967 if (!privatize_symbol_name_1 (node, node->decl))
968 return false;
969
970 /* We could change name which is a target of transparent alias
971 chain of instrumented function name. Fix alias chain if so .*/
972 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
973 {
974 tree iname = NULL_TREE;
975 if (cnode->instrumentation_clone)
976 {
977 /* If we want to privatize instrumentation clone
978 then we also need to privatize original function. */
979 if (cnode->instrumented_version)
980 privatize_symbol_name (cnode->instrumented_version);
981 else
982 privatize_symbol_name_1 (cnode, cnode->orig_decl);
983 iname = DECL_ASSEMBLER_NAME (cnode->decl);
984 TREE_CHAIN (iname) = DECL_ASSEMBLER_NAME (cnode->orig_decl);
985 }
986 else if (cnode->instrumented_version
987 && cnode->instrumented_version->orig_decl == cnode->decl)
988 {
989 iname = DECL_ASSEMBLER_NAME (cnode->instrumented_version->decl);
990 TREE_CHAIN (iname) = DECL_ASSEMBLER_NAME (cnode->decl);
991 }
992 }
993
994 return true;
995 }
996
997 /* Promote variable VNODE to be static. */
998
999 static void
1000 promote_symbol (symtab_node *node)
1001 {
1002 /* We already promoted ... */
1003 if (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
1004 && DECL_VISIBILITY_SPECIFIED (node->decl)
1005 && TREE_PUBLIC (node->decl))
1006 {
1007 validize_symbol_for_target (node);
1008 return;
1009 }
1010
1011 gcc_checking_assert (!TREE_PUBLIC (node->decl)
1012 && !DECL_EXTERNAL (node->decl));
1013 /* Be sure that newly public symbol does not conflict with anything already
1014 defined by the non-LTO part. */
1015 privatize_symbol_name (node);
1016 TREE_PUBLIC (node->decl) = 1;
1017 DECL_VISIBILITY (node->decl) = VISIBILITY_HIDDEN;
1018 DECL_VISIBILITY_SPECIFIED (node->decl) = true;
1019 if (symtab->dump_file)
1020 fprintf (symtab->dump_file,
1021 "Promoting as hidden: %s (%s)\n", node->name (),
1022 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
1023
1024 /* Promoting a symbol also promotes all transparent aliases with exception
1025 of weakref where the visibility flags are always wrong and set to
1026 !PUBLIC. */
1027 ipa_ref *ref;
1028 for (unsigned i = 0; node->iterate_direct_aliases (i, ref); i++)
1029 {
1030 struct symtab_node *alias = ref->referring;
1031 if (alias->transparent_alias && !alias->weakref)
1032 {
1033 TREE_PUBLIC (alias->decl) = 1;
1034 DECL_VISIBILITY (alias->decl) = VISIBILITY_HIDDEN;
1035 DECL_VISIBILITY_SPECIFIED (alias->decl) = true;
1036 if (symtab->dump_file)
1037 fprintf (symtab->dump_file,
1038 "Promoting alias as hidden: %s\n",
1039 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
1040 }
1041 gcc_assert (!alias->weakref || TREE_PUBLIC (alias->decl));
1042 }
1043 }
1044
1045 /* Return true if NODE needs named section even if it won't land in
1046 the partition symbol table.
1047
1048 FIXME: we should really not use named sections for inline clones
1049 and master clones. */
1050
1051 static bool
1052 may_need_named_section_p (lto_symtab_encoder_t encoder, symtab_node *node)
1053 {
1054 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1055 if (!cnode)
1056 return false;
1057 if (node->real_symbol_p ())
1058 return false;
1059 return (!encoder
1060 || (lto_symtab_encoder_lookup (encoder, node) != LCC_NOT_FOUND
1061 && lto_symtab_encoder_encode_body_p (encoder,
1062 cnode)));
1063 }
1064
1065 /* If NODE represents a static variable. See if there are other variables
1066 of the same name in partition ENCODER (or in whole compilation unit if
1067 ENCODER is NULL) and if so, mangle the statics. Always mangle all
1068 conflicting statics, so we reduce changes of silently miscompiling
1069 asm statements referring to them by symbol name. */
1070
1071 static void
1072 rename_statics (lto_symtab_encoder_t encoder, symtab_node *node)
1073 {
1074 tree decl = node->decl;
1075 symtab_node *s;
1076 tree name = DECL_ASSEMBLER_NAME (decl);
1077
1078 /* See if this is static symbol. */
1079 if (((node->externally_visible && !node->weakref)
1080 /* FIXME: externally_visible is somewhat illogically not set for
1081 external symbols (i.e. those not defined). Remove this test
1082 once this is fixed. */
1083 || DECL_EXTERNAL (node->decl)
1084 || !node->real_symbol_p ())
1085 && !may_need_named_section_p (encoder, node))
1086 return;
1087
1088 /* Now walk symbols sharing the same name and see if there are any conflicts.
1089 (all types of symbols counts here, since we can not have static of the
1090 same name as external or public symbol.) */
1091 for (s = symtab_node::get_for_asmname (name);
1092 s; s = s->next_sharing_asm_name)
1093 if ((s->real_symbol_p () || may_need_named_section_p (encoder, s))
1094 && s->decl != node->decl
1095 && (!encoder
1096 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
1097 break;
1098
1099 /* OK, no confict, so we have nothing to do. */
1100 if (!s)
1101 return;
1102
1103 if (symtab->dump_file)
1104 fprintf (symtab->dump_file,
1105 "Renaming statics with asm name: %s\n", node->name ());
1106
1107 /* Assign every symbol in the set that shares the same ASM name an unique
1108 mangled name. */
1109 for (s = symtab_node::get_for_asmname (name); s;)
1110 if ((!s->externally_visible || s->weakref)
1111 /* Transparent aliases having same name as target are renamed at a
1112 time their target gets new name. Transparent aliases that use
1113 separate assembler name require the name to be unique. */
1114 && (!s->transparent_alias || !s->definition || s->weakref
1115 || !symbol_table::assembler_names_equal_p
1116 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (s->decl)),
1117 IDENTIFIER_POINTER
1118 (DECL_ASSEMBLER_NAME (s->get_alias_target()->decl))))
1119 && ((s->real_symbol_p ()
1120 && !DECL_EXTERNAL (s->decl)
1121 && !TREE_PUBLIC (s->decl))
1122 || may_need_named_section_p (encoder, s))
1123 && (!encoder
1124 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
1125 {
1126 if (privatize_symbol_name (s))
1127 /* Re-start from beginning since we do not know how many
1128 symbols changed a name. */
1129 s = symtab_node::get_for_asmname (name);
1130 else s = s->next_sharing_asm_name;
1131 }
1132 else s = s->next_sharing_asm_name;
1133 }
1134
1135 /* Find out all static decls that need to be promoted to global because
1136 of cross file sharing. This function must be run in the WPA mode after
1137 all inlinees are added. */
1138
1139 void
1140 lto_promote_cross_file_statics (void)
1141 {
1142 unsigned i, n_sets;
1143
1144 gcc_assert (flag_wpa);
1145
1146 lto_stream_offload_p = false;
1147 select_what_to_stream ();
1148
1149 /* First compute boundaries. */
1150 n_sets = ltrans_partitions.length ();
1151 for (i = 0; i < n_sets; i++)
1152 {
1153 ltrans_partition part
1154 = ltrans_partitions[i];
1155 part->encoder = compute_ltrans_boundary (part->encoder);
1156 }
1157
1158 /* Look at boundaries and promote symbols as needed. */
1159 for (i = 0; i < n_sets; i++)
1160 {
1161 lto_symtab_encoder_iterator lsei;
1162 lto_symtab_encoder_t encoder = ltrans_partitions[i]->encoder;
1163
1164 for (lsei = lsei_start (encoder); !lsei_end_p (lsei);
1165 lsei_next (&lsei))
1166 {
1167 symtab_node *node = lsei_node (lsei);
1168
1169 /* If symbol is static, rename it if its assembler name
1170 clashes with anything else in this unit. */
1171 rename_statics (encoder, node);
1172
1173 /* No need to promote if symbol already is externally visible ... */
1174 if (node->externally_visible
1175 /* ... or if it is part of current partition ... */
1176 || lto_symtab_encoder_in_partition_p (encoder, node)
1177 /* ... or if we do not partition it. This mean that it will
1178 appear in every partition referencing it. */
1179 || node->get_partitioning_class () != SYMBOL_PARTITION)
1180 {
1181 validize_symbol_for_target (node);
1182 continue;
1183 }
1184
1185 promote_symbol (node);
1186 }
1187 }
1188 }
1189
1190 /* Rename statics in the whole unit in the case that
1191 we do -flto-partition=none. */
1192
1193 void
1194 lto_promote_statics_nonwpa (void)
1195 {
1196 symtab_node *node;
1197 FOR_EACH_SYMBOL (node)
1198 {
1199 rename_statics (NULL, node);
1200 validize_symbol_for_target (node);
1201 }
1202 }