]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto/lto-partition.c
Revert accidental checkin.
[thirdparty/gcc.git] / gcc / lto / lto-partition.c
1 /* LTO partitioning logic routines.
2 Copyright (C) 2009-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "target.h"
24 #include "function.h"
25 #include "basic-block.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "stringpool.h"
30 #include "cgraph.h"
31 #include "lto-streamer.h"
32 #include "params.h"
33 #include "symbol-summary.h"
34 #include "tree-vrp.h"
35 #include "ipa-prop.h"
36 #include "ipa-fnsummary.h"
37 #include "lto-partition.h"
38
39 vec<ltrans_partition> ltrans_partitions;
40
41 static void add_symbol_to_partition (ltrans_partition part, symtab_node *node);
42
43
44 /* Create new partition with name NAME. */
45
46 static ltrans_partition
47 new_partition (const char *name)
48 {
49 ltrans_partition part = XCNEW (struct ltrans_partition_def);
50 part->encoder = lto_symtab_encoder_new (false);
51 part->name = name;
52 part->insns = 0;
53 part->symbols = 0;
54 ltrans_partitions.safe_push (part);
55 return part;
56 }
57
58 /* Free memory used by ltrans datastructures. */
59
60 void
61 free_ltrans_partitions (void)
62 {
63 unsigned int idx;
64 ltrans_partition part;
65 for (idx = 0; ltrans_partitions.iterate (idx, &part); idx++)
66 {
67 if (part->initializers_visited)
68 delete part->initializers_visited;
69 /* Symtab encoder is freed after streaming. */
70 free (part);
71 }
72 ltrans_partitions.release ();
73 }
74
75 /* Return true if symbol is already in some partition. */
76
77 static inline bool
78 symbol_partitioned_p (symtab_node *node)
79 {
80 return node->aux;
81 }
82
83 /* Add references into the partition. */
84 static void
85 add_references_to_partition (ltrans_partition part, symtab_node *node)
86 {
87 int i;
88 struct ipa_ref *ref = NULL;
89
90 /* Add all duplicated references to the partition. */
91 for (i = 0; node->iterate_reference (i, ref); i++)
92 if (ref->referred->get_partitioning_class () == SYMBOL_DUPLICATE)
93 add_symbol_to_partition (part, ref->referred);
94 /* References to a readonly variable may be constant foled into its value.
95 Recursively look into the initializers of the constant variable and add
96 references, too. */
97 else if (is_a <varpool_node *> (ref->referred)
98 && (dyn_cast <varpool_node *> (ref->referred)
99 ->ctor_useable_for_folding_p ()
100 || POINTER_BOUNDS_P (ref->referred->decl))
101 && !lto_symtab_encoder_in_partition_p (part->encoder, ref->referred))
102 {
103 if (!part->initializers_visited)
104 part->initializers_visited = new hash_set<symtab_node *>;
105 if (!part->initializers_visited->add (ref->referred))
106 add_references_to_partition (part, ref->referred);
107 }
108 }
109
110 /* Helper function for add_symbol_to_partition doing the actual dirty work
111 of adding NODE to PART. */
112
113 static bool
114 add_symbol_to_partition_1 (ltrans_partition part, symtab_node *node)
115 {
116 enum symbol_partitioning_class c = node->get_partitioning_class ();
117 struct ipa_ref *ref;
118 symtab_node *node1;
119
120 /* If NODE is already there, we have nothing to do. */
121 if (lto_symtab_encoder_in_partition_p (part->encoder, node))
122 return true;
123
124 /* non-duplicated aliases or tunks of a duplicated symbol needs to be output
125 just once.
126
127 Be lax about comdats; they may or may not be duplicated and we may
128 end up in need to duplicate keyed comdat because it has unkeyed alias. */
129 if (c == SYMBOL_PARTITION && !DECL_COMDAT (node->decl)
130 && symbol_partitioned_p (node))
131 return false;
132
133 /* Be sure that we never try to duplicate partitioned symbol
134 or add external symbol. */
135 gcc_assert (c != SYMBOL_EXTERNAL
136 && (c == SYMBOL_DUPLICATE || !symbol_partitioned_p (node)));
137
138 part->symbols++;
139
140 lto_set_symtab_encoder_in_partition (part->encoder, node);
141
142 if (symbol_partitioned_p (node))
143 {
144 node->in_other_partition = 1;
145 if (symtab->dump_file)
146 fprintf (symtab->dump_file,
147 "Symbol node %s now used in multiple partitions\n",
148 node->name ());
149 }
150 node->aux = (void *)((size_t)node->aux + 1);
151
152 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
153 {
154 struct cgraph_edge *e;
155 if (!node->alias)
156 part->insns += ipa_fn_summaries->get (cnode)->self_size;
157
158 /* Add all inline clones and callees that are duplicated. */
159 for (e = cnode->callees; e; e = e->next_callee)
160 if (!e->inline_failed)
161 add_symbol_to_partition_1 (part, e->callee);
162 else if (e->callee->get_partitioning_class () == SYMBOL_DUPLICATE)
163 add_symbol_to_partition (part, e->callee);
164
165 /* Add all thunks associated with the function. */
166 for (e = cnode->callers; e; e = e->next_caller)
167 if (e->caller->thunk.thunk_p && !e->caller->global.inlined_to)
168 add_symbol_to_partition_1 (part, e->caller);
169
170 /* Instrumented version is actually the same function.
171 Therefore put it into the same partition. */
172 if (cnode->instrumented_version)
173 add_symbol_to_partition_1 (part, cnode->instrumented_version);
174 }
175
176 add_references_to_partition (part, node);
177
178 /* Add all aliases associated with the symbol. */
179
180 FOR_EACH_ALIAS (node, ref)
181 if (!ref->referring->transparent_alias)
182 add_symbol_to_partition_1 (part, ref->referring);
183 else
184 {
185 struct ipa_ref *ref2;
186 /* We do not need to add transparent aliases if they are not used.
187 However we must add aliases of transparent aliases if they exist. */
188 FOR_EACH_ALIAS (ref->referring, ref2)
189 {
190 /* Nested transparent aliases are not permitted. */
191 gcc_checking_assert (!ref2->referring->transparent_alias);
192 add_symbol_to_partition_1 (part, ref2->referring);
193 }
194 }
195
196 /* Ensure that SAME_COMDAT_GROUP lists all allways added in a group. */
197 if (node->same_comdat_group)
198 for (node1 = node->same_comdat_group;
199 node1 != node; node1 = node1->same_comdat_group)
200 if (!node->alias)
201 {
202 bool added = add_symbol_to_partition_1 (part, node1);
203 gcc_assert (added);
204 }
205 return true;
206 }
207
208 /* If symbol NODE is really part of other symbol's definition (i.e. it is
209 internal label, thunk, alias or so), return the outer symbol.
210 When add_symbol_to_partition_1 is called on the outer symbol it must
211 eventually add NODE, too. */
212 static symtab_node *
213 contained_in_symbol (symtab_node *node)
214 {
215 /* There is no need to consider transparent aliases to be part of the
216 definition: they are only useful insite the partition they are output
217 and thus we will always see an explicit reference to it. */
218 if (node->transparent_alias)
219 return node;
220 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
221 {
222 cnode = cnode->function_symbol ();
223 if (cnode->global.inlined_to)
224 cnode = cnode->global.inlined_to;
225 return cnode;
226 }
227 else if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
228 return vnode->ultimate_alias_target ();
229 return node;
230 }
231
232 /* Add symbol NODE to partition. When definition of NODE is part
233 of other symbol definition, add the other symbol, too. */
234
235 static void
236 add_symbol_to_partition (ltrans_partition part, symtab_node *node)
237 {
238 symtab_node *node1;
239
240 /* Verify that we do not try to duplicate something that can not be. */
241 gcc_checking_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
242 || !symbol_partitioned_p (node));
243
244 while ((node1 = contained_in_symbol (node)) != node)
245 node = node1;
246
247 /* If we have duplicated symbol contained in something we can not duplicate,
248 we are very badly screwed. The other way is possible, so we do not
249 assert this in add_symbol_to_partition_1.
250
251 Be lax about comdats; they may or may not be duplicated and we may
252 end up in need to duplicate keyed comdat because it has unkeyed alias. */
253
254 gcc_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
255 || DECL_COMDAT (node->decl)
256 || !symbol_partitioned_p (node));
257
258 add_symbol_to_partition_1 (part, node);
259 }
260
261 /* Undo all additions until number of cgraph nodes in PARITION is N_CGRAPH_NODES
262 and number of varpool nodes is N_VARPOOL_NODES. */
263
264 static void
265 undo_partition (ltrans_partition partition, unsigned int n_nodes)
266 {
267 while (lto_symtab_encoder_size (partition->encoder) > (int)n_nodes)
268 {
269 symtab_node *node = lto_symtab_encoder_deref (partition->encoder,
270 n_nodes);
271 partition->symbols--;
272 cgraph_node *cnode;
273
274 /* After UNDO we no longer know what was visited. */
275 if (partition->initializers_visited)
276 delete partition->initializers_visited;
277 partition->initializers_visited = NULL;
278
279 if (!node->alias && (cnode = dyn_cast <cgraph_node *> (node)))
280 partition->insns -= ipa_fn_summaries->get (cnode)->self_size;
281 lto_symtab_encoder_delete_node (partition->encoder, node);
282 node->aux = (void *)((size_t)node->aux - 1);
283 }
284 }
285
286 /* Group cgrah nodes by input files. This is used mainly for testing
287 right now. */
288
289 void
290 lto_1_to_1_map (void)
291 {
292 symtab_node *node;
293 struct lto_file_decl_data *file_data;
294 hash_map<lto_file_decl_data *, ltrans_partition> pmap;
295 ltrans_partition partition;
296 int npartitions = 0;
297
298 FOR_EACH_SYMBOL (node)
299 {
300 if (node->get_partitioning_class () != SYMBOL_PARTITION
301 || symbol_partitioned_p (node))
302 continue;
303
304 file_data = node->lto_file_data;
305
306 if (file_data)
307 {
308 ltrans_partition *slot = &pmap.get_or_insert (file_data);
309 if (*slot)
310 partition = *slot;
311 else
312 {
313 partition = new_partition (file_data->file_name);
314 *slot = partition;
315 npartitions++;
316 }
317 }
318 else if (!file_data && ltrans_partitions.length ())
319 partition = ltrans_partitions[0];
320 else
321 {
322 partition = new_partition ("");
323 pmap.put (NULL, partition);
324 npartitions++;
325 }
326
327 add_symbol_to_partition (partition, node);
328 }
329
330 /* If the cgraph is empty, create one cgraph node set so that there is still
331 an output file for any variables that need to be exported in a DSO. */
332 if (!npartitions)
333 new_partition ("empty");
334
335 }
336
337 /* Maximal partitioning. Put every new symbol into new partition if possible. */
338
339 void
340 lto_max_map (void)
341 {
342 symtab_node *node;
343 ltrans_partition partition;
344 int npartitions = 0;
345
346 FOR_EACH_SYMBOL (node)
347 {
348 if (node->get_partitioning_class () != SYMBOL_PARTITION
349 || symbol_partitioned_p (node))
350 continue;
351 partition = new_partition (node->asm_name ());
352 add_symbol_to_partition (partition, node);
353 npartitions++;
354 }
355 if (!npartitions)
356 new_partition ("empty");
357 }
358
359 /* Helper function for qsort; sort nodes by order. noreorder functions must have
360 been removed earlier. */
361 static int
362 node_cmp (const void *pa, const void *pb)
363 {
364 const struct cgraph_node *a = *(const struct cgraph_node * const *) pa;
365 const struct cgraph_node *b = *(const struct cgraph_node * const *) pb;
366
367 /* Profile reorder flag enables function reordering based on first execution
368 of a function. All functions with profile are placed in ascending
369 order at the beginning. */
370
371 if (flag_profile_reorder_functions)
372 {
373 /* Functions with time profile are sorted in ascending order. */
374 if (a->tp_first_run && b->tp_first_run)
375 return a->tp_first_run != b->tp_first_run
376 ? a->tp_first_run - b->tp_first_run
377 : a->order - b->order;
378
379 /* Functions with time profile are sorted before the functions
380 that do not have the profile. */
381 if (a->tp_first_run || b->tp_first_run)
382 return b->tp_first_run - a->tp_first_run;
383 }
384
385 return b->order - a->order;
386 }
387
388 /* Helper function for qsort; sort nodes by order. */
389 static int
390 varpool_node_cmp (const void *pa, const void *pb)
391 {
392 const symtab_node *a = *static_cast<const symtab_node * const *> (pa);
393 const symtab_node *b = *static_cast<const symtab_node * const *> (pb);
394 return b->order - a->order;
395 }
396
397 /* Add all symtab nodes from NEXT_NODE to PARTITION in order. */
398
399 static void
400 add_sorted_nodes (vec<symtab_node *> &next_nodes, ltrans_partition partition)
401 {
402 unsigned i;
403 symtab_node *node;
404
405 next_nodes.qsort (varpool_node_cmp);
406 FOR_EACH_VEC_ELT (next_nodes, i, node)
407 if (!symbol_partitioned_p (node))
408 add_symbol_to_partition (partition, node);
409 }
410
411
412 /* Group cgraph nodes into equally-sized partitions.
413
414 The partitioning algorithm is simple: nodes are taken in predefined order.
415 The order corresponds to the order we want functions to have in the final
416 output. In the future this will be given by function reordering pass, but
417 at the moment we use the topological order, which is a good approximation.
418
419 The goal is to partition this linear order into intervals (partitions) so
420 that all the partitions have approximately the same size and the number of
421 callgraph or IPA reference edges crossing boundaries is minimal.
422
423 This is a lot faster (O(n) in size of callgraph) than algorithms doing
424 priority-based graph clustering that are generally O(n^2) and, since
425 WHOPR is designed to make things go well across partitions, it leads
426 to good results.
427
428 We compute the expected size of a partition as:
429
430 max (total_size / lto_partitions, min_partition_size)
431
432 We use dynamic expected size of partition so small programs are partitioned
433 into enough partitions to allow use of multiple CPUs, while large programs
434 are not partitioned too much. Creating too many partitions significantly
435 increases the streaming overhead.
436
437 In the future, we would like to bound the maximal size of partitions so as
438 to prevent the LTRANS stage from consuming too much memory. At the moment,
439 however, the WPA stage is the most memory intensive for large benchmarks,
440 since too many types and declarations are read into memory.
441
442 The function implements a simple greedy algorithm. Nodes are being added
443 to the current partition until after 3/4 of the expected partition size is
444 reached. Past this threshold, we keep track of boundary size (number of
445 edges going to other partitions) and continue adding functions until after
446 the current partition has grown to twice the expected partition size. Then
447 the process is undone to the point where the minimal ratio of boundary size
448 and in-partition calls was reached. */
449
450 void
451 lto_balanced_map (int n_lto_partitions, int max_partition_size)
452 {
453 int n_nodes = 0;
454 int n_varpool_nodes = 0, varpool_pos = 0, best_varpool_pos = 0;
455 struct cgraph_node **order = XNEWVEC (cgraph_node *, symtab->cgraph_max_uid);
456 auto_vec<cgraph_node *> noreorder;
457 auto_vec<varpool_node *> varpool_order;
458 int i;
459 struct cgraph_node *node;
460 int original_total_size, total_size = 0, best_total_size = 0;
461 int partition_size;
462 ltrans_partition partition;
463 int last_visited_node = 0;
464 varpool_node *vnode;
465 int cost = 0, internal = 0;
466 int best_n_nodes = 0, best_i = 0, best_cost =
467 INT_MAX, best_internal = 0;
468 int npartitions;
469 int current_order = -1;
470 int noreorder_pos = 0;
471
472 FOR_EACH_VARIABLE (vnode)
473 gcc_assert (!vnode->aux);
474
475 FOR_EACH_DEFINED_FUNCTION (node)
476 if (node->get_partitioning_class () == SYMBOL_PARTITION)
477 {
478 if (node->no_reorder)
479 noreorder.safe_push (node);
480 else
481 order[n_nodes++] = node;
482 if (!node->alias)
483 total_size += ipa_fn_summaries->get (node)->size;
484 }
485
486 original_total_size = total_size;
487
488 /* Streaming works best when the source units do not cross partition
489 boundaries much. This is because importing function from a source
490 unit tends to import a lot of global trees defined there. We should
491 get better about minimizing the function bounday, but until that
492 things works smoother if we order in source order. */
493 qsort (order, n_nodes, sizeof (struct cgraph_node *), node_cmp);
494 noreorder.qsort (node_cmp);
495
496 if (symtab->dump_file)
497 {
498 for(i = 0; i < n_nodes; i++)
499 fprintf (symtab->dump_file, "Balanced map symbol order:%s:%u\n",
500 order[i]->name (), order[i]->tp_first_run);
501 for(i = 0; i < (int)noreorder.length(); i++)
502 fprintf (symtab->dump_file, "Balanced map symbol no_reorder:%s:%u\n",
503 noreorder[i]->name (), noreorder[i]->tp_first_run);
504 }
505
506 /* Collect all variables that should not be reordered. */
507 FOR_EACH_VARIABLE (vnode)
508 if (vnode->get_partitioning_class () == SYMBOL_PARTITION
509 && (!flag_toplevel_reorder || vnode->no_reorder))
510 varpool_order.safe_push (vnode);
511 n_varpool_nodes = varpool_order.length ();
512 varpool_order.qsort (varpool_node_cmp);
513
514 /* Compute partition size and create the first partition. */
515 if (PARAM_VALUE (MIN_PARTITION_SIZE) > max_partition_size)
516 fatal_error (input_location, "min partition size cannot be greater than max partition size");
517
518 partition_size = total_size / n_lto_partitions;
519 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
520 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
521 npartitions = 1;
522 partition = new_partition ("");
523 if (symtab->dump_file)
524 fprintf (symtab->dump_file, "Total unit size: %i, partition size: %i\n",
525 total_size, partition_size);
526
527 auto_vec<symtab_node *> next_nodes;
528
529 for (i = 0; i < n_nodes; i++)
530 {
531 if (symbol_partitioned_p (order[i]))
532 continue;
533
534 current_order = order[i]->order;
535
536 /* Output noreorder and varpool in program order first. */
537 next_nodes.truncate (0);
538 while (varpool_pos < n_varpool_nodes
539 && varpool_order[varpool_pos]->order < current_order)
540 next_nodes.safe_push (varpool_order[varpool_pos++]);
541 while (noreorder_pos < (int)noreorder.length ()
542 && noreorder[noreorder_pos]->order < current_order)
543 {
544 if (!noreorder[noreorder_pos]->alias)
545 total_size -= ipa_fn_summaries->get (noreorder[noreorder_pos])->size;
546 next_nodes.safe_push (noreorder[noreorder_pos++]);
547 }
548 add_sorted_nodes (next_nodes, partition);
549
550 add_symbol_to_partition (partition, order[i]);
551 if (!order[i]->alias)
552 total_size -= ipa_fn_summaries->get (order[i])->size;
553
554
555 /* Once we added a new node to the partition, we also want to add
556 all referenced variables unless they was already added into some
557 earlier partition.
558 add_symbol_to_partition adds possibly multiple nodes and
559 variables that are needed to satisfy needs of ORDER[i].
560 We remember last visited cgraph and varpool node from last iteration
561 of outer loop that allows us to process every new addition.
562
563 At the same time we compute size of the boundary into COST. Every
564 callgraph or IPA reference edge leaving the partition contributes into
565 COST. Every edge inside partition was earlier computed as one leaving
566 it and thus we need to subtract it from COST. */
567 while (last_visited_node < lto_symtab_encoder_size (partition->encoder))
568 {
569 symtab_node *refs_node;
570 int j;
571 struct ipa_ref *ref = NULL;
572 symtab_node *snode = lto_symtab_encoder_deref (partition->encoder,
573 last_visited_node);
574
575 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
576 {
577 struct cgraph_edge *edge;
578
579 refs_node = node;
580
581 last_visited_node++;
582
583 gcc_assert (node->definition || node->weakref);
584
585 /* Compute boundary cost of callgraph edges. */
586 for (edge = node->callees; edge; edge = edge->next_callee)
587 if (edge->callee->definition)
588 {
589 int edge_cost = edge->frequency;
590 int index;
591
592 if (!edge_cost)
593 edge_cost = 1;
594 gcc_assert (edge_cost > 0);
595 index = lto_symtab_encoder_lookup (partition->encoder,
596 edge->callee);
597 if (index != LCC_NOT_FOUND
598 && index < last_visited_node - 1)
599 cost -= edge_cost, internal += edge_cost;
600 else
601 cost += edge_cost;
602 }
603 for (edge = node->callers; edge; edge = edge->next_caller)
604 {
605 int edge_cost = edge->frequency;
606 int index;
607
608 gcc_assert (edge->caller->definition);
609 if (!edge_cost)
610 edge_cost = 1;
611 gcc_assert (edge_cost > 0);
612 index = lto_symtab_encoder_lookup (partition->encoder,
613 edge->caller);
614 if (index != LCC_NOT_FOUND
615 && index < last_visited_node - 1)
616 cost -= edge_cost;
617 else
618 cost += edge_cost;
619 }
620 }
621 else
622 {
623 refs_node = snode;
624 last_visited_node++;
625 }
626
627 /* Compute boundary cost of IPA REF edges and at the same time look into
628 variables referenced from current partition and try to add them. */
629 for (j = 0; refs_node->iterate_reference (j, ref); j++)
630 if (is_a <varpool_node *> (ref->referred))
631 {
632 int index;
633
634 vnode = dyn_cast <varpool_node *> (ref->referred);
635 if (!vnode->definition)
636 continue;
637 if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
638 && !vnode->no_reorder
639 && vnode->get_partitioning_class () == SYMBOL_PARTITION)
640 add_symbol_to_partition (partition, vnode);
641 index = lto_symtab_encoder_lookup (partition->encoder,
642 vnode);
643 if (index != LCC_NOT_FOUND
644 && index < last_visited_node - 1)
645 cost--, internal++;
646 else
647 cost++;
648 }
649 else
650 {
651 int index;
652
653 node = dyn_cast <cgraph_node *> (ref->referred);
654 if (!node->definition)
655 continue;
656 index = lto_symtab_encoder_lookup (partition->encoder,
657 node);
658 if (index != LCC_NOT_FOUND
659 && index < last_visited_node - 1)
660 cost--, internal++;
661 else
662 cost++;
663 }
664 for (j = 0; refs_node->iterate_referring (j, ref); j++)
665 if (is_a <varpool_node *> (ref->referring))
666 {
667 int index;
668
669 vnode = dyn_cast <varpool_node *> (ref->referring);
670 gcc_assert (vnode->definition);
671 /* It is better to couple variables with their users,
672 because it allows them to be removed. Coupling
673 with objects they refer to only helps to reduce
674 number of symbols promoted to hidden. */
675 if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
676 && !vnode->no_reorder
677 && !vnode->can_remove_if_no_refs_p ()
678 && vnode->get_partitioning_class () == SYMBOL_PARTITION)
679 add_symbol_to_partition (partition, vnode);
680 index = lto_symtab_encoder_lookup (partition->encoder,
681 vnode);
682 if (index != LCC_NOT_FOUND
683 && index < last_visited_node - 1)
684 cost--;
685 else
686 cost++;
687 }
688 else
689 {
690 int index;
691
692 node = dyn_cast <cgraph_node *> (ref->referring);
693 gcc_assert (node->definition);
694 index = lto_symtab_encoder_lookup (partition->encoder,
695 node);
696 if (index != LCC_NOT_FOUND
697 && index < last_visited_node - 1)
698 cost--;
699 else
700 cost++;
701 }
702 }
703
704 /* If the partition is large enough, start looking for smallest boundary cost. */
705 if (partition->insns < partition_size * 3 / 4
706 || best_cost == INT_MAX
707 || ((!cost
708 || (best_internal * (HOST_WIDE_INT) cost
709 > (internal * (HOST_WIDE_INT)best_cost)))
710 && partition->insns < partition_size * 5 / 4))
711 {
712 best_cost = cost;
713 best_internal = internal;
714 best_i = i;
715 best_n_nodes = lto_symtab_encoder_size (partition->encoder);
716 best_total_size = total_size;
717 best_varpool_pos = varpool_pos;
718 }
719 if (symtab->dump_file)
720 fprintf (symtab->dump_file, "Step %i: added %s/%i, size %i, cost %i/%i "
721 "best %i/%i, step %i\n", i,
722 order[i]->name (), order[i]->order,
723 partition->insns, cost, internal,
724 best_cost, best_internal, best_i);
725 /* Partition is too large, unwind into step when best cost was reached and
726 start new partition. */
727 if (partition->insns > 2 * partition_size
728 || partition->insns > max_partition_size)
729 {
730 if (best_i != i)
731 {
732 if (symtab->dump_file)
733 fprintf (symtab->dump_file, "Unwinding %i insertions to step %i\n",
734 i - best_i, best_i);
735 undo_partition (partition, best_n_nodes);
736 varpool_pos = best_varpool_pos;
737 }
738 i = best_i;
739 /* When we are finished, avoid creating empty partition. */
740 while (i < n_nodes - 1 && symbol_partitioned_p (order[i + 1]))
741 i++;
742 if (i == n_nodes - 1)
743 break;
744 partition = new_partition ("");
745 last_visited_node = 0;
746 total_size = best_total_size;
747 cost = 0;
748
749 if (symtab->dump_file)
750 fprintf (symtab->dump_file, "New partition\n");
751 best_n_nodes = 0;
752 best_cost = INT_MAX;
753
754 /* Since the size of partitions is just approximate, update the size after
755 we finished current one. */
756 if (npartitions < n_lto_partitions)
757 partition_size = total_size / (n_lto_partitions - npartitions);
758 else
759 partition_size = INT_MAX;
760
761 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
762 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
763 npartitions ++;
764 }
765 }
766
767 next_nodes.truncate (0);
768
769 /* Varables that are not reachable from the code go into last partition. */
770 if (flag_toplevel_reorder)
771 {
772 FOR_EACH_VARIABLE (vnode)
773 if (vnode->get_partitioning_class () == SYMBOL_PARTITION
774 && !symbol_partitioned_p (vnode)
775 && !vnode->no_reorder)
776 next_nodes.safe_push (vnode);
777 }
778
779 /* Output remaining ordered symbols. */
780 while (varpool_pos < n_varpool_nodes)
781 next_nodes.safe_push (varpool_order[varpool_pos++]);
782 while (noreorder_pos < (int)noreorder.length ())
783 next_nodes.safe_push (noreorder[noreorder_pos++]);
784 add_sorted_nodes (next_nodes, partition);
785
786 free (order);
787
788 if (symtab->dump_file)
789 {
790 fprintf (symtab->dump_file, "\nPartition sizes:\n");
791 unsigned partitions = ltrans_partitions.length ();
792
793 for (unsigned i = 0; i < partitions ; i++)
794 {
795 ltrans_partition p = ltrans_partitions[i];
796 fprintf (symtab->dump_file, "partition %d contains %d (%2.2f%%)"
797 " symbols and %d (%2.2f%%) insns\n", i, p->symbols,
798 100.0 * p->symbols / n_nodes, p->insns,
799 100.0 * p->insns / original_total_size);
800 }
801
802 fprintf (symtab->dump_file, "\n");
803 }
804 }
805
806 /* Return true if we must not change the name of the NODE. The name as
807 extracted from the corresponding decl should be passed in NAME. */
808
809 static bool
810 must_not_rename (symtab_node *node, const char *name)
811 {
812 /* Our renaming machinery do not handle more than one change of assembler name.
813 We should not need more than one anyway. */
814 if (node->lto_file_data
815 && lto_get_decl_name_mapping (node->lto_file_data, name) != name)
816 {
817 if (symtab->dump_file)
818 fprintf (symtab->dump_file,
819 "Not privatizing symbol name: %s. It privatized already.\n",
820 name);
821 return true;
822 }
823 /* Avoid mangling of already mangled clones.
824 ??? should have a flag whether a symbol has a 'private' name already,
825 since we produce some symbols like that i.e. for global constructors
826 that are not really clones. */
827 if (node->unique_name)
828 {
829 if (symtab->dump_file)
830 fprintf (symtab->dump_file,
831 "Not privatizing symbol name: %s. Has unique name.\n",
832 name);
833 return true;
834 }
835 return false;
836 }
837
838 /* If we are an offload compiler, we may have to rewrite symbols to be
839 valid on this target. Return either PTR or a modified version of it. */
840
841 static const char *
842 maybe_rewrite_identifier (const char *ptr)
843 {
844 #if defined ACCEL_COMPILER && (defined NO_DOT_IN_LABEL || defined NO_DOLLAR_IN_LABEL)
845 #ifndef NO_DOT_IN_LABEL
846 char valid = '.';
847 const char reject[] = "$";
848 #elif !defined NO_DOLLAR_IN_LABEL
849 char valid = '$';
850 const char reject[] = ".";
851 #else
852 char valid = '_';
853 const char reject[] = ".$";
854 #endif
855
856 char *copy = NULL;
857 const char *match = ptr;
858 for (;;)
859 {
860 size_t off = strcspn (match, reject);
861 if (match[off] == '\0')
862 break;
863 if (copy == NULL)
864 {
865 copy = xstrdup (ptr);
866 match = copy;
867 }
868 copy[off] = valid;
869 }
870 return match;
871 #else
872 return ptr;
873 #endif
874 }
875
876 /* Ensure that the symbol in NODE is valid for the target, and if not,
877 rewrite it. */
878
879 static void
880 validize_symbol_for_target (symtab_node *node)
881 {
882 tree decl = node->decl;
883 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
884
885 if (must_not_rename (node, name))
886 return;
887
888 const char *name2 = maybe_rewrite_identifier (name);
889 if (name2 != name)
890 {
891 symtab->change_decl_assembler_name (decl, get_identifier (name2));
892 if (node->lto_file_data)
893 lto_record_renamed_decl (node->lto_file_data, name,
894 IDENTIFIER_POINTER
895 (DECL_ASSEMBLER_NAME (decl)));
896 }
897 }
898
899 /* Helper for privatize_symbol_name. Mangle NODE symbol name
900 represented by DECL. */
901
902 static bool
903 privatize_symbol_name_1 (symtab_node *node, tree decl)
904 {
905 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
906
907 if (must_not_rename (node, name))
908 return false;
909
910 name = maybe_rewrite_identifier (name);
911 symtab->change_decl_assembler_name (decl,
912 clone_function_name_1 (name,
913 "lto_priv"));
914
915 if (node->lto_file_data)
916 lto_record_renamed_decl (node->lto_file_data, name,
917 IDENTIFIER_POINTER
918 (DECL_ASSEMBLER_NAME (decl)));
919
920 if (symtab->dump_file)
921 fprintf (symtab->dump_file,
922 "Privatizing symbol name: %s -> %s\n",
923 name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
924
925 return true;
926 }
927
928 /* Mangle NODE symbol name into a local name.
929 This is necessary to do
930 1) if two or more static vars of same assembler name
931 are merged into single ltrans unit.
932 2) if previously static var was promoted hidden to avoid possible conflict
933 with symbols defined out of the LTO world. */
934
935 static bool
936 privatize_symbol_name (symtab_node *node)
937 {
938 if (!privatize_symbol_name_1 (node, node->decl))
939 return false;
940
941 /* We could change name which is a target of transparent alias
942 chain of instrumented function name. Fix alias chain if so .*/
943 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
944 {
945 tree iname = NULL_TREE;
946 if (cnode->instrumentation_clone)
947 {
948 /* If we want to privatize instrumentation clone
949 then we also need to privatize original function. */
950 if (cnode->instrumented_version)
951 privatize_symbol_name (cnode->instrumented_version);
952 else
953 privatize_symbol_name_1 (cnode, cnode->orig_decl);
954 iname = DECL_ASSEMBLER_NAME (cnode->decl);
955 TREE_CHAIN (iname) = DECL_ASSEMBLER_NAME (cnode->orig_decl);
956 }
957 else if (cnode->instrumented_version
958 && cnode->instrumented_version->orig_decl == cnode->decl)
959 {
960 iname = DECL_ASSEMBLER_NAME (cnode->instrumented_version->decl);
961 TREE_CHAIN (iname) = DECL_ASSEMBLER_NAME (cnode->decl);
962 }
963 }
964
965 return true;
966 }
967
968 /* Promote variable VNODE to be static. */
969
970 static void
971 promote_symbol (symtab_node *node)
972 {
973 /* We already promoted ... */
974 if (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
975 && DECL_VISIBILITY_SPECIFIED (node->decl)
976 && TREE_PUBLIC (node->decl))
977 {
978 validize_symbol_for_target (node);
979 return;
980 }
981
982 gcc_checking_assert (!TREE_PUBLIC (node->decl)
983 && !DECL_EXTERNAL (node->decl));
984 /* Be sure that newly public symbol does not conflict with anything already
985 defined by the non-LTO part. */
986 privatize_symbol_name (node);
987 TREE_PUBLIC (node->decl) = 1;
988 DECL_VISIBILITY (node->decl) = VISIBILITY_HIDDEN;
989 DECL_VISIBILITY_SPECIFIED (node->decl) = true;
990 if (symtab->dump_file)
991 fprintf (symtab->dump_file,
992 "Promoting as hidden: %s (%s)\n", node->name (),
993 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
994
995 /* Promoting a symbol also promotes all transparent aliases with exception
996 of weakref where the visibility flags are always wrong and set to
997 !PUBLIC. */
998 ipa_ref *ref;
999 for (unsigned i = 0; node->iterate_direct_aliases (i, ref); i++)
1000 {
1001 struct symtab_node *alias = ref->referring;
1002 if (alias->transparent_alias && !alias->weakref)
1003 {
1004 TREE_PUBLIC (alias->decl) = 1;
1005 DECL_VISIBILITY (alias->decl) = VISIBILITY_HIDDEN;
1006 DECL_VISIBILITY_SPECIFIED (alias->decl) = true;
1007 if (symtab->dump_file)
1008 fprintf (symtab->dump_file,
1009 "Promoting alias as hidden: %s\n",
1010 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
1011 }
1012 gcc_assert (!alias->weakref || TREE_PUBLIC (alias->decl));
1013 }
1014 }
1015
1016 /* Return true if NODE needs named section even if it won't land in
1017 the partition symbol table.
1018
1019 FIXME: we should really not use named sections for inline clones
1020 and master clones. */
1021
1022 static bool
1023 may_need_named_section_p (lto_symtab_encoder_t encoder, symtab_node *node)
1024 {
1025 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1026 if (!cnode)
1027 return false;
1028 if (node->real_symbol_p ())
1029 return false;
1030 return (!encoder
1031 || (lto_symtab_encoder_lookup (encoder, node) != LCC_NOT_FOUND
1032 && lto_symtab_encoder_encode_body_p (encoder,
1033 cnode)));
1034 }
1035
1036 /* If NODE represents a static variable. See if there are other variables
1037 of the same name in partition ENCODER (or in whole compilation unit if
1038 ENCODER is NULL) and if so, mangle the statics. Always mangle all
1039 conflicting statics, so we reduce changes of silently miscompiling
1040 asm statements referring to them by symbol name. */
1041
1042 static void
1043 rename_statics (lto_symtab_encoder_t encoder, symtab_node *node)
1044 {
1045 tree decl = node->decl;
1046 symtab_node *s;
1047 tree name = DECL_ASSEMBLER_NAME (decl);
1048
1049 /* See if this is static symbol. */
1050 if (((node->externally_visible && !node->weakref)
1051 /* FIXME: externally_visible is somewhat illogically not set for
1052 external symbols (i.e. those not defined). Remove this test
1053 once this is fixed. */
1054 || DECL_EXTERNAL (node->decl)
1055 || !node->real_symbol_p ())
1056 && !may_need_named_section_p (encoder, node))
1057 return;
1058
1059 /* Now walk symbols sharing the same name and see if there are any conflicts.
1060 (all types of symbols counts here, since we can not have static of the
1061 same name as external or public symbol.) */
1062 for (s = symtab_node::get_for_asmname (name);
1063 s; s = s->next_sharing_asm_name)
1064 if ((s->real_symbol_p () || may_need_named_section_p (encoder, s))
1065 && s->decl != node->decl
1066 && (!encoder
1067 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
1068 break;
1069
1070 /* OK, no confict, so we have nothing to do. */
1071 if (!s)
1072 return;
1073
1074 if (symtab->dump_file)
1075 fprintf (symtab->dump_file,
1076 "Renaming statics with asm name: %s\n", node->name ());
1077
1078 /* Assign every symbol in the set that shares the same ASM name an unique
1079 mangled name. */
1080 for (s = symtab_node::get_for_asmname (name); s;)
1081 if ((!s->externally_visible || s->weakref)
1082 /* Transparent aliases having same name as target are renamed at a
1083 time their target gets new name. Transparent aliases that use
1084 separate assembler name require the name to be unique. */
1085 && (!s->transparent_alias || !s->definition || s->weakref
1086 || !symbol_table::assembler_names_equal_p
1087 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (s->decl)),
1088 IDENTIFIER_POINTER
1089 (DECL_ASSEMBLER_NAME (s->get_alias_target()->decl))))
1090 && ((s->real_symbol_p ()
1091 && !DECL_EXTERNAL (s->decl)
1092 && !TREE_PUBLIC (s->decl))
1093 || may_need_named_section_p (encoder, s))
1094 && (!encoder
1095 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
1096 {
1097 if (privatize_symbol_name (s))
1098 /* Re-start from beginning since we do not know how many
1099 symbols changed a name. */
1100 s = symtab_node::get_for_asmname (name);
1101 else s = s->next_sharing_asm_name;
1102 }
1103 else s = s->next_sharing_asm_name;
1104 }
1105
1106 /* Find out all static decls that need to be promoted to global because
1107 of cross file sharing. This function must be run in the WPA mode after
1108 all inlinees are added. */
1109
1110 void
1111 lto_promote_cross_file_statics (void)
1112 {
1113 unsigned i, n_sets;
1114
1115 gcc_assert (flag_wpa);
1116
1117 lto_stream_offload_p = false;
1118 select_what_to_stream ();
1119
1120 /* First compute boundaries. */
1121 n_sets = ltrans_partitions.length ();
1122 for (i = 0; i < n_sets; i++)
1123 {
1124 ltrans_partition part
1125 = ltrans_partitions[i];
1126 part->encoder = compute_ltrans_boundary (part->encoder);
1127 }
1128
1129 /* Look at boundaries and promote symbols as needed. */
1130 for (i = 0; i < n_sets; i++)
1131 {
1132 lto_symtab_encoder_iterator lsei;
1133 lto_symtab_encoder_t encoder = ltrans_partitions[i]->encoder;
1134
1135 for (lsei = lsei_start (encoder); !lsei_end_p (lsei);
1136 lsei_next (&lsei))
1137 {
1138 symtab_node *node = lsei_node (lsei);
1139
1140 /* If symbol is static, rename it if its assembler name
1141 clashes with anything else in this unit. */
1142 rename_statics (encoder, node);
1143
1144 /* No need to promote if symbol already is externally visible ... */
1145 if (node->externally_visible
1146 /* ... or if it is part of current partition ... */
1147 || lto_symtab_encoder_in_partition_p (encoder, node)
1148 /* ... or if we do not partition it. This mean that it will
1149 appear in every partition referencing it. */
1150 || node->get_partitioning_class () != SYMBOL_PARTITION)
1151 {
1152 validize_symbol_for_target (node);
1153 continue;
1154 }
1155
1156 promote_symbol (node);
1157 }
1158 }
1159 }
1160
1161 /* Rename statics in the whole unit in the case that
1162 we do -flto-partition=none. */
1163
1164 void
1165 lto_promote_statics_nonwpa (void)
1166 {
1167 symtab_node *node;
1168 FOR_EACH_SYMBOL (node)
1169 {
1170 rename_statics (NULL, node);
1171 validize_symbol_for_target (node);
1172 }
1173 }