]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto/lto-partition.c
cgraph.h (const_value_known_p): Replace by ...
[thirdparty/gcc.git] / gcc / lto / lto-partition.c
1 /* LTO partitioning logic routines.
2 Copyright (C) 2009-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "toplev.h"
24 #include "tree.h"
25 #include "tm.h"
26 #include "cgraph.h"
27 #include "lto-streamer.h"
28 #include "timevar.h"
29 #include "params.h"
30 #include "ipa-inline.h"
31 #include "ipa-utils.h"
32 #include "lto-partition.h"
33
34 /* Classifcation of symbols into classes WRT partitioning. */
35 enum symbol_class
36 {
37 /* External declarations are ignored by partitioning algorithms and they are
38 added into the boundary later via compute_ltrans_boundary. */
39 SYMBOL_EXTERNAL,
40 /* Partitioned symbols are pur into one of partitions. */
41 SYMBOL_PARTITION,
42 /* Duplicated symbols (such as comdat or constant pool references) are
43 copied into every node needing them via add_symbol_to_partition. */
44 SYMBOL_DUPLICATE
45 };
46
47 vec<ltrans_partition> ltrans_partitions;
48
49 static void add_symbol_to_partition (ltrans_partition part, symtab_node node);
50
51 /* Classify symbol NODE. */
52
53 enum symbol_class
54 get_symbol_class (symtab_node node)
55 {
56 /* Inline clones are always duplicated.
57 This include external delcarations. */
58 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
59 if (cnode && cnode->global.inlined_to)
60 return SYMBOL_DUPLICATE;
61
62 /* Weakref aliases are always duplicated. */
63 if (node->symbol.weakref)
64 return SYMBOL_DUPLICATE;
65
66 /* External declarations are external. */
67 if (DECL_EXTERNAL (node->symbol.decl))
68 return SYMBOL_EXTERNAL;
69
70 if (varpool_node *vnode = dyn_cast <varpool_node> (node))
71 {
72 /* Constant pool references use local symbol names that can not
73 be promoted global. We should never put into a constant pool
74 objects that can not be duplicated across partitions. */
75 if (DECL_IN_CONSTANT_POOL (node->symbol.decl))
76 return SYMBOL_DUPLICATE;
77 gcc_checking_assert (vnode->symbol.definition);
78 }
79 /* Functions that are cloned may stay in callgraph even if they are unused.
80 Handle them as external; compute_ltrans_boundary take care to make
81 proper things to happen (i.e. to make them appear in the boundary but
82 with body streamed, so clone can me materialized). */
83 else if (!cgraph (node)->symbol.definition)
84 return SYMBOL_EXTERNAL;
85
86 /* Comdats are duplicated to every use unless they are keyed.
87 Those do not need duplication. */
88 if (DECL_COMDAT (node->symbol.decl)
89 && !node->symbol.force_output
90 && !symtab_used_from_object_file_p ((symtab_node) node))
91 return SYMBOL_DUPLICATE;
92
93 return SYMBOL_PARTITION;
94 }
95
96 /* Create new partition with name NAME. */
97
98 static ltrans_partition
99 new_partition (const char *name)
100 {
101 ltrans_partition part = XCNEW (struct ltrans_partition_def);
102 part->encoder = lto_symtab_encoder_new (false);
103 part->name = name;
104 part->insns = 0;
105 ltrans_partitions.safe_push (part);
106 return part;
107 }
108
109 /* Free memory used by ltrans datastructures. */
110
111 void
112 free_ltrans_partitions (void)
113 {
114 unsigned int idx;
115 ltrans_partition part;
116 for (idx = 0; ltrans_partitions.iterate (idx, &part); idx++)
117 {
118 if (part->initializers_visited)
119 pointer_set_destroy (part->initializers_visited);
120 /* Symtab encoder is freed after streaming. */
121 free (part);
122 }
123 ltrans_partitions.release ();
124 }
125
126 /* Return true if symbol is already in some partition. */
127
128 static inline bool
129 symbol_partitioned_p (symtab_node node)
130 {
131 return node->symbol.aux;
132 }
133
134 /* Add references into the partition. */
135 static void
136 add_references_to_partition (ltrans_partition part, symtab_node node)
137 {
138 int i;
139 struct ipa_ref *ref;
140
141 /* Add all duplicated references to the partition. */
142 for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list, i, ref); i++)
143 if (get_symbol_class (ref->referred) == SYMBOL_DUPLICATE)
144 add_symbol_to_partition (part, ref->referred);
145 /* References to a readonly variable may be constant foled into its value.
146 Recursively look into the initializers of the constant variable and add
147 references, too. */
148 else if (is_a <varpool_node> (ref->referred)
149 && ctor_for_folding (ref->referred->symbol.decl) != error_mark_node
150 && !lto_symtab_encoder_in_partition_p (part->encoder, ref->referred))
151 {
152 if (!part->initializers_visited)
153 part->initializers_visited = pointer_set_create ();
154 if (!pointer_set_insert (part->initializers_visited, ref->referred))
155 add_references_to_partition (part, ref->referred);
156 }
157 }
158
159 /* Helper function for add_symbol_to_partition doing the actual dirty work
160 of adding NODE to PART. */
161
162 static bool
163 add_symbol_to_partition_1 (ltrans_partition part, symtab_node node)
164 {
165 enum symbol_class c = get_symbol_class (node);
166 int i;
167 struct ipa_ref *ref;
168 symtab_node node1;
169
170 /* If NODE is already there, we have nothing to do. */
171 if (lto_symtab_encoder_in_partition_p (part->encoder, (symtab_node) node))
172 return true;
173
174 /* non-duplicated aliases or tunks of a duplicated symbol needs to be output
175 just once.
176
177 Be lax about comdats; they may or may not be duplicated and we may
178 end up in need to duplicate keyed comdat because it has unkeyed alias. */
179 if (c == SYMBOL_PARTITION && !DECL_COMDAT (node->symbol.decl)
180 && symbol_partitioned_p (node))
181 return false;
182
183 /* Be sure that we never try to duplicate partitioned symbol
184 or add external symbol. */
185 gcc_assert (c != SYMBOL_EXTERNAL
186 && (c == SYMBOL_DUPLICATE || !symbol_partitioned_p (node)));
187
188 lto_set_symtab_encoder_in_partition (part->encoder, (symtab_node) node);
189
190 if (symbol_partitioned_p (node))
191 {
192 node->symbol.in_other_partition = 1;
193 if (cgraph_dump_file)
194 fprintf (cgraph_dump_file, "Symbol node %s now used in multiple partitions\n",
195 symtab_node_name (node));
196 }
197 node->symbol.aux = (void *)((size_t)node->symbol.aux + 1);
198
199 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
200 {
201 struct cgraph_edge *e;
202 part->insns += inline_summary (cnode)->self_size;
203
204 /* Add all inline clones and callees that are duplicated. */
205 for (e = cnode->callees; e; e = e->next_callee)
206 if (!e->inline_failed)
207 add_symbol_to_partition_1 (part, (symtab_node) e->callee);
208 else if (get_symbol_class ((symtab_node) e->callee) == SYMBOL_DUPLICATE)
209 add_symbol_to_partition (part, (symtab_node) e->callee);
210
211 /* Add all thunks associated with the function. */
212 for (e = cnode->callers; e; e = e->next_caller)
213 if (e->caller->thunk.thunk_p)
214 add_symbol_to_partition_1 (part, (symtab_node) e->caller);
215 }
216
217 add_references_to_partition (part, node);
218
219 /* Add all aliases associated with the symbol. */
220 for (i = 0; ipa_ref_list_referring_iterate (&node->symbol.ref_list, i, ref); i++)
221 if (ref->use == IPA_REF_ALIAS && !node->symbol.weakref)
222 add_symbol_to_partition_1 (part, ref->referring);
223
224 /* Ensure that SAME_COMDAT_GROUP lists all allways added in a group. */
225 if (node->symbol.same_comdat_group)
226 for (node1 = node->symbol.same_comdat_group;
227 node1 != node; node1 = node1->symbol.same_comdat_group)
228 {
229 bool added = add_symbol_to_partition_1 (part, node1);
230 gcc_assert (added);
231 }
232 return true;
233 }
234
235 /* If symbol NODE is really part of other symbol's definition (i.e. it is
236 internal label, thunk, alias or so), return the outer symbol.
237 When add_symbol_to_partition_1 is called on the outer symbol it must
238 eventually add NODE, too. */
239 static symtab_node
240 contained_in_symbol (symtab_node node)
241 {
242 /* Weakrefs are never contained in anything. */
243 if (node->symbol.weakref)
244 return node;
245 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
246 {
247 cnode = cgraph_function_node (cnode, NULL);
248 if (cnode->global.inlined_to)
249 cnode = cnode->global.inlined_to;
250 return (symtab_node) cnode;
251 }
252 else if (varpool_node *vnode = dyn_cast <varpool_node> (node))
253 return (symtab_node) varpool_variable_node (vnode, NULL);
254 return node;
255 }
256
257 /* Add symbol NODE to partition. When definition of NODE is part
258 of other symbol definition, add the other symbol, too. */
259
260 static void
261 add_symbol_to_partition (ltrans_partition part, symtab_node node)
262 {
263 symtab_node node1;
264
265 /* Verify that we do not try to duplicate something that can not be. */
266 gcc_checking_assert (get_symbol_class (node) == SYMBOL_DUPLICATE
267 || !symbol_partitioned_p (node));
268
269 while ((node1 = contained_in_symbol (node)) != node)
270 node = node1;
271
272 /* If we have duplicated symbol contained in something we can not duplicate,
273 we are very badly screwed. The other way is possible, so we do not
274 assert this in add_symbol_to_partition_1.
275
276 Be lax about comdats; they may or may not be duplicated and we may
277 end up in need to duplicate keyed comdat because it has unkeyed alias. */
278 gcc_assert (get_symbol_class (node) == SYMBOL_DUPLICATE
279 || DECL_COMDAT (node->symbol.decl)
280 || !symbol_partitioned_p (node));
281 add_symbol_to_partition_1 (part, node);
282 }
283
284 /* Undo all additions until number of cgraph nodes in PARITION is N_CGRAPH_NODES
285 and number of varpool nodes is N_VARPOOL_NODES. */
286
287 static void
288 undo_partition (ltrans_partition partition, unsigned int n_nodes)
289 {
290 while (lto_symtab_encoder_size (partition->encoder) > (int)n_nodes)
291 {
292 symtab_node node = lto_symtab_encoder_deref (partition->encoder,
293 n_nodes);
294
295 /* After UNDO we no longer know what was visited. */
296 if (partition->initializers_visited)
297 pointer_set_destroy (partition->initializers_visited);
298 partition->initializers_visited = NULL;
299
300 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
301 partition->insns -= inline_summary (cnode)->self_size;
302 lto_symtab_encoder_delete_node (partition->encoder, node);
303 node->symbol.aux = (void *)((size_t)node->symbol.aux - 1);
304 }
305 }
306
307 /* Group cgrah nodes by input files. This is used mainly for testing
308 right now. */
309
310 void
311 lto_1_to_1_map (void)
312 {
313 symtab_node node;
314 struct lto_file_decl_data *file_data;
315 struct pointer_map_t *pmap;
316 ltrans_partition partition;
317 void **slot;
318 int npartitions = 0;
319
320 pmap = pointer_map_create ();
321
322 FOR_EACH_SYMBOL (node)
323 {
324 if (get_symbol_class (node) != SYMBOL_PARTITION
325 || symbol_partitioned_p (node))
326 continue;
327
328 file_data = node->symbol.lto_file_data;
329
330 if (file_data)
331 {
332 slot = pointer_map_contains (pmap, file_data);
333 if (slot)
334 partition = (ltrans_partition) *slot;
335 else
336 {
337 partition = new_partition (file_data->file_name);
338 slot = pointer_map_insert (pmap, file_data);
339 *slot = partition;
340 npartitions++;
341 }
342 }
343 else if (!file_data && ltrans_partitions.length ())
344 partition = ltrans_partitions[0];
345 else
346 {
347 partition = new_partition ("");
348 slot = pointer_map_insert (pmap, NULL);
349 *slot = partition;
350 npartitions++;
351 }
352
353 add_symbol_to_partition (partition, (symtab_node) node);
354 }
355
356 /* If the cgraph is empty, create one cgraph node set so that there is still
357 an output file for any variables that need to be exported in a DSO. */
358 if (!npartitions)
359 new_partition ("empty");
360
361 pointer_map_destroy (pmap);
362
363 }
364
365 /* Maximal partitioning. Put every new symbol into new partition if possible. */
366
367 void
368 lto_max_map (void)
369 {
370 symtab_node node;
371 ltrans_partition partition;
372 int npartitions = 0;
373
374 FOR_EACH_SYMBOL (node)
375 {
376 if (get_symbol_class (node) != SYMBOL_PARTITION
377 || symbol_partitioned_p (node))
378 continue;
379 partition = new_partition (symtab_node_asm_name (node));
380 add_symbol_to_partition (partition, (symtab_node) node);
381 npartitions++;
382 }
383 if (!npartitions)
384 new_partition ("empty");
385 }
386
387 /* Helper function for qsort; sort nodes by order. */
388 static int
389 node_cmp (const void *pa, const void *pb)
390 {
391 const struct cgraph_node *a = *(const struct cgraph_node * const *) pa;
392 const struct cgraph_node *b = *(const struct cgraph_node * const *) pb;
393 return b->symbol.order - a->symbol.order;
394 }
395
396 /* Helper function for qsort; sort nodes by order. */
397 static int
398 varpool_node_cmp (const void *pa, const void *pb)
399 {
400 const struct varpool_node *a = *(const struct varpool_node * const *) pa;
401 const struct varpool_node *b = *(const struct varpool_node * const *) pb;
402 return b->symbol.order - a->symbol.order;
403 }
404
405 /* Group cgraph nodes into equally-sized partitions.
406
407 The partitioning algorithm is simple: nodes are taken in predefined order.
408 The order corresponds to the order we want functions to have in the final
409 output. In the future this will be given by function reordering pass, but
410 at the moment we use the topological order, which is a good approximation.
411
412 The goal is to partition this linear order into intervals (partitions) so
413 that all the partitions have approximately the same size and the number of
414 callgraph or IPA reference edges crossing boundaries is minimal.
415
416 This is a lot faster (O(n) in size of callgraph) than algorithms doing
417 priority-based graph clustering that are generally O(n^2) and, since
418 WHOPR is designed to make things go well across partitions, it leads
419 to good results.
420
421 We compute the expected size of a partition as:
422
423 max (total_size / lto_partitions, min_partition_size)
424
425 We use dynamic expected size of partition so small programs are partitioned
426 into enough partitions to allow use of multiple CPUs, while large programs
427 are not partitioned too much. Creating too many partitions significantly
428 increases the streaming overhead.
429
430 In the future, we would like to bound the maximal size of partitions so as
431 to prevent the LTRANS stage from consuming too much memory. At the moment,
432 however, the WPA stage is the most memory intensive for large benchmarks,
433 since too many types and declarations are read into memory.
434
435 The function implements a simple greedy algorithm. Nodes are being added
436 to the current partition until after 3/4 of the expected partition size is
437 reached. Past this threshold, we keep track of boundary size (number of
438 edges going to other partitions) and continue adding functions until after
439 the current partition has grown to twice the expected partition size. Then
440 the process is undone to the point where the minimal ratio of boundary size
441 and in-partition calls was reached. */
442
443 void
444 lto_balanced_map (void)
445 {
446 int n_nodes = 0;
447 int n_varpool_nodes = 0, varpool_pos = 0;
448 struct cgraph_node **postorder =
449 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
450 struct cgraph_node **order = XNEWVEC (struct cgraph_node *, cgraph_max_uid);
451 struct varpool_node **varpool_order = NULL;
452 int i, postorder_len;
453 struct cgraph_node *node;
454 int total_size = 0, best_total_size = 0;
455 int partition_size;
456 ltrans_partition partition;
457 int last_visited_node = 0;
458 struct varpool_node *vnode;
459 int cost = 0, internal = 0;
460 int best_n_nodes = 0, best_i = 0, best_cost =
461 INT_MAX, best_internal = 0;
462 int npartitions;
463 int current_order = -1;
464
465 FOR_EACH_VARIABLE (vnode)
466 gcc_assert (!vnode->symbol.aux);
467 /* Until we have better ordering facility, use toplogical order.
468 Include only nodes we will partition and compute estimate of program
469 size. Note that since nodes that are not partitioned might be put into
470 multiple partitions, this is just an estimate of real size. This is why
471 we keep partition_size updated after every partition is finalized. */
472 postorder_len = ipa_reverse_postorder (postorder);
473
474 for (i = 0; i < postorder_len; i++)
475 {
476 node = postorder[i];
477 if (get_symbol_class ((symtab_node) node) == SYMBOL_PARTITION)
478 {
479 order[n_nodes++] = node;
480 total_size += inline_summary (node)->size;
481 }
482 }
483 free (postorder);
484
485 if (!flag_toplevel_reorder)
486 {
487 qsort (order, n_nodes, sizeof (struct cgraph_node *), node_cmp);
488
489 FOR_EACH_VARIABLE (vnode)
490 if (get_symbol_class ((symtab_node) vnode) == SYMBOL_PARTITION)
491 n_varpool_nodes++;
492 varpool_order = XNEWVEC (struct varpool_node *, n_varpool_nodes);
493
494 n_varpool_nodes = 0;
495 FOR_EACH_VARIABLE (vnode)
496 if (get_symbol_class ((symtab_node) vnode) == SYMBOL_PARTITION)
497 varpool_order[n_varpool_nodes++] = vnode;
498 qsort (varpool_order, n_varpool_nodes, sizeof (struct varpool_node *),
499 varpool_node_cmp);
500 }
501
502 /* Compute partition size and create the first partition. */
503 partition_size = total_size / PARAM_VALUE (PARAM_LTO_PARTITIONS);
504 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
505 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
506 npartitions = 1;
507 partition = new_partition ("");
508 if (cgraph_dump_file)
509 fprintf (cgraph_dump_file, "Total unit size: %i, partition size: %i\n",
510 total_size, partition_size);
511
512 for (i = 0; i < n_nodes; i++)
513 {
514 if (symbol_partitioned_p ((symtab_node) order[i]))
515 continue;
516
517 current_order = order[i]->symbol.order;
518
519 if (!flag_toplevel_reorder)
520 while (varpool_pos < n_varpool_nodes
521 && varpool_order[varpool_pos]->symbol.order < current_order)
522 {
523 if (!symbol_partitioned_p ((symtab_node) varpool_order[varpool_pos]))
524 add_symbol_to_partition (partition, (symtab_node) varpool_order[varpool_pos]);
525 varpool_pos++;
526 }
527
528 add_symbol_to_partition (partition, (symtab_node) order[i]);
529 total_size -= inline_summary (order[i])->size;
530
531
532 /* Once we added a new node to the partition, we also want to add
533 all referenced variables unless they was already added into some
534 earlier partition.
535 add_symbol_to_partition adds possibly multiple nodes and
536 variables that are needed to satisfy needs of ORDER[i].
537 We remember last visited cgraph and varpool node from last iteration
538 of outer loop that allows us to process every new addition.
539
540 At the same time we compute size of the boundary into COST. Every
541 callgraph or IPA reference edge leaving the partition contributes into
542 COST. Every edge inside partition was earlier computed as one leaving
543 it and thus we need to subtract it from COST. */
544 while (last_visited_node < lto_symtab_encoder_size (partition->encoder))
545 {
546 struct ipa_ref_list *refs;
547 int j;
548 struct ipa_ref *ref;
549 symtab_node snode = lto_symtab_encoder_deref (partition->encoder,
550 last_visited_node);
551
552 if (cgraph_node *node = dyn_cast <cgraph_node> (snode))
553 {
554 struct cgraph_edge *edge;
555
556 refs = &node->symbol.ref_list;
557
558 last_visited_node++;
559
560 gcc_assert (node->symbol.definition || node->symbol.weakref);
561
562 /* Compute boundary cost of callgraph edges. */
563 for (edge = node->callees; edge; edge = edge->next_callee)
564 if (edge->callee->symbol.definition)
565 {
566 int edge_cost = edge->frequency;
567 int index;
568
569 if (!edge_cost)
570 edge_cost = 1;
571 gcc_assert (edge_cost > 0);
572 index = lto_symtab_encoder_lookup (partition->encoder,
573 (symtab_node)edge->callee);
574 if (index != LCC_NOT_FOUND
575 && index < last_visited_node - 1)
576 cost -= edge_cost, internal += edge_cost;
577 else
578 cost += edge_cost;
579 }
580 for (edge = node->callers; edge; edge = edge->next_caller)
581 {
582 int edge_cost = edge->frequency;
583 int index;
584
585 gcc_assert (edge->caller->symbol.definition);
586 if (!edge_cost)
587 edge_cost = 1;
588 gcc_assert (edge_cost > 0);
589 index = lto_symtab_encoder_lookup (partition->encoder,
590 (symtab_node)edge->caller);
591 if (index != LCC_NOT_FOUND
592 && index < last_visited_node - 1)
593 cost -= edge_cost;
594 else
595 cost += edge_cost;
596 }
597 }
598 else
599 {
600 refs = &snode->symbol.ref_list;
601 last_visited_node++;
602 }
603
604 /* Compute boundary cost of IPA REF edges and at the same time look into
605 variables referenced from current partition and try to add them. */
606 for (j = 0; ipa_ref_list_reference_iterate (refs, j, ref); j++)
607 if (is_a <varpool_node> (ref->referred))
608 {
609 int index;
610
611 vnode = ipa_ref_varpool_node (ref);
612 if (!vnode->symbol.definition)
613 continue;
614 if (!symbol_partitioned_p ((symtab_node) vnode) && flag_toplevel_reorder
615 && get_symbol_class ((symtab_node) vnode) == SYMBOL_PARTITION)
616 add_symbol_to_partition (partition, (symtab_node) vnode);
617 index = lto_symtab_encoder_lookup (partition->encoder,
618 (symtab_node)vnode);
619 if (index != LCC_NOT_FOUND
620 && index < last_visited_node - 1)
621 cost--, internal++;
622 else
623 cost++;
624 }
625 else
626 {
627 int index;
628
629 node = ipa_ref_node (ref);
630 if (!node->symbol.definition)
631 continue;
632 index = lto_symtab_encoder_lookup (partition->encoder,
633 (symtab_node)node);
634 if (index != LCC_NOT_FOUND
635 && index < last_visited_node - 1)
636 cost--, internal++;
637 else
638 cost++;
639 }
640 for (j = 0; ipa_ref_list_referring_iterate (refs, j, ref); j++)
641 if (is_a <varpool_node> (ref->referring))
642 {
643 int index;
644
645 vnode = ipa_ref_referring_varpool_node (ref);
646 gcc_assert (vnode->symbol.definition);
647 if (!symbol_partitioned_p ((symtab_node) vnode) && flag_toplevel_reorder
648 && get_symbol_class ((symtab_node) vnode) == SYMBOL_PARTITION)
649 add_symbol_to_partition (partition, (symtab_node) vnode);
650 index = lto_symtab_encoder_lookup (partition->encoder,
651 (symtab_node)vnode);
652 if (index != LCC_NOT_FOUND
653 && index < last_visited_node - 1)
654 cost--;
655 else
656 cost++;
657 }
658 else
659 {
660 int index;
661
662 node = ipa_ref_referring_node (ref);
663 gcc_assert (node->symbol.definition);
664 index = lto_symtab_encoder_lookup (partition->encoder,
665 (symtab_node)node);
666 if (index != LCC_NOT_FOUND
667 && index < last_visited_node - 1)
668 cost--;
669 else
670 cost++;
671 }
672 }
673
674 /* If the partition is large enough, start looking for smallest boundary cost. */
675 if (partition->insns < partition_size * 3 / 4
676 || best_cost == INT_MAX
677 || ((!cost
678 || (best_internal * (HOST_WIDE_INT) cost
679 > (internal * (HOST_WIDE_INT)best_cost)))
680 && partition->insns < partition_size * 5 / 4))
681 {
682 best_cost = cost;
683 best_internal = internal;
684 best_i = i;
685 best_n_nodes = lto_symtab_encoder_size (partition->encoder);
686 best_total_size = total_size;
687 }
688 if (cgraph_dump_file)
689 fprintf (cgraph_dump_file, "Step %i: added %s/%i, size %i, cost %i/%i "
690 "best %i/%i, step %i\n", i,
691 cgraph_node_name (order[i]), order[i]->symbol.order,
692 partition->insns, cost, internal,
693 best_cost, best_internal, best_i);
694 /* Partition is too large, unwind into step when best cost was reached and
695 start new partition. */
696 if (partition->insns > 2 * partition_size)
697 {
698 if (best_i != i)
699 {
700 if (cgraph_dump_file)
701 fprintf (cgraph_dump_file, "Unwinding %i insertions to step %i\n",
702 i - best_i, best_i);
703 undo_partition (partition, best_n_nodes);
704 }
705 i = best_i;
706 /* When we are finished, avoid creating empty partition. */
707 while (i < n_nodes - 1 && symbol_partitioned_p ((symtab_node) order[i + 1]))
708 i++;
709 if (i == n_nodes - 1)
710 break;
711 partition = new_partition ("");
712 last_visited_node = 0;
713 total_size = best_total_size;
714 cost = 0;
715
716 if (cgraph_dump_file)
717 fprintf (cgraph_dump_file, "New partition\n");
718 best_n_nodes = 0;
719 best_cost = INT_MAX;
720
721 /* Since the size of partitions is just approximate, update the size after
722 we finished current one. */
723 if (npartitions < PARAM_VALUE (PARAM_LTO_PARTITIONS))
724 partition_size = total_size
725 / (PARAM_VALUE (PARAM_LTO_PARTITIONS) - npartitions);
726 else
727 partition_size = INT_MAX;
728
729 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
730 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
731 npartitions ++;
732 }
733 }
734
735 /* Varables that are not reachable from the code go into last partition. */
736 if (flag_toplevel_reorder)
737 {
738 FOR_EACH_VARIABLE (vnode)
739 if (get_symbol_class ((symtab_node) vnode) == SYMBOL_PARTITION
740 && !symbol_partitioned_p ((symtab_node) vnode))
741 add_symbol_to_partition (partition, (symtab_node) vnode);
742 }
743 else
744 {
745 while (varpool_pos < n_varpool_nodes)
746 {
747 if (!symbol_partitioned_p ((symtab_node) varpool_order[varpool_pos]))
748 add_symbol_to_partition (partition, (symtab_node) varpool_order[varpool_pos]);
749 varpool_pos++;
750 }
751 free (varpool_order);
752 }
753 free (order);
754 }
755
756 /* Mangle NODE symbol name into a local name.
757 This is necessary to do
758 1) if two or more static vars of same assembler name
759 are merged into single ltrans unit.
760 2) if prevoiusly static var was promoted hidden to avoid possible conflict
761 with symbols defined out of the LTO world.
762 */
763
764 static bool
765 privatize_symbol_name (symtab_node node)
766 {
767 tree decl = node->symbol.decl;
768 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
769
770 /* Our renaming machinery do not handle more than one change of assembler name.
771 We should not need more than one anyway. */
772 if (node->symbol.lto_file_data
773 && lto_get_decl_name_mapping (node->symbol.lto_file_data, name) != name)
774 {
775 if (cgraph_dump_file)
776 fprintf (cgraph_dump_file,
777 "Not privatizing symbol name: %s. It privatized already.\n",
778 name);
779 return false;
780 }
781 /* Avoid mangling of already mangled clones.
782 ??? should have a flag whether a symbol has a 'private' name already,
783 since we produce some symbols like that i.e. for global constructors
784 that are not really clones. */
785 if (node->symbol.unique_name)
786 {
787 if (cgraph_dump_file)
788 fprintf (cgraph_dump_file,
789 "Not privatizing symbol name: %s. Has unique name.\n",
790 name);
791 return false;
792 }
793 change_decl_assembler_name (decl, clone_function_name (decl, "lto_priv"));
794 if (node->symbol.lto_file_data)
795 lto_record_renamed_decl (node->symbol.lto_file_data, name,
796 IDENTIFIER_POINTER
797 (DECL_ASSEMBLER_NAME (decl)));
798 if (cgraph_dump_file)
799 fprintf (cgraph_dump_file,
800 "Privatizing symbol name: %s -> %s\n",
801 name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
802 return true;
803 }
804
805 /* Promote variable VNODE to be static. */
806
807 static void
808 promote_symbol (symtab_node node)
809 {
810 /* We already promoted ... */
811 if (DECL_VISIBILITY (node->symbol.decl) == VISIBILITY_HIDDEN
812 && DECL_VISIBILITY_SPECIFIED (node->symbol.decl)
813 && TREE_PUBLIC (node->symbol.decl))
814 return;
815
816 gcc_checking_assert (!TREE_PUBLIC (node->symbol.decl)
817 && !DECL_EXTERNAL (node->symbol.decl));
818 /* Be sure that newly public symbol does not conflict with anything already
819 defined by the non-LTO part. */
820 privatize_symbol_name (node);
821 TREE_PUBLIC (node->symbol.decl) = 1;
822 DECL_VISIBILITY (node->symbol.decl) = VISIBILITY_HIDDEN;
823 DECL_VISIBILITY_SPECIFIED (node->symbol.decl) = true;
824 if (cgraph_dump_file)
825 fprintf (cgraph_dump_file,
826 "Promoting as hidden: %s\n", symtab_node_name (node));
827 }
828
829 /* Return true if NODE needs named section even if it won't land in the partition
830 symbol table.
831 FIXME: we should really not use named sections for inline clones and master clones. */
832
833 static bool
834 may_need_named_section_p (lto_symtab_encoder_t encoder, symtab_node node)
835 {
836 struct cgraph_node *cnode = dyn_cast <cgraph_node> (node);
837 if (!cnode)
838 return false;
839 if (symtab_real_symbol_p (node))
840 return false;
841 if (!cnode->global.inlined_to && !cnode->clones)
842 return false;
843 return (!encoder
844 || (lto_symtab_encoder_lookup (encoder, node) != LCC_NOT_FOUND
845 && lto_symtab_encoder_encode_body_p (encoder,
846 cnode)));
847 }
848
849 /* If NODE represents a static variable. See if there are other variables
850 of the same name in partition ENCODER (or in whole compilation unit if
851 ENCODER is NULL) and if so, mangle the statics. Always mangle all
852 conflicting statics, so we reduce changes of silently miscompiling
853 asm statemnets refering to them by symbol name. */
854
855 static void
856 rename_statics (lto_symtab_encoder_t encoder, symtab_node node)
857 {
858 tree decl = node->symbol.decl;
859 symtab_node s;
860 tree name = DECL_ASSEMBLER_NAME (decl);
861
862 /* See if this is static symbol. */
863 if ((node->symbol.externally_visible
864 /* FIXME: externally_visible is somewhat illogically not set for
865 external symbols (i.e. those not defined). Remove this test
866 once this is fixed. */
867 || DECL_EXTERNAL (node->symbol.decl)
868 || !symtab_real_symbol_p (node))
869 && !may_need_named_section_p (encoder, node))
870 return;
871
872 /* Now walk symbols sharing the same name and see if there are any conflicts.
873 (all types of symbols counts here, since we can not have static of the
874 same name as external or public symbol.) */
875 for (s = symtab_node_for_asm (name);
876 s; s = s->symbol.next_sharing_asm_name)
877 if ((symtab_real_symbol_p (s) || may_need_named_section_p (encoder, s))
878 && s->symbol.decl != node->symbol.decl
879 && (!encoder
880 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
881 break;
882
883 /* OK, no confict, so we have nothing to do. */
884 if (!s)
885 return;
886
887 if (cgraph_dump_file)
888 fprintf (cgraph_dump_file,
889 "Renaming statics with asm name: %s\n", symtab_node_name (node));
890
891 /* Assign every symbol in the set that shares the same ASM name an unique
892 mangled name. */
893 for (s = symtab_node_for_asm (name); s;)
894 if (!s->symbol.externally_visible
895 && ((symtab_real_symbol_p (s)
896 && !DECL_EXTERNAL (node->symbol.decl)
897 && !TREE_PUBLIC (node->symbol.decl))
898 || may_need_named_section_p (encoder, s))
899 && (!encoder
900 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
901 {
902 if (privatize_symbol_name (s))
903 /* Re-start from beggining since we do not know how many symbols changed a name. */
904 s = symtab_node_for_asm (name);
905 else s = s->symbol.next_sharing_asm_name;
906 }
907 else s = s->symbol.next_sharing_asm_name;
908 }
909
910 /* Find out all static decls that need to be promoted to global because
911 of cross file sharing. This function must be run in the WPA mode after
912 all inlinees are added. */
913
914 void
915 lto_promote_cross_file_statics (void)
916 {
917 unsigned i, n_sets;
918
919 gcc_assert (flag_wpa);
920
921 /* First compute boundaries. */
922 n_sets = ltrans_partitions.length ();
923 for (i = 0; i < n_sets; i++)
924 {
925 ltrans_partition part
926 = ltrans_partitions[i];
927 part->encoder = compute_ltrans_boundary (part->encoder);
928 }
929
930 /* Look at boundaries and promote symbols as needed. */
931 for (i = 0; i < n_sets; i++)
932 {
933 lto_symtab_encoder_iterator lsei;
934 lto_symtab_encoder_t encoder = ltrans_partitions[i]->encoder;
935
936 for (lsei = lsei_start (encoder); !lsei_end_p (lsei);
937 lsei_next (&lsei))
938 {
939 symtab_node node = lsei_node (lsei);
940
941 /* If symbol is static, rename it if its assembler name clash with
942 anything else in this unit. */
943 rename_statics (encoder, node);
944
945 /* No need to promote if symbol already is externally visible ... */
946 if (node->symbol.externally_visible
947 /* ... or if it is part of current partition ... */
948 || lto_symtab_encoder_in_partition_p (encoder, node)
949 /* ... or if we do not partition it. This mean that it will
950 appear in every partition refernecing it. */
951 || get_symbol_class ((symtab_node) node) != SYMBOL_PARTITION)
952 continue;
953
954 promote_symbol (node);
955 }
956 }
957 }
958
959 /* Rename statics in the whole unit in the case that
960 we do -flto-partition=none. */
961
962 void
963 lto_promote_statics_nonwpa (void)
964 {
965 symtab_node node;
966 FOR_EACH_SYMBOL (node)
967 rename_statics (NULL, node);
968 }