]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto/lto-partition.c
lto-cgraph.c (lto_symtab_encoder_new): New parameter FOR_INPUT.
[thirdparty/gcc.git] / gcc / lto / lto-partition.c
1 /* LTO partitioning logic routines.
2 Copyright 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "toplev.h"
24 #include "tree.h"
25 #include "tm.h"
26 #include "cgraph.h"
27 #include "lto-streamer.h"
28 #include "timevar.h"
29 #include "params.h"
30 #include "ipa-inline.h"
31 #include "ipa-utils.h"
32 #include "lto-partition.h"
33
34 /* Classifcation of symbols into classes WRT partitioning. */
35 enum symbol_class
36 {
37 /* External declarations are ignored by partitioning algorithms and they are
38 added into the boundary later via compute_ltrans_boundary. */
39 SYMBOL_EXTERNAL,
40 /* Partitioned symbols are pur into one of partitions. */
41 SYMBOL_PARTITION,
42 /* Duplicated symbols (such as comdat or constant pool references) are
43 copied into every node needing them via add_symbol_to_partition. */
44 SYMBOL_DUPLICATE
45 };
46
47 VEC(ltrans_partition, heap) *ltrans_partitions;
48
49 static void add_symbol_to_partition (ltrans_partition part, symtab_node node);
50
51 /* Classify symbol NODE. */
52
53 enum symbol_class
54 get_symbol_class (symtab_node node)
55 {
56 /* Inline clones are always duplicated.
57 This include external delcarations. */
58 if (symtab_function_p (node)
59 && cgraph (node)->global.inlined_to)
60 return SYMBOL_DUPLICATE;
61
62 /* External declarations are external. */
63 if (DECL_EXTERNAL (node->symbol.decl))
64 return SYMBOL_EXTERNAL;
65
66 if (symtab_variable_p (node))
67 {
68 /* Constant pool references use local symbol names that can not
69 be promoted global. We should never put into a constant pool
70 objects that can not be duplicated across partitions. */
71 if (DECL_IN_CONSTANT_POOL (node->symbol.decl))
72 return SYMBOL_DUPLICATE;
73 gcc_checking_assert (varpool (node)->analyzed);
74 }
75 /* Functions that are cloned may stay in callgraph even if they are unused.
76 Handle them as external; compute_ltrans_boundary take care to make
77 proper things to happen (i.e. to make them appear in the boundary but
78 with body streamed, so clone can me materialized). */
79 else if (!cgraph (node)->analyzed)
80 return SYMBOL_EXTERNAL;
81
82 /* Weakref aliases are always duplicated. */
83 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (node->symbol.decl)))
84 return SYMBOL_DUPLICATE;
85
86 /* Comdats are duplicated to every use unless they are keyed.
87 Those do not need duplication. */
88 if (DECL_COMDAT (node->symbol.decl)
89 && !node->symbol.force_output
90 && !symtab_used_from_object_file_p ((symtab_node) node))
91 return SYMBOL_DUPLICATE;
92
93 return SYMBOL_PARTITION;
94 }
95
96 /* Create new partition with name NAME. */
97
98 static ltrans_partition
99 new_partition (const char *name)
100 {
101 ltrans_partition part = XCNEW (struct ltrans_partition_def);
102 part->encoder = lto_symtab_encoder_new (false);
103 part->name = name;
104 part->insns = 0;
105 VEC_safe_push (ltrans_partition, heap, ltrans_partitions, part);
106 return part;
107 }
108
109 /* Free memory used by ltrans datastructures. */
110
111 void
112 free_ltrans_partitions (void)
113 {
114 unsigned int idx;
115 ltrans_partition part;
116 for (idx = 0; VEC_iterate (ltrans_partition, ltrans_partitions, idx, part); idx++)
117 {
118 if (part->initializers_visited)
119 pointer_set_destroy (part->initializers_visited);
120 /* Symtab encoder is freed after streaming. */
121 free (part);
122 }
123 VEC_free (ltrans_partition, heap, ltrans_partitions);
124 }
125
126 /* Return true if symbol is already in some partition. */
127
128 static inline bool
129 symbol_partitioned_p (symtab_node node)
130 {
131 return node->symbol.aux;
132 }
133
134 /* Add references into the partition. */
135 static void
136 add_references_to_partition (ltrans_partition part, symtab_node node)
137 {
138 int i;
139 struct ipa_ref *ref;
140
141 /* Add all duplicated references to the partition. */
142 for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list, i, ref); i++)
143 if (get_symbol_class (ref->referred) == SYMBOL_DUPLICATE)
144 add_symbol_to_partition (part, ref->referred);
145 /* References to a readonly variable may be constant foled into its value.
146 Recursively look into the initializers of the constant variable and add
147 references, too. */
148 else if (symtab_variable_p (ref->referred)
149 && const_value_known_p (ref->referred->symbol.decl)
150 && !lto_symtab_encoder_in_partition_p (part->encoder, ref->referred))
151 {
152 if (!part->initializers_visited)
153 part->initializers_visited = pointer_set_create ();
154 if (!pointer_set_insert (part->initializers_visited, ref->referred))
155 add_references_to_partition (part, ref->referred);
156 }
157 }
158
159 /* Helper function for add_symbol_to_partition doing the actual dirty work
160 of adding NODE to PART. */
161
162 static bool
163 add_symbol_to_partition_1 (ltrans_partition part, symtab_node node)
164 {
165 enum symbol_class c = get_symbol_class (node);
166 int i;
167 struct ipa_ref *ref;
168 symtab_node node1;
169
170 /* If NODE is already there, we have nothing to do. */
171 if (lto_symtab_encoder_in_partition_p (part->encoder, (symtab_node) node))
172 return true;
173
174 /* non-duplicated aliases or tunks of a duplicated symbol needs to be output
175 just once.
176
177 Be lax about comdats; they may or may not be duplicated and we may
178 end up in need to duplicate keyed comdat because it has unkeyed alias. */
179 if (c == SYMBOL_PARTITION && !DECL_COMDAT (node->symbol.decl)
180 && symbol_partitioned_p (node))
181 return false;
182
183 /* Be sure that we never try to duplicate partitioned symbol
184 or add external symbol. */
185 gcc_assert (c != SYMBOL_EXTERNAL
186 && (c == SYMBOL_DUPLICATE || !symbol_partitioned_p (node)));
187
188 lto_set_symtab_encoder_in_partition (part->encoder, (symtab_node) node);
189
190 if (symbol_partitioned_p (node))
191 {
192 node->symbol.in_other_partition = 1;
193 if (cgraph_dump_file)
194 fprintf (cgraph_dump_file, "Symbol node %s now used in multiple partitions\n",
195 symtab_node_name (node));
196 }
197 node->symbol.aux = (void *)((size_t)node->symbol.aux + 1);
198
199 if (symtab_function_p (node))
200 {
201 struct cgraph_node *cnode = cgraph (node);
202 struct cgraph_edge *e;
203 part->insns += inline_summary (cnode)->self_size;
204
205 /* Add all inline clones and callees that are duplicated. */
206 for (e = cnode->callees; e; e = e->next_callee)
207 if (!e->inline_failed)
208 add_symbol_to_partition_1 (part, (symtab_node) e->callee);
209 else if (get_symbol_class ((symtab_node) e->callee) == SYMBOL_DUPLICATE)
210 add_symbol_to_partition (part, (symtab_node) e->callee);
211
212 /* Add all thunks associated with the function. */
213 for (e = cnode->callers; e; e = e->next_caller)
214 if (e->caller->thunk.thunk_p)
215 add_symbol_to_partition_1 (part, (symtab_node) e->caller);
216 }
217
218 add_references_to_partition (part, node);
219
220 /* Add all aliases associated with the symbol. */
221 for (i = 0; ipa_ref_list_referring_iterate (&node->symbol.ref_list, i, ref); i++)
222 if (ref->use == IPA_REF_ALIAS
223 && !lookup_attribute ("weakref",
224 DECL_ATTRIBUTES
225 (ref->referring->symbol.decl)))
226 add_symbol_to_partition_1 (part, ref->referring);
227
228 /* Ensure that SAME_COMDAT_GROUP lists all allways added in a group. */
229 if (node->symbol.same_comdat_group)
230 for (node1 = node->symbol.same_comdat_group;
231 node1 != node; node1 = node1->symbol.same_comdat_group)
232 {
233 bool added = add_symbol_to_partition_1 (part, node1);
234 gcc_assert (added);
235 }
236 return true;
237 }
238
239 /* If symbol NODE is really part of other symbol's definition (i.e. it is
240 internal label, thunk, alias or so), return the outer symbol.
241 When add_symbol_to_partition_1 is called on the outer symbol it must
242 eventually add NODE, too. */
243 static symtab_node
244 contained_in_symbol (symtab_node node)
245 {
246 /* Weakrefs are never contained in anything. */
247 if (lookup_attribute ("weakref",
248 DECL_ATTRIBUTES (node->symbol.decl)))
249 return node;
250 if (symtab_function_p (node))
251 {
252 struct cgraph_node *cnode = cgraph_function_node (cgraph (node), NULL);
253 if (cnode->global.inlined_to)
254 cnode = cnode->global.inlined_to;
255 return (symtab_node) cnode;
256 }
257 else if (symtab_variable_p (node))
258 return (symtab_node) varpool_variable_node (varpool (node), NULL);
259 return node;
260 }
261
262 /* Add symbol NODE to partition. When definition of NODE is part
263 of other symbol definition, add the other symbol, too. */
264
265 static void
266 add_symbol_to_partition (ltrans_partition part, symtab_node node)
267 {
268 symtab_node node1;
269
270 /* Verify that we do not try to duplicate something that can not be. */
271 gcc_checking_assert (get_symbol_class (node) == SYMBOL_DUPLICATE
272 || !symbol_partitioned_p (node));
273
274 while ((node1 = contained_in_symbol (node)) != node)
275 node = node1;
276
277 /* If we have duplicated symbol contained in something we can not duplicate,
278 we are very badly screwed. The other way is possible, so we do not
279 assert this in add_symbol_to_partition_1.
280
281 Be lax about comdats; they may or may not be duplicated and we may
282 end up in need to duplicate keyed comdat because it has unkeyed alias. */
283 gcc_assert (get_symbol_class (node) == SYMBOL_DUPLICATE
284 || DECL_COMDAT (node->symbol.decl)
285 || !symbol_partitioned_p (node));
286 add_symbol_to_partition_1 (part, node);
287 }
288
289 /* Undo all additions until number of cgraph nodes in PARITION is N_CGRAPH_NODES
290 and number of varpool nodes is N_VARPOOL_NODES. */
291
292 static void
293 undo_partition (ltrans_partition partition, unsigned int n_nodes)
294 {
295 while (lto_symtab_encoder_size (partition->encoder) > (int)n_nodes)
296 {
297 symtab_node node = lto_symtab_encoder_deref (partition->encoder,
298 n_nodes);
299
300 /* After UNDO we no longer know what was visited. */
301 if (partition->initializers_visited)
302 pointer_set_destroy (partition->initializers_visited);
303 partition->initializers_visited = NULL;
304
305 if (symtab_function_p (node))
306 partition->insns -= inline_summary (cgraph (node))->self_size;
307 lto_symtab_encoder_delete_node (partition->encoder, node);
308 node->symbol.aux = (void *)((size_t)node->symbol.aux - 1);
309 }
310 }
311
312 /* Group cgrah nodes by input files. This is used mainly for testing
313 right now. */
314
315 void
316 lto_1_to_1_map (void)
317 {
318 symtab_node node;
319 struct lto_file_decl_data *file_data;
320 struct pointer_map_t *pmap;
321 ltrans_partition partition;
322 void **slot;
323 int npartitions = 0;
324
325 pmap = pointer_map_create ();
326
327 FOR_EACH_SYMBOL (node)
328 {
329 if (get_symbol_class (node) != SYMBOL_PARTITION
330 || symbol_partitioned_p (node))
331 continue;
332
333 file_data = node->symbol.lto_file_data;
334
335 if (file_data)
336 {
337 slot = pointer_map_contains (pmap, file_data);
338 if (slot)
339 partition = (ltrans_partition) *slot;
340 else
341 {
342 partition = new_partition (file_data->file_name);
343 slot = pointer_map_insert (pmap, file_data);
344 *slot = partition;
345 npartitions++;
346 }
347 }
348 else if (!file_data
349 && VEC_length (ltrans_partition, ltrans_partitions))
350 partition = VEC_index (ltrans_partition, ltrans_partitions, 0);
351 else
352 {
353 partition = new_partition ("");
354 slot = pointer_map_insert (pmap, NULL);
355 *slot = partition;
356 npartitions++;
357 }
358
359 add_symbol_to_partition (partition, (symtab_node) node);
360 }
361
362 /* If the cgraph is empty, create one cgraph node set so that there is still
363 an output file for any variables that need to be exported in a DSO. */
364 if (!npartitions)
365 new_partition ("empty");
366
367 pointer_map_destroy (pmap);
368
369 }
370
371 /* Maximal partitioning. Put every new symbol into new partition if possible. */
372
373 void
374 lto_max_map (void)
375 {
376 symtab_node node;
377 ltrans_partition partition;
378 int npartitions = 0;
379
380 FOR_EACH_SYMBOL (node)
381 {
382 if (get_symbol_class (node) != SYMBOL_PARTITION
383 || symbol_partitioned_p (node))
384 continue;
385 partition = new_partition (symtab_node_asm_name (node));
386 add_symbol_to_partition (partition, (symtab_node) node);
387 npartitions++;
388 }
389 if (!npartitions)
390 new_partition ("empty");
391 }
392
393 /* Helper function for qsort; sort nodes by order. */
394 static int
395 node_cmp (const void *pa, const void *pb)
396 {
397 const struct cgraph_node *a = *(const struct cgraph_node * const *) pa;
398 const struct cgraph_node *b = *(const struct cgraph_node * const *) pb;
399 return b->symbol.order - a->symbol.order;
400 }
401
402 /* Helper function for qsort; sort nodes by order. */
403 static int
404 varpool_node_cmp (const void *pa, const void *pb)
405 {
406 const struct varpool_node *a = *(const struct varpool_node * const *) pa;
407 const struct varpool_node *b = *(const struct varpool_node * const *) pb;
408 return b->symbol.order - a->symbol.order;
409 }
410
411 /* Group cgraph nodes into equally-sized partitions.
412
413 The partitioning algorithm is simple: nodes are taken in predefined order.
414 The order corresponds to the order we want functions to have in the final
415 output. In the future this will be given by function reordering pass, but
416 at the moment we use the topological order, which is a good approximation.
417
418 The goal is to partition this linear order into intervals (partitions) so
419 that all the partitions have approximately the same size and the number of
420 callgraph or IPA reference edges crossing boundaries is minimal.
421
422 This is a lot faster (O(n) in size of callgraph) than algorithms doing
423 priority-based graph clustering that are generally O(n^2) and, since
424 WHOPR is designed to make things go well across partitions, it leads
425 to good results.
426
427 We compute the expected size of a partition as:
428
429 max (total_size / lto_partitions, min_partition_size)
430
431 We use dynamic expected size of partition so small programs are partitioned
432 into enough partitions to allow use of multiple CPUs, while large programs
433 are not partitioned too much. Creating too many partitions significantly
434 increases the streaming overhead.
435
436 In the future, we would like to bound the maximal size of partitions so as
437 to prevent the LTRANS stage from consuming too much memory. At the moment,
438 however, the WPA stage is the most memory intensive for large benchmarks,
439 since too many types and declarations are read into memory.
440
441 The function implements a simple greedy algorithm. Nodes are being added
442 to the current partition until after 3/4 of the expected partition size is
443 reached. Past this threshold, we keep track of boundary size (number of
444 edges going to other partitions) and continue adding functions until after
445 the current partition has grown to twice the expected partition size. Then
446 the process is undone to the point where the minimal ratio of boundary size
447 and in-partition calls was reached. */
448
449 void
450 lto_balanced_map (void)
451 {
452 int n_nodes = 0;
453 int n_varpool_nodes = 0, varpool_pos = 0;
454 struct cgraph_node **postorder =
455 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
456 struct cgraph_node **order = XNEWVEC (struct cgraph_node *, cgraph_max_uid);
457 struct varpool_node **varpool_order = NULL;
458 int i, postorder_len;
459 struct cgraph_node *node;
460 int total_size = 0, best_total_size = 0;
461 int partition_size;
462 ltrans_partition partition;
463 int last_visited_node = 0;
464 struct varpool_node *vnode;
465 int cost = 0, internal = 0;
466 int best_n_nodes = 0, best_i = 0, best_cost =
467 INT_MAX, best_internal = 0;
468 int npartitions;
469 int current_order = -1;
470
471 FOR_EACH_VARIABLE (vnode)
472 gcc_assert (!vnode->symbol.aux);
473 /* Until we have better ordering facility, use toplogical order.
474 Include only nodes we will partition and compute estimate of program
475 size. Note that since nodes that are not partitioned might be put into
476 multiple partitions, this is just an estimate of real size. This is why
477 we keep partition_size updated after every partition is finalized. */
478 postorder_len = ipa_reverse_postorder (postorder);
479
480 for (i = 0; i < postorder_len; i++)
481 {
482 node = postorder[i];
483 if (get_symbol_class ((symtab_node) node) == SYMBOL_PARTITION)
484 {
485 order[n_nodes++] = node;
486 total_size += inline_summary (node)->size;
487 }
488 }
489 free (postorder);
490
491 if (!flag_toplevel_reorder)
492 {
493 qsort (order, n_nodes, sizeof (struct cgraph_node *), node_cmp);
494
495 FOR_EACH_VARIABLE (vnode)
496 if (get_symbol_class ((symtab_node) vnode) == SYMBOL_PARTITION)
497 n_varpool_nodes++;
498 varpool_order = XNEWVEC (struct varpool_node *, n_varpool_nodes);
499
500 n_varpool_nodes = 0;
501 FOR_EACH_VARIABLE (vnode)
502 if (get_symbol_class ((symtab_node) vnode) == SYMBOL_PARTITION)
503 varpool_order[n_varpool_nodes++] = vnode;
504 qsort (varpool_order, n_varpool_nodes, sizeof (struct varpool_node *),
505 varpool_node_cmp);
506 }
507
508 /* Compute partition size and create the first partition. */
509 partition_size = total_size / PARAM_VALUE (PARAM_LTO_PARTITIONS);
510 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
511 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
512 npartitions = 1;
513 partition = new_partition ("");
514 if (cgraph_dump_file)
515 fprintf (cgraph_dump_file, "Total unit size: %i, partition size: %i\n",
516 total_size, partition_size);
517
518 for (i = 0; i < n_nodes; i++)
519 {
520 if (symbol_partitioned_p ((symtab_node) order[i]))
521 continue;
522
523 current_order = order[i]->symbol.order;
524
525 if (!flag_toplevel_reorder)
526 while (varpool_pos < n_varpool_nodes
527 && varpool_order[varpool_pos]->symbol.order < current_order)
528 {
529 if (!symbol_partitioned_p ((symtab_node) varpool_order[varpool_pos]))
530 add_symbol_to_partition (partition, (symtab_node) varpool_order[varpool_pos]);
531 varpool_pos++;
532 }
533
534 add_symbol_to_partition (partition, (symtab_node) order[i]);
535 total_size -= inline_summary (order[i])->size;
536
537
538 /* Once we added a new node to the partition, we also want to add
539 all referenced variables unless they was already added into some
540 earlier partition.
541 add_symbol_to_partition adds possibly multiple nodes and
542 variables that are needed to satisfy needs of ORDER[i].
543 We remember last visited cgraph and varpool node from last iteration
544 of outer loop that allows us to process every new addition.
545
546 At the same time we compute size of the boundary into COST. Every
547 callgraph or IPA reference edge leaving the partition contributes into
548 COST. Every edge inside partition was earlier computed as one leaving
549 it and thus we need to subtract it from COST. */
550 while (last_visited_node < lto_symtab_encoder_size (partition->encoder))
551 {
552 struct ipa_ref_list *refs;
553 int j;
554 struct ipa_ref *ref;
555 symtab_node snode = lto_symtab_encoder_deref (partition->encoder,
556 last_visited_node);
557
558 if (symtab_function_p (snode))
559 {
560 struct cgraph_edge *edge;
561
562 node = cgraph (snode);
563 refs = &node->symbol.ref_list;
564
565 last_visited_node++;
566
567 gcc_assert (node->analyzed);
568
569 /* Compute boundary cost of callgraph edges. */
570 for (edge = node->callees; edge; edge = edge->next_callee)
571 if (edge->callee->analyzed)
572 {
573 int edge_cost = edge->frequency;
574 int index;
575
576 if (!edge_cost)
577 edge_cost = 1;
578 gcc_assert (edge_cost > 0);
579 index = lto_symtab_encoder_lookup (partition->encoder,
580 (symtab_node)edge->callee);
581 if (index != LCC_NOT_FOUND
582 && index < last_visited_node - 1)
583 cost -= edge_cost, internal += edge_cost;
584 else
585 cost += edge_cost;
586 }
587 for (edge = node->callers; edge; edge = edge->next_caller)
588 {
589 int edge_cost = edge->frequency;
590 int index;
591
592 gcc_assert (edge->caller->analyzed);
593 if (!edge_cost)
594 edge_cost = 1;
595 gcc_assert (edge_cost > 0);
596 index = lto_symtab_encoder_lookup (partition->encoder,
597 (symtab_node)edge->caller);
598 if (index != LCC_NOT_FOUND
599 && index < last_visited_node - 1)
600 cost -= edge_cost;
601 else
602 cost += edge_cost;
603 }
604 }
605 else
606 {
607 refs = &snode->symbol.ref_list;
608 last_visited_node++;
609 }
610
611 /* Compute boundary cost of IPA REF edges and at the same time look into
612 variables referenced from current partition and try to add them. */
613 for (j = 0; ipa_ref_list_reference_iterate (refs, j, ref); j++)
614 if (symtab_variable_p (ref->referred))
615 {
616 int index;
617
618 vnode = ipa_ref_varpool_node (ref);
619 if (!vnode->finalized)
620 continue;
621 if (!symbol_partitioned_p ((symtab_node) vnode) && flag_toplevel_reorder
622 && get_symbol_class ((symtab_node) vnode) == SYMBOL_PARTITION)
623 add_symbol_to_partition (partition, (symtab_node) vnode);
624 index = lto_symtab_encoder_lookup (partition->encoder,
625 (symtab_node)vnode);
626 if (index != LCC_NOT_FOUND
627 && index < last_visited_node - 1)
628 cost--, internal++;
629 else
630 cost++;
631 }
632 else
633 {
634 int index;
635
636 node = ipa_ref_node (ref);
637 if (!node->analyzed)
638 continue;
639 index = lto_symtab_encoder_lookup (partition->encoder,
640 (symtab_node)node);
641 if (index != LCC_NOT_FOUND
642 && index < last_visited_node - 1)
643 cost--, internal++;
644 else
645 cost++;
646 }
647 for (j = 0; ipa_ref_list_referring_iterate (refs, j, ref); j++)
648 if (symtab_variable_p (ref->referring))
649 {
650 int index;
651
652 vnode = ipa_ref_referring_varpool_node (ref);
653 gcc_assert (vnode->finalized);
654 if (!symbol_partitioned_p ((symtab_node) vnode) && flag_toplevel_reorder
655 && get_symbol_class ((symtab_node) vnode) == SYMBOL_PARTITION)
656 add_symbol_to_partition (partition, (symtab_node) vnode);
657 index = lto_symtab_encoder_lookup (partition->encoder,
658 (symtab_node)vnode);
659 if (index != LCC_NOT_FOUND
660 && index < last_visited_node - 1)
661 cost--;
662 else
663 cost++;
664 }
665 else
666 {
667 int index;
668
669 node = ipa_ref_referring_node (ref);
670 gcc_assert (node->analyzed);
671 index = lto_symtab_encoder_lookup (partition->encoder,
672 (symtab_node)node);
673 if (index != LCC_NOT_FOUND
674 && index < last_visited_node - 1)
675 cost--;
676 else
677 cost++;
678 }
679 }
680
681 /* If the partition is large enough, start looking for smallest boundary cost. */
682 if (partition->insns < partition_size * 3 / 4
683 || best_cost == INT_MAX
684 || ((!cost
685 || (best_internal * (HOST_WIDE_INT) cost
686 > (internal * (HOST_WIDE_INT)best_cost)))
687 && partition->insns < partition_size * 5 / 4))
688 {
689 best_cost = cost;
690 best_internal = internal;
691 best_i = i;
692 best_n_nodes = lto_symtab_encoder_size (partition->encoder);
693 best_total_size = total_size;
694 }
695 if (cgraph_dump_file)
696 fprintf (cgraph_dump_file, "Step %i: added %s/%i, size %i, cost %i/%i best %i/%i, step %i\n", i,
697 cgraph_node_name (order[i]), order[i]->uid, partition->insns, cost, internal,
698 best_cost, best_internal, best_i);
699 /* Partition is too large, unwind into step when best cost was reached and
700 start new partition. */
701 if (partition->insns > 2 * partition_size)
702 {
703 if (best_i != i)
704 {
705 if (cgraph_dump_file)
706 fprintf (cgraph_dump_file, "Unwinding %i insertions to step %i\n",
707 i - best_i, best_i);
708 undo_partition (partition, best_n_nodes);
709 }
710 i = best_i;
711 /* When we are finished, avoid creating empty partition. */
712 while (i < n_nodes - 1 && symbol_partitioned_p ((symtab_node) order[i + 1]))
713 i++;
714 if (i == n_nodes - 1)
715 break;
716 partition = new_partition ("");
717 last_visited_node = 0;
718 total_size = best_total_size;
719 cost = 0;
720
721 if (cgraph_dump_file)
722 fprintf (cgraph_dump_file, "New partition\n");
723 best_n_nodes = 0;
724 best_cost = INT_MAX;
725
726 /* Since the size of partitions is just approximate, update the size after
727 we finished current one. */
728 if (npartitions < PARAM_VALUE (PARAM_LTO_PARTITIONS))
729 partition_size = total_size
730 / (PARAM_VALUE (PARAM_LTO_PARTITIONS) - npartitions);
731 else
732 partition_size = INT_MAX;
733
734 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
735 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
736 npartitions ++;
737 }
738 }
739
740 /* Varables that are not reachable from the code go into last partition. */
741 if (flag_toplevel_reorder)
742 {
743 FOR_EACH_VARIABLE (vnode)
744 if (get_symbol_class ((symtab_node) vnode) == SYMBOL_PARTITION
745 && !symbol_partitioned_p ((symtab_node) vnode))
746 add_symbol_to_partition (partition, (symtab_node) vnode);
747 }
748 else
749 {
750 while (varpool_pos < n_varpool_nodes)
751 {
752 if (!symbol_partitioned_p ((symtab_node) varpool_order[varpool_pos]))
753 add_symbol_to_partition (partition, (symtab_node) varpool_order[varpool_pos]);
754 varpool_pos++;
755 }
756 free (varpool_order);
757 }
758 free (order);
759 }
760
761 /* Promote variable VNODE to be static. */
762
763 static void
764 promote_symbol (symtab_node node)
765 {
766 /* We already promoted ... */
767 if (DECL_VISIBILITY (node->symbol.decl) == VISIBILITY_HIDDEN
768 && DECL_VISIBILITY_SPECIFIED (node->symbol.decl)
769 && TREE_PUBLIC (node->symbol.decl))
770 return;
771
772 gcc_checking_assert (!TREE_PUBLIC (node->symbol.decl)
773 && !DECL_EXTERNAL (node->symbol.decl));
774 TREE_PUBLIC (node->symbol.decl) = 1;
775 DECL_VISIBILITY (node->symbol.decl) = VISIBILITY_HIDDEN;
776 DECL_VISIBILITY_SPECIFIED (node->symbol.decl) = true;
777 if (cgraph_dump_file)
778 fprintf (cgraph_dump_file,
779 "Promoting as hidden: %s\n", symtab_node_name (node));
780 }
781
782
783 /* Find out all static decls that need to be promoted to global because
784 of cross file sharing. This function must be run in the WPA mode after
785 all inlinees are added. */
786
787 void
788 lto_promote_cross_file_statics (void)
789 {
790 unsigned i, n_sets;
791
792 gcc_assert (flag_wpa);
793
794 /* First compute boundaries. */
795 n_sets = VEC_length (ltrans_partition, ltrans_partitions);
796 for (i = 0; i < n_sets; i++)
797 {
798 ltrans_partition part
799 = VEC_index (ltrans_partition, ltrans_partitions, i);
800 part->encoder = compute_ltrans_boundary (part->encoder);
801 }
802
803 /* Look at boundaries and promote symbols as needed. */
804 for (i = 0; i < n_sets; i++)
805 {
806 lto_symtab_encoder_iterator lsei;
807 lto_symtab_encoder_t encoder;
808 ltrans_partition part
809 = VEC_index (ltrans_partition, ltrans_partitions, i);
810
811 encoder = part->encoder;
812 for (lsei = lsei_start (encoder); !lsei_end_p (lsei);
813 lsei_next (&lsei))
814 {
815 symtab_node node = lsei_node (lsei);
816
817 /* No need to promote if symbol already is externally visible ... */
818 if (node->symbol.externally_visible
819 /* ... or if it is part of current partition ... */
820 || lto_symtab_encoder_in_partition_p (encoder, node)
821 /* ... or if we do not partition it. This mean that it will
822 appear in every partition refernecing it. */
823 || get_symbol_class ((symtab_node) node) != SYMBOL_PARTITION)
824 continue;
825
826 promote_symbol (node);
827 }
828 }
829 }