]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-cgraph.c
2015-06-25 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright (C) 2009-2015 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "alias.h"
28 #include "symtab.h"
29 #include "tree.h"
30 #include "fold-const.h"
31 #include "stringpool.h"
32 #include "predict.h"
33 #include "hard-reg-set.h"
34 #include "function.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-expr.h"
39 #include "gimple.h"
40 #include "rtl.h"
41 #include "flags.h"
42 #include "insn-config.h"
43 #include "expmed.h"
44 #include "dojump.h"
45 #include "explow.h"
46 #include "calls.h"
47 #include "emit-rtl.h"
48 #include "varasm.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "params.h"
52 #include "langhooks.h"
53 #include "bitmap.h"
54 #include "diagnostic-core.h"
55 #include "except.h"
56 #include "timevar.h"
57 #include "cgraph.h"
58 #include "lto-streamer.h"
59 #include "data-streamer.h"
60 #include "tree-streamer.h"
61 #include "gcov-io.h"
62 #include "tree-pass.h"
63 #include "profile.h"
64 #include "context.h"
65 #include "pass_manager.h"
66 #include "ipa-utils.h"
67 #include "omp-low.h"
68 #include "ipa-chkp.h"
69
70 /* True when asm nodes has been output. */
71 bool asm_nodes_output = false;
72
73 static void output_cgraph_opt_summary (void);
74 static void input_cgraph_opt_summary (vec<symtab_node *> nodes);
75
76 /* Number of LDPR values known to GCC. */
77 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
78
79 /* All node orders are ofsetted by ORDER_BASE. */
80 static int order_base;
81
82 /* Cgraph streaming is organized as set of record whose type
83 is indicated by a tag. */
84 enum LTO_symtab_tags
85 {
86 /* Must leave 0 for the stopper. */
87
88 /* Cgraph node without body available. */
89 LTO_symtab_unavail_node = 1,
90 /* Cgraph node with function body. */
91 LTO_symtab_analyzed_node,
92 /* Cgraph edges. */
93 LTO_symtab_edge,
94 LTO_symtab_indirect_edge,
95 LTO_symtab_variable,
96 LTO_symtab_last_tag
97 };
98
99 /* Create a new symtab encoder.
100 if FOR_INPUT, the encoder allocate only datastructures needed
101 to read the symtab. */
102
103 lto_symtab_encoder_t
104 lto_symtab_encoder_new (bool for_input)
105 {
106 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
107
108 if (!for_input)
109 encoder->map = new hash_map<symtab_node *, size_t>;
110 encoder->nodes.create (0);
111 return encoder;
112 }
113
114
115 /* Delete ENCODER and its components. */
116
117 void
118 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
119 {
120 encoder->nodes.release ();
121 if (encoder->map)
122 delete encoder->map;
123 free (encoder);
124 }
125
126
127 /* Return the existing reference number of NODE in the symtab encoder in
128 output block OB. Assign a new reference if this is the first time
129 NODE is encoded. */
130
131 int
132 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
133 symtab_node *node)
134 {
135 int ref;
136
137 if (!encoder->map)
138 {
139 lto_encoder_entry entry = {node, false, false, false};
140
141 ref = encoder->nodes.length ();
142 encoder->nodes.safe_push (entry);
143 return ref;
144 }
145
146 size_t *slot = encoder->map->get (node);
147 if (!slot || !*slot)
148 {
149 lto_encoder_entry entry = {node, false, false, false};
150 ref = encoder->nodes.length ();
151 if (!slot)
152 encoder->map->put (node, ref + 1);
153 encoder->nodes.safe_push (entry);
154 }
155 else
156 ref = *slot - 1;
157
158 return ref;
159 }
160
161 /* Remove NODE from encoder. */
162
163 bool
164 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
165 symtab_node *node)
166 {
167 int index;
168 lto_encoder_entry last_node;
169
170 size_t *slot = encoder->map->get (node);
171 if (slot == NULL || !*slot)
172 return false;
173
174 index = *slot - 1;
175 gcc_checking_assert (encoder->nodes[index].node == node);
176
177 /* Remove from vector. We do this by swapping node with the last element
178 of the vector. */
179 last_node = encoder->nodes.pop ();
180 if (last_node.node != node)
181 {
182 gcc_assert (encoder->map->put (last_node.node, index + 1));
183
184 /* Move the last element to the original spot of NODE. */
185 encoder->nodes[index] = last_node;
186 }
187
188 /* Remove element from hash table. */
189 encoder->map->remove (node);
190 return true;
191 }
192
193
194 /* Return TRUE if we should encode the body of NODE (if any). */
195
196 bool
197 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
198 struct cgraph_node *node)
199 {
200 int index = lto_symtab_encoder_lookup (encoder, node);
201 return encoder->nodes[index].body;
202 }
203
204 /* Specify that we encode the body of NODE in this partition. */
205
206 static void
207 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
208 struct cgraph_node *node)
209 {
210 int index = lto_symtab_encoder_encode (encoder, node);
211 gcc_checking_assert (encoder->nodes[index].node == node);
212 encoder->nodes[index].body = true;
213 }
214
215 /* Return TRUE if we should encode initializer of NODE (if any). */
216
217 bool
218 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
219 varpool_node *node)
220 {
221 int index = lto_symtab_encoder_lookup (encoder, node);
222 if (index == LCC_NOT_FOUND)
223 return false;
224 return encoder->nodes[index].initializer;
225 }
226
227 /* Specify that we should encode initializer of NODE (if any). */
228
229 static void
230 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
231 varpool_node *node)
232 {
233 int index = lto_symtab_encoder_lookup (encoder, node);
234 encoder->nodes[index].initializer = true;
235 }
236
237 /* Return TRUE if NODE is in this partition. */
238
239 bool
240 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
241 symtab_node *node)
242 {
243 int index = lto_symtab_encoder_lookup (encoder, node);
244 if (index == LCC_NOT_FOUND)
245 return false;
246 return encoder->nodes[index].in_partition;
247 }
248
249 /* Specify that NODE is in this partition. */
250
251 void
252 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
253 symtab_node *node)
254 {
255 int index = lto_symtab_encoder_encode (encoder, node);
256 encoder->nodes[index].in_partition = true;
257 }
258
259 /* Output the cgraph EDGE to OB using ENCODER. */
260
261 static void
262 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
263 lto_symtab_encoder_t encoder)
264 {
265 unsigned int uid;
266 intptr_t ref;
267 struct bitpack_d bp;
268
269 if (edge->indirect_unknown_callee)
270 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
271 LTO_symtab_indirect_edge);
272 else
273 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
274 LTO_symtab_edge);
275
276 ref = lto_symtab_encoder_lookup (encoder, edge->caller);
277 gcc_assert (ref != LCC_NOT_FOUND);
278 streamer_write_hwi_stream (ob->main_stream, ref);
279
280 if (!edge->indirect_unknown_callee)
281 {
282 ref = lto_symtab_encoder_lookup (encoder, edge->callee);
283 gcc_assert (ref != LCC_NOT_FOUND);
284 streamer_write_hwi_stream (ob->main_stream, ref);
285 }
286
287 streamer_write_gcov_count_stream (ob->main_stream, edge->count);
288
289 bp = bitpack_create (ob->main_stream);
290 uid = (!gimple_has_body_p (edge->caller->decl)
291 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt) + 1);
292 bp_pack_enum (&bp, cgraph_inline_failed_t,
293 CIF_N_REASONS, edge->inline_failed);
294 bp_pack_var_len_unsigned (&bp, uid);
295 bp_pack_var_len_unsigned (&bp, edge->frequency);
296 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
297 bp_pack_value (&bp, edge->speculative, 1);
298 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
299 bp_pack_value (&bp, edge->can_throw_external, 1);
300 bp_pack_value (&bp, edge->in_polymorphic_cdtor, 1);
301 if (edge->indirect_unknown_callee)
302 {
303 int flags = edge->indirect_info->ecf_flags;
304 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
305 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
306 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
307 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
308 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
309 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
310 /* Flags that should not appear on indirect calls. */
311 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
312 | ECF_MAY_BE_ALLOCA
313 | ECF_SIBCALL
314 | ECF_LEAF
315 | ECF_NOVOPS)));
316 }
317 streamer_write_bitpack (&bp);
318 if (edge->indirect_unknown_callee)
319 {
320 streamer_write_hwi_stream (ob->main_stream,
321 edge->indirect_info->common_target_id);
322 if (edge->indirect_info->common_target_id)
323 streamer_write_hwi_stream
324 (ob->main_stream, edge->indirect_info->common_target_probability);
325 }
326 }
327
328 /* Return if NODE contain references from other partitions. */
329
330 bool
331 referenced_from_other_partition_p (symtab_node *node, lto_symtab_encoder_t encoder)
332 {
333 int i;
334 struct ipa_ref *ref = NULL;
335
336 for (i = 0; node->iterate_referring (i, ref); i++)
337 {
338 /* Ignore references from non-offloadable nodes while streaming NODE into
339 offload LTO section. */
340 if (!ref->referring->need_lto_streaming)
341 continue;
342
343 if (ref->referring->in_other_partition
344 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
345 return true;
346 }
347 return false;
348 }
349
350 /* Return true when node is reachable from other partition. */
351
352 bool
353 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
354 {
355 struct cgraph_edge *e;
356 if (!node->definition)
357 return false;
358 if (node->global.inlined_to)
359 return false;
360 for (e = node->callers; e; e = e->next_caller)
361 {
362 /* Ignore references from non-offloadable nodes while streaming NODE into
363 offload LTO section. */
364 if (!e->caller->need_lto_streaming)
365 continue;
366
367 if (e->caller->in_other_partition
368 || !lto_symtab_encoder_in_partition_p (encoder, e->caller))
369 return true;
370 }
371 return false;
372 }
373
374 /* Return if NODE contain references from other partitions. */
375
376 bool
377 referenced_from_this_partition_p (symtab_node *node,
378 lto_symtab_encoder_t encoder)
379 {
380 int i;
381 struct ipa_ref *ref = NULL;
382
383 for (i = 0; node->iterate_referring (i, ref); i++)
384 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
385 return true;
386 return false;
387 }
388
389 /* Return true when node is reachable from other partition. */
390
391 bool
392 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
393 {
394 struct cgraph_edge *e;
395 for (e = node->callers; e; e = e->next_caller)
396 if (lto_symtab_encoder_in_partition_p (encoder, e->caller))
397 return true;
398 return false;
399 }
400
401 /* Output the cgraph NODE to OB. ENCODER is used to find the
402 reference number of NODE->inlined_to. SET is the set of nodes we
403 are writing to the current file. If NODE is not in SET, then NODE
404 is a boundary of a cgraph_node_set and we pretend NODE just has a
405 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
406 that have had their callgraph node written so far. This is used to
407 determine if NODE is a clone of a previously written node. */
408
409 static void
410 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
411 lto_symtab_encoder_t encoder)
412 {
413 unsigned int tag;
414 struct bitpack_d bp;
415 bool boundary_p;
416 intptr_t ref;
417 bool in_other_partition = false;
418 struct cgraph_node *clone_of, *ultimate_clone_of;
419 ipa_opt_pass_d *pass;
420 int i;
421 const char *comdat;
422 const char *section;
423 tree group;
424
425 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
426
427 if (node->analyzed && (!boundary_p || node->alias || node->thunk.thunk_p))
428 tag = LTO_symtab_analyzed_node;
429 else
430 tag = LTO_symtab_unavail_node;
431
432 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
433 tag);
434 streamer_write_hwi_stream (ob->main_stream, node->order);
435
436 /* In WPA mode, we only output part of the call-graph. Also, we
437 fake cgraph node attributes. There are two cases that we care.
438
439 Boundary nodes: There are nodes that are not part of SET but are
440 called from within SET. We artificially make them look like
441 externally visible nodes with no function body.
442
443 Cherry-picked nodes: These are nodes we pulled from other
444 translation units into SET during IPA-inlining. We make them as
445 local static nodes to prevent clashes with other local statics. */
446 if (boundary_p && node->analyzed
447 && node->get_partitioning_class () == SYMBOL_PARTITION)
448 {
449 /* Inline clones can not be part of boundary.
450 gcc_assert (!node->global.inlined_to);
451
452 FIXME: At the moment they can be, when partition contains an inline
453 clone that is clone of inline clone from outside partition. We can
454 reshape the clone tree and make other tree to be the root, but it
455 needs a bit extra work and will be promplty done by cgraph_remove_node
456 after reading back. */
457 in_other_partition = 1;
458 }
459
460 clone_of = node->clone_of;
461 while (clone_of
462 && (ref = lto_symtab_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
463 if (clone_of->prev_sibling_clone)
464 clone_of = clone_of->prev_sibling_clone;
465 else
466 clone_of = clone_of->clone_of;
467
468 /* See if body of the master function is output. If not, we are seeing only
469 an declaration and we do not need to pass down clone tree. */
470 ultimate_clone_of = clone_of;
471 while (ultimate_clone_of && ultimate_clone_of->clone_of)
472 ultimate_clone_of = ultimate_clone_of->clone_of;
473
474 if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
475 clone_of = NULL;
476
477 if (tag == LTO_symtab_analyzed_node)
478 gcc_assert (clone_of || !node->clone_of);
479 if (!clone_of)
480 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
481 else
482 streamer_write_hwi_stream (ob->main_stream, ref);
483
484
485 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
486 streamer_write_gcov_count_stream (ob->main_stream, node->count);
487 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
488
489 streamer_write_hwi_stream (ob->main_stream,
490 node->ipa_transforms_to_apply.length ());
491 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
492 streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
493
494 if (tag == LTO_symtab_analyzed_node)
495 {
496 if (node->global.inlined_to)
497 {
498 ref = lto_symtab_encoder_lookup (encoder, node->global.inlined_to);
499 gcc_assert (ref != LCC_NOT_FOUND);
500 }
501 else
502 ref = LCC_NOT_FOUND;
503
504 streamer_write_hwi_stream (ob->main_stream, ref);
505 }
506
507 group = node->get_comdat_group ();
508 if (group)
509 comdat = IDENTIFIER_POINTER (group);
510 else
511 comdat = "";
512 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
513
514 if (group)
515 {
516 if (node->same_comdat_group && !boundary_p)
517 {
518 ref = lto_symtab_encoder_lookup (encoder,
519 node->same_comdat_group);
520 gcc_assert (ref != LCC_NOT_FOUND);
521 }
522 else
523 ref = LCC_NOT_FOUND;
524 streamer_write_hwi_stream (ob->main_stream, ref);
525 }
526
527 section = node->get_section ();
528 if (!section)
529 section = "";
530
531 streamer_write_hwi_stream (ob->main_stream, node->tp_first_run);
532
533 bp = bitpack_create (ob->main_stream);
534 bp_pack_value (&bp, node->local.local, 1);
535 bp_pack_value (&bp, node->externally_visible, 1);
536 bp_pack_value (&bp, node->no_reorder, 1);
537 bp_pack_value (&bp, node->definition, 1);
538 bp_pack_value (&bp, node->local.versionable, 1);
539 bp_pack_value (&bp, node->local.can_change_signature, 1);
540 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
541 bp_pack_value (&bp, node->force_output, 1);
542 bp_pack_value (&bp, node->forced_by_abi, 1);
543 bp_pack_value (&bp, node->unique_name, 1);
544 bp_pack_value (&bp, node->body_removed, 1);
545 bp_pack_value (&bp, node->implicit_section, 1);
546 bp_pack_value (&bp, node->address_taken, 1);
547 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
548 && node->get_partitioning_class () == SYMBOL_PARTITION
549 && (reachable_from_other_partition_p (node, encoder)
550 || referenced_from_other_partition_p (node, encoder)), 1);
551 bp_pack_value (&bp, node->lowered, 1);
552 bp_pack_value (&bp, in_other_partition, 1);
553 bp_pack_value (&bp, node->alias, 1);
554 bp_pack_value (&bp, node->weakref, 1);
555 bp_pack_value (&bp, node->frequency, 2);
556 bp_pack_value (&bp, node->only_called_at_startup, 1);
557 bp_pack_value (&bp, node->only_called_at_exit, 1);
558 bp_pack_value (&bp, node->tm_clone, 1);
559 bp_pack_value (&bp, node->calls_comdat_local, 1);
560 bp_pack_value (&bp, node->icf_merged, 1);
561 bp_pack_value (&bp, node->nonfreeing_fn, 1);
562 bp_pack_value (&bp, node->thunk.thunk_p, 1);
563 bp_pack_value (&bp, node->parallelized_function, 1);
564 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
565 LDPR_NUM_KNOWN, node->resolution);
566 bp_pack_value (&bp, node->instrumentation_clone, 1);
567 bp_pack_value (&bp, node->split_part, 1);
568 streamer_write_bitpack (&bp);
569 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
570
571 if (node->thunk.thunk_p)
572 {
573 streamer_write_uhwi_stream
574 (ob->main_stream,
575 1 + (node->thunk.this_adjusting != 0) * 2
576 + (node->thunk.virtual_offset_p != 0) * 4
577 + (node->thunk.add_pointer_bounds_args != 0) * 8);
578 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
579 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
580 }
581 streamer_write_hwi_stream (ob->main_stream, node->profile_id);
582 if (DECL_STATIC_CONSTRUCTOR (node->decl))
583 streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ());
584 if (DECL_STATIC_DESTRUCTOR (node->decl))
585 streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ());
586
587 if (node->instrumentation_clone)
588 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->orig_decl);
589 }
590
591 /* Output the varpool NODE to OB.
592 If NODE is not in SET, then NODE is a boundary. */
593
594 static void
595 lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
596 lto_symtab_encoder_t encoder)
597 {
598 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
599 bool encode_initializer_p
600 = (node->definition
601 && lto_symtab_encoder_encode_initializer_p (encoder, node));
602 struct bitpack_d bp;
603 int ref;
604 const char *comdat;
605 const char *section;
606 tree group;
607
608 gcc_assert (!encode_initializer_p || node->definition);
609 gcc_assert (boundary_p || encode_initializer_p);
610
611 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
612 LTO_symtab_variable);
613 streamer_write_hwi_stream (ob->main_stream, node->order);
614 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
615 bp = bitpack_create (ob->main_stream);
616 bp_pack_value (&bp, node->externally_visible, 1);
617 bp_pack_value (&bp, node->no_reorder, 1);
618 bp_pack_value (&bp, node->force_output, 1);
619 bp_pack_value (&bp, node->forced_by_abi, 1);
620 bp_pack_value (&bp, node->unique_name, 1);
621 bp_pack_value (&bp,
622 node->body_removed
623 || (!encode_initializer_p && !node->alias && node->definition),
624 1);
625 bp_pack_value (&bp, node->implicit_section, 1);
626 bp_pack_value (&bp, node->writeonly, 1);
627 bp_pack_value (&bp, node->definition && (encode_initializer_p || node->alias),
628 1);
629 bp_pack_value (&bp, node->alias, 1);
630 bp_pack_value (&bp, node->weakref, 1);
631 bp_pack_value (&bp, node->analyzed && !boundary_p, 1);
632 gcc_assert (node->definition || !node->analyzed);
633 /* Constant pool initializers can be de-unified into individual ltrans units.
634 FIXME: Alternatively at -Os we may want to avoid generating for them the local
635 labels and share them across LTRANS partitions. */
636 if (node->get_partitioning_class () != SYMBOL_PARTITION)
637 {
638 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
639 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
640 }
641 else
642 {
643 bp_pack_value (&bp, node->definition
644 && referenced_from_other_partition_p (node, encoder), 1);
645 bp_pack_value (&bp, node->analyzed
646 && boundary_p && !DECL_EXTERNAL (node->decl), 1);
647 /* in_other_partition. */
648 }
649 bp_pack_value (&bp, node->tls_model, 3);
650 bp_pack_value (&bp, node->used_by_single_function, 1);
651 bp_pack_value (&bp, node->need_bounds_init, 1);
652 streamer_write_bitpack (&bp);
653
654 group = node->get_comdat_group ();
655 if (group)
656 comdat = IDENTIFIER_POINTER (group);
657 else
658 comdat = "";
659 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
660
661 if (group)
662 {
663 if (node->same_comdat_group && !boundary_p)
664 {
665 ref = lto_symtab_encoder_lookup (encoder,
666 node->same_comdat_group);
667 gcc_assert (ref != LCC_NOT_FOUND);
668 }
669 else
670 ref = LCC_NOT_FOUND;
671 streamer_write_hwi_stream (ob->main_stream, ref);
672 }
673
674 section = node->get_section ();
675 if (!section)
676 section = "";
677 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
678
679 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
680 LDPR_NUM_KNOWN, node->resolution);
681 }
682
683 /* Output the varpool NODE to OB.
684 If NODE is not in SET, then NODE is a boundary. */
685
686 static void
687 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
688 lto_symtab_encoder_t encoder)
689 {
690 struct bitpack_d bp;
691 int nref;
692 int uid = ref->lto_stmt_uid;
693 struct cgraph_node *node;
694
695 bp = bitpack_create (ob->main_stream);
696 bp_pack_value (&bp, ref->use, 3);
697 bp_pack_value (&bp, ref->speculative, 1);
698 streamer_write_bitpack (&bp);
699 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
700 gcc_assert (nref != LCC_NOT_FOUND);
701 streamer_write_hwi_stream (ob->main_stream, nref);
702
703 node = dyn_cast <cgraph_node *> (ref->referring);
704 if (node)
705 {
706 if (ref->stmt)
707 uid = gimple_uid (ref->stmt) + 1;
708 streamer_write_hwi_stream (ob->main_stream, uid);
709 }
710 }
711
712 /* Stream out profile_summary to OB. */
713
714 static void
715 output_profile_summary (struct lto_simple_output_block *ob)
716 {
717 unsigned h_ix;
718 struct bitpack_d bp;
719
720 if (profile_info)
721 {
722 /* We do not output num and run_max, they are not used by
723 GCC profile feedback and they are difficult to merge from multiple
724 units. */
725 gcc_assert (profile_info->runs);
726 streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
727 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_max);
728
729 /* sum_all is needed for computing the working set with the
730 histogram. */
731 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_all);
732
733 /* Create and output a bitpack of non-zero histogram entries indices. */
734 bp = bitpack_create (ob->main_stream);
735 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
736 bp_pack_value (&bp, profile_info->histogram[h_ix].num_counters > 0, 1);
737 streamer_write_bitpack (&bp);
738 /* Now stream out only those non-zero entries. */
739 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
740 {
741 if (!profile_info->histogram[h_ix].num_counters)
742 continue;
743 streamer_write_gcov_count_stream (ob->main_stream,
744 profile_info->histogram[h_ix].num_counters);
745 streamer_write_gcov_count_stream (ob->main_stream,
746 profile_info->histogram[h_ix].min_value);
747 streamer_write_gcov_count_stream (ob->main_stream,
748 profile_info->histogram[h_ix].cum_value);
749 }
750 /* IPA-profile computes hot bb threshold based on cumulated
751 whole program profile. We need to stream it down to ltrans. */
752 if (flag_wpa)
753 streamer_write_gcov_count_stream (ob->main_stream,
754 get_hot_bb_threshold ());
755 }
756 else
757 streamer_write_uhwi_stream (ob->main_stream, 0);
758 }
759
760 /* Output all callees or indirect outgoing edges. EDGE must be the first such
761 edge. */
762
763 static void
764 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
765 struct lto_simple_output_block *ob,
766 lto_symtab_encoder_t encoder)
767 {
768 if (!edge)
769 return;
770
771 /* Output edges in backward direction, so the reconstructed callgraph match
772 and it is easy to associate call sites in the IPA pass summaries. */
773 while (edge->next_callee)
774 edge = edge->next_callee;
775 for (; edge; edge = edge->prev_callee)
776 lto_output_edge (ob, edge, encoder);
777 }
778
779 /* Output the part of the cgraph in SET. */
780
781 static void
782 output_refs (lto_symtab_encoder_t encoder)
783 {
784 struct lto_simple_output_block *ob;
785 int count;
786 struct ipa_ref *ref;
787
788 ob = lto_create_simple_output_block (LTO_section_refs);
789
790 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
791 {
792 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
793
794 /* IPA_REF_ALIAS and IPA_REF_CHKP references are always preserved
795 in the boundary. Alias node can't have other references and
796 can be always handled as if it's not in the boundary. */
797 if (!node->alias && !lto_symtab_encoder_in_partition_p (encoder, node))
798 {
799 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
800 /* Output IPA_REF_CHKP reference. */
801 if (cnode
802 && cnode->instrumented_version
803 && !cnode->instrumentation_clone)
804 {
805 for (int i = 0; node->iterate_reference (i, ref); i++)
806 if (ref->use == IPA_REF_CHKP)
807 {
808 if (lto_symtab_encoder_lookup (encoder, ref->referred)
809 != LCC_NOT_FOUND)
810 {
811 int nref = lto_symtab_encoder_lookup (encoder, node);
812 streamer_write_gcov_count_stream (ob->main_stream, 1);
813 streamer_write_uhwi_stream (ob->main_stream, nref);
814 lto_output_ref (ob, ref, encoder);
815 }
816 break;
817 }
818 }
819 continue;
820 }
821
822 count = node->ref_list.nreferences ();
823 if (count)
824 {
825 streamer_write_gcov_count_stream (ob->main_stream, count);
826 streamer_write_uhwi_stream (ob->main_stream,
827 lto_symtab_encoder_lookup (encoder, node));
828 for (int i = 0; node->iterate_reference (i, ref); i++)
829 lto_output_ref (ob, ref, encoder);
830 }
831 }
832
833 streamer_write_uhwi_stream (ob->main_stream, 0);
834
835 lto_destroy_simple_output_block (ob);
836 }
837
838 /* Add NODE into encoder as well as nodes it is cloned from.
839 Do it in a way so clones appear first. */
840
841 static void
842 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
843 bool include_body)
844 {
845 if (node->clone_of)
846 add_node_to (encoder, node->clone_of, include_body);
847 else if (include_body)
848 lto_set_symtab_encoder_encode_body (encoder, node);
849 lto_symtab_encoder_encode (encoder, node);
850 }
851
852 /* Add all references in NODE to encoders. */
853
854 static void
855 create_references (lto_symtab_encoder_t encoder, symtab_node *node)
856 {
857 int i;
858 struct ipa_ref *ref = NULL;
859 for (i = 0; node->iterate_reference (i, ref); i++)
860 if (is_a <cgraph_node *> (ref->referred))
861 add_node_to (encoder, dyn_cast <cgraph_node *> (ref->referred), false);
862 else
863 lto_symtab_encoder_encode (encoder, ref->referred);
864 }
865
866 /* Select what needs to be streamed out. In regular lto mode stream everything.
867 In offload lto mode stream only nodes marked as offloadable. */
868 void
869 select_what_to_stream (void)
870 {
871 struct symtab_node *snode;
872 FOR_EACH_SYMBOL (snode)
873 snode->need_lto_streaming = !lto_stream_offload_p || snode->offloadable;
874 }
875
876 /* Find all symbols we want to stream into given partition and insert them
877 to encoders.
878
879 The function actually replaces IN_ENCODER by new one. The reason is that
880 streaming code needs clone's origin to be streamed before clone. This
881 means that we need to insert the nodes in specific order. This order is
882 ignored by the partitioning logic earlier. */
883
884 lto_symtab_encoder_t
885 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
886 {
887 struct cgraph_edge *edge;
888 int i;
889 lto_symtab_encoder_t encoder;
890 lto_symtab_encoder_iterator lsei;
891 hash_set<void *> reachable_call_targets;
892
893 encoder = lto_symtab_encoder_new (false);
894
895 /* Go over all entries in the IN_ENCODER and duplicate them to
896 ENCODER. At the same time insert masters of clones so
897 every master appears before clone. */
898 for (lsei = lsei_start_function_in_partition (in_encoder);
899 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
900 {
901 struct cgraph_node *node = lsei_cgraph_node (lsei);
902 if (!node->need_lto_streaming)
903 continue;
904 add_node_to (encoder, node, true);
905 lto_set_symtab_encoder_in_partition (encoder, node);
906 create_references (encoder, node);
907 /* For proper debug info, we need to ship the origins, too. */
908 if (DECL_ABSTRACT_ORIGIN (node->decl))
909 {
910 struct cgraph_node *origin_node
911 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (node->decl));
912 origin_node->used_as_abstract_origin = true;
913 add_node_to (encoder, origin_node, true);
914 }
915 }
916 for (lsei = lsei_start_variable_in_partition (in_encoder);
917 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
918 {
919 varpool_node *vnode = lsei_varpool_node (lsei);
920
921 if (!vnode->need_lto_streaming)
922 continue;
923 lto_set_symtab_encoder_in_partition (encoder, vnode);
924 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
925 create_references (encoder, vnode);
926 /* For proper debug info, we need to ship the origins, too. */
927 if (DECL_ABSTRACT_ORIGIN (vnode->decl))
928 {
929 varpool_node *origin_node
930 = varpool_node::get (DECL_ABSTRACT_ORIGIN (vnode->decl));
931 lto_set_symtab_encoder_in_partition (encoder, origin_node);
932 }
933 }
934 /* Pickle in also the initializer of all referenced readonly variables
935 to help folding. Constant pool variables are not shared, so we must
936 pickle those too. */
937 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
938 {
939 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
940 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
941 {
942 if (!lto_symtab_encoder_encode_initializer_p (encoder,
943 vnode)
944 && (((vnode->ctor_useable_for_folding_p ()
945 && (!DECL_VIRTUAL_P (vnode->decl)
946 || !flag_wpa
947 || flag_ltrans_devirtualize))
948 || POINTER_BOUNDS_P (vnode->decl))))
949 {
950 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
951 create_references (encoder, vnode);
952 }
953 }
954 }
955
956 /* Go over all the nodes again to include callees that are not in
957 SET. */
958 for (lsei = lsei_start_function_in_partition (encoder);
959 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
960 {
961 struct cgraph_node *node = lsei_cgraph_node (lsei);
962 for (edge = node->callees; edge; edge = edge->next_callee)
963 {
964 struct cgraph_node *callee = edge->callee;
965 if (!lto_symtab_encoder_in_partition_p (encoder, callee))
966 {
967 /* We should have moved all the inlines. */
968 gcc_assert (!callee->global.inlined_to);
969 add_node_to (encoder, callee, false);
970 }
971 }
972 /* Add all possible targets for late devirtualization. */
973 if (flag_ltrans_devirtualize || !flag_wpa)
974 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
975 if (edge->indirect_info->polymorphic)
976 {
977 unsigned int i;
978 void *cache_token;
979 bool final;
980 vec <cgraph_node *>targets
981 = possible_polymorphic_call_targets
982 (edge, &final, &cache_token);
983 if (!reachable_call_targets.add (cache_token))
984 {
985 for (i = 0; i < targets.length (); i++)
986 {
987 struct cgraph_node *callee = targets[i];
988
989 /* Adding an external declarations into the unit serves
990 no purpose and just increases its boundary. */
991 if (callee->definition
992 && !lto_symtab_encoder_in_partition_p
993 (encoder, callee))
994 {
995 gcc_assert (!callee->global.inlined_to);
996 add_node_to (encoder, callee, false);
997 }
998 }
999 }
1000 }
1001 }
1002 /* Be sure to also insert alias targert and thunk callees. These needs
1003 to stay to aid local calling conventions. */
1004 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
1005 {
1006 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1007 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1008
1009 if (node->alias && node->analyzed)
1010 create_references (encoder, node);
1011 if (cnode
1012 && cnode->thunk.thunk_p)
1013 add_node_to (encoder, cnode->callees->callee, false);
1014 }
1015 lto_symtab_encoder_delete (in_encoder);
1016 return encoder;
1017 }
1018
1019 /* Output the part of the symtab in SET and VSET. */
1020
1021 void
1022 output_symtab (void)
1023 {
1024 struct cgraph_node *node;
1025 struct lto_simple_output_block *ob;
1026 int i, n_nodes;
1027 lto_symtab_encoder_t encoder;
1028
1029 if (flag_wpa)
1030 output_cgraph_opt_summary ();
1031
1032 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
1033
1034 output_profile_summary (ob);
1035
1036 /* An encoder for cgraph nodes should have been created by
1037 ipa_write_summaries_1. */
1038 gcc_assert (ob->decl_state->symtab_node_encoder);
1039 encoder = ob->decl_state->symtab_node_encoder;
1040
1041 /* Write out the nodes. We must first output a node and then its clones,
1042 otherwise at a time reading back the node there would be nothing to clone
1043 from. */
1044 n_nodes = lto_symtab_encoder_size (encoder);
1045 for (i = 0; i < n_nodes; i++)
1046 {
1047 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1048 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1049 lto_output_node (ob, cnode, encoder);
1050 else
1051 lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
1052 }
1053
1054 /* Go over the nodes in SET again to write edges. */
1055 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
1056 {
1057 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder, i));
1058 if (node
1059 && (node->thunk.thunk_p
1060 || lto_symtab_encoder_in_partition_p (encoder, node)))
1061 {
1062 output_outgoing_cgraph_edges (node->callees, ob, encoder);
1063 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
1064 }
1065 }
1066
1067 streamer_write_uhwi_stream (ob->main_stream, 0);
1068
1069 lto_destroy_simple_output_block (ob);
1070
1071 /* Emit toplevel asms.
1072 When doing WPA we must output every asm just once. Since we do not partition asm
1073 nodes at all, output them to first output. This is kind of hack, but should work
1074 well. */
1075 if (!asm_nodes_output)
1076 {
1077 asm_nodes_output = true;
1078 lto_output_toplevel_asms ();
1079 }
1080
1081 output_refs (encoder);
1082 }
1083
1084 /* Return identifier encoded in IB as a plain string. */
1085
1086 static tree
1087 read_identifier (struct lto_input_block *ib)
1088 {
1089 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1090 tree id;
1091
1092 if (ib->data[ib->p + len])
1093 lto_section_overrun (ib);
1094 if (!len)
1095 {
1096 ib->p++;
1097 return NULL;
1098 }
1099 id = get_identifier (ib->data + ib->p);
1100 ib->p += len + 1;
1101 return id;
1102 }
1103
1104 /* Return string encoded in IB, NULL if string is empty. */
1105
1106 static const char *
1107 read_string (struct lto_input_block *ib)
1108 {
1109 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1110 const char *str;
1111
1112 if (ib->data[ib->p + len])
1113 lto_section_overrun (ib);
1114 if (!len)
1115 {
1116 ib->p++;
1117 return NULL;
1118 }
1119 str = ib->data + ib->p;
1120 ib->p += len + 1;
1121 return str;
1122 }
1123
1124 /* Output function/variable tables that will allow libgomp to look up offload
1125 target code.
1126 OFFLOAD_FUNCS is filled in expand_omp_target, OFFLOAD_VARS is filled in
1127 varpool_node::get_create. In WHOPR (partitioned) mode during the WPA stage
1128 both OFFLOAD_FUNCS and OFFLOAD_VARS are filled by input_offload_tables. */
1129
1130 void
1131 output_offload_tables (void)
1132 {
1133 if (vec_safe_is_empty (offload_funcs) && vec_safe_is_empty (offload_vars))
1134 return;
1135
1136 struct lto_simple_output_block *ob
1137 = lto_create_simple_output_block (LTO_section_offload_table);
1138
1139 for (unsigned i = 0; i < vec_safe_length (offload_funcs); i++)
1140 {
1141 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1142 LTO_symtab_last_tag, LTO_symtab_unavail_node);
1143 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
1144 (*offload_funcs)[i]);
1145 }
1146
1147 for (unsigned i = 0; i < vec_safe_length (offload_vars); i++)
1148 {
1149 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1150 LTO_symtab_last_tag, LTO_symtab_variable);
1151 lto_output_var_decl_index (ob->decl_state, ob->main_stream,
1152 (*offload_vars)[i]);
1153 }
1154
1155 streamer_write_uhwi_stream (ob->main_stream, 0);
1156 lto_destroy_simple_output_block (ob);
1157
1158 /* In WHOPR mode during the WPA stage the joint offload tables need to be
1159 streamed to one partition only. That's why we free offload_funcs and
1160 offload_vars after the first call of output_offload_tables. */
1161 if (flag_wpa)
1162 {
1163 vec_free (offload_funcs);
1164 vec_free (offload_vars);
1165 }
1166 }
1167
1168 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
1169 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
1170 NODE or to replace the values in it, for instance because the first
1171 time we saw it, the function body was not available but now it
1172 is. BP is a bitpack with all the bitflags for NODE read from the
1173 stream. */
1174
1175 static void
1176 input_overwrite_node (struct lto_file_decl_data *file_data,
1177 struct cgraph_node *node,
1178 enum LTO_symtab_tags tag,
1179 struct bitpack_d *bp)
1180 {
1181 node->aux = (void *) tag;
1182 node->lto_file_data = file_data;
1183
1184 node->local.local = bp_unpack_value (bp, 1);
1185 node->externally_visible = bp_unpack_value (bp, 1);
1186 node->no_reorder = bp_unpack_value (bp, 1);
1187 node->definition = bp_unpack_value (bp, 1);
1188 node->local.versionable = bp_unpack_value (bp, 1);
1189 node->local.can_change_signature = bp_unpack_value (bp, 1);
1190 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
1191 node->force_output = bp_unpack_value (bp, 1);
1192 node->forced_by_abi = bp_unpack_value (bp, 1);
1193 node->unique_name = bp_unpack_value (bp, 1);
1194 node->body_removed = bp_unpack_value (bp, 1);
1195 node->implicit_section = bp_unpack_value (bp, 1);
1196 node->address_taken = bp_unpack_value (bp, 1);
1197 node->used_from_other_partition = bp_unpack_value (bp, 1);
1198 node->lowered = bp_unpack_value (bp, 1);
1199 node->analyzed = tag == LTO_symtab_analyzed_node;
1200 node->in_other_partition = bp_unpack_value (bp, 1);
1201 if (node->in_other_partition
1202 /* Avoid updating decl when we are seeing just inline clone.
1203 When inlining function that has functions already inlined into it,
1204 we produce clones of inline clones.
1205
1206 WPA partitioning might put each clone into different unit and
1207 we might end up streaming inline clone from other partition
1208 to support clone we are interested in. */
1209 && (!node->clone_of
1210 || node->clone_of->decl != node->decl))
1211 {
1212 DECL_EXTERNAL (node->decl) = 1;
1213 TREE_STATIC (node->decl) = 0;
1214 }
1215 node->alias = bp_unpack_value (bp, 1);
1216 node->weakref = bp_unpack_value (bp, 1);
1217 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
1218 node->only_called_at_startup = bp_unpack_value (bp, 1);
1219 node->only_called_at_exit = bp_unpack_value (bp, 1);
1220 node->tm_clone = bp_unpack_value (bp, 1);
1221 node->calls_comdat_local = bp_unpack_value (bp, 1);
1222 node->icf_merged = bp_unpack_value (bp, 1);
1223 node->nonfreeing_fn = bp_unpack_value (bp, 1);
1224 node->thunk.thunk_p = bp_unpack_value (bp, 1);
1225 node->parallelized_function = bp_unpack_value (bp, 1);
1226 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
1227 LDPR_NUM_KNOWN);
1228 node->instrumentation_clone = bp_unpack_value (bp, 1);
1229 node->split_part = bp_unpack_value (bp, 1);
1230 gcc_assert (flag_ltrans
1231 || (!node->in_other_partition
1232 && !node->used_from_other_partition));
1233 }
1234
1235 /* Return string alias is alias of. */
1236
1237 static tree
1238 get_alias_symbol (tree decl)
1239 {
1240 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1241 return get_identifier (TREE_STRING_POINTER
1242 (TREE_VALUE (TREE_VALUE (alias))));
1243 }
1244
1245 /* Read a node from input_block IB. TAG is the node's tag just read.
1246 Return the node read or overwriten. */
1247
1248 static struct cgraph_node *
1249 input_node (struct lto_file_decl_data *file_data,
1250 struct lto_input_block *ib,
1251 enum LTO_symtab_tags tag,
1252 vec<symtab_node *> nodes)
1253 {
1254 gcc::pass_manager *passes = g->get_passes ();
1255 tree fn_decl;
1256 struct cgraph_node *node;
1257 struct bitpack_d bp;
1258 unsigned decl_index;
1259 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1260 int clone_ref;
1261 int order;
1262 int i, count;
1263 tree group;
1264 const char *section;
1265 order = streamer_read_hwi (ib) + order_base;
1266 clone_ref = streamer_read_hwi (ib);
1267
1268 decl_index = streamer_read_uhwi (ib);
1269 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1270
1271 if (clone_ref != LCC_NOT_FOUND)
1272 {
1273 node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
1274 0, CGRAPH_FREQ_BASE, false,
1275 vNULL, false, NULL, NULL);
1276 }
1277 else
1278 {
1279 /* Declaration of functions can be already merged with a declaration
1280 from other input file. We keep cgraph unmerged until after streaming
1281 of ipa passes is done. Alays forcingly create a fresh node. */
1282 node = symtab->create_empty ();
1283 node->decl = fn_decl;
1284 node->register_symbol ();
1285 }
1286
1287 node->order = order;
1288 if (order >= symtab->order)
1289 symtab->order = order + 1;
1290
1291 node->count = streamer_read_gcov_count (ib);
1292 node->count_materialization_scale = streamer_read_hwi (ib);
1293
1294 count = streamer_read_hwi (ib);
1295 node->ipa_transforms_to_apply = vNULL;
1296 for (i = 0; i < count; i++)
1297 {
1298 opt_pass *pass;
1299 int pid = streamer_read_hwi (ib);
1300
1301 gcc_assert (pid < passes->passes_by_id_size);
1302 pass = passes->passes_by_id[pid];
1303 node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass_d *) pass);
1304 }
1305
1306 if (tag == LTO_symtab_analyzed_node)
1307 ref = streamer_read_hwi (ib);
1308
1309 group = read_identifier (ib);
1310 if (group)
1311 ref2 = streamer_read_hwi (ib);
1312
1313 /* Make sure that we have not read this node before. Nodes that
1314 have already been read will have their tag stored in the 'aux'
1315 field. Since built-in functions can be referenced in multiple
1316 functions, they are expected to be read more than once. */
1317 if (node->aux && !DECL_BUILT_IN (node->decl))
1318 internal_error ("bytecode stream: found multiple instances of cgraph "
1319 "node with uid %d", node->uid);
1320
1321 node->tp_first_run = streamer_read_uhwi (ib);
1322
1323 bp = streamer_read_bitpack (ib);
1324
1325 input_overwrite_node (file_data, node, tag, &bp);
1326
1327 /* Store a reference for now, and fix up later to be a pointer. */
1328 node->global.inlined_to = (cgraph_node *) (intptr_t) ref;
1329
1330 if (group)
1331 {
1332 node->set_comdat_group (group);
1333 /* Store a reference for now, and fix up later to be a pointer. */
1334 node->same_comdat_group = (symtab_node *) (intptr_t) ref2;
1335 }
1336 else
1337 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1338 section = read_string (ib);
1339 if (section)
1340 node->set_section_for_node (section);
1341
1342 if (node->thunk.thunk_p)
1343 {
1344 int type = streamer_read_uhwi (ib);
1345 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1346 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1347
1348 node->thunk.fixed_offset = fixed_offset;
1349 node->thunk.this_adjusting = (type & 2);
1350 node->thunk.virtual_value = virtual_value;
1351 node->thunk.virtual_offset_p = (type & 4);
1352 node->thunk.add_pointer_bounds_args = (type & 8);
1353 }
1354 if (node->alias && !node->analyzed && node->weakref)
1355 node->alias_target = get_alias_symbol (node->decl);
1356 node->profile_id = streamer_read_hwi (ib);
1357 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1358 node->set_init_priority (streamer_read_hwi (ib));
1359 if (DECL_STATIC_DESTRUCTOR (node->decl))
1360 node->set_fini_priority (streamer_read_hwi (ib));
1361
1362 if (node->instrumentation_clone)
1363 {
1364 decl_index = streamer_read_uhwi (ib);
1365 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1366 node->orig_decl = fn_decl;
1367 }
1368
1369 return node;
1370 }
1371
1372 /* Read a node from input_block IB. TAG is the node's tag just read.
1373 Return the node read or overwriten. */
1374
1375 static varpool_node *
1376 input_varpool_node (struct lto_file_decl_data *file_data,
1377 struct lto_input_block *ib)
1378 {
1379 int decl_index;
1380 tree var_decl;
1381 varpool_node *node;
1382 struct bitpack_d bp;
1383 int ref = LCC_NOT_FOUND;
1384 int order;
1385 tree group;
1386 const char *section;
1387
1388 order = streamer_read_hwi (ib) + order_base;
1389 decl_index = streamer_read_uhwi (ib);
1390 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1391
1392 /* Declaration of functions can be already merged with a declaration
1393 from other input file. We keep cgraph unmerged until after streaming
1394 of ipa passes is done. Alays forcingly create a fresh node. */
1395 node = varpool_node::create_empty ();
1396 node->decl = var_decl;
1397 node->register_symbol ();
1398
1399 node->order = order;
1400 if (order >= symtab->order)
1401 symtab->order = order + 1;
1402 node->lto_file_data = file_data;
1403
1404 bp = streamer_read_bitpack (ib);
1405 node->externally_visible = bp_unpack_value (&bp, 1);
1406 node->no_reorder = bp_unpack_value (&bp, 1);
1407 node->force_output = bp_unpack_value (&bp, 1);
1408 node->forced_by_abi = bp_unpack_value (&bp, 1);
1409 node->unique_name = bp_unpack_value (&bp, 1);
1410 node->body_removed = bp_unpack_value (&bp, 1);
1411 node->implicit_section = bp_unpack_value (&bp, 1);
1412 node->writeonly = bp_unpack_value (&bp, 1);
1413 node->definition = bp_unpack_value (&bp, 1);
1414 node->alias = bp_unpack_value (&bp, 1);
1415 node->weakref = bp_unpack_value (&bp, 1);
1416 node->analyzed = bp_unpack_value (&bp, 1);
1417 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1418 node->in_other_partition = bp_unpack_value (&bp, 1);
1419 if (node->in_other_partition)
1420 {
1421 DECL_EXTERNAL (node->decl) = 1;
1422 TREE_STATIC (node->decl) = 0;
1423 }
1424 if (node->alias && !node->analyzed && node->weakref)
1425 node->alias_target = get_alias_symbol (node->decl);
1426 node->tls_model = (enum tls_model)bp_unpack_value (&bp, 3);
1427 node->used_by_single_function = (enum tls_model)bp_unpack_value (&bp, 1);
1428 node->need_bounds_init = bp_unpack_value (&bp, 1);
1429 group = read_identifier (ib);
1430 if (group)
1431 {
1432 node->set_comdat_group (group);
1433 ref = streamer_read_hwi (ib);
1434 /* Store a reference for now, and fix up later to be a pointer. */
1435 node->same_comdat_group = (symtab_node *) (intptr_t) ref;
1436 }
1437 else
1438 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1439 section = read_string (ib);
1440 if (section)
1441 node->set_section_for_node (section);
1442 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1443 LDPR_NUM_KNOWN);
1444 gcc_assert (flag_ltrans
1445 || (!node->in_other_partition
1446 && !node->used_from_other_partition));
1447
1448 return node;
1449 }
1450
1451 /* Read a node from input_block IB. TAG is the node's tag just read.
1452 Return the node read or overwriten. */
1453
1454 static void
1455 input_ref (struct lto_input_block *ib,
1456 symtab_node *referring_node,
1457 vec<symtab_node *> nodes)
1458 {
1459 symtab_node *node = NULL;
1460 struct bitpack_d bp;
1461 enum ipa_ref_use use;
1462 bool speculative;
1463 struct ipa_ref *ref;
1464
1465 bp = streamer_read_bitpack (ib);
1466 use = (enum ipa_ref_use) bp_unpack_value (&bp, 3);
1467 speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1468 node = nodes[streamer_read_hwi (ib)];
1469 ref = referring_node->create_reference (node, use);
1470 ref->speculative = speculative;
1471 if (is_a <cgraph_node *> (referring_node))
1472 ref->lto_stmt_uid = streamer_read_hwi (ib);
1473 }
1474
1475 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1476 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1477 edge being read is indirect (in the sense that it has
1478 indirect_unknown_callee set). */
1479
1480 static void
1481 input_edge (struct lto_input_block *ib, vec<symtab_node *> nodes,
1482 bool indirect)
1483 {
1484 struct cgraph_node *caller, *callee;
1485 struct cgraph_edge *edge;
1486 unsigned int stmt_id;
1487 gcov_type count;
1488 int freq;
1489 cgraph_inline_failed_t inline_failed;
1490 struct bitpack_d bp;
1491 int ecf_flags = 0;
1492
1493 caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1494 if (caller == NULL || caller->decl == NULL_TREE)
1495 internal_error ("bytecode stream: no caller found while reading edge");
1496
1497 if (!indirect)
1498 {
1499 callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1500 if (callee == NULL || callee->decl == NULL_TREE)
1501 internal_error ("bytecode stream: no callee found while reading edge");
1502 }
1503 else
1504 callee = NULL;
1505
1506 count = streamer_read_gcov_count (ib);
1507
1508 bp = streamer_read_bitpack (ib);
1509 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
1510 stmt_id = bp_unpack_var_len_unsigned (&bp);
1511 freq = (int) bp_unpack_var_len_unsigned (&bp);
1512
1513 if (indirect)
1514 edge = caller->create_indirect_edge (NULL, 0, count, freq);
1515 else
1516 edge = caller->create_edge (callee, NULL, count, freq);
1517
1518 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1519 edge->speculative = bp_unpack_value (&bp, 1);
1520 edge->lto_stmt_uid = stmt_id;
1521 edge->inline_failed = inline_failed;
1522 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1523 edge->can_throw_external = bp_unpack_value (&bp, 1);
1524 edge->in_polymorphic_cdtor = bp_unpack_value (&bp, 1);
1525 if (indirect)
1526 {
1527 if (bp_unpack_value (&bp, 1))
1528 ecf_flags |= ECF_CONST;
1529 if (bp_unpack_value (&bp, 1))
1530 ecf_flags |= ECF_PURE;
1531 if (bp_unpack_value (&bp, 1))
1532 ecf_flags |= ECF_NORETURN;
1533 if (bp_unpack_value (&bp, 1))
1534 ecf_flags |= ECF_MALLOC;
1535 if (bp_unpack_value (&bp, 1))
1536 ecf_flags |= ECF_NOTHROW;
1537 if (bp_unpack_value (&bp, 1))
1538 ecf_flags |= ECF_RETURNS_TWICE;
1539 edge->indirect_info->ecf_flags = ecf_flags;
1540 edge->indirect_info->common_target_id = streamer_read_hwi (ib);
1541 if (edge->indirect_info->common_target_id)
1542 edge->indirect_info->common_target_probability = streamer_read_hwi (ib);
1543 }
1544 }
1545
1546
1547 /* Read a cgraph from IB using the info in FILE_DATA. */
1548
1549 static vec<symtab_node *>
1550 input_cgraph_1 (struct lto_file_decl_data *file_data,
1551 struct lto_input_block *ib)
1552 {
1553 enum LTO_symtab_tags tag;
1554 vec<symtab_node *> nodes = vNULL;
1555 symtab_node *node;
1556 unsigned i;
1557
1558 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1559 order_base = symtab->order;
1560 while (tag)
1561 {
1562 if (tag == LTO_symtab_edge)
1563 input_edge (ib, nodes, false);
1564 else if (tag == LTO_symtab_indirect_edge)
1565 input_edge (ib, nodes, true);
1566 else if (tag == LTO_symtab_variable)
1567 {
1568 node = input_varpool_node (file_data, ib);
1569 nodes.safe_push (node);
1570 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1571 }
1572 else
1573 {
1574 node = input_node (file_data, ib, tag, nodes);
1575 if (node == NULL || node->decl == NULL_TREE)
1576 internal_error ("bytecode stream: found empty cgraph node");
1577 nodes.safe_push (node);
1578 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1579 }
1580
1581 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1582 }
1583
1584 lto_input_toplevel_asms (file_data, order_base);
1585
1586 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1587 #ifdef ENABLE_CHECKING
1588 FOR_EACH_VEC_ELT (nodes, i, node)
1589 gcc_assert (node->aux || !is_a <cgraph_node *> (node));
1590 #endif
1591 FOR_EACH_VEC_ELT (nodes, i, node)
1592 {
1593 int ref;
1594 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1595 {
1596 ref = (int) (intptr_t) cnode->global.inlined_to;
1597
1598 /* We share declaration of builtins, so we may read same node twice. */
1599 if (!node->aux)
1600 continue;
1601 node->aux = NULL;
1602
1603 /* Fixup inlined_to from reference to pointer. */
1604 if (ref != LCC_NOT_FOUND)
1605 dyn_cast<cgraph_node *> (node)->global.inlined_to
1606 = dyn_cast<cgraph_node *> (nodes[ref]);
1607 else
1608 cnode->global.inlined_to = NULL;
1609
1610 /* Compute instrumented_version. */
1611 if (cnode->instrumentation_clone)
1612 {
1613 gcc_assert (cnode->orig_decl);
1614
1615 cnode->instrumented_version = cgraph_node::get (cnode->orig_decl);
1616 if (cnode->instrumented_version)
1617 {
1618 /* We may have multiple nodes for a single function which
1619 will be merged later. To have a proper merge we need
1620 to keep instrumentation_version reference between nodes
1621 consistent: each instrumented_version reference should
1622 have proper reverse reference. Thus don't break existing
1623 instrumented_version reference if it already exists. */
1624 if (cnode->instrumented_version->instrumented_version)
1625 cnode->instrumented_version = NULL;
1626 else
1627 cnode->instrumented_version->instrumented_version = cnode;
1628 }
1629
1630 /* Restore decl names reference except for wrapper functions. */
1631 if (!chkp_wrap_function (cnode->orig_decl))
1632 {
1633 tree name = DECL_ASSEMBLER_NAME (cnode->decl);
1634 IDENTIFIER_TRANSPARENT_ALIAS (name) = 1;
1635 TREE_CHAIN (name) = DECL_ASSEMBLER_NAME (cnode->orig_decl);
1636 }
1637 }
1638 }
1639
1640 ref = (int) (intptr_t) node->same_comdat_group;
1641
1642 /* Fixup same_comdat_group from reference to pointer. */
1643 if (ref != LCC_NOT_FOUND)
1644 node->same_comdat_group = nodes[ref];
1645 else
1646 node->same_comdat_group = NULL;
1647 }
1648 FOR_EACH_VEC_ELT (nodes, i, node)
1649 node->aux = is_a <cgraph_node *> (node) ? (void *)1 : NULL;
1650 return nodes;
1651 }
1652
1653 /* Input ipa_refs. */
1654
1655 static void
1656 input_refs (struct lto_input_block *ib,
1657 vec<symtab_node *> nodes)
1658 {
1659 int count;
1660 int idx;
1661 while (true)
1662 {
1663 symtab_node *node;
1664 count = streamer_read_uhwi (ib);
1665 if (!count)
1666 break;
1667 idx = streamer_read_uhwi (ib);
1668 node = nodes[idx];
1669 while (count)
1670 {
1671 input_ref (ib, node, nodes);
1672 count--;
1673 }
1674 }
1675 }
1676
1677
1678 static struct gcov_ctr_summary lto_gcov_summary;
1679
1680 /* Input profile_info from IB. */
1681 static void
1682 input_profile_summary (struct lto_input_block *ib,
1683 struct lto_file_decl_data *file_data)
1684 {
1685 unsigned h_ix;
1686 struct bitpack_d bp;
1687 unsigned int runs = streamer_read_uhwi (ib);
1688 if (runs)
1689 {
1690 file_data->profile_info.runs = runs;
1691 file_data->profile_info.sum_max = streamer_read_gcov_count (ib);
1692 file_data->profile_info.sum_all = streamer_read_gcov_count (ib);
1693
1694 memset (file_data->profile_info.histogram, 0,
1695 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1696 /* Input the bitpack of non-zero histogram indices. */
1697 bp = streamer_read_bitpack (ib);
1698 /* Read in and unpack the full bitpack, flagging non-zero
1699 histogram entries by setting the num_counters non-zero. */
1700 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1701 {
1702 file_data->profile_info.histogram[h_ix].num_counters
1703 = bp_unpack_value (&bp, 1);
1704 }
1705 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1706 {
1707 if (!file_data->profile_info.histogram[h_ix].num_counters)
1708 continue;
1709
1710 file_data->profile_info.histogram[h_ix].num_counters
1711 = streamer_read_gcov_count (ib);
1712 file_data->profile_info.histogram[h_ix].min_value
1713 = streamer_read_gcov_count (ib);
1714 file_data->profile_info.histogram[h_ix].cum_value
1715 = streamer_read_gcov_count (ib);
1716 }
1717 /* IPA-profile computes hot bb threshold based on cumulated
1718 whole program profile. We need to stream it down to ltrans. */
1719 if (flag_ltrans)
1720 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1721 }
1722
1723 }
1724
1725 /* Rescale profile summaries to the same number of runs in the whole unit. */
1726
1727 static void
1728 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1729 {
1730 struct lto_file_decl_data *file_data;
1731 unsigned int j, h_ix;
1732 gcov_unsigned_t max_runs = 0;
1733 struct cgraph_node *node;
1734 struct cgraph_edge *edge;
1735 gcov_type saved_sum_all = 0;
1736 gcov_ctr_summary *saved_profile_info = 0;
1737 int saved_scale = 0;
1738
1739 /* Find unit with maximal number of runs. If we ever get serious about
1740 roundoff errors, we might also consider computing smallest common
1741 multiply. */
1742 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1743 if (max_runs < file_data->profile_info.runs)
1744 max_runs = file_data->profile_info.runs;
1745
1746 if (!max_runs)
1747 return;
1748
1749 /* Simple overflow check. We probably don't need to support that many train
1750 runs. Such a large value probably imply data corruption anyway. */
1751 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1752 {
1753 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1754 INT_MAX / REG_BR_PROB_BASE);
1755 return;
1756 }
1757
1758 profile_info = &lto_gcov_summary;
1759 lto_gcov_summary.runs = max_runs;
1760 lto_gcov_summary.sum_max = 0;
1761 memset (lto_gcov_summary.histogram, 0,
1762 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1763
1764 /* Rescale all units to the maximal number of runs.
1765 sum_max can not be easily merged, as we have no idea what files come from
1766 the same run. We do not use the info anyway, so leave it 0. */
1767 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1768 if (file_data->profile_info.runs)
1769 {
1770 int scale = GCOV_COMPUTE_SCALE (max_runs,
1771 file_data->profile_info.runs);
1772 lto_gcov_summary.sum_max
1773 = MAX (lto_gcov_summary.sum_max,
1774 apply_scale (file_data->profile_info.sum_max, scale));
1775 lto_gcov_summary.sum_all
1776 = MAX (lto_gcov_summary.sum_all,
1777 apply_scale (file_data->profile_info.sum_all, scale));
1778 /* Save a pointer to the profile_info with the largest
1779 scaled sum_all and the scale for use in merging the
1780 histogram. */
1781 if (!saved_profile_info
1782 || lto_gcov_summary.sum_all > saved_sum_all)
1783 {
1784 saved_profile_info = &file_data->profile_info;
1785 saved_sum_all = lto_gcov_summary.sum_all;
1786 saved_scale = scale;
1787 }
1788 }
1789
1790 gcc_assert (saved_profile_info);
1791
1792 /* Scale up the histogram from the profile that had the largest
1793 scaled sum_all above. */
1794 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1795 {
1796 /* Scale up the min value as we did the corresponding sum_all
1797 above. Use that to find the new histogram index. */
1798 gcov_type scaled_min
1799 = apply_scale (saved_profile_info->histogram[h_ix].min_value,
1800 saved_scale);
1801 /* The new index may be shared with another scaled histogram entry,
1802 so we need to account for a non-zero histogram entry at new_ix. */
1803 unsigned new_ix = gcov_histo_index (scaled_min);
1804 lto_gcov_summary.histogram[new_ix].min_value
1805 = (lto_gcov_summary.histogram[new_ix].num_counters
1806 ? MIN (lto_gcov_summary.histogram[new_ix].min_value, scaled_min)
1807 : scaled_min);
1808 /* Some of the scaled counter values would ostensibly need to be placed
1809 into different (larger) histogram buckets, but we keep things simple
1810 here and place the scaled cumulative counter value in the bucket
1811 corresponding to the scaled minimum counter value. */
1812 lto_gcov_summary.histogram[new_ix].cum_value
1813 += apply_scale (saved_profile_info->histogram[h_ix].cum_value,
1814 saved_scale);
1815 lto_gcov_summary.histogram[new_ix].num_counters
1816 += saved_profile_info->histogram[h_ix].num_counters;
1817 }
1818
1819 /* Watch roundoff errors. */
1820 if (lto_gcov_summary.sum_max < max_runs)
1821 lto_gcov_summary.sum_max = max_runs;
1822
1823 /* If merging already happent at WPA time, we are done. */
1824 if (flag_ltrans)
1825 return;
1826
1827 /* Now compute count_materialization_scale of each node.
1828 During LTRANS we already have values of count_materialization_scale
1829 computed, so just update them. */
1830 FOR_EACH_FUNCTION (node)
1831 if (node->lto_file_data
1832 && node->lto_file_data->profile_info.runs)
1833 {
1834 int scale;
1835
1836 scale = RDIV (node->count_materialization_scale * max_runs,
1837 node->lto_file_data->profile_info.runs);
1838 node->count_materialization_scale = scale;
1839 if (scale < 0)
1840 fatal_error (input_location, "Profile information in %s corrupted",
1841 file_data->file_name);
1842
1843 if (scale == REG_BR_PROB_BASE)
1844 continue;
1845 for (edge = node->callees; edge; edge = edge->next_callee)
1846 edge->count = apply_scale (edge->count, scale);
1847 node->count = apply_scale (node->count, scale);
1848 }
1849 }
1850
1851 /* Input and merge the symtab from each of the .o files passed to
1852 lto1. */
1853
1854 void
1855 input_symtab (void)
1856 {
1857 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1858 struct lto_file_decl_data *file_data;
1859 unsigned int j = 0;
1860 struct cgraph_node *node;
1861
1862 while ((file_data = file_data_vec[j++]))
1863 {
1864 const char *data;
1865 size_t len;
1866 struct lto_input_block *ib;
1867 vec<symtab_node *> nodes;
1868
1869 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1870 &data, &len);
1871 if (!ib)
1872 fatal_error (input_location,
1873 "cannot find LTO cgraph in %s", file_data->file_name);
1874 input_profile_summary (ib, file_data);
1875 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1876 nodes = input_cgraph_1 (file_data, ib);
1877 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1878 ib, data, len);
1879
1880 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1881 &data, &len);
1882 if (!ib)
1883 fatal_error (input_location, "cannot find LTO section refs in %s",
1884 file_data->file_name);
1885 input_refs (ib, nodes);
1886 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1887 ib, data, len);
1888 if (flag_ltrans)
1889 input_cgraph_opt_summary (nodes);
1890 nodes.release ();
1891 }
1892
1893 merge_profile_summaries (file_data_vec);
1894 get_working_sets ();
1895
1896
1897 /* Clear out the aux field that was used to store enough state to
1898 tell which nodes should be overwritten. */
1899 FOR_EACH_FUNCTION (node)
1900 {
1901 /* Some nodes may have been created by cgraph_node. This
1902 happens when the callgraph contains nested functions. If the
1903 node for the parent function was never emitted to the gimple
1904 file, cgraph_node will create a node for it when setting the
1905 context of the nested function. */
1906 if (node->lto_file_data)
1907 node->aux = NULL;
1908 }
1909 }
1910
1911 /* Input function/variable tables that will allow libgomp to look up offload
1912 target code, and store them into OFFLOAD_FUNCS and OFFLOAD_VARS. */
1913
1914 void
1915 input_offload_tables (void)
1916 {
1917 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1918 struct lto_file_decl_data *file_data;
1919 unsigned int j = 0;
1920
1921 while ((file_data = file_data_vec[j++]))
1922 {
1923 const char *data;
1924 size_t len;
1925 struct lto_input_block *ib
1926 = lto_create_simple_input_block (file_data, LTO_section_offload_table,
1927 &data, &len);
1928 if (!ib)
1929 continue;
1930
1931 enum LTO_symtab_tags tag
1932 = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1933 while (tag)
1934 {
1935 if (tag == LTO_symtab_unavail_node)
1936 {
1937 int decl_index = streamer_read_uhwi (ib);
1938 tree fn_decl
1939 = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1940 vec_safe_push (offload_funcs, fn_decl);
1941 }
1942 else if (tag == LTO_symtab_variable)
1943 {
1944 int decl_index = streamer_read_uhwi (ib);
1945 tree var_decl
1946 = lto_file_decl_data_get_var_decl (file_data, decl_index);
1947 vec_safe_push (offload_vars, var_decl);
1948 }
1949 else
1950 fatal_error (input_location,
1951 "invalid offload table in %s", file_data->file_name);
1952
1953 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1954 }
1955
1956 lto_destroy_simple_input_block (file_data, LTO_section_offload_table,
1957 ib, data, len);
1958 }
1959 }
1960
1961 /* True when we need optimization summary for NODE. */
1962
1963 static int
1964 output_cgraph_opt_summary_p (struct cgraph_node *node)
1965 {
1966 return (node->clone_of
1967 && (node->clone.tree_map
1968 || node->clone.args_to_skip
1969 || node->clone.combined_args_to_skip));
1970 }
1971
1972 /* Output optimization summary for EDGE to OB. */
1973 static void
1974 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1975 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1976 {
1977 }
1978
1979 /* Output optimization summary for NODE to OB. */
1980
1981 static void
1982 output_node_opt_summary (struct output_block *ob,
1983 struct cgraph_node *node,
1984 lto_symtab_encoder_t encoder)
1985 {
1986 unsigned int index;
1987 bitmap_iterator bi;
1988 struct ipa_replace_map *map;
1989 struct bitpack_d bp;
1990 int i;
1991 struct cgraph_edge *e;
1992
1993 if (node->clone.args_to_skip)
1994 {
1995 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1996 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1997 streamer_write_uhwi (ob, index);
1998 }
1999 else
2000 streamer_write_uhwi (ob, 0);
2001 if (node->clone.combined_args_to_skip)
2002 {
2003 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
2004 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
2005 streamer_write_uhwi (ob, index);
2006 }
2007 else
2008 streamer_write_uhwi (ob, 0);
2009 streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
2010 FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
2011 {
2012 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
2013 mechanism to store function local declarations into summaries. */
2014 gcc_assert (!map->old_tree);
2015 streamer_write_uhwi (ob, map->parm_num);
2016 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
2017 stream_write_tree (ob, map->new_tree, true);
2018 bp = bitpack_create (ob->main_stream);
2019 bp_pack_value (&bp, map->replace_p, 1);
2020 bp_pack_value (&bp, map->ref_p, 1);
2021 streamer_write_bitpack (&bp);
2022 }
2023
2024 if (lto_symtab_encoder_in_partition_p (encoder, node))
2025 {
2026 for (e = node->callees; e; e = e->next_callee)
2027 output_edge_opt_summary (ob, e);
2028 for (e = node->indirect_calls; e; e = e->next_callee)
2029 output_edge_opt_summary (ob, e);
2030 }
2031 }
2032
2033 /* Output optimization summaries stored in callgraph.
2034 At the moment it is the clone info structure. */
2035
2036 static void
2037 output_cgraph_opt_summary (void)
2038 {
2039 int i, n_nodes;
2040 lto_symtab_encoder_t encoder;
2041 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
2042 unsigned count = 0;
2043
2044 ob->symbol = NULL;
2045 encoder = ob->decl_state->symtab_node_encoder;
2046 n_nodes = lto_symtab_encoder_size (encoder);
2047 for (i = 0; i < n_nodes; i++)
2048 {
2049 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
2050 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2051 if (cnode && output_cgraph_opt_summary_p (cnode))
2052 count++;
2053 }
2054 streamer_write_uhwi (ob, count);
2055 for (i = 0; i < n_nodes; i++)
2056 {
2057 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
2058 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2059 if (cnode && output_cgraph_opt_summary_p (cnode))
2060 {
2061 streamer_write_uhwi (ob, i);
2062 output_node_opt_summary (ob, cnode, encoder);
2063 }
2064 }
2065 produce_asm (ob, NULL);
2066 destroy_output_block (ob);
2067 }
2068
2069 /* Input optimisation summary of EDGE. */
2070
2071 static void
2072 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
2073 struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
2074 {
2075 }
2076
2077 /* Input optimisation summary of NODE. */
2078
2079 static void
2080 input_node_opt_summary (struct cgraph_node *node,
2081 struct lto_input_block *ib_main,
2082 struct data_in *data_in)
2083 {
2084 int i;
2085 int count;
2086 int bit;
2087 struct bitpack_d bp;
2088 struct cgraph_edge *e;
2089
2090 count = streamer_read_uhwi (ib_main);
2091 if (count)
2092 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
2093 for (i = 0; i < count; i++)
2094 {
2095 bit = streamer_read_uhwi (ib_main);
2096 bitmap_set_bit (node->clone.args_to_skip, bit);
2097 }
2098 count = streamer_read_uhwi (ib_main);
2099 if (count)
2100 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
2101 for (i = 0; i < count; i++)
2102 {
2103 bit = streamer_read_uhwi (ib_main);
2104 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
2105 }
2106 count = streamer_read_uhwi (ib_main);
2107 for (i = 0; i < count; i++)
2108 {
2109 struct ipa_replace_map *map = ggc_alloc<ipa_replace_map> ();
2110
2111 vec_safe_push (node->clone.tree_map, map);
2112 map->parm_num = streamer_read_uhwi (ib_main);
2113 map->old_tree = NULL;
2114 map->new_tree = stream_read_tree (ib_main, data_in);
2115 bp = streamer_read_bitpack (ib_main);
2116 map->replace_p = bp_unpack_value (&bp, 1);
2117 map->ref_p = bp_unpack_value (&bp, 1);
2118 }
2119 for (e = node->callees; e; e = e->next_callee)
2120 input_edge_opt_summary (e, ib_main);
2121 for (e = node->indirect_calls; e; e = e->next_callee)
2122 input_edge_opt_summary (e, ib_main);
2123 }
2124
2125 /* Read section in file FILE_DATA of length LEN with data DATA. */
2126
2127 static void
2128 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
2129 const char *data, size_t len,
2130 vec<symtab_node *> nodes)
2131 {
2132 const struct lto_function_header *header =
2133 (const struct lto_function_header *) data;
2134 const int cfg_offset = sizeof (struct lto_function_header);
2135 const int main_offset = cfg_offset + header->cfg_size;
2136 const int string_offset = main_offset + header->main_size;
2137 struct data_in *data_in;
2138 unsigned int i;
2139 unsigned int count;
2140
2141 lto_input_block ib_main ((const char *) data + main_offset,
2142 header->main_size, file_data->mode_table);
2143
2144 data_in =
2145 lto_data_in_create (file_data, (const char *) data + string_offset,
2146 header->string_size, vNULL);
2147 count = streamer_read_uhwi (&ib_main);
2148
2149 for (i = 0; i < count; i++)
2150 {
2151 int ref = streamer_read_uhwi (&ib_main);
2152 input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
2153 &ib_main, data_in);
2154 }
2155 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
2156 len);
2157 lto_data_in_delete (data_in);
2158 }
2159
2160 /* Input optimization summary of cgraph. */
2161
2162 static void
2163 input_cgraph_opt_summary (vec<symtab_node *> nodes)
2164 {
2165 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2166 struct lto_file_decl_data *file_data;
2167 unsigned int j = 0;
2168
2169 while ((file_data = file_data_vec[j++]))
2170 {
2171 size_t len;
2172 const char *data =
2173 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
2174 &len);
2175
2176 if (data)
2177 input_cgraph_opt_section (file_data, data, len, nodes);
2178 }
2179 }