]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-cgraph.c
alias.c: Reorder #include statements and remove duplicates.
[thirdparty/gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright (C) 2009-2015 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "predict.h"
31 #include "timevar.h"
32 #include "stringpool.h"
33 #include "expmed.h"
34 #include "insn-config.h"
35 #include "emit-rtl.h"
36 #include "tree-streamer.h"
37 #include "cgraph.h"
38 #include "gcov-io.h"
39 #include "diagnostic-core.h"
40 #include "alias.h"
41 #include "fold-const.h"
42 #include "internal-fn.h"
43 #include "flags.h"
44 #include "dojump.h"
45 #include "explow.h"
46 #include "calls.h"
47 #include "varasm.h"
48 #include "stmt.h"
49 #include "expr.h"
50 #include "params.h"
51 #include "langhooks.h"
52 #include "except.h"
53 #include "tree-pass.h"
54 #include "profile.h"
55 #include "context.h"
56 #include "pass_manager.h"
57 #include "ipa-utils.h"
58 #include "omp-low.h"
59 #include "ipa-chkp.h"
60
61 /* True when asm nodes has been output. */
62 bool asm_nodes_output = false;
63
64 static void output_cgraph_opt_summary (void);
65 static void input_cgraph_opt_summary (vec<symtab_node *> nodes);
66
67 /* Number of LDPR values known to GCC. */
68 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
69
70 /* All node orders are ofsetted by ORDER_BASE. */
71 static int order_base;
72
73 /* Cgraph streaming is organized as set of record whose type
74 is indicated by a tag. */
75 enum LTO_symtab_tags
76 {
77 /* Must leave 0 for the stopper. */
78
79 /* Cgraph node without body available. */
80 LTO_symtab_unavail_node = 1,
81 /* Cgraph node with function body. */
82 LTO_symtab_analyzed_node,
83 /* Cgraph edges. */
84 LTO_symtab_edge,
85 LTO_symtab_indirect_edge,
86 LTO_symtab_variable,
87 LTO_symtab_last_tag
88 };
89
90 /* Create a new symtab encoder.
91 if FOR_INPUT, the encoder allocate only datastructures needed
92 to read the symtab. */
93
94 lto_symtab_encoder_t
95 lto_symtab_encoder_new (bool for_input)
96 {
97 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
98
99 if (!for_input)
100 encoder->map = new hash_map<symtab_node *, size_t>;
101 encoder->nodes.create (0);
102 return encoder;
103 }
104
105
106 /* Delete ENCODER and its components. */
107
108 void
109 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
110 {
111 encoder->nodes.release ();
112 if (encoder->map)
113 delete encoder->map;
114 free (encoder);
115 }
116
117
118 /* Return the existing reference number of NODE in the symtab encoder in
119 output block OB. Assign a new reference if this is the first time
120 NODE is encoded. */
121
122 int
123 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
124 symtab_node *node)
125 {
126 int ref;
127
128 if (!encoder->map)
129 {
130 lto_encoder_entry entry = {node, false, false, false};
131
132 ref = encoder->nodes.length ();
133 encoder->nodes.safe_push (entry);
134 return ref;
135 }
136
137 size_t *slot = encoder->map->get (node);
138 if (!slot || !*slot)
139 {
140 lto_encoder_entry entry = {node, false, false, false};
141 ref = encoder->nodes.length ();
142 if (!slot)
143 encoder->map->put (node, ref + 1);
144 encoder->nodes.safe_push (entry);
145 }
146 else
147 ref = *slot - 1;
148
149 return ref;
150 }
151
152 /* Remove NODE from encoder. */
153
154 bool
155 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
156 symtab_node *node)
157 {
158 int index;
159 lto_encoder_entry last_node;
160
161 size_t *slot = encoder->map->get (node);
162 if (slot == NULL || !*slot)
163 return false;
164
165 index = *slot - 1;
166 gcc_checking_assert (encoder->nodes[index].node == node);
167
168 /* Remove from vector. We do this by swapping node with the last element
169 of the vector. */
170 last_node = encoder->nodes.pop ();
171 if (last_node.node != node)
172 {
173 gcc_assert (encoder->map->put (last_node.node, index + 1));
174
175 /* Move the last element to the original spot of NODE. */
176 encoder->nodes[index] = last_node;
177 }
178
179 /* Remove element from hash table. */
180 encoder->map->remove (node);
181 return true;
182 }
183
184
185 /* Return TRUE if we should encode the body of NODE (if any). */
186
187 bool
188 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
189 struct cgraph_node *node)
190 {
191 int index = lto_symtab_encoder_lookup (encoder, node);
192 return encoder->nodes[index].body;
193 }
194
195 /* Specify that we encode the body of NODE in this partition. */
196
197 static void
198 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
199 struct cgraph_node *node)
200 {
201 int index = lto_symtab_encoder_encode (encoder, node);
202 gcc_checking_assert (encoder->nodes[index].node == node);
203 encoder->nodes[index].body = true;
204 }
205
206 /* Return TRUE if we should encode initializer of NODE (if any). */
207
208 bool
209 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
210 varpool_node *node)
211 {
212 int index = lto_symtab_encoder_lookup (encoder, node);
213 if (index == LCC_NOT_FOUND)
214 return false;
215 return encoder->nodes[index].initializer;
216 }
217
218 /* Specify that we should encode initializer of NODE (if any). */
219
220 static void
221 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
222 varpool_node *node)
223 {
224 int index = lto_symtab_encoder_lookup (encoder, node);
225 encoder->nodes[index].initializer = true;
226 }
227
228 /* Return TRUE if NODE is in this partition. */
229
230 bool
231 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
232 symtab_node *node)
233 {
234 int index = lto_symtab_encoder_lookup (encoder, node);
235 if (index == LCC_NOT_FOUND)
236 return false;
237 return encoder->nodes[index].in_partition;
238 }
239
240 /* Specify that NODE is in this partition. */
241
242 void
243 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
244 symtab_node *node)
245 {
246 int index = lto_symtab_encoder_encode (encoder, node);
247 encoder->nodes[index].in_partition = true;
248 }
249
250 /* Output the cgraph EDGE to OB using ENCODER. */
251
252 static void
253 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
254 lto_symtab_encoder_t encoder)
255 {
256 unsigned int uid;
257 intptr_t ref;
258 struct bitpack_d bp;
259
260 if (edge->indirect_unknown_callee)
261 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
262 LTO_symtab_indirect_edge);
263 else
264 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
265 LTO_symtab_edge);
266
267 ref = lto_symtab_encoder_lookup (encoder, edge->caller);
268 gcc_assert (ref != LCC_NOT_FOUND);
269 streamer_write_hwi_stream (ob->main_stream, ref);
270
271 if (!edge->indirect_unknown_callee)
272 {
273 ref = lto_symtab_encoder_lookup (encoder, edge->callee);
274 gcc_assert (ref != LCC_NOT_FOUND);
275 streamer_write_hwi_stream (ob->main_stream, ref);
276 }
277
278 streamer_write_gcov_count_stream (ob->main_stream, edge->count);
279
280 bp = bitpack_create (ob->main_stream);
281 uid = (!gimple_has_body_p (edge->caller->decl)
282 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt) + 1);
283 bp_pack_enum (&bp, cgraph_inline_failed_t,
284 CIF_N_REASONS, edge->inline_failed);
285 bp_pack_var_len_unsigned (&bp, uid);
286 bp_pack_var_len_unsigned (&bp, edge->frequency);
287 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
288 bp_pack_value (&bp, edge->speculative, 1);
289 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
290 bp_pack_value (&bp, edge->can_throw_external, 1);
291 bp_pack_value (&bp, edge->in_polymorphic_cdtor, 1);
292 if (edge->indirect_unknown_callee)
293 {
294 int flags = edge->indirect_info->ecf_flags;
295 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
296 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
297 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
298 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
299 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
300 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
301 /* Flags that should not appear on indirect calls. */
302 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
303 | ECF_MAY_BE_ALLOCA
304 | ECF_SIBCALL
305 | ECF_LEAF
306 | ECF_NOVOPS)));
307 }
308 streamer_write_bitpack (&bp);
309 if (edge->indirect_unknown_callee)
310 {
311 streamer_write_hwi_stream (ob->main_stream,
312 edge->indirect_info->common_target_id);
313 if (edge->indirect_info->common_target_id)
314 streamer_write_hwi_stream
315 (ob->main_stream, edge->indirect_info->common_target_probability);
316 }
317 }
318
319 /* Return if NODE contain references from other partitions. */
320
321 bool
322 referenced_from_other_partition_p (symtab_node *node, lto_symtab_encoder_t encoder)
323 {
324 int i;
325 struct ipa_ref *ref = NULL;
326
327 for (i = 0; node->iterate_referring (i, ref); i++)
328 {
329 /* Ignore references from non-offloadable nodes while streaming NODE into
330 offload LTO section. */
331 if (!ref->referring->need_lto_streaming)
332 continue;
333
334 if (ref->referring->in_other_partition
335 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
336 return true;
337 }
338 return false;
339 }
340
341 /* Return true when node is reachable from other partition. */
342
343 bool
344 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
345 {
346 struct cgraph_edge *e;
347 if (!node->definition)
348 return false;
349 if (node->global.inlined_to)
350 return false;
351 for (e = node->callers; e; e = e->next_caller)
352 {
353 /* Ignore references from non-offloadable nodes while streaming NODE into
354 offload LTO section. */
355 if (!e->caller->need_lto_streaming)
356 continue;
357
358 if (e->caller->in_other_partition
359 || !lto_symtab_encoder_in_partition_p (encoder, e->caller))
360 return true;
361 }
362 return false;
363 }
364
365 /* Return if NODE contain references from other partitions. */
366
367 bool
368 referenced_from_this_partition_p (symtab_node *node,
369 lto_symtab_encoder_t encoder)
370 {
371 int i;
372 struct ipa_ref *ref = NULL;
373
374 for (i = 0; node->iterate_referring (i, ref); i++)
375 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
376 return true;
377 return false;
378 }
379
380 /* Return true when node is reachable from other partition. */
381
382 bool
383 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
384 {
385 struct cgraph_edge *e;
386 for (e = node->callers; e; e = e->next_caller)
387 if (lto_symtab_encoder_in_partition_p (encoder, e->caller))
388 return true;
389 return false;
390 }
391
392 /* Output the cgraph NODE to OB. ENCODER is used to find the
393 reference number of NODE->inlined_to. SET is the set of nodes we
394 are writing to the current file. If NODE is not in SET, then NODE
395 is a boundary of a cgraph_node_set and we pretend NODE just has a
396 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
397 that have had their callgraph node written so far. This is used to
398 determine if NODE is a clone of a previously written node. */
399
400 static void
401 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
402 lto_symtab_encoder_t encoder)
403 {
404 unsigned int tag;
405 struct bitpack_d bp;
406 bool boundary_p;
407 intptr_t ref;
408 bool in_other_partition = false;
409 struct cgraph_node *clone_of, *ultimate_clone_of;
410 ipa_opt_pass_d *pass;
411 int i;
412 const char *comdat;
413 const char *section;
414 tree group;
415
416 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
417
418 if (node->analyzed && (!boundary_p || node->alias || node->thunk.thunk_p))
419 tag = LTO_symtab_analyzed_node;
420 else
421 tag = LTO_symtab_unavail_node;
422
423 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
424 tag);
425 streamer_write_hwi_stream (ob->main_stream, node->order);
426
427 /* In WPA mode, we only output part of the call-graph. Also, we
428 fake cgraph node attributes. There are two cases that we care.
429
430 Boundary nodes: There are nodes that are not part of SET but are
431 called from within SET. We artificially make them look like
432 externally visible nodes with no function body.
433
434 Cherry-picked nodes: These are nodes we pulled from other
435 translation units into SET during IPA-inlining. We make them as
436 local static nodes to prevent clashes with other local statics. */
437 if (boundary_p && node->analyzed
438 && node->get_partitioning_class () == SYMBOL_PARTITION)
439 {
440 /* Inline clones can not be part of boundary.
441 gcc_assert (!node->global.inlined_to);
442
443 FIXME: At the moment they can be, when partition contains an inline
444 clone that is clone of inline clone from outside partition. We can
445 reshape the clone tree and make other tree to be the root, but it
446 needs a bit extra work and will be promplty done by cgraph_remove_node
447 after reading back. */
448 in_other_partition = 1;
449 }
450
451 clone_of = node->clone_of;
452 while (clone_of
453 && (ref = lto_symtab_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
454 if (clone_of->prev_sibling_clone)
455 clone_of = clone_of->prev_sibling_clone;
456 else
457 clone_of = clone_of->clone_of;
458
459 /* See if body of the master function is output. If not, we are seeing only
460 an declaration and we do not need to pass down clone tree. */
461 ultimate_clone_of = clone_of;
462 while (ultimate_clone_of && ultimate_clone_of->clone_of)
463 ultimate_clone_of = ultimate_clone_of->clone_of;
464
465 if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
466 clone_of = NULL;
467
468 if (tag == LTO_symtab_analyzed_node)
469 gcc_assert (clone_of || !node->clone_of);
470 if (!clone_of)
471 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
472 else
473 streamer_write_hwi_stream (ob->main_stream, ref);
474
475
476 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
477 streamer_write_gcov_count_stream (ob->main_stream, node->count);
478 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
479
480 streamer_write_hwi_stream (ob->main_stream,
481 node->ipa_transforms_to_apply.length ());
482 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
483 streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
484
485 if (tag == LTO_symtab_analyzed_node)
486 {
487 if (node->global.inlined_to)
488 {
489 ref = lto_symtab_encoder_lookup (encoder, node->global.inlined_to);
490 gcc_assert (ref != LCC_NOT_FOUND);
491 }
492 else
493 ref = LCC_NOT_FOUND;
494
495 streamer_write_hwi_stream (ob->main_stream, ref);
496 }
497
498 group = node->get_comdat_group ();
499 if (group)
500 comdat = IDENTIFIER_POINTER (group);
501 else
502 comdat = "";
503 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
504
505 if (group)
506 {
507 if (node->same_comdat_group && !boundary_p)
508 {
509 ref = lto_symtab_encoder_lookup (encoder,
510 node->same_comdat_group);
511 gcc_assert (ref != LCC_NOT_FOUND);
512 }
513 else
514 ref = LCC_NOT_FOUND;
515 streamer_write_hwi_stream (ob->main_stream, ref);
516 }
517
518 section = node->get_section ();
519 if (!section)
520 section = "";
521
522 streamer_write_hwi_stream (ob->main_stream, node->tp_first_run);
523
524 bp = bitpack_create (ob->main_stream);
525 bp_pack_value (&bp, node->local.local, 1);
526 bp_pack_value (&bp, node->externally_visible, 1);
527 bp_pack_value (&bp, node->no_reorder, 1);
528 bp_pack_value (&bp, node->definition, 1);
529 bp_pack_value (&bp, node->local.versionable, 1);
530 bp_pack_value (&bp, node->local.can_change_signature, 1);
531 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
532 bp_pack_value (&bp, node->force_output, 1);
533 bp_pack_value (&bp, node->forced_by_abi, 1);
534 bp_pack_value (&bp, node->unique_name, 1);
535 bp_pack_value (&bp, node->body_removed, 1);
536 bp_pack_value (&bp, node->implicit_section, 1);
537 bp_pack_value (&bp, node->address_taken, 1);
538 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
539 && node->get_partitioning_class () == SYMBOL_PARTITION
540 && (reachable_from_other_partition_p (node, encoder)
541 || referenced_from_other_partition_p (node, encoder)), 1);
542 bp_pack_value (&bp, node->lowered, 1);
543 bp_pack_value (&bp, in_other_partition, 1);
544 bp_pack_value (&bp, node->alias, 1);
545 bp_pack_value (&bp, node->weakref, 1);
546 bp_pack_value (&bp, node->frequency, 2);
547 bp_pack_value (&bp, node->only_called_at_startup, 1);
548 bp_pack_value (&bp, node->only_called_at_exit, 1);
549 bp_pack_value (&bp, node->tm_clone, 1);
550 bp_pack_value (&bp, node->calls_comdat_local, 1);
551 bp_pack_value (&bp, node->icf_merged, 1);
552 bp_pack_value (&bp, node->nonfreeing_fn, 1);
553 bp_pack_value (&bp, node->thunk.thunk_p, 1);
554 bp_pack_value (&bp, node->parallelized_function, 1);
555 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
556 LDPR_NUM_KNOWN, node->resolution);
557 bp_pack_value (&bp, node->instrumentation_clone, 1);
558 bp_pack_value (&bp, node->split_part, 1);
559 streamer_write_bitpack (&bp);
560 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
561
562 if (node->thunk.thunk_p)
563 {
564 streamer_write_uhwi_stream
565 (ob->main_stream,
566 1 + (node->thunk.this_adjusting != 0) * 2
567 + (node->thunk.virtual_offset_p != 0) * 4
568 + (node->thunk.add_pointer_bounds_args != 0) * 8);
569 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
570 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
571 }
572 streamer_write_hwi_stream (ob->main_stream, node->profile_id);
573 if (DECL_STATIC_CONSTRUCTOR (node->decl))
574 streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ());
575 if (DECL_STATIC_DESTRUCTOR (node->decl))
576 streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ());
577
578 if (node->instrumentation_clone)
579 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->orig_decl);
580 }
581
582 /* Output the varpool NODE to OB.
583 If NODE is not in SET, then NODE is a boundary. */
584
585 static void
586 lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
587 lto_symtab_encoder_t encoder)
588 {
589 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
590 bool encode_initializer_p
591 = (node->definition
592 && lto_symtab_encoder_encode_initializer_p (encoder, node));
593 struct bitpack_d bp;
594 int ref;
595 const char *comdat;
596 const char *section;
597 tree group;
598
599 gcc_assert (!encode_initializer_p || node->definition);
600 gcc_assert (boundary_p || encode_initializer_p);
601
602 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
603 LTO_symtab_variable);
604 streamer_write_hwi_stream (ob->main_stream, node->order);
605 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
606 bp = bitpack_create (ob->main_stream);
607 bp_pack_value (&bp, node->externally_visible, 1);
608 bp_pack_value (&bp, node->no_reorder, 1);
609 bp_pack_value (&bp, node->force_output, 1);
610 bp_pack_value (&bp, node->forced_by_abi, 1);
611 bp_pack_value (&bp, node->unique_name, 1);
612 bp_pack_value (&bp,
613 node->body_removed
614 || (!encode_initializer_p && !node->alias && node->definition),
615 1);
616 bp_pack_value (&bp, node->implicit_section, 1);
617 bp_pack_value (&bp, node->writeonly, 1);
618 bp_pack_value (&bp, node->definition && (encode_initializer_p || node->alias),
619 1);
620 bp_pack_value (&bp, node->alias, 1);
621 bp_pack_value (&bp, node->weakref, 1);
622 bp_pack_value (&bp, node->analyzed && !boundary_p, 1);
623 gcc_assert (node->definition || !node->analyzed);
624 /* Constant pool initializers can be de-unified into individual ltrans units.
625 FIXME: Alternatively at -Os we may want to avoid generating for them the local
626 labels and share them across LTRANS partitions. */
627 if (node->get_partitioning_class () != SYMBOL_PARTITION)
628 {
629 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
630 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
631 }
632 else
633 {
634 bp_pack_value (&bp, node->definition
635 && referenced_from_other_partition_p (node, encoder), 1);
636 bp_pack_value (&bp, node->analyzed
637 && boundary_p && !DECL_EXTERNAL (node->decl), 1);
638 /* in_other_partition. */
639 }
640 bp_pack_value (&bp, node->tls_model, 3);
641 bp_pack_value (&bp, node->used_by_single_function, 1);
642 bp_pack_value (&bp, node->need_bounds_init, 1);
643 streamer_write_bitpack (&bp);
644
645 group = node->get_comdat_group ();
646 if (group)
647 comdat = IDENTIFIER_POINTER (group);
648 else
649 comdat = "";
650 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
651
652 if (group)
653 {
654 if (node->same_comdat_group && !boundary_p)
655 {
656 ref = lto_symtab_encoder_lookup (encoder,
657 node->same_comdat_group);
658 gcc_assert (ref != LCC_NOT_FOUND);
659 }
660 else
661 ref = LCC_NOT_FOUND;
662 streamer_write_hwi_stream (ob->main_stream, ref);
663 }
664
665 section = node->get_section ();
666 if (!section)
667 section = "";
668 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
669
670 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
671 LDPR_NUM_KNOWN, node->resolution);
672 }
673
674 /* Output the varpool NODE to OB.
675 If NODE is not in SET, then NODE is a boundary. */
676
677 static void
678 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
679 lto_symtab_encoder_t encoder)
680 {
681 struct bitpack_d bp;
682 int nref;
683 int uid = ref->lto_stmt_uid;
684 struct cgraph_node *node;
685
686 bp = bitpack_create (ob->main_stream);
687 bp_pack_value (&bp, ref->use, 3);
688 bp_pack_value (&bp, ref->speculative, 1);
689 streamer_write_bitpack (&bp);
690 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
691 gcc_assert (nref != LCC_NOT_FOUND);
692 streamer_write_hwi_stream (ob->main_stream, nref);
693
694 node = dyn_cast <cgraph_node *> (ref->referring);
695 if (node)
696 {
697 if (ref->stmt)
698 uid = gimple_uid (ref->stmt) + 1;
699 streamer_write_hwi_stream (ob->main_stream, uid);
700 }
701 }
702
703 /* Stream out profile_summary to OB. */
704
705 static void
706 output_profile_summary (struct lto_simple_output_block *ob)
707 {
708 unsigned h_ix;
709 struct bitpack_d bp;
710
711 if (profile_info)
712 {
713 /* We do not output num and run_max, they are not used by
714 GCC profile feedback and they are difficult to merge from multiple
715 units. */
716 gcc_assert (profile_info->runs);
717 streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
718 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_max);
719
720 /* sum_all is needed for computing the working set with the
721 histogram. */
722 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_all);
723
724 /* Create and output a bitpack of non-zero histogram entries indices. */
725 bp = bitpack_create (ob->main_stream);
726 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
727 bp_pack_value (&bp, profile_info->histogram[h_ix].num_counters > 0, 1);
728 streamer_write_bitpack (&bp);
729 /* Now stream out only those non-zero entries. */
730 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
731 {
732 if (!profile_info->histogram[h_ix].num_counters)
733 continue;
734 streamer_write_gcov_count_stream (ob->main_stream,
735 profile_info->histogram[h_ix].num_counters);
736 streamer_write_gcov_count_stream (ob->main_stream,
737 profile_info->histogram[h_ix].min_value);
738 streamer_write_gcov_count_stream (ob->main_stream,
739 profile_info->histogram[h_ix].cum_value);
740 }
741 /* IPA-profile computes hot bb threshold based on cumulated
742 whole program profile. We need to stream it down to ltrans. */
743 if (flag_wpa)
744 streamer_write_gcov_count_stream (ob->main_stream,
745 get_hot_bb_threshold ());
746 }
747 else
748 streamer_write_uhwi_stream (ob->main_stream, 0);
749 }
750
751 /* Output all callees or indirect outgoing edges. EDGE must be the first such
752 edge. */
753
754 static void
755 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
756 struct lto_simple_output_block *ob,
757 lto_symtab_encoder_t encoder)
758 {
759 if (!edge)
760 return;
761
762 /* Output edges in backward direction, so the reconstructed callgraph match
763 and it is easy to associate call sites in the IPA pass summaries. */
764 while (edge->next_callee)
765 edge = edge->next_callee;
766 for (; edge; edge = edge->prev_callee)
767 lto_output_edge (ob, edge, encoder);
768 }
769
770 /* Output the part of the cgraph in SET. */
771
772 static void
773 output_refs (lto_symtab_encoder_t encoder)
774 {
775 struct lto_simple_output_block *ob;
776 int count;
777 struct ipa_ref *ref;
778
779 ob = lto_create_simple_output_block (LTO_section_refs);
780
781 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
782 {
783 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
784
785 /* IPA_REF_ALIAS and IPA_REF_CHKP references are always preserved
786 in the boundary. Alias node can't have other references and
787 can be always handled as if it's not in the boundary. */
788 if (!node->alias && !lto_symtab_encoder_in_partition_p (encoder, node))
789 {
790 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
791 /* Output IPA_REF_CHKP reference. */
792 if (cnode
793 && cnode->instrumented_version
794 && !cnode->instrumentation_clone)
795 {
796 for (int i = 0; node->iterate_reference (i, ref); i++)
797 if (ref->use == IPA_REF_CHKP)
798 {
799 if (lto_symtab_encoder_lookup (encoder, ref->referred)
800 != LCC_NOT_FOUND)
801 {
802 int nref = lto_symtab_encoder_lookup (encoder, node);
803 streamer_write_gcov_count_stream (ob->main_stream, 1);
804 streamer_write_uhwi_stream (ob->main_stream, nref);
805 lto_output_ref (ob, ref, encoder);
806 }
807 break;
808 }
809 }
810 continue;
811 }
812
813 count = node->ref_list.nreferences ();
814 if (count)
815 {
816 streamer_write_gcov_count_stream (ob->main_stream, count);
817 streamer_write_uhwi_stream (ob->main_stream,
818 lto_symtab_encoder_lookup (encoder, node));
819 for (int i = 0; node->iterate_reference (i, ref); i++)
820 lto_output_ref (ob, ref, encoder);
821 }
822 }
823
824 streamer_write_uhwi_stream (ob->main_stream, 0);
825
826 lto_destroy_simple_output_block (ob);
827 }
828
829 /* Add NODE into encoder as well as nodes it is cloned from.
830 Do it in a way so clones appear first. */
831
832 static void
833 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
834 bool include_body)
835 {
836 if (node->clone_of)
837 add_node_to (encoder, node->clone_of, include_body);
838 else if (include_body)
839 lto_set_symtab_encoder_encode_body (encoder, node);
840 lto_symtab_encoder_encode (encoder, node);
841 }
842
843 /* Add all references in NODE to encoders. */
844
845 static void
846 create_references (lto_symtab_encoder_t encoder, symtab_node *node)
847 {
848 int i;
849 struct ipa_ref *ref = NULL;
850 for (i = 0; node->iterate_reference (i, ref); i++)
851 if (is_a <cgraph_node *> (ref->referred))
852 add_node_to (encoder, dyn_cast <cgraph_node *> (ref->referred), false);
853 else
854 lto_symtab_encoder_encode (encoder, ref->referred);
855 }
856
857 /* Select what needs to be streamed out. In regular lto mode stream everything.
858 In offload lto mode stream only nodes marked as offloadable. */
859 void
860 select_what_to_stream (void)
861 {
862 struct symtab_node *snode;
863 FOR_EACH_SYMBOL (snode)
864 snode->need_lto_streaming = !lto_stream_offload_p || snode->offloadable;
865 }
866
867 /* Find all symbols we want to stream into given partition and insert them
868 to encoders.
869
870 The function actually replaces IN_ENCODER by new one. The reason is that
871 streaming code needs clone's origin to be streamed before clone. This
872 means that we need to insert the nodes in specific order. This order is
873 ignored by the partitioning logic earlier. */
874
875 lto_symtab_encoder_t
876 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
877 {
878 struct cgraph_edge *edge;
879 int i;
880 lto_symtab_encoder_t encoder;
881 lto_symtab_encoder_iterator lsei;
882 hash_set<void *> reachable_call_targets;
883
884 encoder = lto_symtab_encoder_new (false);
885
886 /* Go over all entries in the IN_ENCODER and duplicate them to
887 ENCODER. At the same time insert masters of clones so
888 every master appears before clone. */
889 for (lsei = lsei_start_function_in_partition (in_encoder);
890 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
891 {
892 struct cgraph_node *node = lsei_cgraph_node (lsei);
893 if (!node->need_lto_streaming)
894 continue;
895 add_node_to (encoder, node, true);
896 lto_set_symtab_encoder_in_partition (encoder, node);
897 create_references (encoder, node);
898 }
899 for (lsei = lsei_start_variable_in_partition (in_encoder);
900 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
901 {
902 varpool_node *vnode = lsei_varpool_node (lsei);
903
904 if (!vnode->need_lto_streaming)
905 continue;
906 lto_set_symtab_encoder_in_partition (encoder, vnode);
907 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
908 create_references (encoder, vnode);
909 }
910 /* Pickle in also the initializer of all referenced readonly variables
911 to help folding. Constant pool variables are not shared, so we must
912 pickle those too. */
913 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
914 {
915 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
916 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
917 {
918 if (!lto_symtab_encoder_encode_initializer_p (encoder,
919 vnode)
920 && (((vnode->ctor_useable_for_folding_p ()
921 && (!DECL_VIRTUAL_P (vnode->decl)
922 || !flag_wpa
923 || flag_ltrans_devirtualize))
924 || POINTER_BOUNDS_P (vnode->decl))))
925 {
926 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
927 create_references (encoder, vnode);
928 }
929 }
930 }
931
932 /* Go over all the nodes again to include callees that are not in
933 SET. */
934 for (lsei = lsei_start_function_in_partition (encoder);
935 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
936 {
937 struct cgraph_node *node = lsei_cgraph_node (lsei);
938 for (edge = node->callees; edge; edge = edge->next_callee)
939 {
940 struct cgraph_node *callee = edge->callee;
941 if (!lto_symtab_encoder_in_partition_p (encoder, callee))
942 {
943 /* We should have moved all the inlines. */
944 gcc_assert (!callee->global.inlined_to);
945 add_node_to (encoder, callee, false);
946 }
947 }
948 /* Add all possible targets for late devirtualization. */
949 if (flag_ltrans_devirtualize || !flag_wpa)
950 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
951 if (edge->indirect_info->polymorphic)
952 {
953 unsigned int i;
954 void *cache_token;
955 bool final;
956 vec <cgraph_node *>targets
957 = possible_polymorphic_call_targets
958 (edge, &final, &cache_token);
959 if (!reachable_call_targets.add (cache_token))
960 {
961 for (i = 0; i < targets.length (); i++)
962 {
963 struct cgraph_node *callee = targets[i];
964
965 /* Adding an external declarations into the unit serves
966 no purpose and just increases its boundary. */
967 if (callee->definition
968 && !lto_symtab_encoder_in_partition_p
969 (encoder, callee))
970 {
971 gcc_assert (!callee->global.inlined_to);
972 add_node_to (encoder, callee, false);
973 }
974 }
975 }
976 }
977 }
978 /* Be sure to also insert alias targert and thunk callees. These needs
979 to stay to aid local calling conventions. */
980 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
981 {
982 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
983 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
984
985 if (node->alias && node->analyzed)
986 create_references (encoder, node);
987 if (cnode
988 && cnode->thunk.thunk_p)
989 add_node_to (encoder, cnode->callees->callee, false);
990 }
991 lto_symtab_encoder_delete (in_encoder);
992 return encoder;
993 }
994
995 /* Output the part of the symtab in SET and VSET. */
996
997 void
998 output_symtab (void)
999 {
1000 struct cgraph_node *node;
1001 struct lto_simple_output_block *ob;
1002 int i, n_nodes;
1003 lto_symtab_encoder_t encoder;
1004
1005 if (flag_wpa)
1006 output_cgraph_opt_summary ();
1007
1008 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
1009
1010 output_profile_summary (ob);
1011
1012 /* An encoder for cgraph nodes should have been created by
1013 ipa_write_summaries_1. */
1014 gcc_assert (ob->decl_state->symtab_node_encoder);
1015 encoder = ob->decl_state->symtab_node_encoder;
1016
1017 /* Write out the nodes. We must first output a node and then its clones,
1018 otherwise at a time reading back the node there would be nothing to clone
1019 from. */
1020 n_nodes = lto_symtab_encoder_size (encoder);
1021 for (i = 0; i < n_nodes; i++)
1022 {
1023 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1024 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1025 lto_output_node (ob, cnode, encoder);
1026 else
1027 lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
1028 }
1029
1030 /* Go over the nodes in SET again to write edges. */
1031 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
1032 {
1033 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder, i));
1034 if (node
1035 && (node->thunk.thunk_p
1036 || lto_symtab_encoder_in_partition_p (encoder, node)))
1037 {
1038 output_outgoing_cgraph_edges (node->callees, ob, encoder);
1039 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
1040 }
1041 }
1042
1043 streamer_write_uhwi_stream (ob->main_stream, 0);
1044
1045 lto_destroy_simple_output_block (ob);
1046
1047 /* Emit toplevel asms.
1048 When doing WPA we must output every asm just once. Since we do not partition asm
1049 nodes at all, output them to first output. This is kind of hack, but should work
1050 well. */
1051 if (!asm_nodes_output)
1052 {
1053 asm_nodes_output = true;
1054 lto_output_toplevel_asms ();
1055 }
1056
1057 output_refs (encoder);
1058 }
1059
1060 /* Return identifier encoded in IB as a plain string. */
1061
1062 static tree
1063 read_identifier (struct lto_input_block *ib)
1064 {
1065 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1066 tree id;
1067
1068 if (ib->data[ib->p + len])
1069 lto_section_overrun (ib);
1070 if (!len)
1071 {
1072 ib->p++;
1073 return NULL;
1074 }
1075 id = get_identifier (ib->data + ib->p);
1076 ib->p += len + 1;
1077 return id;
1078 }
1079
1080 /* Return string encoded in IB, NULL if string is empty. */
1081
1082 static const char *
1083 read_string (struct lto_input_block *ib)
1084 {
1085 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1086 const char *str;
1087
1088 if (ib->data[ib->p + len])
1089 lto_section_overrun (ib);
1090 if (!len)
1091 {
1092 ib->p++;
1093 return NULL;
1094 }
1095 str = ib->data + ib->p;
1096 ib->p += len + 1;
1097 return str;
1098 }
1099
1100 /* Output function/variable tables that will allow libgomp to look up offload
1101 target code.
1102 OFFLOAD_FUNCS is filled in expand_omp_target, OFFLOAD_VARS is filled in
1103 varpool_node::get_create. In WHOPR (partitioned) mode during the WPA stage
1104 both OFFLOAD_FUNCS and OFFLOAD_VARS are filled by input_offload_tables. */
1105
1106 void
1107 output_offload_tables (void)
1108 {
1109 if (vec_safe_is_empty (offload_funcs) && vec_safe_is_empty (offload_vars))
1110 return;
1111
1112 struct lto_simple_output_block *ob
1113 = lto_create_simple_output_block (LTO_section_offload_table);
1114
1115 for (unsigned i = 0; i < vec_safe_length (offload_funcs); i++)
1116 {
1117 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1118 LTO_symtab_last_tag, LTO_symtab_unavail_node);
1119 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
1120 (*offload_funcs)[i]);
1121 }
1122
1123 for (unsigned i = 0; i < vec_safe_length (offload_vars); i++)
1124 {
1125 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1126 LTO_symtab_last_tag, LTO_symtab_variable);
1127 lto_output_var_decl_index (ob->decl_state, ob->main_stream,
1128 (*offload_vars)[i]);
1129 }
1130
1131 streamer_write_uhwi_stream (ob->main_stream, 0);
1132 lto_destroy_simple_output_block (ob);
1133
1134 /* In WHOPR mode during the WPA stage the joint offload tables need to be
1135 streamed to one partition only. That's why we free offload_funcs and
1136 offload_vars after the first call of output_offload_tables. */
1137 if (flag_wpa)
1138 {
1139 vec_free (offload_funcs);
1140 vec_free (offload_vars);
1141 }
1142 }
1143
1144 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
1145 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
1146 NODE or to replace the values in it, for instance because the first
1147 time we saw it, the function body was not available but now it
1148 is. BP is a bitpack with all the bitflags for NODE read from the
1149 stream. */
1150
1151 static void
1152 input_overwrite_node (struct lto_file_decl_data *file_data,
1153 struct cgraph_node *node,
1154 enum LTO_symtab_tags tag,
1155 struct bitpack_d *bp)
1156 {
1157 node->aux = (void *) tag;
1158 node->lto_file_data = file_data;
1159
1160 node->local.local = bp_unpack_value (bp, 1);
1161 node->externally_visible = bp_unpack_value (bp, 1);
1162 node->no_reorder = bp_unpack_value (bp, 1);
1163 node->definition = bp_unpack_value (bp, 1);
1164 node->local.versionable = bp_unpack_value (bp, 1);
1165 node->local.can_change_signature = bp_unpack_value (bp, 1);
1166 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
1167 node->force_output = bp_unpack_value (bp, 1);
1168 node->forced_by_abi = bp_unpack_value (bp, 1);
1169 node->unique_name = bp_unpack_value (bp, 1);
1170 node->body_removed = bp_unpack_value (bp, 1);
1171 node->implicit_section = bp_unpack_value (bp, 1);
1172 node->address_taken = bp_unpack_value (bp, 1);
1173 node->used_from_other_partition = bp_unpack_value (bp, 1);
1174 node->lowered = bp_unpack_value (bp, 1);
1175 node->analyzed = tag == LTO_symtab_analyzed_node;
1176 node->in_other_partition = bp_unpack_value (bp, 1);
1177 if (node->in_other_partition
1178 /* Avoid updating decl when we are seeing just inline clone.
1179 When inlining function that has functions already inlined into it,
1180 we produce clones of inline clones.
1181
1182 WPA partitioning might put each clone into different unit and
1183 we might end up streaming inline clone from other partition
1184 to support clone we are interested in. */
1185 && (!node->clone_of
1186 || node->clone_of->decl != node->decl))
1187 {
1188 DECL_EXTERNAL (node->decl) = 1;
1189 TREE_STATIC (node->decl) = 0;
1190 }
1191 node->alias = bp_unpack_value (bp, 1);
1192 node->weakref = bp_unpack_value (bp, 1);
1193 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
1194 node->only_called_at_startup = bp_unpack_value (bp, 1);
1195 node->only_called_at_exit = bp_unpack_value (bp, 1);
1196 node->tm_clone = bp_unpack_value (bp, 1);
1197 node->calls_comdat_local = bp_unpack_value (bp, 1);
1198 node->icf_merged = bp_unpack_value (bp, 1);
1199 node->nonfreeing_fn = bp_unpack_value (bp, 1);
1200 node->thunk.thunk_p = bp_unpack_value (bp, 1);
1201 node->parallelized_function = bp_unpack_value (bp, 1);
1202 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
1203 LDPR_NUM_KNOWN);
1204 node->instrumentation_clone = bp_unpack_value (bp, 1);
1205 node->split_part = bp_unpack_value (bp, 1);
1206 gcc_assert (flag_ltrans
1207 || (!node->in_other_partition
1208 && !node->used_from_other_partition));
1209 }
1210
1211 /* Return string alias is alias of. */
1212
1213 static tree
1214 get_alias_symbol (tree decl)
1215 {
1216 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1217 return get_identifier (TREE_STRING_POINTER
1218 (TREE_VALUE (TREE_VALUE (alias))));
1219 }
1220
1221 /* Read a node from input_block IB. TAG is the node's tag just read.
1222 Return the node read or overwriten. */
1223
1224 static struct cgraph_node *
1225 input_node (struct lto_file_decl_data *file_data,
1226 struct lto_input_block *ib,
1227 enum LTO_symtab_tags tag,
1228 vec<symtab_node *> nodes)
1229 {
1230 gcc::pass_manager *passes = g->get_passes ();
1231 tree fn_decl;
1232 struct cgraph_node *node;
1233 struct bitpack_d bp;
1234 unsigned decl_index;
1235 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1236 int clone_ref;
1237 int order;
1238 int i, count;
1239 tree group;
1240 const char *section;
1241 order = streamer_read_hwi (ib) + order_base;
1242 clone_ref = streamer_read_hwi (ib);
1243
1244 decl_index = streamer_read_uhwi (ib);
1245 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1246
1247 if (clone_ref != LCC_NOT_FOUND)
1248 {
1249 node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
1250 0, CGRAPH_FREQ_BASE, false,
1251 vNULL, false, NULL, NULL);
1252 }
1253 else
1254 {
1255 /* Declaration of functions can be already merged with a declaration
1256 from other input file. We keep cgraph unmerged until after streaming
1257 of ipa passes is done. Alays forcingly create a fresh node. */
1258 node = symtab->create_empty ();
1259 node->decl = fn_decl;
1260 node->register_symbol ();
1261 }
1262
1263 node->order = order;
1264 if (order >= symtab->order)
1265 symtab->order = order + 1;
1266
1267 node->count = streamer_read_gcov_count (ib);
1268 node->count_materialization_scale = streamer_read_hwi (ib);
1269
1270 count = streamer_read_hwi (ib);
1271 node->ipa_transforms_to_apply = vNULL;
1272 for (i = 0; i < count; i++)
1273 {
1274 opt_pass *pass;
1275 int pid = streamer_read_hwi (ib);
1276
1277 gcc_assert (pid < passes->passes_by_id_size);
1278 pass = passes->passes_by_id[pid];
1279 node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass_d *) pass);
1280 }
1281
1282 if (tag == LTO_symtab_analyzed_node)
1283 ref = streamer_read_hwi (ib);
1284
1285 group = read_identifier (ib);
1286 if (group)
1287 ref2 = streamer_read_hwi (ib);
1288
1289 /* Make sure that we have not read this node before. Nodes that
1290 have already been read will have their tag stored in the 'aux'
1291 field. Since built-in functions can be referenced in multiple
1292 functions, they are expected to be read more than once. */
1293 if (node->aux && !DECL_BUILT_IN (node->decl))
1294 internal_error ("bytecode stream: found multiple instances of cgraph "
1295 "node with uid %d", node->uid);
1296
1297 node->tp_first_run = streamer_read_uhwi (ib);
1298
1299 bp = streamer_read_bitpack (ib);
1300
1301 input_overwrite_node (file_data, node, tag, &bp);
1302
1303 /* Store a reference for now, and fix up later to be a pointer. */
1304 node->global.inlined_to = (cgraph_node *) (intptr_t) ref;
1305
1306 if (group)
1307 {
1308 node->set_comdat_group (group);
1309 /* Store a reference for now, and fix up later to be a pointer. */
1310 node->same_comdat_group = (symtab_node *) (intptr_t) ref2;
1311 }
1312 else
1313 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1314 section = read_string (ib);
1315 if (section)
1316 node->set_section_for_node (section);
1317
1318 if (node->thunk.thunk_p)
1319 {
1320 int type = streamer_read_uhwi (ib);
1321 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1322 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1323
1324 node->thunk.fixed_offset = fixed_offset;
1325 node->thunk.this_adjusting = (type & 2);
1326 node->thunk.virtual_value = virtual_value;
1327 node->thunk.virtual_offset_p = (type & 4);
1328 node->thunk.add_pointer_bounds_args = (type & 8);
1329 }
1330 if (node->alias && !node->analyzed && node->weakref)
1331 node->alias_target = get_alias_symbol (node->decl);
1332 node->profile_id = streamer_read_hwi (ib);
1333 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1334 node->set_init_priority (streamer_read_hwi (ib));
1335 if (DECL_STATIC_DESTRUCTOR (node->decl))
1336 node->set_fini_priority (streamer_read_hwi (ib));
1337
1338 if (node->instrumentation_clone)
1339 {
1340 decl_index = streamer_read_uhwi (ib);
1341 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1342 node->orig_decl = fn_decl;
1343 }
1344
1345 return node;
1346 }
1347
1348 /* Read a node from input_block IB. TAG is the node's tag just read.
1349 Return the node read or overwriten. */
1350
1351 static varpool_node *
1352 input_varpool_node (struct lto_file_decl_data *file_data,
1353 struct lto_input_block *ib)
1354 {
1355 int decl_index;
1356 tree var_decl;
1357 varpool_node *node;
1358 struct bitpack_d bp;
1359 int ref = LCC_NOT_FOUND;
1360 int order;
1361 tree group;
1362 const char *section;
1363
1364 order = streamer_read_hwi (ib) + order_base;
1365 decl_index = streamer_read_uhwi (ib);
1366 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1367
1368 /* Declaration of functions can be already merged with a declaration
1369 from other input file. We keep cgraph unmerged until after streaming
1370 of ipa passes is done. Alays forcingly create a fresh node. */
1371 node = varpool_node::create_empty ();
1372 node->decl = var_decl;
1373 node->register_symbol ();
1374
1375 node->order = order;
1376 if (order >= symtab->order)
1377 symtab->order = order + 1;
1378 node->lto_file_data = file_data;
1379
1380 bp = streamer_read_bitpack (ib);
1381 node->externally_visible = bp_unpack_value (&bp, 1);
1382 node->no_reorder = bp_unpack_value (&bp, 1);
1383 node->force_output = bp_unpack_value (&bp, 1);
1384 node->forced_by_abi = bp_unpack_value (&bp, 1);
1385 node->unique_name = bp_unpack_value (&bp, 1);
1386 node->body_removed = bp_unpack_value (&bp, 1);
1387 node->implicit_section = bp_unpack_value (&bp, 1);
1388 node->writeonly = bp_unpack_value (&bp, 1);
1389 node->definition = bp_unpack_value (&bp, 1);
1390 node->alias = bp_unpack_value (&bp, 1);
1391 node->weakref = bp_unpack_value (&bp, 1);
1392 node->analyzed = bp_unpack_value (&bp, 1);
1393 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1394 node->in_other_partition = bp_unpack_value (&bp, 1);
1395 if (node->in_other_partition)
1396 {
1397 DECL_EXTERNAL (node->decl) = 1;
1398 TREE_STATIC (node->decl) = 0;
1399 }
1400 if (node->alias && !node->analyzed && node->weakref)
1401 node->alias_target = get_alias_symbol (node->decl);
1402 node->tls_model = (enum tls_model)bp_unpack_value (&bp, 3);
1403 node->used_by_single_function = (enum tls_model)bp_unpack_value (&bp, 1);
1404 node->need_bounds_init = bp_unpack_value (&bp, 1);
1405 group = read_identifier (ib);
1406 if (group)
1407 {
1408 node->set_comdat_group (group);
1409 ref = streamer_read_hwi (ib);
1410 /* Store a reference for now, and fix up later to be a pointer. */
1411 node->same_comdat_group = (symtab_node *) (intptr_t) ref;
1412 }
1413 else
1414 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1415 section = read_string (ib);
1416 if (section)
1417 node->set_section_for_node (section);
1418 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1419 LDPR_NUM_KNOWN);
1420 gcc_assert (flag_ltrans
1421 || (!node->in_other_partition
1422 && !node->used_from_other_partition));
1423
1424 return node;
1425 }
1426
1427 /* Read a node from input_block IB. TAG is the node's tag just read.
1428 Return the node read or overwriten. */
1429
1430 static void
1431 input_ref (struct lto_input_block *ib,
1432 symtab_node *referring_node,
1433 vec<symtab_node *> nodes)
1434 {
1435 symtab_node *node = NULL;
1436 struct bitpack_d bp;
1437 enum ipa_ref_use use;
1438 bool speculative;
1439 struct ipa_ref *ref;
1440
1441 bp = streamer_read_bitpack (ib);
1442 use = (enum ipa_ref_use) bp_unpack_value (&bp, 3);
1443 speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1444 node = nodes[streamer_read_hwi (ib)];
1445 ref = referring_node->create_reference (node, use);
1446 ref->speculative = speculative;
1447 if (is_a <cgraph_node *> (referring_node))
1448 ref->lto_stmt_uid = streamer_read_hwi (ib);
1449 }
1450
1451 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1452 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1453 edge being read is indirect (in the sense that it has
1454 indirect_unknown_callee set). */
1455
1456 static void
1457 input_edge (struct lto_input_block *ib, vec<symtab_node *> nodes,
1458 bool indirect)
1459 {
1460 struct cgraph_node *caller, *callee;
1461 struct cgraph_edge *edge;
1462 unsigned int stmt_id;
1463 gcov_type count;
1464 int freq;
1465 cgraph_inline_failed_t inline_failed;
1466 struct bitpack_d bp;
1467 int ecf_flags = 0;
1468
1469 caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1470 if (caller == NULL || caller->decl == NULL_TREE)
1471 internal_error ("bytecode stream: no caller found while reading edge");
1472
1473 if (!indirect)
1474 {
1475 callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1476 if (callee == NULL || callee->decl == NULL_TREE)
1477 internal_error ("bytecode stream: no callee found while reading edge");
1478 }
1479 else
1480 callee = NULL;
1481
1482 count = streamer_read_gcov_count (ib);
1483
1484 bp = streamer_read_bitpack (ib);
1485 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
1486 stmt_id = bp_unpack_var_len_unsigned (&bp);
1487 freq = (int) bp_unpack_var_len_unsigned (&bp);
1488
1489 if (indirect)
1490 edge = caller->create_indirect_edge (NULL, 0, count, freq);
1491 else
1492 edge = caller->create_edge (callee, NULL, count, freq);
1493
1494 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1495 edge->speculative = bp_unpack_value (&bp, 1);
1496 edge->lto_stmt_uid = stmt_id;
1497 edge->inline_failed = inline_failed;
1498 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1499 edge->can_throw_external = bp_unpack_value (&bp, 1);
1500 edge->in_polymorphic_cdtor = bp_unpack_value (&bp, 1);
1501 if (indirect)
1502 {
1503 if (bp_unpack_value (&bp, 1))
1504 ecf_flags |= ECF_CONST;
1505 if (bp_unpack_value (&bp, 1))
1506 ecf_flags |= ECF_PURE;
1507 if (bp_unpack_value (&bp, 1))
1508 ecf_flags |= ECF_NORETURN;
1509 if (bp_unpack_value (&bp, 1))
1510 ecf_flags |= ECF_MALLOC;
1511 if (bp_unpack_value (&bp, 1))
1512 ecf_flags |= ECF_NOTHROW;
1513 if (bp_unpack_value (&bp, 1))
1514 ecf_flags |= ECF_RETURNS_TWICE;
1515 edge->indirect_info->ecf_flags = ecf_flags;
1516 edge->indirect_info->common_target_id = streamer_read_hwi (ib);
1517 if (edge->indirect_info->common_target_id)
1518 edge->indirect_info->common_target_probability = streamer_read_hwi (ib);
1519 }
1520 }
1521
1522
1523 /* Read a cgraph from IB using the info in FILE_DATA. */
1524
1525 static vec<symtab_node *>
1526 input_cgraph_1 (struct lto_file_decl_data *file_data,
1527 struct lto_input_block *ib)
1528 {
1529 enum LTO_symtab_tags tag;
1530 vec<symtab_node *> nodes = vNULL;
1531 symtab_node *node;
1532 unsigned i;
1533
1534 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1535 order_base = symtab->order;
1536 while (tag)
1537 {
1538 if (tag == LTO_symtab_edge)
1539 input_edge (ib, nodes, false);
1540 else if (tag == LTO_symtab_indirect_edge)
1541 input_edge (ib, nodes, true);
1542 else if (tag == LTO_symtab_variable)
1543 {
1544 node = input_varpool_node (file_data, ib);
1545 nodes.safe_push (node);
1546 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1547 }
1548 else
1549 {
1550 node = input_node (file_data, ib, tag, nodes);
1551 if (node == NULL || node->decl == NULL_TREE)
1552 internal_error ("bytecode stream: found empty cgraph node");
1553 nodes.safe_push (node);
1554 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1555 }
1556
1557 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1558 }
1559
1560 lto_input_toplevel_asms (file_data, order_base);
1561
1562 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1563 if (flag_checking)
1564 {
1565 FOR_EACH_VEC_ELT (nodes, i, node)
1566 gcc_assert (node->aux || !is_a <cgraph_node *> (node));
1567 }
1568 FOR_EACH_VEC_ELT (nodes, i, node)
1569 {
1570 int ref;
1571 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1572 {
1573 ref = (int) (intptr_t) cnode->global.inlined_to;
1574
1575 /* We share declaration of builtins, so we may read same node twice. */
1576 if (!node->aux)
1577 continue;
1578 node->aux = NULL;
1579
1580 /* Fixup inlined_to from reference to pointer. */
1581 if (ref != LCC_NOT_FOUND)
1582 dyn_cast<cgraph_node *> (node)->global.inlined_to
1583 = dyn_cast<cgraph_node *> (nodes[ref]);
1584 else
1585 cnode->global.inlined_to = NULL;
1586
1587 /* Compute instrumented_version. */
1588 if (cnode->instrumentation_clone)
1589 {
1590 gcc_assert (cnode->orig_decl);
1591
1592 cnode->instrumented_version = cgraph_node::get (cnode->orig_decl);
1593 if (cnode->instrumented_version)
1594 {
1595 /* We may have multiple nodes for a single function which
1596 will be merged later. To have a proper merge we need
1597 to keep instrumentation_version reference between nodes
1598 consistent: each instrumented_version reference should
1599 have proper reverse reference. Thus don't break existing
1600 instrumented_version reference if it already exists. */
1601 if (cnode->instrumented_version->instrumented_version)
1602 cnode->instrumented_version = NULL;
1603 else
1604 cnode->instrumented_version->instrumented_version = cnode;
1605 }
1606
1607 /* Restore decl names reference except for wrapper functions. */
1608 if (!chkp_wrap_function (cnode->orig_decl))
1609 {
1610 tree name = DECL_ASSEMBLER_NAME (cnode->decl);
1611 IDENTIFIER_TRANSPARENT_ALIAS (name) = 1;
1612 TREE_CHAIN (name) = DECL_ASSEMBLER_NAME (cnode->orig_decl);
1613 }
1614 }
1615 }
1616
1617 ref = (int) (intptr_t) node->same_comdat_group;
1618
1619 /* Fixup same_comdat_group from reference to pointer. */
1620 if (ref != LCC_NOT_FOUND)
1621 node->same_comdat_group = nodes[ref];
1622 else
1623 node->same_comdat_group = NULL;
1624 }
1625 FOR_EACH_VEC_ELT (nodes, i, node)
1626 node->aux = is_a <cgraph_node *> (node) ? (void *)1 : NULL;
1627 return nodes;
1628 }
1629
1630 /* Input ipa_refs. */
1631
1632 static void
1633 input_refs (struct lto_input_block *ib,
1634 vec<symtab_node *> nodes)
1635 {
1636 int count;
1637 int idx;
1638 while (true)
1639 {
1640 symtab_node *node;
1641 count = streamer_read_uhwi (ib);
1642 if (!count)
1643 break;
1644 idx = streamer_read_uhwi (ib);
1645 node = nodes[idx];
1646 while (count)
1647 {
1648 input_ref (ib, node, nodes);
1649 count--;
1650 }
1651 }
1652 }
1653
1654
1655 static struct gcov_ctr_summary lto_gcov_summary;
1656
1657 /* Input profile_info from IB. */
1658 static void
1659 input_profile_summary (struct lto_input_block *ib,
1660 struct lto_file_decl_data *file_data)
1661 {
1662 unsigned h_ix;
1663 struct bitpack_d bp;
1664 unsigned int runs = streamer_read_uhwi (ib);
1665 if (runs)
1666 {
1667 file_data->profile_info.runs = runs;
1668 file_data->profile_info.sum_max = streamer_read_gcov_count (ib);
1669 file_data->profile_info.sum_all = streamer_read_gcov_count (ib);
1670
1671 memset (file_data->profile_info.histogram, 0,
1672 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1673 /* Input the bitpack of non-zero histogram indices. */
1674 bp = streamer_read_bitpack (ib);
1675 /* Read in and unpack the full bitpack, flagging non-zero
1676 histogram entries by setting the num_counters non-zero. */
1677 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1678 {
1679 file_data->profile_info.histogram[h_ix].num_counters
1680 = bp_unpack_value (&bp, 1);
1681 }
1682 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1683 {
1684 if (!file_data->profile_info.histogram[h_ix].num_counters)
1685 continue;
1686
1687 file_data->profile_info.histogram[h_ix].num_counters
1688 = streamer_read_gcov_count (ib);
1689 file_data->profile_info.histogram[h_ix].min_value
1690 = streamer_read_gcov_count (ib);
1691 file_data->profile_info.histogram[h_ix].cum_value
1692 = streamer_read_gcov_count (ib);
1693 }
1694 /* IPA-profile computes hot bb threshold based on cumulated
1695 whole program profile. We need to stream it down to ltrans. */
1696 if (flag_ltrans)
1697 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1698 }
1699
1700 }
1701
1702 /* Rescale profile summaries to the same number of runs in the whole unit. */
1703
1704 static void
1705 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1706 {
1707 struct lto_file_decl_data *file_data;
1708 unsigned int j, h_ix;
1709 gcov_unsigned_t max_runs = 0;
1710 struct cgraph_node *node;
1711 struct cgraph_edge *edge;
1712 gcov_type saved_sum_all = 0;
1713 gcov_ctr_summary *saved_profile_info = 0;
1714 int saved_scale = 0;
1715
1716 /* Find unit with maximal number of runs. If we ever get serious about
1717 roundoff errors, we might also consider computing smallest common
1718 multiply. */
1719 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1720 if (max_runs < file_data->profile_info.runs)
1721 max_runs = file_data->profile_info.runs;
1722
1723 if (!max_runs)
1724 return;
1725
1726 /* Simple overflow check. We probably don't need to support that many train
1727 runs. Such a large value probably imply data corruption anyway. */
1728 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1729 {
1730 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1731 INT_MAX / REG_BR_PROB_BASE);
1732 return;
1733 }
1734
1735 profile_info = &lto_gcov_summary;
1736 lto_gcov_summary.runs = max_runs;
1737 lto_gcov_summary.sum_max = 0;
1738 memset (lto_gcov_summary.histogram, 0,
1739 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1740
1741 /* Rescale all units to the maximal number of runs.
1742 sum_max can not be easily merged, as we have no idea what files come from
1743 the same run. We do not use the info anyway, so leave it 0. */
1744 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1745 if (file_data->profile_info.runs)
1746 {
1747 int scale = GCOV_COMPUTE_SCALE (max_runs,
1748 file_data->profile_info.runs);
1749 lto_gcov_summary.sum_max
1750 = MAX (lto_gcov_summary.sum_max,
1751 apply_scale (file_data->profile_info.sum_max, scale));
1752 lto_gcov_summary.sum_all
1753 = MAX (lto_gcov_summary.sum_all,
1754 apply_scale (file_data->profile_info.sum_all, scale));
1755 /* Save a pointer to the profile_info with the largest
1756 scaled sum_all and the scale for use in merging the
1757 histogram. */
1758 if (!saved_profile_info
1759 || lto_gcov_summary.sum_all > saved_sum_all)
1760 {
1761 saved_profile_info = &file_data->profile_info;
1762 saved_sum_all = lto_gcov_summary.sum_all;
1763 saved_scale = scale;
1764 }
1765 }
1766
1767 gcc_assert (saved_profile_info);
1768
1769 /* Scale up the histogram from the profile that had the largest
1770 scaled sum_all above. */
1771 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1772 {
1773 /* Scale up the min value as we did the corresponding sum_all
1774 above. Use that to find the new histogram index. */
1775 gcov_type scaled_min
1776 = apply_scale (saved_profile_info->histogram[h_ix].min_value,
1777 saved_scale);
1778 /* The new index may be shared with another scaled histogram entry,
1779 so we need to account for a non-zero histogram entry at new_ix. */
1780 unsigned new_ix = gcov_histo_index (scaled_min);
1781 lto_gcov_summary.histogram[new_ix].min_value
1782 = (lto_gcov_summary.histogram[new_ix].num_counters
1783 ? MIN (lto_gcov_summary.histogram[new_ix].min_value, scaled_min)
1784 : scaled_min);
1785 /* Some of the scaled counter values would ostensibly need to be placed
1786 into different (larger) histogram buckets, but we keep things simple
1787 here and place the scaled cumulative counter value in the bucket
1788 corresponding to the scaled minimum counter value. */
1789 lto_gcov_summary.histogram[new_ix].cum_value
1790 += apply_scale (saved_profile_info->histogram[h_ix].cum_value,
1791 saved_scale);
1792 lto_gcov_summary.histogram[new_ix].num_counters
1793 += saved_profile_info->histogram[h_ix].num_counters;
1794 }
1795
1796 /* Watch roundoff errors. */
1797 if (lto_gcov_summary.sum_max < max_runs)
1798 lto_gcov_summary.sum_max = max_runs;
1799
1800 /* If merging already happent at WPA time, we are done. */
1801 if (flag_ltrans)
1802 return;
1803
1804 /* Now compute count_materialization_scale of each node.
1805 During LTRANS we already have values of count_materialization_scale
1806 computed, so just update them. */
1807 FOR_EACH_FUNCTION (node)
1808 if (node->lto_file_data
1809 && node->lto_file_data->profile_info.runs)
1810 {
1811 int scale;
1812
1813 scale = RDIV (node->count_materialization_scale * max_runs,
1814 node->lto_file_data->profile_info.runs);
1815 node->count_materialization_scale = scale;
1816 if (scale < 0)
1817 fatal_error (input_location, "Profile information in %s corrupted",
1818 file_data->file_name);
1819
1820 if (scale == REG_BR_PROB_BASE)
1821 continue;
1822 for (edge = node->callees; edge; edge = edge->next_callee)
1823 edge->count = apply_scale (edge->count, scale);
1824 node->count = apply_scale (node->count, scale);
1825 }
1826 }
1827
1828 /* Input and merge the symtab from each of the .o files passed to
1829 lto1. */
1830
1831 void
1832 input_symtab (void)
1833 {
1834 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1835 struct lto_file_decl_data *file_data;
1836 unsigned int j = 0;
1837 struct cgraph_node *node;
1838
1839 while ((file_data = file_data_vec[j++]))
1840 {
1841 const char *data;
1842 size_t len;
1843 struct lto_input_block *ib;
1844 vec<symtab_node *> nodes;
1845
1846 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1847 &data, &len);
1848 if (!ib)
1849 fatal_error (input_location,
1850 "cannot find LTO cgraph in %s", file_data->file_name);
1851 input_profile_summary (ib, file_data);
1852 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1853 nodes = input_cgraph_1 (file_data, ib);
1854 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1855 ib, data, len);
1856
1857 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1858 &data, &len);
1859 if (!ib)
1860 fatal_error (input_location, "cannot find LTO section refs in %s",
1861 file_data->file_name);
1862 input_refs (ib, nodes);
1863 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1864 ib, data, len);
1865 if (flag_ltrans)
1866 input_cgraph_opt_summary (nodes);
1867 nodes.release ();
1868 }
1869
1870 merge_profile_summaries (file_data_vec);
1871 get_working_sets ();
1872
1873
1874 /* Clear out the aux field that was used to store enough state to
1875 tell which nodes should be overwritten. */
1876 FOR_EACH_FUNCTION (node)
1877 {
1878 /* Some nodes may have been created by cgraph_node. This
1879 happens when the callgraph contains nested functions. If the
1880 node for the parent function was never emitted to the gimple
1881 file, cgraph_node will create a node for it when setting the
1882 context of the nested function. */
1883 if (node->lto_file_data)
1884 node->aux = NULL;
1885 }
1886 }
1887
1888 /* Input function/variable tables that will allow libgomp to look up offload
1889 target code, and store them into OFFLOAD_FUNCS and OFFLOAD_VARS. */
1890
1891 void
1892 input_offload_tables (void)
1893 {
1894 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1895 struct lto_file_decl_data *file_data;
1896 unsigned int j = 0;
1897
1898 while ((file_data = file_data_vec[j++]))
1899 {
1900 const char *data;
1901 size_t len;
1902 struct lto_input_block *ib
1903 = lto_create_simple_input_block (file_data, LTO_section_offload_table,
1904 &data, &len);
1905 if (!ib)
1906 continue;
1907
1908 enum LTO_symtab_tags tag
1909 = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1910 while (tag)
1911 {
1912 if (tag == LTO_symtab_unavail_node)
1913 {
1914 int decl_index = streamer_read_uhwi (ib);
1915 tree fn_decl
1916 = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1917 vec_safe_push (offload_funcs, fn_decl);
1918 }
1919 else if (tag == LTO_symtab_variable)
1920 {
1921 int decl_index = streamer_read_uhwi (ib);
1922 tree var_decl
1923 = lto_file_decl_data_get_var_decl (file_data, decl_index);
1924 vec_safe_push (offload_vars, var_decl);
1925 }
1926 else
1927 fatal_error (input_location,
1928 "invalid offload table in %s", file_data->file_name);
1929
1930 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1931 }
1932
1933 lto_destroy_simple_input_block (file_data, LTO_section_offload_table,
1934 ib, data, len);
1935 }
1936 }
1937
1938 /* True when we need optimization summary for NODE. */
1939
1940 static int
1941 output_cgraph_opt_summary_p (struct cgraph_node *node)
1942 {
1943 return (node->clone_of
1944 && (node->clone.tree_map
1945 || node->clone.args_to_skip
1946 || node->clone.combined_args_to_skip));
1947 }
1948
1949 /* Output optimization summary for EDGE to OB. */
1950 static void
1951 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1952 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1953 {
1954 }
1955
1956 /* Output optimization summary for NODE to OB. */
1957
1958 static void
1959 output_node_opt_summary (struct output_block *ob,
1960 struct cgraph_node *node,
1961 lto_symtab_encoder_t encoder)
1962 {
1963 unsigned int index;
1964 bitmap_iterator bi;
1965 struct ipa_replace_map *map;
1966 struct bitpack_d bp;
1967 int i;
1968 struct cgraph_edge *e;
1969
1970 if (node->clone.args_to_skip)
1971 {
1972 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1973 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1974 streamer_write_uhwi (ob, index);
1975 }
1976 else
1977 streamer_write_uhwi (ob, 0);
1978 if (node->clone.combined_args_to_skip)
1979 {
1980 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1981 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1982 streamer_write_uhwi (ob, index);
1983 }
1984 else
1985 streamer_write_uhwi (ob, 0);
1986 streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
1987 FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
1988 {
1989 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1990 mechanism to store function local declarations into summaries. */
1991 gcc_assert (!map->old_tree);
1992 streamer_write_uhwi (ob, map->parm_num);
1993 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
1994 stream_write_tree (ob, map->new_tree, true);
1995 bp = bitpack_create (ob->main_stream);
1996 bp_pack_value (&bp, map->replace_p, 1);
1997 bp_pack_value (&bp, map->ref_p, 1);
1998 streamer_write_bitpack (&bp);
1999 }
2000
2001 if (lto_symtab_encoder_in_partition_p (encoder, node))
2002 {
2003 for (e = node->callees; e; e = e->next_callee)
2004 output_edge_opt_summary (ob, e);
2005 for (e = node->indirect_calls; e; e = e->next_callee)
2006 output_edge_opt_summary (ob, e);
2007 }
2008 }
2009
2010 /* Output optimization summaries stored in callgraph.
2011 At the moment it is the clone info structure. */
2012
2013 static void
2014 output_cgraph_opt_summary (void)
2015 {
2016 int i, n_nodes;
2017 lto_symtab_encoder_t encoder;
2018 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
2019 unsigned count = 0;
2020
2021 ob->symbol = NULL;
2022 encoder = ob->decl_state->symtab_node_encoder;
2023 n_nodes = lto_symtab_encoder_size (encoder);
2024 for (i = 0; i < n_nodes; i++)
2025 {
2026 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
2027 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2028 if (cnode && output_cgraph_opt_summary_p (cnode))
2029 count++;
2030 }
2031 streamer_write_uhwi (ob, count);
2032 for (i = 0; i < n_nodes; i++)
2033 {
2034 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
2035 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2036 if (cnode && output_cgraph_opt_summary_p (cnode))
2037 {
2038 streamer_write_uhwi (ob, i);
2039 output_node_opt_summary (ob, cnode, encoder);
2040 }
2041 }
2042 produce_asm (ob, NULL);
2043 destroy_output_block (ob);
2044 }
2045
2046 /* Input optimisation summary of EDGE. */
2047
2048 static void
2049 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
2050 struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
2051 {
2052 }
2053
2054 /* Input optimisation summary of NODE. */
2055
2056 static void
2057 input_node_opt_summary (struct cgraph_node *node,
2058 struct lto_input_block *ib_main,
2059 struct data_in *data_in)
2060 {
2061 int i;
2062 int count;
2063 int bit;
2064 struct bitpack_d bp;
2065 struct cgraph_edge *e;
2066
2067 count = streamer_read_uhwi (ib_main);
2068 if (count)
2069 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
2070 for (i = 0; i < count; i++)
2071 {
2072 bit = streamer_read_uhwi (ib_main);
2073 bitmap_set_bit (node->clone.args_to_skip, bit);
2074 }
2075 count = streamer_read_uhwi (ib_main);
2076 if (count)
2077 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
2078 for (i = 0; i < count; i++)
2079 {
2080 bit = streamer_read_uhwi (ib_main);
2081 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
2082 }
2083 count = streamer_read_uhwi (ib_main);
2084 for (i = 0; i < count; i++)
2085 {
2086 struct ipa_replace_map *map = ggc_alloc<ipa_replace_map> ();
2087
2088 vec_safe_push (node->clone.tree_map, map);
2089 map->parm_num = streamer_read_uhwi (ib_main);
2090 map->old_tree = NULL;
2091 map->new_tree = stream_read_tree (ib_main, data_in);
2092 bp = streamer_read_bitpack (ib_main);
2093 map->replace_p = bp_unpack_value (&bp, 1);
2094 map->ref_p = bp_unpack_value (&bp, 1);
2095 }
2096 for (e = node->callees; e; e = e->next_callee)
2097 input_edge_opt_summary (e, ib_main);
2098 for (e = node->indirect_calls; e; e = e->next_callee)
2099 input_edge_opt_summary (e, ib_main);
2100 }
2101
2102 /* Read section in file FILE_DATA of length LEN with data DATA. */
2103
2104 static void
2105 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
2106 const char *data, size_t len,
2107 vec<symtab_node *> nodes)
2108 {
2109 const struct lto_function_header *header =
2110 (const struct lto_function_header *) data;
2111 const int cfg_offset = sizeof (struct lto_function_header);
2112 const int main_offset = cfg_offset + header->cfg_size;
2113 const int string_offset = main_offset + header->main_size;
2114 struct data_in *data_in;
2115 unsigned int i;
2116 unsigned int count;
2117
2118 lto_input_block ib_main ((const char *) data + main_offset,
2119 header->main_size, file_data->mode_table);
2120
2121 data_in =
2122 lto_data_in_create (file_data, (const char *) data + string_offset,
2123 header->string_size, vNULL);
2124 count = streamer_read_uhwi (&ib_main);
2125
2126 for (i = 0; i < count; i++)
2127 {
2128 int ref = streamer_read_uhwi (&ib_main);
2129 input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
2130 &ib_main, data_in);
2131 }
2132 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
2133 len);
2134 lto_data_in_delete (data_in);
2135 }
2136
2137 /* Input optimization summary of cgraph. */
2138
2139 static void
2140 input_cgraph_opt_summary (vec<symtab_node *> nodes)
2141 {
2142 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2143 struct lto_file_decl_data *file_data;
2144 unsigned int j = 0;
2145
2146 while ((file_data = file_data_vec[j++]))
2147 {
2148 size_t len;
2149 const char *data =
2150 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
2151 &len);
2152
2153 if (data)
2154 input_cgraph_opt_section (file_data, data, len, nodes);
2155 }
2156 }