]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-cgraph.c
sh.c: Do not include algorithm.
[thirdparty/gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright (C) 2009-2014 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "stringpool.h"
29 #include "predict.h"
30 #include "vec.h"
31 #include "hashtab.h"
32 #include "hash-set.h"
33 #include "machmode.h"
34 #include "hard-reg-set.h"
35 #include "input.h"
36 #include "function.h"
37 #include "basic-block.h"
38 #include "tree-ssa-alias.h"
39 #include "internal-fn.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "expr.h"
44 #include "flags.h"
45 #include "params.h"
46 #include "langhooks.h"
47 #include "bitmap.h"
48 #include "diagnostic-core.h"
49 #include "except.h"
50 #include "timevar.h"
51 #include "hash-map.h"
52 #include "plugin-api.h"
53 #include "ipa-ref.h"
54 #include "cgraph.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "gcov-io.h"
59 #include "tree-pass.h"
60 #include "profile.h"
61 #include "context.h"
62 #include "pass_manager.h"
63 #include "ipa-utils.h"
64 #include "omp-low.h"
65
66 /* True when asm nodes has been output. */
67 bool asm_nodes_output = false;
68
69 static void output_cgraph_opt_summary (void);
70 static void input_cgraph_opt_summary (vec<symtab_node *> nodes);
71
72 /* Number of LDPR values known to GCC. */
73 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
74
75 /* All node orders are ofsetted by ORDER_BASE. */
76 static int order_base;
77
78 /* Cgraph streaming is organized as set of record whose type
79 is indicated by a tag. */
80 enum LTO_symtab_tags
81 {
82 /* Must leave 0 for the stopper. */
83
84 /* Cgraph node without body available. */
85 LTO_symtab_unavail_node = 1,
86 /* Cgraph node with function body. */
87 LTO_symtab_analyzed_node,
88 /* Cgraph edges. */
89 LTO_symtab_edge,
90 LTO_symtab_indirect_edge,
91 LTO_symtab_variable,
92 LTO_symtab_last_tag
93 };
94
95 /* Create a new symtab encoder.
96 if FOR_INPUT, the encoder allocate only datastructures needed
97 to read the symtab. */
98
99 lto_symtab_encoder_t
100 lto_symtab_encoder_new (bool for_input)
101 {
102 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
103
104 if (!for_input)
105 encoder->map = new hash_map<symtab_node *, size_t>;
106 encoder->nodes.create (0);
107 return encoder;
108 }
109
110
111 /* Delete ENCODER and its components. */
112
113 void
114 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
115 {
116 encoder->nodes.release ();
117 if (encoder->map)
118 delete encoder->map;
119 free (encoder);
120 }
121
122
123 /* Return the existing reference number of NODE in the symtab encoder in
124 output block OB. Assign a new reference if this is the first time
125 NODE is encoded. */
126
127 int
128 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
129 symtab_node *node)
130 {
131 int ref;
132
133 if (!encoder->map)
134 {
135 lto_encoder_entry entry = {node, false, false, false};
136
137 ref = encoder->nodes.length ();
138 encoder->nodes.safe_push (entry);
139 return ref;
140 }
141
142 size_t *slot = encoder->map->get (node);
143 if (!slot || !*slot)
144 {
145 lto_encoder_entry entry = {node, false, false, false};
146 ref = encoder->nodes.length ();
147 if (!slot)
148 encoder->map->put (node, ref + 1);
149 encoder->nodes.safe_push (entry);
150 }
151 else
152 ref = *slot - 1;
153
154 return ref;
155 }
156
157 /* Remove NODE from encoder. */
158
159 bool
160 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
161 symtab_node *node)
162 {
163 int index;
164 lto_encoder_entry last_node;
165
166 size_t *slot = encoder->map->get (node);
167 if (slot == NULL || !*slot)
168 return false;
169
170 index = *slot - 1;
171 gcc_checking_assert (encoder->nodes[index].node == node);
172
173 /* Remove from vector. We do this by swapping node with the last element
174 of the vector. */
175 last_node = encoder->nodes.pop ();
176 if (last_node.node != node)
177 {
178 gcc_assert (encoder->map->put (last_node.node, index + 1));
179
180 /* Move the last element to the original spot of NODE. */
181 encoder->nodes[index] = last_node;
182 }
183
184 /* Remove element from hash table. */
185 encoder->map->remove (node);
186 return true;
187 }
188
189
190 /* Return TRUE if we should encode initializer of NODE (if any). */
191
192 bool
193 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
194 struct cgraph_node *node)
195 {
196 int index = lto_symtab_encoder_lookup (encoder, node);
197 return encoder->nodes[index].body;
198 }
199
200 /* Return TRUE if we should encode body of NODE (if any). */
201
202 static void
203 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
204 struct cgraph_node *node)
205 {
206 int index = lto_symtab_encoder_encode (encoder, node);
207 gcc_checking_assert (encoder->nodes[index].node == node);
208 encoder->nodes[index].body = true;
209 }
210
211 /* Return TRUE if we should encode initializer of NODE (if any). */
212
213 bool
214 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
215 varpool_node *node)
216 {
217 int index = lto_symtab_encoder_lookup (encoder, node);
218 if (index == LCC_NOT_FOUND)
219 return false;
220 return encoder->nodes[index].initializer;
221 }
222
223 /* Return TRUE if we should encode initializer of NODE (if any). */
224
225 static void
226 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
227 varpool_node *node)
228 {
229 int index = lto_symtab_encoder_lookup (encoder, node);
230 encoder->nodes[index].initializer = true;
231 }
232
233 /* Return TRUE if we should encode initializer of NODE (if any). */
234
235 bool
236 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
237 symtab_node *node)
238 {
239 int index = lto_symtab_encoder_lookup (encoder, node);
240 if (index == LCC_NOT_FOUND)
241 return false;
242 return encoder->nodes[index].in_partition;
243 }
244
245 /* Return TRUE if we should encode body of NODE (if any). */
246
247 void
248 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
249 symtab_node *node)
250 {
251 int index = lto_symtab_encoder_encode (encoder, node);
252 encoder->nodes[index].in_partition = true;
253 }
254
255 /* Output the cgraph EDGE to OB using ENCODER. */
256
257 static void
258 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
259 lto_symtab_encoder_t encoder)
260 {
261 unsigned int uid;
262 intptr_t ref;
263 struct bitpack_d bp;
264
265 if (edge->indirect_unknown_callee)
266 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
267 LTO_symtab_indirect_edge);
268 else
269 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
270 LTO_symtab_edge);
271
272 ref = lto_symtab_encoder_lookup (encoder, edge->caller);
273 gcc_assert (ref != LCC_NOT_FOUND);
274 streamer_write_hwi_stream (ob->main_stream, ref);
275
276 if (!edge->indirect_unknown_callee)
277 {
278 ref = lto_symtab_encoder_lookup (encoder, edge->callee);
279 gcc_assert (ref != LCC_NOT_FOUND);
280 streamer_write_hwi_stream (ob->main_stream, ref);
281 }
282
283 streamer_write_gcov_count_stream (ob->main_stream, edge->count);
284
285 bp = bitpack_create (ob->main_stream);
286 uid = (!gimple_has_body_p (edge->caller->decl)
287 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt) + 1);
288 bp_pack_enum (&bp, cgraph_inline_failed_t,
289 CIF_N_REASONS, edge->inline_failed);
290 bp_pack_var_len_unsigned (&bp, uid);
291 bp_pack_var_len_unsigned (&bp, edge->frequency);
292 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
293 bp_pack_value (&bp, edge->speculative, 1);
294 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
295 bp_pack_value (&bp, edge->can_throw_external, 1);
296 bp_pack_value (&bp, edge->in_polymorphic_cdtor, 1);
297 if (edge->indirect_unknown_callee)
298 {
299 int flags = edge->indirect_info->ecf_flags;
300 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
301 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
302 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
303 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
304 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
305 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
306 /* Flags that should not appear on indirect calls. */
307 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
308 | ECF_MAY_BE_ALLOCA
309 | ECF_SIBCALL
310 | ECF_LEAF
311 | ECF_NOVOPS)));
312 }
313 streamer_write_bitpack (&bp);
314 if (edge->indirect_unknown_callee)
315 {
316 streamer_write_hwi_stream (ob->main_stream,
317 edge->indirect_info->common_target_id);
318 if (edge->indirect_info->common_target_id)
319 streamer_write_hwi_stream
320 (ob->main_stream, edge->indirect_info->common_target_probability);
321 }
322 }
323
324 /* Return if NODE contain references from other partitions. */
325
326 bool
327 referenced_from_other_partition_p (symtab_node *node, lto_symtab_encoder_t encoder)
328 {
329 int i;
330 struct ipa_ref *ref = NULL;
331
332 for (i = 0; node->iterate_referring (i, ref); i++)
333 {
334 /* Ignore references from non-offloadable nodes while streaming NODE into
335 offload LTO section. */
336 if (!ref->referring->need_lto_streaming)
337 continue;
338
339 if (ref->referring->in_other_partition
340 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
341 return true;
342 }
343 return false;
344 }
345
346 /* Return true when node is reachable from other partition. */
347
348 bool
349 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
350 {
351 struct cgraph_edge *e;
352 if (!node->definition)
353 return false;
354 if (node->global.inlined_to)
355 return false;
356 for (e = node->callers; e; e = e->next_caller)
357 {
358 /* Ignore references from non-offloadable nodes while streaming NODE into
359 offload LTO section. */
360 if (!e->caller->need_lto_streaming)
361 continue;
362
363 if (e->caller->in_other_partition
364 || !lto_symtab_encoder_in_partition_p (encoder, e->caller))
365 return true;
366 }
367 return false;
368 }
369
370 /* Return if NODE contain references from other partitions. */
371
372 bool
373 referenced_from_this_partition_p (symtab_node *node,
374 lto_symtab_encoder_t encoder)
375 {
376 int i;
377 struct ipa_ref *ref = NULL;
378
379 for (i = 0; node->iterate_referring (i, ref); i++)
380 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
381 return true;
382 return false;
383 }
384
385 /* Return true when node is reachable from other partition. */
386
387 bool
388 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
389 {
390 struct cgraph_edge *e;
391 for (e = node->callers; e; e = e->next_caller)
392 if (lto_symtab_encoder_in_partition_p (encoder, e->caller))
393 return true;
394 return false;
395 }
396
397 /* Output the cgraph NODE to OB. ENCODER is used to find the
398 reference number of NODE->inlined_to. SET is the set of nodes we
399 are writing to the current file. If NODE is not in SET, then NODE
400 is a boundary of a cgraph_node_set and we pretend NODE just has a
401 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
402 that have had their callgraph node written so far. This is used to
403 determine if NODE is a clone of a previously written node. */
404
405 static void
406 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
407 lto_symtab_encoder_t encoder)
408 {
409 unsigned int tag;
410 struct bitpack_d bp;
411 bool boundary_p;
412 intptr_t ref;
413 bool in_other_partition = false;
414 struct cgraph_node *clone_of, *ultimate_clone_of;
415 ipa_opt_pass_d *pass;
416 int i;
417 bool alias_p;
418 const char *comdat;
419 const char *section;
420 tree group;
421
422 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
423
424 if (node->analyzed && !boundary_p)
425 tag = LTO_symtab_analyzed_node;
426 else
427 tag = LTO_symtab_unavail_node;
428
429 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
430 tag);
431 streamer_write_hwi_stream (ob->main_stream, node->order);
432
433 /* In WPA mode, we only output part of the call-graph. Also, we
434 fake cgraph node attributes. There are two cases that we care.
435
436 Boundary nodes: There are nodes that are not part of SET but are
437 called from within SET. We artificially make them look like
438 externally visible nodes with no function body.
439
440 Cherry-picked nodes: These are nodes we pulled from other
441 translation units into SET during IPA-inlining. We make them as
442 local static nodes to prevent clashes with other local statics. */
443 if (boundary_p && node->analyzed
444 && node->get_partitioning_class () == SYMBOL_PARTITION)
445 {
446 /* Inline clones can not be part of boundary.
447 gcc_assert (!node->global.inlined_to);
448
449 FIXME: At the moment they can be, when partition contains an inline
450 clone that is clone of inline clone from outside partition. We can
451 reshape the clone tree and make other tree to be the root, but it
452 needs a bit extra work and will be promplty done by cgraph_remove_node
453 after reading back. */
454 in_other_partition = 1;
455 }
456
457 clone_of = node->clone_of;
458 while (clone_of
459 && (ref = lto_symtab_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
460 if (clone_of->prev_sibling_clone)
461 clone_of = clone_of->prev_sibling_clone;
462 else
463 clone_of = clone_of->clone_of;
464
465 /* See if body of the master function is output. If not, we are seeing only
466 an declaration and we do not need to pass down clone tree. */
467 ultimate_clone_of = clone_of;
468 while (ultimate_clone_of && ultimate_clone_of->clone_of)
469 ultimate_clone_of = ultimate_clone_of->clone_of;
470
471 if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
472 clone_of = NULL;
473
474 if (tag == LTO_symtab_analyzed_node)
475 gcc_assert (clone_of || !node->clone_of);
476 if (!clone_of)
477 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
478 else
479 streamer_write_hwi_stream (ob->main_stream, ref);
480
481
482 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
483 streamer_write_gcov_count_stream (ob->main_stream, node->count);
484 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
485
486 streamer_write_hwi_stream (ob->main_stream,
487 node->ipa_transforms_to_apply.length ());
488 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
489 streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
490
491 if (tag == LTO_symtab_analyzed_node)
492 {
493 if (node->global.inlined_to)
494 {
495 ref = lto_symtab_encoder_lookup (encoder, node->global.inlined_to);
496 gcc_assert (ref != LCC_NOT_FOUND);
497 }
498 else
499 ref = LCC_NOT_FOUND;
500
501 streamer_write_hwi_stream (ob->main_stream, ref);
502 }
503
504 group = node->get_comdat_group ();
505 if (group)
506 comdat = IDENTIFIER_POINTER (group);
507 else
508 comdat = "";
509 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
510
511 if (group)
512 {
513 if (node->same_comdat_group && !boundary_p)
514 {
515 ref = lto_symtab_encoder_lookup (encoder,
516 node->same_comdat_group);
517 gcc_assert (ref != LCC_NOT_FOUND);
518 }
519 else
520 ref = LCC_NOT_FOUND;
521 streamer_write_hwi_stream (ob->main_stream, ref);
522 }
523
524 section = node->get_section ();
525 if (!section)
526 section = "";
527
528 streamer_write_hwi_stream (ob->main_stream, node->tp_first_run);
529
530 bp = bitpack_create (ob->main_stream);
531 bp_pack_value (&bp, node->local.local, 1);
532 bp_pack_value (&bp, node->externally_visible, 1);
533 bp_pack_value (&bp, node->no_reorder, 1);
534 bp_pack_value (&bp, node->definition, 1);
535 bp_pack_value (&bp, node->local.versionable, 1);
536 bp_pack_value (&bp, node->local.can_change_signature, 1);
537 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
538 bp_pack_value (&bp, node->force_output, 1);
539 bp_pack_value (&bp, node->forced_by_abi, 1);
540 bp_pack_value (&bp, node->unique_name, 1);
541 bp_pack_value (&bp, node->body_removed, 1);
542 bp_pack_value (&bp, node->implicit_section, 1);
543 bp_pack_value (&bp, node->address_taken, 1);
544 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
545 && node->get_partitioning_class () == SYMBOL_PARTITION
546 && (reachable_from_other_partition_p (node, encoder)
547 || referenced_from_other_partition_p (node, encoder)), 1);
548 bp_pack_value (&bp, node->lowered, 1);
549 bp_pack_value (&bp, in_other_partition, 1);
550 /* Real aliases in a boundary become non-aliases. However we still stream
551 alias info on weakrefs.
552 TODO: We lose a bit of information here - when we know that variable is
553 defined in other unit, we may use the info on aliases to resolve
554 symbol1 != symbol2 type tests that we can do only for locally defined objects
555 otherwise. */
556 alias_p = node->alias && (!boundary_p || node->weakref);
557 bp_pack_value (&bp, alias_p, 1);
558 bp_pack_value (&bp, node->weakref, 1);
559 bp_pack_value (&bp, node->frequency, 2);
560 bp_pack_value (&bp, node->only_called_at_startup, 1);
561 bp_pack_value (&bp, node->only_called_at_exit, 1);
562 bp_pack_value (&bp, node->tm_clone, 1);
563 bp_pack_value (&bp, node->calls_comdat_local, 1);
564 bp_pack_value (&bp, node->icf_merged, 1);
565 bp_pack_value (&bp, node->nonfreeing_fn, 1);
566 bp_pack_value (&bp, node->thunk.thunk_p && !boundary_p, 1);
567 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
568 LDPR_NUM_KNOWN, node->resolution);
569 bp_pack_value (&bp, node->instrumentation_clone, 1);
570 streamer_write_bitpack (&bp);
571 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
572
573 if (node->thunk.thunk_p && !boundary_p)
574 {
575 streamer_write_uhwi_stream
576 (ob->main_stream,
577 1 + (node->thunk.this_adjusting != 0) * 2
578 + (node->thunk.virtual_offset_p != 0) * 4
579 + (node->thunk.add_pointer_bounds_args != 0) * 8);
580 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
581 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
582 }
583 streamer_write_hwi_stream (ob->main_stream, node->profile_id);
584 if (DECL_STATIC_CONSTRUCTOR (node->decl))
585 streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ());
586 if (DECL_STATIC_DESTRUCTOR (node->decl))
587 streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ());
588
589 if (node->instrumentation_clone)
590 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->orig_decl);
591 }
592
593 /* Output the varpool NODE to OB.
594 If NODE is not in SET, then NODE is a boundary. */
595
596 static void
597 lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
598 lto_symtab_encoder_t encoder)
599 {
600 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
601 struct bitpack_d bp;
602 int ref;
603 bool alias_p;
604 const char *comdat;
605 const char *section;
606 tree group;
607
608 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
609 LTO_symtab_variable);
610 streamer_write_hwi_stream (ob->main_stream, node->order);
611 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
612 bp = bitpack_create (ob->main_stream);
613 bp_pack_value (&bp, node->externally_visible, 1);
614 bp_pack_value (&bp, node->no_reorder, 1);
615 bp_pack_value (&bp, node->force_output, 1);
616 bp_pack_value (&bp, node->forced_by_abi, 1);
617 bp_pack_value (&bp, node->unique_name, 1);
618 bp_pack_value (&bp, node->body_removed, 1);
619 bp_pack_value (&bp, node->implicit_section, 1);
620 bp_pack_value (&bp, node->writeonly, 1);
621 bp_pack_value (&bp, node->definition, 1);
622 alias_p = node->alias && (!boundary_p || node->weakref);
623 bp_pack_value (&bp, alias_p, 1);
624 bp_pack_value (&bp, node->weakref, 1);
625 bp_pack_value (&bp, node->analyzed && !boundary_p, 1);
626 gcc_assert (node->definition || !node->analyzed);
627 /* Constant pool initializers can be de-unified into individual ltrans units.
628 FIXME: Alternatively at -Os we may want to avoid generating for them the local
629 labels and share them across LTRANS partitions. */
630 if (node->get_partitioning_class () != SYMBOL_PARTITION)
631 {
632 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
633 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
634 }
635 else
636 {
637 bp_pack_value (&bp, node->definition
638 && referenced_from_other_partition_p (node, encoder), 1);
639 bp_pack_value (&bp, node->analyzed
640 && boundary_p && !DECL_EXTERNAL (node->decl), 1);
641 /* in_other_partition. */
642 }
643 bp_pack_value (&bp, node->tls_model, 3);
644 bp_pack_value (&bp, node->used_by_single_function, 1);
645 bp_pack_value (&bp, node->need_bounds_init, 1);
646 streamer_write_bitpack (&bp);
647
648 group = node->get_comdat_group ();
649 if (group)
650 comdat = IDENTIFIER_POINTER (group);
651 else
652 comdat = "";
653 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
654
655 if (group)
656 {
657 if (node->same_comdat_group && !boundary_p)
658 {
659 ref = lto_symtab_encoder_lookup (encoder,
660 node->same_comdat_group);
661 gcc_assert (ref != LCC_NOT_FOUND);
662 }
663 else
664 ref = LCC_NOT_FOUND;
665 streamer_write_hwi_stream (ob->main_stream, ref);
666 }
667
668 section = node->get_section ();
669 if (!section)
670 section = "";
671 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
672
673 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
674 LDPR_NUM_KNOWN, node->resolution);
675 }
676
677 /* Output the varpool NODE to OB.
678 If NODE is not in SET, then NODE is a boundary. */
679
680 static void
681 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
682 lto_symtab_encoder_t encoder)
683 {
684 struct bitpack_d bp;
685 int nref;
686 int uid = ref->lto_stmt_uid;
687 struct cgraph_node *node;
688
689 bp = bitpack_create (ob->main_stream);
690 bp_pack_value (&bp, ref->use, 3);
691 bp_pack_value (&bp, ref->speculative, 1);
692 streamer_write_bitpack (&bp);
693 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
694 gcc_assert (nref != LCC_NOT_FOUND);
695 streamer_write_hwi_stream (ob->main_stream, nref);
696
697 node = dyn_cast <cgraph_node *> (ref->referring);
698 if (node)
699 {
700 if (ref->stmt)
701 uid = gimple_uid (ref->stmt) + 1;
702 streamer_write_hwi_stream (ob->main_stream, uid);
703 }
704 }
705
706 /* Stream out profile_summary to OB. */
707
708 static void
709 output_profile_summary (struct lto_simple_output_block *ob)
710 {
711 unsigned h_ix;
712 struct bitpack_d bp;
713
714 if (profile_info)
715 {
716 /* We do not output num and run_max, they are not used by
717 GCC profile feedback and they are difficult to merge from multiple
718 units. */
719 gcc_assert (profile_info->runs);
720 streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
721 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_max);
722
723 /* sum_all is needed for computing the working set with the
724 histogram. */
725 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_all);
726
727 /* Create and output a bitpack of non-zero histogram entries indices. */
728 bp = bitpack_create (ob->main_stream);
729 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
730 bp_pack_value (&bp, profile_info->histogram[h_ix].num_counters > 0, 1);
731 streamer_write_bitpack (&bp);
732 /* Now stream out only those non-zero entries. */
733 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
734 {
735 if (!profile_info->histogram[h_ix].num_counters)
736 continue;
737 streamer_write_gcov_count_stream (ob->main_stream,
738 profile_info->histogram[h_ix].num_counters);
739 streamer_write_gcov_count_stream (ob->main_stream,
740 profile_info->histogram[h_ix].min_value);
741 streamer_write_gcov_count_stream (ob->main_stream,
742 profile_info->histogram[h_ix].cum_value);
743 }
744 /* IPA-profile computes hot bb threshold based on cumulated
745 whole program profile. We need to stream it down to ltrans. */
746 if (flag_wpa)
747 streamer_write_gcov_count_stream (ob->main_stream,
748 get_hot_bb_threshold ());
749 }
750 else
751 streamer_write_uhwi_stream (ob->main_stream, 0);
752 }
753
754 /* Output all callees or indirect outgoing edges. EDGE must be the first such
755 edge. */
756
757 static void
758 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
759 struct lto_simple_output_block *ob,
760 lto_symtab_encoder_t encoder)
761 {
762 if (!edge)
763 return;
764
765 /* Output edges in backward direction, so the reconstructed callgraph match
766 and it is easy to associate call sites in the IPA pass summaries. */
767 while (edge->next_callee)
768 edge = edge->next_callee;
769 for (; edge; edge = edge->prev_callee)
770 lto_output_edge (ob, edge, encoder);
771 }
772
773 /* Output the part of the cgraph in SET. */
774
775 static void
776 output_refs (lto_symtab_encoder_t encoder)
777 {
778 lto_symtab_encoder_iterator lsei;
779 struct lto_simple_output_block *ob;
780 int count;
781 struct ipa_ref *ref;
782 int i;
783
784 ob = lto_create_simple_output_block (LTO_section_refs);
785
786 for (lsei = lsei_start_in_partition (encoder); !lsei_end_p (lsei);
787 lsei_next_in_partition (&lsei))
788 {
789 symtab_node *node = lsei_node (lsei);
790
791 count = node->ref_list.nreferences ();
792 if (count)
793 {
794 streamer_write_gcov_count_stream (ob->main_stream, count);
795 streamer_write_uhwi_stream (ob->main_stream,
796 lto_symtab_encoder_lookup (encoder, node));
797 for (i = 0; node->iterate_reference (i, ref); i++)
798 lto_output_ref (ob, ref, encoder);
799 }
800 }
801
802 streamer_write_uhwi_stream (ob->main_stream, 0);
803
804 lto_destroy_simple_output_block (ob);
805 }
806
807 /* Add NODE into encoder as well as nodes it is cloned from.
808 Do it in a way so clones appear first. */
809
810 static void
811 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
812 bool include_body)
813 {
814 if (node->clone_of)
815 add_node_to (encoder, node->clone_of, include_body);
816 else if (include_body)
817 lto_set_symtab_encoder_encode_body (encoder, node);
818 lto_symtab_encoder_encode (encoder, node);
819 }
820
821 /* Add all references in NODE to encoders. */
822
823 static void
824 create_references (lto_symtab_encoder_t encoder, symtab_node *node)
825 {
826 int i;
827 struct ipa_ref *ref = NULL;
828 for (i = 0; node->iterate_reference (i, ref); i++)
829 if (is_a <cgraph_node *> (ref->referred))
830 add_node_to (encoder, dyn_cast <cgraph_node *> (ref->referred), false);
831 else
832 lto_symtab_encoder_encode (encoder, ref->referred);
833 }
834
835 /* Select what needs to be streamed out. In regular lto mode stream everything.
836 In offload lto mode stream only nodes marked as offloadable. */
837 void
838 select_what_to_stream (bool offload_lto_mode)
839 {
840 struct symtab_node *snode;
841 FOR_EACH_SYMBOL (snode)
842 snode->need_lto_streaming = !offload_lto_mode || snode->offloadable;
843 }
844
845 /* Find all symbols we want to stream into given partition and insert them
846 to encoders.
847
848 The function actually replaces IN_ENCODER by new one. The reason is that
849 streaming code needs clone's origin to be streamed before clone. This
850 means that we need to insert the nodes in specific order. This order is
851 ignored by the partitioning logic earlier. */
852
853 lto_symtab_encoder_t
854 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
855 {
856 struct cgraph_edge *edge;
857 int i;
858 lto_symtab_encoder_t encoder;
859 lto_symtab_encoder_iterator lsei;
860 hash_set<void *> reachable_call_targets;
861
862 encoder = lto_symtab_encoder_new (false);
863
864 /* Go over all entries in the IN_ENCODER and duplicate them to
865 ENCODER. At the same time insert masters of clones so
866 every master appears before clone. */
867 for (lsei = lsei_start_function_in_partition (in_encoder);
868 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
869 {
870 struct cgraph_node *node = lsei_cgraph_node (lsei);
871 if (!node->need_lto_streaming)
872 continue;
873 add_node_to (encoder, node, true);
874 lto_set_symtab_encoder_in_partition (encoder, node);
875 create_references (encoder, node);
876 /* For proper debug info, we need to ship the origins, too. */
877 if (DECL_ABSTRACT_ORIGIN (node->decl))
878 {
879 struct cgraph_node *origin_node
880 = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
881 add_node_to (encoder, origin_node, true);
882 }
883 }
884 for (lsei = lsei_start_variable_in_partition (in_encoder);
885 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
886 {
887 varpool_node *vnode = lsei_varpool_node (lsei);
888
889 if (!vnode->need_lto_streaming)
890 continue;
891 lto_set_symtab_encoder_in_partition (encoder, vnode);
892 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
893 create_references (encoder, vnode);
894 /* For proper debug info, we need to ship the origins, too. */
895 if (DECL_ABSTRACT_ORIGIN (vnode->decl))
896 {
897 varpool_node *origin_node
898 = varpool_node::get (DECL_ABSTRACT_ORIGIN (vnode->decl));
899 lto_set_symtab_encoder_in_partition (encoder, origin_node);
900 }
901 }
902 /* Pickle in also the initializer of all referenced readonly variables
903 to help folding. Constant pool variables are not shared, so we must
904 pickle those too. */
905 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
906 {
907 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
908 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
909 {
910 if (!lto_symtab_encoder_encode_initializer_p (encoder,
911 vnode)
912 && (vnode->ctor_useable_for_folding_p ()
913 || POINTER_BOUNDS_P (vnode->decl)))
914 {
915 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
916 create_references (encoder, vnode);
917 }
918 }
919 }
920
921 /* Go over all the nodes again to include callees that are not in
922 SET. */
923 for (lsei = lsei_start_function_in_partition (encoder);
924 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
925 {
926 struct cgraph_node *node = lsei_cgraph_node (lsei);
927 for (edge = node->callees; edge; edge = edge->next_callee)
928 {
929 struct cgraph_node *callee = edge->callee;
930 if (!lto_symtab_encoder_in_partition_p (encoder, callee))
931 {
932 /* We should have moved all the inlines. */
933 gcc_assert (!callee->global.inlined_to);
934 add_node_to (encoder, callee, false);
935 }
936 }
937 /* Add all possible targets for late devirtualization. */
938 if (flag_devirtualize)
939 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
940 if (edge->indirect_info->polymorphic)
941 {
942 unsigned int i;
943 void *cache_token;
944 bool final;
945 vec <cgraph_node *>targets
946 = possible_polymorphic_call_targets
947 (edge, &final, &cache_token);
948 if (!reachable_call_targets.add (cache_token))
949 {
950 for (i = 0; i < targets.length (); i++)
951 {
952 struct cgraph_node *callee = targets[i];
953
954 /* Adding an external declarations into the unit serves
955 no purpose and just increases its boundary. */
956 if (callee->definition
957 && !lto_symtab_encoder_in_partition_p
958 (encoder, callee))
959 {
960 gcc_assert (!callee->global.inlined_to);
961 add_node_to (encoder, callee, false);
962 }
963 }
964 }
965 }
966 }
967 lto_symtab_encoder_delete (in_encoder);
968 return encoder;
969 }
970
971 /* Output the part of the symtab in SET and VSET. */
972
973 void
974 output_symtab (void)
975 {
976 struct cgraph_node *node;
977 struct lto_simple_output_block *ob;
978 lto_symtab_encoder_iterator lsei;
979 int i, n_nodes;
980 lto_symtab_encoder_t encoder;
981
982 if (flag_wpa)
983 output_cgraph_opt_summary ();
984
985 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
986
987 output_profile_summary (ob);
988
989 /* An encoder for cgraph nodes should have been created by
990 ipa_write_summaries_1. */
991 gcc_assert (ob->decl_state->symtab_node_encoder);
992 encoder = ob->decl_state->symtab_node_encoder;
993
994 /* Write out the nodes. We must first output a node and then its clones,
995 otherwise at a time reading back the node there would be nothing to clone
996 from. */
997 n_nodes = lto_symtab_encoder_size (encoder);
998 for (i = 0; i < n_nodes; i++)
999 {
1000 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1001 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1002 lto_output_node (ob, cnode, encoder);
1003 else
1004 lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
1005 }
1006
1007 /* Go over the nodes in SET again to write edges. */
1008 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1009 lsei_next_function_in_partition (&lsei))
1010 {
1011 node = lsei_cgraph_node (lsei);
1012 output_outgoing_cgraph_edges (node->callees, ob, encoder);
1013 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
1014 }
1015
1016 streamer_write_uhwi_stream (ob->main_stream, 0);
1017
1018 lto_destroy_simple_output_block (ob);
1019
1020 /* Emit toplevel asms.
1021 When doing WPA we must output every asm just once. Since we do not partition asm
1022 nodes at all, output them to first output. This is kind of hack, but should work
1023 well. */
1024 if (!asm_nodes_output)
1025 {
1026 asm_nodes_output = true;
1027 lto_output_toplevel_asms ();
1028 }
1029
1030 output_refs (encoder);
1031 }
1032
1033 /* Return identifier encoded in IB as a plain string. */
1034
1035 static tree
1036 read_identifier (struct lto_input_block *ib)
1037 {
1038 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1039 tree id;
1040
1041 if (ib->data[ib->p + len])
1042 lto_section_overrun (ib);
1043 if (!len)
1044 {
1045 ib->p++;
1046 return NULL;
1047 }
1048 id = get_identifier (ib->data + ib->p);
1049 ib->p += len + 1;
1050 return id;
1051 }
1052
1053 /* Return string encoded in IB, NULL if string is empty. */
1054
1055 static const char *
1056 read_string (struct lto_input_block *ib)
1057 {
1058 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1059 const char *str;
1060
1061 if (ib->data[ib->p + len])
1062 lto_section_overrun (ib);
1063 if (!len)
1064 {
1065 ib->p++;
1066 return NULL;
1067 }
1068 str = ib->data + ib->p;
1069 ib->p += len + 1;
1070 return str;
1071 }
1072
1073 /* Output function/variable tables that will allow libgomp to look up offload
1074 target code.
1075 OFFLOAD_FUNCS is filled in expand_omp_target, OFFLOAD_VARS is filled in
1076 varpool_node::get_create. In WHOPR (partitioned) mode during the WPA stage
1077 both OFFLOAD_FUNCS and OFFLOAD_VARS are filled by input_offload_tables. */
1078
1079 void
1080 output_offload_tables (void)
1081 {
1082 if (vec_safe_is_empty (offload_funcs) && vec_safe_is_empty (offload_vars))
1083 return;
1084
1085 struct lto_simple_output_block *ob
1086 = lto_create_simple_output_block (LTO_section_offload_table);
1087
1088 for (unsigned i = 0; i < vec_safe_length (offload_funcs); i++)
1089 {
1090 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1091 LTO_symtab_last_tag, LTO_symtab_unavail_node);
1092 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
1093 (*offload_funcs)[i]);
1094 }
1095
1096 for (unsigned i = 0; i < vec_safe_length (offload_vars); i++)
1097 {
1098 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1099 LTO_symtab_last_tag, LTO_symtab_variable);
1100 lto_output_var_decl_index (ob->decl_state, ob->main_stream,
1101 (*offload_vars)[i]);
1102 }
1103
1104 streamer_write_uhwi_stream (ob->main_stream, 0);
1105 lto_destroy_simple_output_block (ob);
1106
1107 /* In WHOPR mode during the WPA stage the joint offload tables need to be
1108 streamed to one partition only. That's why we free offload_funcs and
1109 offload_vars after the first call of output_offload_tables. */
1110 if (flag_wpa)
1111 {
1112 vec_free (offload_funcs);
1113 vec_free (offload_vars);
1114 }
1115 }
1116
1117 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
1118 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
1119 NODE or to replace the values in it, for instance because the first
1120 time we saw it, the function body was not available but now it
1121 is. BP is a bitpack with all the bitflags for NODE read from the
1122 stream. */
1123
1124 static void
1125 input_overwrite_node (struct lto_file_decl_data *file_data,
1126 struct cgraph_node *node,
1127 enum LTO_symtab_tags tag,
1128 struct bitpack_d *bp)
1129 {
1130 node->aux = (void *) tag;
1131 node->lto_file_data = file_data;
1132
1133 node->local.local = bp_unpack_value (bp, 1);
1134 node->externally_visible = bp_unpack_value (bp, 1);
1135 node->no_reorder = bp_unpack_value (bp, 1);
1136 node->definition = bp_unpack_value (bp, 1);
1137 node->local.versionable = bp_unpack_value (bp, 1);
1138 node->local.can_change_signature = bp_unpack_value (bp, 1);
1139 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
1140 node->force_output = bp_unpack_value (bp, 1);
1141 node->forced_by_abi = bp_unpack_value (bp, 1);
1142 node->unique_name = bp_unpack_value (bp, 1);
1143 node->body_removed = bp_unpack_value (bp, 1);
1144 node->implicit_section = bp_unpack_value (bp, 1);
1145 node->address_taken = bp_unpack_value (bp, 1);
1146 node->used_from_other_partition = bp_unpack_value (bp, 1);
1147 node->lowered = bp_unpack_value (bp, 1);
1148 node->analyzed = tag == LTO_symtab_analyzed_node;
1149 node->in_other_partition = bp_unpack_value (bp, 1);
1150 if (node->in_other_partition
1151 /* Avoid updating decl when we are seeing just inline clone.
1152 When inlining function that has functions already inlined into it,
1153 we produce clones of inline clones.
1154
1155 WPA partitioning might put each clone into different unit and
1156 we might end up streaming inline clone from other partition
1157 to support clone we are interested in. */
1158 && (!node->clone_of
1159 || node->clone_of->decl != node->decl))
1160 {
1161 DECL_EXTERNAL (node->decl) = 1;
1162 TREE_STATIC (node->decl) = 0;
1163 }
1164 node->alias = bp_unpack_value (bp, 1);
1165 node->weakref = bp_unpack_value (bp, 1);
1166 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
1167 node->only_called_at_startup = bp_unpack_value (bp, 1);
1168 node->only_called_at_exit = bp_unpack_value (bp, 1);
1169 node->tm_clone = bp_unpack_value (bp, 1);
1170 node->calls_comdat_local = bp_unpack_value (bp, 1);
1171 node->icf_merged = bp_unpack_value (bp, 1);
1172 node->nonfreeing_fn = bp_unpack_value (bp, 1);
1173 node->thunk.thunk_p = bp_unpack_value (bp, 1);
1174 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
1175 LDPR_NUM_KNOWN);
1176 node->instrumentation_clone = bp_unpack_value (bp, 1);
1177 gcc_assert (flag_ltrans
1178 || (!node->in_other_partition
1179 && !node->used_from_other_partition));
1180 }
1181
1182 /* Return string alias is alias of. */
1183
1184 static tree
1185 get_alias_symbol (tree decl)
1186 {
1187 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1188 return get_identifier (TREE_STRING_POINTER
1189 (TREE_VALUE (TREE_VALUE (alias))));
1190 }
1191
1192 /* Read a node from input_block IB. TAG is the node's tag just read.
1193 Return the node read or overwriten. */
1194
1195 static struct cgraph_node *
1196 input_node (struct lto_file_decl_data *file_data,
1197 struct lto_input_block *ib,
1198 enum LTO_symtab_tags tag,
1199 vec<symtab_node *> nodes)
1200 {
1201 gcc::pass_manager *passes = g->get_passes ();
1202 tree fn_decl;
1203 struct cgraph_node *node;
1204 struct bitpack_d bp;
1205 unsigned decl_index;
1206 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1207 int clone_ref;
1208 int order;
1209 int i, count;
1210 tree group;
1211 const char *section;
1212 order = streamer_read_hwi (ib) + order_base;
1213 clone_ref = streamer_read_hwi (ib);
1214
1215 decl_index = streamer_read_uhwi (ib);
1216 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1217
1218 if (clone_ref != LCC_NOT_FOUND)
1219 {
1220 node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
1221 0, CGRAPH_FREQ_BASE, false,
1222 vNULL, false, NULL, NULL);
1223 }
1224 else
1225 {
1226 /* Declaration of functions can be already merged with a declaration
1227 from other input file. We keep cgraph unmerged until after streaming
1228 of ipa passes is done. Alays forcingly create a fresh node. */
1229 node = symtab->create_empty ();
1230 node->decl = fn_decl;
1231 node->register_symbol ();
1232 }
1233
1234 node->order = order;
1235 if (order >= symtab->order)
1236 symtab->order = order + 1;
1237
1238 node->count = streamer_read_gcov_count (ib);
1239 node->count_materialization_scale = streamer_read_hwi (ib);
1240
1241 count = streamer_read_hwi (ib);
1242 node->ipa_transforms_to_apply = vNULL;
1243 for (i = 0; i < count; i++)
1244 {
1245 opt_pass *pass;
1246 int pid = streamer_read_hwi (ib);
1247
1248 gcc_assert (pid < passes->passes_by_id_size);
1249 pass = passes->passes_by_id[pid];
1250 node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass_d *) pass);
1251 }
1252
1253 if (tag == LTO_symtab_analyzed_node)
1254 ref = streamer_read_hwi (ib);
1255
1256 group = read_identifier (ib);
1257 if (group)
1258 ref2 = streamer_read_hwi (ib);
1259
1260 /* Make sure that we have not read this node before. Nodes that
1261 have already been read will have their tag stored in the 'aux'
1262 field. Since built-in functions can be referenced in multiple
1263 functions, they are expected to be read more than once. */
1264 if (node->aux && !DECL_BUILT_IN (node->decl))
1265 internal_error ("bytecode stream: found multiple instances of cgraph "
1266 "node with uid %d", node->uid);
1267
1268 node->tp_first_run = streamer_read_uhwi (ib);
1269
1270 bp = streamer_read_bitpack (ib);
1271
1272 input_overwrite_node (file_data, node, tag, &bp);
1273
1274 /* Store a reference for now, and fix up later to be a pointer. */
1275 node->global.inlined_to = (cgraph_node *) (intptr_t) ref;
1276
1277 if (group)
1278 {
1279 node->set_comdat_group (group);
1280 /* Store a reference for now, and fix up later to be a pointer. */
1281 node->same_comdat_group = (symtab_node *) (intptr_t) ref2;
1282 }
1283 else
1284 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1285 section = read_string (ib);
1286 if (section)
1287 node->set_section_for_node (section);
1288
1289 if (node->thunk.thunk_p)
1290 {
1291 int type = streamer_read_uhwi (ib);
1292 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1293 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1294
1295 node->thunk.fixed_offset = fixed_offset;
1296 node->thunk.this_adjusting = (type & 2);
1297 node->thunk.virtual_value = virtual_value;
1298 node->thunk.virtual_offset_p = (type & 4);
1299 node->thunk.add_pointer_bounds_args = (type & 8);
1300 }
1301 if (node->alias && !node->analyzed && node->weakref)
1302 node->alias_target = get_alias_symbol (node->decl);
1303 node->profile_id = streamer_read_hwi (ib);
1304 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1305 node->set_init_priority (streamer_read_hwi (ib));
1306 if (DECL_STATIC_DESTRUCTOR (node->decl))
1307 node->set_fini_priority (streamer_read_hwi (ib));
1308
1309 if (node->instrumentation_clone)
1310 {
1311 decl_index = streamer_read_uhwi (ib);
1312 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1313 node->orig_decl = fn_decl;
1314 }
1315
1316 return node;
1317 }
1318
1319 /* Read a node from input_block IB. TAG is the node's tag just read.
1320 Return the node read or overwriten. */
1321
1322 static varpool_node *
1323 input_varpool_node (struct lto_file_decl_data *file_data,
1324 struct lto_input_block *ib)
1325 {
1326 int decl_index;
1327 tree var_decl;
1328 varpool_node *node;
1329 struct bitpack_d bp;
1330 int ref = LCC_NOT_FOUND;
1331 int order;
1332 tree group;
1333 const char *section;
1334
1335 order = streamer_read_hwi (ib) + order_base;
1336 decl_index = streamer_read_uhwi (ib);
1337 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1338
1339 /* Declaration of functions can be already merged with a declaration
1340 from other input file. We keep cgraph unmerged until after streaming
1341 of ipa passes is done. Alays forcingly create a fresh node. */
1342 node = varpool_node::create_empty ();
1343 node->decl = var_decl;
1344 node->register_symbol ();
1345
1346 node->order = order;
1347 if (order >= symtab->order)
1348 symtab->order = order + 1;
1349 node->lto_file_data = file_data;
1350
1351 bp = streamer_read_bitpack (ib);
1352 node->externally_visible = bp_unpack_value (&bp, 1);
1353 node->no_reorder = bp_unpack_value (&bp, 1);
1354 node->force_output = bp_unpack_value (&bp, 1);
1355 node->forced_by_abi = bp_unpack_value (&bp, 1);
1356 node->unique_name = bp_unpack_value (&bp, 1);
1357 node->body_removed = bp_unpack_value (&bp, 1);
1358 node->implicit_section = bp_unpack_value (&bp, 1);
1359 node->writeonly = bp_unpack_value (&bp, 1);
1360 node->definition = bp_unpack_value (&bp, 1);
1361 node->alias = bp_unpack_value (&bp, 1);
1362 node->weakref = bp_unpack_value (&bp, 1);
1363 node->analyzed = bp_unpack_value (&bp, 1);
1364 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1365 node->in_other_partition = bp_unpack_value (&bp, 1);
1366 if (node->in_other_partition)
1367 {
1368 DECL_EXTERNAL (node->decl) = 1;
1369 TREE_STATIC (node->decl) = 0;
1370 }
1371 if (node->alias && !node->analyzed && node->weakref)
1372 node->alias_target = get_alias_symbol (node->decl);
1373 node->tls_model = (enum tls_model)bp_unpack_value (&bp, 3);
1374 node->used_by_single_function = (enum tls_model)bp_unpack_value (&bp, 1);
1375 node->need_bounds_init = bp_unpack_value (&bp, 1);
1376 group = read_identifier (ib);
1377 if (group)
1378 {
1379 node->set_comdat_group (group);
1380 ref = streamer_read_hwi (ib);
1381 /* Store a reference for now, and fix up later to be a pointer. */
1382 node->same_comdat_group = (symtab_node *) (intptr_t) ref;
1383 }
1384 else
1385 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1386 section = read_string (ib);
1387 if (section)
1388 node->set_section_for_node (section);
1389 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1390 LDPR_NUM_KNOWN);
1391 gcc_assert (flag_ltrans
1392 || (!node->in_other_partition
1393 && !node->used_from_other_partition));
1394
1395 return node;
1396 }
1397
1398 /* Read a node from input_block IB. TAG is the node's tag just read.
1399 Return the node read or overwriten. */
1400
1401 static void
1402 input_ref (struct lto_input_block *ib,
1403 symtab_node *referring_node,
1404 vec<symtab_node *> nodes)
1405 {
1406 symtab_node *node = NULL;
1407 struct bitpack_d bp;
1408 enum ipa_ref_use use;
1409 bool speculative;
1410 struct ipa_ref *ref;
1411
1412 bp = streamer_read_bitpack (ib);
1413 use = (enum ipa_ref_use) bp_unpack_value (&bp, 3);
1414 speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1415 node = nodes[streamer_read_hwi (ib)];
1416 ref = referring_node->create_reference (node, use);
1417 ref->speculative = speculative;
1418 if (is_a <cgraph_node *> (referring_node))
1419 ref->lto_stmt_uid = streamer_read_hwi (ib);
1420 }
1421
1422 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1423 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1424 edge being read is indirect (in the sense that it has
1425 indirect_unknown_callee set). */
1426
1427 static void
1428 input_edge (struct lto_input_block *ib, vec<symtab_node *> nodes,
1429 bool indirect)
1430 {
1431 struct cgraph_node *caller, *callee;
1432 struct cgraph_edge *edge;
1433 unsigned int stmt_id;
1434 gcov_type count;
1435 int freq;
1436 cgraph_inline_failed_t inline_failed;
1437 struct bitpack_d bp;
1438 int ecf_flags = 0;
1439
1440 caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1441 if (caller == NULL || caller->decl == NULL_TREE)
1442 internal_error ("bytecode stream: no caller found while reading edge");
1443
1444 if (!indirect)
1445 {
1446 callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1447 if (callee == NULL || callee->decl == NULL_TREE)
1448 internal_error ("bytecode stream: no callee found while reading edge");
1449 }
1450 else
1451 callee = NULL;
1452
1453 count = streamer_read_gcov_count (ib);
1454
1455 bp = streamer_read_bitpack (ib);
1456 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
1457 stmt_id = bp_unpack_var_len_unsigned (&bp);
1458 freq = (int) bp_unpack_var_len_unsigned (&bp);
1459
1460 if (indirect)
1461 edge = caller->create_indirect_edge (NULL, 0, count, freq);
1462 else
1463 edge = caller->create_edge (callee, NULL, count, freq);
1464
1465 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1466 edge->speculative = bp_unpack_value (&bp, 1);
1467 edge->lto_stmt_uid = stmt_id;
1468 edge->inline_failed = inline_failed;
1469 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1470 edge->can_throw_external = bp_unpack_value (&bp, 1);
1471 edge->in_polymorphic_cdtor = bp_unpack_value (&bp, 1);
1472 if (indirect)
1473 {
1474 if (bp_unpack_value (&bp, 1))
1475 ecf_flags |= ECF_CONST;
1476 if (bp_unpack_value (&bp, 1))
1477 ecf_flags |= ECF_PURE;
1478 if (bp_unpack_value (&bp, 1))
1479 ecf_flags |= ECF_NORETURN;
1480 if (bp_unpack_value (&bp, 1))
1481 ecf_flags |= ECF_MALLOC;
1482 if (bp_unpack_value (&bp, 1))
1483 ecf_flags |= ECF_NOTHROW;
1484 if (bp_unpack_value (&bp, 1))
1485 ecf_flags |= ECF_RETURNS_TWICE;
1486 edge->indirect_info->ecf_flags = ecf_flags;
1487 edge->indirect_info->common_target_id = streamer_read_hwi (ib);
1488 if (edge->indirect_info->common_target_id)
1489 edge->indirect_info->common_target_probability = streamer_read_hwi (ib);
1490 }
1491 }
1492
1493
1494 /* Read a cgraph from IB using the info in FILE_DATA. */
1495
1496 static vec<symtab_node *>
1497 input_cgraph_1 (struct lto_file_decl_data *file_data,
1498 struct lto_input_block *ib)
1499 {
1500 enum LTO_symtab_tags tag;
1501 vec<symtab_node *> nodes = vNULL;
1502 symtab_node *node;
1503 unsigned i;
1504
1505 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1506 order_base = symtab->order;
1507 while (tag)
1508 {
1509 if (tag == LTO_symtab_edge)
1510 input_edge (ib, nodes, false);
1511 else if (tag == LTO_symtab_indirect_edge)
1512 input_edge (ib, nodes, true);
1513 else if (tag == LTO_symtab_variable)
1514 {
1515 node = input_varpool_node (file_data, ib);
1516 nodes.safe_push (node);
1517 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1518 }
1519 else
1520 {
1521 node = input_node (file_data, ib, tag, nodes);
1522 if (node == NULL || node->decl == NULL_TREE)
1523 internal_error ("bytecode stream: found empty cgraph node");
1524 nodes.safe_push (node);
1525 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1526 }
1527
1528 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1529 }
1530
1531 lto_input_toplevel_asms (file_data, order_base);
1532
1533 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1534 #ifdef ENABLE_CHECKING
1535 FOR_EACH_VEC_ELT (nodes, i, node)
1536 gcc_assert (node->aux || !is_a <cgraph_node *> (node));
1537 #endif
1538 FOR_EACH_VEC_ELT (nodes, i, node)
1539 {
1540 int ref;
1541 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1542 {
1543 ref = (int) (intptr_t) cnode->global.inlined_to;
1544
1545 /* We share declaration of builtins, so we may read same node twice. */
1546 if (!node->aux)
1547 continue;
1548 node->aux = NULL;
1549
1550 /* Fixup inlined_to from reference to pointer. */
1551 if (ref != LCC_NOT_FOUND)
1552 dyn_cast<cgraph_node *> (node)->global.inlined_to
1553 = dyn_cast<cgraph_node *> (nodes[ref]);
1554 else
1555 cnode->global.inlined_to = NULL;
1556
1557 /* Compute instrumented_version. */
1558 if (cnode->instrumentation_clone)
1559 {
1560 gcc_assert (cnode->orig_decl);
1561
1562 cnode->instrumented_version = cgraph_node::get (cnode->orig_decl);
1563 if (cnode->instrumented_version)
1564 cnode->instrumented_version->instrumented_version = cnode;
1565
1566 /* Restore decl names reference. */
1567 if (IDENTIFIER_TRANSPARENT_ALIAS (DECL_ASSEMBLER_NAME (cnode->decl))
1568 && !TREE_CHAIN (DECL_ASSEMBLER_NAME (cnode->decl)))
1569 TREE_CHAIN (DECL_ASSEMBLER_NAME (cnode->decl))
1570 = DECL_ASSEMBLER_NAME (cnode->orig_decl);
1571 }
1572 }
1573
1574 ref = (int) (intptr_t) node->same_comdat_group;
1575
1576 /* Fixup same_comdat_group from reference to pointer. */
1577 if (ref != LCC_NOT_FOUND)
1578 node->same_comdat_group = nodes[ref];
1579 else
1580 node->same_comdat_group = NULL;
1581 }
1582 FOR_EACH_VEC_ELT (nodes, i, node)
1583 node->aux = is_a <cgraph_node *> (node) ? (void *)1 : NULL;
1584 return nodes;
1585 }
1586
1587 /* Input ipa_refs. */
1588
1589 static void
1590 input_refs (struct lto_input_block *ib,
1591 vec<symtab_node *> nodes)
1592 {
1593 int count;
1594 int idx;
1595 while (true)
1596 {
1597 symtab_node *node;
1598 count = streamer_read_uhwi (ib);
1599 if (!count)
1600 break;
1601 idx = streamer_read_uhwi (ib);
1602 node = nodes[idx];
1603 while (count)
1604 {
1605 input_ref (ib, node, nodes);
1606 count--;
1607 }
1608 }
1609 }
1610
1611
1612 static struct gcov_ctr_summary lto_gcov_summary;
1613
1614 /* Input profile_info from IB. */
1615 static void
1616 input_profile_summary (struct lto_input_block *ib,
1617 struct lto_file_decl_data *file_data)
1618 {
1619 unsigned h_ix;
1620 struct bitpack_d bp;
1621 unsigned int runs = streamer_read_uhwi (ib);
1622 if (runs)
1623 {
1624 file_data->profile_info.runs = runs;
1625 file_data->profile_info.sum_max = streamer_read_gcov_count (ib);
1626 file_data->profile_info.sum_all = streamer_read_gcov_count (ib);
1627
1628 memset (file_data->profile_info.histogram, 0,
1629 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1630 /* Input the bitpack of non-zero histogram indices. */
1631 bp = streamer_read_bitpack (ib);
1632 /* Read in and unpack the full bitpack, flagging non-zero
1633 histogram entries by setting the num_counters non-zero. */
1634 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1635 {
1636 file_data->profile_info.histogram[h_ix].num_counters
1637 = bp_unpack_value (&bp, 1);
1638 }
1639 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1640 {
1641 if (!file_data->profile_info.histogram[h_ix].num_counters)
1642 continue;
1643
1644 file_data->profile_info.histogram[h_ix].num_counters
1645 = streamer_read_gcov_count (ib);
1646 file_data->profile_info.histogram[h_ix].min_value
1647 = streamer_read_gcov_count (ib);
1648 file_data->profile_info.histogram[h_ix].cum_value
1649 = streamer_read_gcov_count (ib);
1650 }
1651 /* IPA-profile computes hot bb threshold based on cumulated
1652 whole program profile. We need to stream it down to ltrans. */
1653 if (flag_ltrans)
1654 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1655 }
1656
1657 }
1658
1659 /* Rescale profile summaries to the same number of runs in the whole unit. */
1660
1661 static void
1662 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1663 {
1664 struct lto_file_decl_data *file_data;
1665 unsigned int j, h_ix;
1666 gcov_unsigned_t max_runs = 0;
1667 struct cgraph_node *node;
1668 struct cgraph_edge *edge;
1669 gcov_type saved_sum_all = 0;
1670 gcov_ctr_summary *saved_profile_info = 0;
1671 int saved_scale = 0;
1672
1673 /* Find unit with maximal number of runs. If we ever get serious about
1674 roundoff errors, we might also consider computing smallest common
1675 multiply. */
1676 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1677 if (max_runs < file_data->profile_info.runs)
1678 max_runs = file_data->profile_info.runs;
1679
1680 if (!max_runs)
1681 return;
1682
1683 /* Simple overflow check. We probably don't need to support that many train
1684 runs. Such a large value probably imply data corruption anyway. */
1685 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1686 {
1687 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1688 INT_MAX / REG_BR_PROB_BASE);
1689 return;
1690 }
1691
1692 profile_info = &lto_gcov_summary;
1693 lto_gcov_summary.runs = max_runs;
1694 lto_gcov_summary.sum_max = 0;
1695 memset (lto_gcov_summary.histogram, 0,
1696 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1697
1698 /* Rescale all units to the maximal number of runs.
1699 sum_max can not be easily merged, as we have no idea what files come from
1700 the same run. We do not use the info anyway, so leave it 0. */
1701 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1702 if (file_data->profile_info.runs)
1703 {
1704 int scale = GCOV_COMPUTE_SCALE (max_runs,
1705 file_data->profile_info.runs);
1706 lto_gcov_summary.sum_max
1707 = MAX (lto_gcov_summary.sum_max,
1708 apply_scale (file_data->profile_info.sum_max, scale));
1709 lto_gcov_summary.sum_all
1710 = MAX (lto_gcov_summary.sum_all,
1711 apply_scale (file_data->profile_info.sum_all, scale));
1712 /* Save a pointer to the profile_info with the largest
1713 scaled sum_all and the scale for use in merging the
1714 histogram. */
1715 if (!saved_profile_info
1716 || lto_gcov_summary.sum_all > saved_sum_all)
1717 {
1718 saved_profile_info = &file_data->profile_info;
1719 saved_sum_all = lto_gcov_summary.sum_all;
1720 saved_scale = scale;
1721 }
1722 }
1723
1724 gcc_assert (saved_profile_info);
1725
1726 /* Scale up the histogram from the profile that had the largest
1727 scaled sum_all above. */
1728 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1729 {
1730 /* Scale up the min value as we did the corresponding sum_all
1731 above. Use that to find the new histogram index. */
1732 gcov_type scaled_min
1733 = apply_scale (saved_profile_info->histogram[h_ix].min_value,
1734 saved_scale);
1735 /* The new index may be shared with another scaled histogram entry,
1736 so we need to account for a non-zero histogram entry at new_ix. */
1737 unsigned new_ix = gcov_histo_index (scaled_min);
1738 lto_gcov_summary.histogram[new_ix].min_value
1739 = (lto_gcov_summary.histogram[new_ix].num_counters
1740 ? MIN (lto_gcov_summary.histogram[new_ix].min_value, scaled_min)
1741 : scaled_min);
1742 /* Some of the scaled counter values would ostensibly need to be placed
1743 into different (larger) histogram buckets, but we keep things simple
1744 here and place the scaled cumulative counter value in the bucket
1745 corresponding to the scaled minimum counter value. */
1746 lto_gcov_summary.histogram[new_ix].cum_value
1747 += apply_scale (saved_profile_info->histogram[h_ix].cum_value,
1748 saved_scale);
1749 lto_gcov_summary.histogram[new_ix].num_counters
1750 += saved_profile_info->histogram[h_ix].num_counters;
1751 }
1752
1753 /* Watch roundoff errors. */
1754 if (lto_gcov_summary.sum_max < max_runs)
1755 lto_gcov_summary.sum_max = max_runs;
1756
1757 /* If merging already happent at WPA time, we are done. */
1758 if (flag_ltrans)
1759 return;
1760
1761 /* Now compute count_materialization_scale of each node.
1762 During LTRANS we already have values of count_materialization_scale
1763 computed, so just update them. */
1764 FOR_EACH_FUNCTION (node)
1765 if (node->lto_file_data
1766 && node->lto_file_data->profile_info.runs)
1767 {
1768 int scale;
1769
1770 scale = RDIV (node->count_materialization_scale * max_runs,
1771 node->lto_file_data->profile_info.runs);
1772 node->count_materialization_scale = scale;
1773 if (scale < 0)
1774 fatal_error ("Profile information in %s corrupted",
1775 file_data->file_name);
1776
1777 if (scale == REG_BR_PROB_BASE)
1778 continue;
1779 for (edge = node->callees; edge; edge = edge->next_callee)
1780 edge->count = apply_scale (edge->count, scale);
1781 node->count = apply_scale (node->count, scale);
1782 }
1783 }
1784
1785 /* Input and merge the symtab from each of the .o files passed to
1786 lto1. */
1787
1788 void
1789 input_symtab (void)
1790 {
1791 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1792 struct lto_file_decl_data *file_data;
1793 unsigned int j = 0;
1794 struct cgraph_node *node;
1795
1796 while ((file_data = file_data_vec[j++]))
1797 {
1798 const char *data;
1799 size_t len;
1800 struct lto_input_block *ib;
1801 vec<symtab_node *> nodes;
1802
1803 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1804 &data, &len);
1805 if (!ib)
1806 fatal_error ("cannot find LTO cgraph in %s", file_data->file_name);
1807 input_profile_summary (ib, file_data);
1808 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1809 nodes = input_cgraph_1 (file_data, ib);
1810 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1811 ib, data, len);
1812
1813 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1814 &data, &len);
1815 if (!ib)
1816 fatal_error ("cannot find LTO section refs in %s",
1817 file_data->file_name);
1818 input_refs (ib, nodes);
1819 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1820 ib, data, len);
1821 if (flag_ltrans)
1822 input_cgraph_opt_summary (nodes);
1823 nodes.release ();
1824 }
1825
1826 merge_profile_summaries (file_data_vec);
1827 get_working_sets ();
1828
1829
1830 /* Clear out the aux field that was used to store enough state to
1831 tell which nodes should be overwritten. */
1832 FOR_EACH_FUNCTION (node)
1833 {
1834 /* Some nodes may have been created by cgraph_node. This
1835 happens when the callgraph contains nested functions. If the
1836 node for the parent function was never emitted to the gimple
1837 file, cgraph_node will create a node for it when setting the
1838 context of the nested function. */
1839 if (node->lto_file_data)
1840 node->aux = NULL;
1841 }
1842 }
1843
1844 /* Input function/variable tables that will allow libgomp to look up offload
1845 target code, and store them into OFFLOAD_FUNCS and OFFLOAD_VARS. */
1846
1847 void
1848 input_offload_tables (void)
1849 {
1850 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1851 struct lto_file_decl_data *file_data;
1852 unsigned int j = 0;
1853
1854 while ((file_data = file_data_vec[j++]))
1855 {
1856 const char *data;
1857 size_t len;
1858 struct lto_input_block *ib
1859 = lto_create_simple_input_block (file_data, LTO_section_offload_table,
1860 &data, &len);
1861 if (!ib)
1862 continue;
1863
1864 enum LTO_symtab_tags tag
1865 = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1866 while (tag)
1867 {
1868 if (tag == LTO_symtab_unavail_node)
1869 {
1870 int decl_index = streamer_read_uhwi (ib);
1871 tree fn_decl
1872 = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1873 vec_safe_push (offload_funcs, fn_decl);
1874 }
1875 else if (tag == LTO_symtab_variable)
1876 {
1877 int decl_index = streamer_read_uhwi (ib);
1878 tree var_decl
1879 = lto_file_decl_data_get_var_decl (file_data, decl_index);
1880 vec_safe_push (offload_vars, var_decl);
1881 }
1882 else
1883 fatal_error ("invalid offload table in %s", file_data->file_name);
1884
1885 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1886 }
1887
1888 lto_destroy_simple_input_block (file_data, LTO_section_offload_table,
1889 ib, data, len);
1890 }
1891 }
1892
1893 /* True when we need optimization summary for NODE. */
1894
1895 static int
1896 output_cgraph_opt_summary_p (struct cgraph_node *node)
1897 {
1898 return (node->clone_of
1899 && (node->clone.tree_map
1900 || node->clone.args_to_skip
1901 || node->clone.combined_args_to_skip));
1902 }
1903
1904 /* Output optimization summary for EDGE to OB. */
1905 static void
1906 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1907 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1908 {
1909 }
1910
1911 /* Output optimization summary for NODE to OB. */
1912
1913 static void
1914 output_node_opt_summary (struct output_block *ob,
1915 struct cgraph_node *node,
1916 lto_symtab_encoder_t encoder)
1917 {
1918 unsigned int index;
1919 bitmap_iterator bi;
1920 struct ipa_replace_map *map;
1921 struct bitpack_d bp;
1922 int i;
1923 struct cgraph_edge *e;
1924
1925 if (node->clone.args_to_skip)
1926 {
1927 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1928 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1929 streamer_write_uhwi (ob, index);
1930 }
1931 else
1932 streamer_write_uhwi (ob, 0);
1933 if (node->clone.combined_args_to_skip)
1934 {
1935 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1936 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1937 streamer_write_uhwi (ob, index);
1938 }
1939 else
1940 streamer_write_uhwi (ob, 0);
1941 streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
1942 FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
1943 {
1944 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1945 mechanism to store function local declarations into summaries. */
1946 gcc_assert (!map->old_tree);
1947 streamer_write_uhwi (ob, map->parm_num);
1948 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
1949 stream_write_tree (ob, map->new_tree, true);
1950 bp = bitpack_create (ob->main_stream);
1951 bp_pack_value (&bp, map->replace_p, 1);
1952 bp_pack_value (&bp, map->ref_p, 1);
1953 streamer_write_bitpack (&bp);
1954 }
1955
1956 if (lto_symtab_encoder_in_partition_p (encoder, node))
1957 {
1958 for (e = node->callees; e; e = e->next_callee)
1959 output_edge_opt_summary (ob, e);
1960 for (e = node->indirect_calls; e; e = e->next_callee)
1961 output_edge_opt_summary (ob, e);
1962 }
1963 }
1964
1965 /* Output optimization summaries stored in callgraph.
1966 At the moment it is the clone info structure. */
1967
1968 static void
1969 output_cgraph_opt_summary (void)
1970 {
1971 int i, n_nodes;
1972 lto_symtab_encoder_t encoder;
1973 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1974 unsigned count = 0;
1975
1976 ob->symbol = NULL;
1977 encoder = ob->decl_state->symtab_node_encoder;
1978 n_nodes = lto_symtab_encoder_size (encoder);
1979 for (i = 0; i < n_nodes; i++)
1980 {
1981 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1982 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1983 if (cnode && output_cgraph_opt_summary_p (cnode))
1984 count++;
1985 }
1986 streamer_write_uhwi (ob, count);
1987 for (i = 0; i < n_nodes; i++)
1988 {
1989 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1990 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1991 if (cnode && output_cgraph_opt_summary_p (cnode))
1992 {
1993 streamer_write_uhwi (ob, i);
1994 output_node_opt_summary (ob, cnode, encoder);
1995 }
1996 }
1997 produce_asm (ob, NULL);
1998 destroy_output_block (ob);
1999 }
2000
2001 /* Input optimisation summary of EDGE. */
2002
2003 static void
2004 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
2005 struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
2006 {
2007 }
2008
2009 /* Input optimisation summary of NODE. */
2010
2011 static void
2012 input_node_opt_summary (struct cgraph_node *node,
2013 struct lto_input_block *ib_main,
2014 struct data_in *data_in)
2015 {
2016 int i;
2017 int count;
2018 int bit;
2019 struct bitpack_d bp;
2020 struct cgraph_edge *e;
2021
2022 count = streamer_read_uhwi (ib_main);
2023 if (count)
2024 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
2025 for (i = 0; i < count; i++)
2026 {
2027 bit = streamer_read_uhwi (ib_main);
2028 bitmap_set_bit (node->clone.args_to_skip, bit);
2029 }
2030 count = streamer_read_uhwi (ib_main);
2031 if (count)
2032 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
2033 for (i = 0; i < count; i++)
2034 {
2035 bit = streamer_read_uhwi (ib_main);
2036 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
2037 }
2038 count = streamer_read_uhwi (ib_main);
2039 for (i = 0; i < count; i++)
2040 {
2041 struct ipa_replace_map *map = ggc_alloc<ipa_replace_map> ();
2042
2043 vec_safe_push (node->clone.tree_map, map);
2044 map->parm_num = streamer_read_uhwi (ib_main);
2045 map->old_tree = NULL;
2046 map->new_tree = stream_read_tree (ib_main, data_in);
2047 bp = streamer_read_bitpack (ib_main);
2048 map->replace_p = bp_unpack_value (&bp, 1);
2049 map->ref_p = bp_unpack_value (&bp, 1);
2050 }
2051 for (e = node->callees; e; e = e->next_callee)
2052 input_edge_opt_summary (e, ib_main);
2053 for (e = node->indirect_calls; e; e = e->next_callee)
2054 input_edge_opt_summary (e, ib_main);
2055 }
2056
2057 /* Read section in file FILE_DATA of length LEN with data DATA. */
2058
2059 static void
2060 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
2061 const char *data, size_t len,
2062 vec<symtab_node *> nodes)
2063 {
2064 const struct lto_function_header *header =
2065 (const struct lto_function_header *) data;
2066 const int cfg_offset = sizeof (struct lto_function_header);
2067 const int main_offset = cfg_offset + header->cfg_size;
2068 const int string_offset = main_offset + header->main_size;
2069 struct data_in *data_in;
2070 unsigned int i;
2071 unsigned int count;
2072
2073 lto_input_block ib_main ((const char *) data + main_offset,
2074 header->main_size);
2075
2076 data_in =
2077 lto_data_in_create (file_data, (const char *) data + string_offset,
2078 header->string_size, vNULL);
2079 count = streamer_read_uhwi (&ib_main);
2080
2081 for (i = 0; i < count; i++)
2082 {
2083 int ref = streamer_read_uhwi (&ib_main);
2084 input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
2085 &ib_main, data_in);
2086 }
2087 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
2088 len);
2089 lto_data_in_delete (data_in);
2090 }
2091
2092 /* Input optimization summary of cgraph. */
2093
2094 static void
2095 input_cgraph_opt_summary (vec<symtab_node *> nodes)
2096 {
2097 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2098 struct lto_file_decl_data *file_data;
2099 unsigned int j = 0;
2100
2101 while ((file_data = file_data_vec[j++]))
2102 {
2103 size_t len;
2104 const char *data =
2105 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
2106 &len);
2107
2108 if (data)
2109 input_cgraph_opt_section (file_data, data, len, nodes);
2110 }
2111 }