]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-cgraph.c
c++: Handle multiple aggregate overloads [PR95319].
[thirdparty/gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright (C) 2009-2020 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "predict.h"
31 #include "stringpool.h"
32 #include "tree-streamer.h"
33 #include "cgraph.h"
34 #include "tree-pass.h"
35 #include "profile.h"
36 #include "context.h"
37 #include "pass_manager.h"
38 #include "ipa-utils.h"
39 #include "omp-offload.h"
40 #include "stringpool.h"
41 #include "attribs.h"
42
43 /* True when asm nodes has been output. */
44 bool asm_nodes_output = false;
45
46 static void output_cgraph_opt_summary (void);
47 static void input_cgraph_opt_summary (vec<symtab_node *> nodes);
48
49 /* Number of LDPR values known to GCC. */
50 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
51
52 /* Cgraph streaming is organized as set of record whose type
53 is indicated by a tag. */
54 enum LTO_symtab_tags
55 {
56 /* Must leave 0 for the stopper. */
57
58 /* Cgraph node without body available. */
59 LTO_symtab_unavail_node = 1,
60 /* Cgraph node with function body. */
61 LTO_symtab_analyzed_node,
62 /* Cgraph edges. */
63 LTO_symtab_edge,
64 LTO_symtab_indirect_edge,
65 LTO_symtab_variable,
66 LTO_symtab_last_tag
67 };
68
69 /* Create a new symtab encoder.
70 if FOR_INPUT, the encoder allocate only datastructures needed
71 to read the symtab. */
72
73 lto_symtab_encoder_t
74 lto_symtab_encoder_new (bool for_input)
75 {
76 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
77
78 if (!for_input)
79 encoder->map = new hash_map<symtab_node *, size_t>;
80 encoder->nodes.create (0);
81 return encoder;
82 }
83
84
85 /* Delete ENCODER and its components. */
86
87 void
88 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
89 {
90 encoder->nodes.release ();
91 if (encoder->map)
92 delete encoder->map;
93 free (encoder);
94 }
95
96
97 /* Return the existing reference number of NODE in the symtab encoder in
98 output block OB. Assign a new reference if this is the first time
99 NODE is encoded. */
100
101 int
102 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
103 symtab_node *node)
104 {
105 int ref;
106
107 if (!encoder->map)
108 {
109 lto_encoder_entry entry = {node, false, false, false};
110
111 ref = encoder->nodes.length ();
112 encoder->nodes.safe_push (entry);
113 return ref;
114 }
115
116 size_t *slot = encoder->map->get (node);
117 if (!slot || !*slot)
118 {
119 lto_encoder_entry entry = {node, false, false, false};
120 ref = encoder->nodes.length ();
121 if (!slot)
122 encoder->map->put (node, ref + 1);
123 encoder->nodes.safe_push (entry);
124 }
125 else
126 ref = *slot - 1;
127
128 return ref;
129 }
130
131 /* Remove NODE from encoder. */
132
133 bool
134 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
135 symtab_node *node)
136 {
137 int index;
138 lto_encoder_entry last_node;
139
140 size_t *slot = encoder->map->get (node);
141 if (slot == NULL || !*slot)
142 return false;
143
144 index = *slot - 1;
145 gcc_checking_assert (encoder->nodes[index].node == node);
146
147 /* Remove from vector. We do this by swapping node with the last element
148 of the vector. */
149 last_node = encoder->nodes.pop ();
150 if (last_node.node != node)
151 {
152 gcc_assert (encoder->map->put (last_node.node, index + 1));
153
154 /* Move the last element to the original spot of NODE. */
155 encoder->nodes[index] = last_node;
156 }
157
158 /* Remove element from hash table. */
159 encoder->map->remove (node);
160 return true;
161 }
162
163
164 /* Return TRUE if we should encode the body of NODE (if any). */
165
166 bool
167 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
168 struct cgraph_node *node)
169 {
170 int index = lto_symtab_encoder_lookup (encoder, node);
171 return encoder->nodes[index].body;
172 }
173
174 /* Specify that we encode the body of NODE in this partition. */
175
176 static void
177 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
178 struct cgraph_node *node)
179 {
180 int index = lto_symtab_encoder_encode (encoder, node);
181 gcc_checking_assert (encoder->nodes[index].node == node);
182 encoder->nodes[index].body = true;
183 }
184
185 /* Return TRUE if we should encode initializer of NODE (if any). */
186
187 bool
188 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
189 varpool_node *node)
190 {
191 int index = lto_symtab_encoder_lookup (encoder, node);
192 if (index == LCC_NOT_FOUND)
193 return false;
194 return encoder->nodes[index].initializer;
195 }
196
197 /* Specify that we should encode initializer of NODE (if any). */
198
199 static void
200 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
201 varpool_node *node)
202 {
203 int index = lto_symtab_encoder_lookup (encoder, node);
204 encoder->nodes[index].initializer = true;
205 }
206
207 /* Return TRUE if NODE is in this partition. */
208
209 bool
210 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
211 symtab_node *node)
212 {
213 int index = lto_symtab_encoder_lookup (encoder, node);
214 if (index == LCC_NOT_FOUND)
215 return false;
216 return encoder->nodes[index].in_partition;
217 }
218
219 /* Specify that NODE is in this partition. */
220
221 void
222 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
223 symtab_node *node)
224 {
225 int index = lto_symtab_encoder_encode (encoder, node);
226 encoder->nodes[index].in_partition = true;
227 }
228
229 /* Output the cgraph EDGE to OB using ENCODER. */
230
231 static void
232 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
233 lto_symtab_encoder_t encoder)
234 {
235 unsigned int uid;
236 intptr_t ref;
237 struct bitpack_d bp;
238
239 if (edge->indirect_unknown_callee)
240 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
241 LTO_symtab_indirect_edge);
242 else
243 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
244 LTO_symtab_edge);
245
246 ref = lto_symtab_encoder_lookup (encoder, edge->caller);
247 gcc_assert (ref != LCC_NOT_FOUND);
248 streamer_write_hwi_stream (ob->main_stream, ref);
249
250 if (!edge->indirect_unknown_callee)
251 {
252 ref = lto_symtab_encoder_lookup (encoder, edge->callee);
253 gcc_assert (ref != LCC_NOT_FOUND);
254 streamer_write_hwi_stream (ob->main_stream, ref);
255 }
256
257 edge->count.stream_out (ob->main_stream);
258
259 bp = bitpack_create (ob->main_stream);
260 uid = !edge->call_stmt ? edge->lto_stmt_uid
261 : gimple_uid (edge->call_stmt) + 1;
262 bp_pack_enum (&bp, cgraph_inline_failed_t,
263 CIF_N_REASONS, edge->inline_failed);
264 gcc_checking_assert (uid || edge->caller->thunk.thunk_p);
265 bp_pack_var_len_unsigned (&bp, uid);
266 bp_pack_value (&bp, edge->speculative_id, 16);
267 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
268 bp_pack_value (&bp, edge->speculative, 1);
269 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
270 gcc_assert (!edge->call_stmt_cannot_inline_p
271 || edge->inline_failed != CIF_BODY_NOT_AVAILABLE);
272 bp_pack_value (&bp, edge->can_throw_external, 1);
273 bp_pack_value (&bp, edge->in_polymorphic_cdtor, 1);
274 if (edge->indirect_unknown_callee)
275 {
276 int flags = edge->indirect_info->ecf_flags;
277 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
278 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
279 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
280 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
281 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
282 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
283 /* Flags that should not appear on indirect calls. */
284 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
285 | ECF_MAY_BE_ALLOCA
286 | ECF_SIBCALL
287 | ECF_LEAF
288 | ECF_NOVOPS)));
289
290 bp_pack_value (&bp, edge->indirect_info->num_speculative_call_targets,
291 16);
292 }
293 streamer_write_bitpack (&bp);
294 }
295
296 /* Return if NODE contain references from other partitions. */
297
298 bool
299 referenced_from_other_partition_p (symtab_node *node, lto_symtab_encoder_t encoder)
300 {
301 int i;
302 struct ipa_ref *ref = NULL;
303
304 for (i = 0; node->iterate_referring (i, ref); i++)
305 {
306 /* Ignore references from non-offloadable nodes while streaming NODE into
307 offload LTO section. */
308 if (!ref->referring->need_lto_streaming)
309 continue;
310
311 if (ref->referring->in_other_partition
312 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
313 return true;
314 }
315 return false;
316 }
317
318 /* Return true when node is reachable from other partition. */
319
320 bool
321 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
322 {
323 struct cgraph_edge *e;
324 if (!node->definition)
325 return false;
326 if (node->inlined_to)
327 return false;
328 for (e = node->callers; e; e = e->next_caller)
329 {
330 /* Ignore references from non-offloadable nodes while streaming NODE into
331 offload LTO section. */
332 if (!e->caller->need_lto_streaming)
333 continue;
334
335 if (e->caller->in_other_partition
336 || !lto_symtab_encoder_in_partition_p (encoder, e->caller))
337 return true;
338 }
339 return false;
340 }
341
342 /* Return if NODE contain references from other partitions. */
343
344 bool
345 referenced_from_this_partition_p (symtab_node *node,
346 lto_symtab_encoder_t encoder)
347 {
348 int i;
349 struct ipa_ref *ref = NULL;
350
351 for (i = 0; node->iterate_referring (i, ref); i++)
352 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
353 return true;
354 return false;
355 }
356
357 /* Return true when node is reachable from other partition. */
358
359 bool
360 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
361 {
362 struct cgraph_edge *e;
363 for (e = node->callers; e; e = e->next_caller)
364 if (lto_symtab_encoder_in_partition_p (encoder, e->caller))
365 return true;
366 return false;
367 }
368
369 /* Output the cgraph NODE to OB. ENCODER is used to find the
370 reference number of NODE->inlined_to. SET is the set of nodes we
371 are writing to the current file. If NODE is not in SET, then NODE
372 is a boundary of a cgraph_node_set and we pretend NODE just has a
373 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
374 that have had their callgraph node written so far. This is used to
375 determine if NODE is a clone of a previously written node. */
376
377 static void
378 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
379 lto_symtab_encoder_t encoder)
380 {
381 unsigned int tag;
382 struct bitpack_d bp;
383 bool boundary_p;
384 intptr_t ref;
385 bool in_other_partition = false;
386 struct cgraph_node *clone_of, *ultimate_clone_of;
387 ipa_opt_pass_d *pass;
388 int i;
389 const char *comdat;
390 const char *section;
391 tree group;
392
393 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
394
395 if (node->analyzed && (!boundary_p || node->alias
396 || (node->thunk.thunk_p && !node->inlined_to)))
397 tag = LTO_symtab_analyzed_node;
398 else
399 tag = LTO_symtab_unavail_node;
400
401 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
402 tag);
403 streamer_write_hwi_stream (ob->main_stream, node->order);
404
405 /* In WPA mode, we only output part of the call-graph. Also, we
406 fake cgraph node attributes. There are two cases that we care.
407
408 Boundary nodes: There are nodes that are not part of SET but are
409 called from within SET. We artificially make them look like
410 externally visible nodes with no function body.
411
412 Cherry-picked nodes: These are nodes we pulled from other
413 translation units into SET during IPA-inlining. We make them as
414 local static nodes to prevent clashes with other local statics. */
415 if (boundary_p && node->analyzed
416 && node->get_partitioning_class () == SYMBOL_PARTITION)
417 {
418 /* Inline clones cannot be part of boundary.
419 gcc_assert (!node->inlined_to);
420
421 FIXME: At the moment they can be, when partition contains an inline
422 clone that is clone of inline clone from outside partition. We can
423 reshape the clone tree and make other tree to be the root, but it
424 needs a bit extra work and will be promplty done by cgraph_remove_node
425 after reading back. */
426 in_other_partition = 1;
427 }
428
429 clone_of = node->clone_of;
430 while (clone_of
431 && (ref = lto_symtab_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
432 if (clone_of->prev_sibling_clone)
433 clone_of = clone_of->prev_sibling_clone;
434 else
435 clone_of = clone_of->clone_of;
436
437 /* See if body of the master function is output. If not, we are seeing only
438 an declaration and we do not need to pass down clone tree. */
439 ultimate_clone_of = clone_of;
440 while (ultimate_clone_of && ultimate_clone_of->clone_of)
441 ultimate_clone_of = ultimate_clone_of->clone_of;
442
443 if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
444 clone_of = NULL;
445
446 if (tag == LTO_symtab_analyzed_node)
447 gcc_assert (clone_of || !node->clone_of);
448 if (!clone_of)
449 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
450 else
451 streamer_write_hwi_stream (ob->main_stream, ref);
452
453
454 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
455 node->count.stream_out (ob->main_stream);
456 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
457
458 streamer_write_hwi_stream (ob->main_stream,
459 node->ipa_transforms_to_apply.length ());
460 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
461 streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
462
463 if (tag == LTO_symtab_analyzed_node)
464 {
465 if (node->inlined_to)
466 {
467 ref = lto_symtab_encoder_lookup (encoder, node->inlined_to);
468 gcc_assert (ref != LCC_NOT_FOUND);
469 }
470 else
471 ref = LCC_NOT_FOUND;
472
473 streamer_write_hwi_stream (ob->main_stream, ref);
474 }
475
476 group = node->get_comdat_group ();
477 if (group)
478 comdat = IDENTIFIER_POINTER (group);
479 else
480 comdat = "";
481 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
482
483 if (group)
484 {
485 if (node->same_comdat_group)
486 {
487 ref = LCC_NOT_FOUND;
488 for (struct symtab_node *n = node->same_comdat_group;
489 ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
490 ref = lto_symtab_encoder_lookup (encoder, n);
491 }
492 else
493 ref = LCC_NOT_FOUND;
494 streamer_write_hwi_stream (ob->main_stream, ref);
495 }
496
497 section = node->get_section ();
498 if (!section)
499 section = "";
500
501 streamer_write_hwi_stream (ob->main_stream, node->tp_first_run);
502
503 bp = bitpack_create (ob->main_stream);
504 bp_pack_value (&bp, node->local, 1);
505 bp_pack_value (&bp, node->externally_visible, 1);
506 bp_pack_value (&bp, node->no_reorder, 1);
507 bp_pack_value (&bp, node->definition, 1);
508 bp_pack_value (&bp, node->versionable, 1);
509 bp_pack_value (&bp, node->can_change_signature, 1);
510 bp_pack_value (&bp, node->redefined_extern_inline, 1);
511 bp_pack_value (&bp, node->force_output, 1);
512 bp_pack_value (&bp, node->forced_by_abi, 1);
513 bp_pack_value (&bp, node->unique_name, 1);
514 bp_pack_value (&bp, node->body_removed, 1);
515 bp_pack_value (&bp, node->implicit_section, 1);
516 bp_pack_value (&bp, node->address_taken, 1);
517 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
518 && node->get_partitioning_class () == SYMBOL_PARTITION
519 && (reachable_from_other_partition_p (node, encoder)
520 || referenced_from_other_partition_p (node, encoder)), 1);
521 bp_pack_value (&bp, node->lowered, 1);
522 bp_pack_value (&bp, in_other_partition, 1);
523 bp_pack_value (&bp, node->alias, 1);
524 bp_pack_value (&bp, node->transparent_alias, 1);
525 bp_pack_value (&bp, node->weakref, 1);
526 bp_pack_value (&bp, node->symver, 1);
527 bp_pack_value (&bp, node->frequency, 2);
528 bp_pack_value (&bp, node->only_called_at_startup, 1);
529 bp_pack_value (&bp, node->only_called_at_exit, 1);
530 bp_pack_value (&bp, node->tm_clone, 1);
531 bp_pack_value (&bp, node->calls_comdat_local, 1);
532 bp_pack_value (&bp, node->icf_merged, 1);
533 bp_pack_value (&bp, node->nonfreeing_fn, 1);
534 bp_pack_value (&bp, node->merged_comdat, 1);
535 bp_pack_value (&bp, node->merged_extern_inline, 1);
536 bp_pack_value (&bp, node->thunk.thunk_p, 1);
537 bp_pack_value (&bp, node->parallelized_function, 1);
538 bp_pack_value (&bp, node->declare_variant_alt, 1);
539 bp_pack_value (&bp, node->calls_declare_variant_alt, 1);
540 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
541 LDPR_NUM_KNOWN,
542 /* When doing incremental link, we will get new resolution
543 info next time we process the file. */
544 flag_incremental_link ? LDPR_UNKNOWN : node->resolution);
545 bp_pack_value (&bp, node->split_part, 1);
546 streamer_write_bitpack (&bp);
547 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
548
549 /* Stream thunk info always because we use it in
550 ipa_polymorphic_call_context::ipa_polymorphic_call_context
551 to properly interpret THIS pointers for thunks that has been converted
552 to Gimple. */
553 if (node->definition)
554 {
555 streamer_write_uhwi_stream
556 (ob->main_stream,
557 1 + (node->thunk.this_adjusting != 0) * 2
558 + (node->thunk.virtual_offset_p != 0) * 4);
559 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
560 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
561 streamer_write_uhwi_stream (ob->main_stream, node->thunk.indirect_offset);
562 }
563 streamer_write_hwi_stream (ob->main_stream, node->profile_id);
564 streamer_write_hwi_stream (ob->main_stream, node->unit_id);
565 if (DECL_STATIC_CONSTRUCTOR (node->decl))
566 streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ());
567 if (DECL_STATIC_DESTRUCTOR (node->decl))
568 streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ());
569 }
570
571 /* Output the varpool NODE to OB.
572 If NODE is not in SET, then NODE is a boundary. */
573
574 static void
575 lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
576 lto_symtab_encoder_t encoder)
577 {
578 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
579 bool encode_initializer_p
580 = (node->definition
581 && lto_symtab_encoder_encode_initializer_p (encoder, node));
582 struct bitpack_d bp;
583 int ref;
584 const char *comdat;
585 const char *section;
586 tree group;
587
588 gcc_assert (!encode_initializer_p || node->definition);
589 gcc_assert (boundary_p || encode_initializer_p);
590
591 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
592 LTO_symtab_variable);
593 streamer_write_hwi_stream (ob->main_stream, node->order);
594 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
595 bp = bitpack_create (ob->main_stream);
596 bp_pack_value (&bp, node->externally_visible, 1);
597 bp_pack_value (&bp, node->no_reorder, 1);
598 bp_pack_value (&bp, node->force_output, 1);
599 bp_pack_value (&bp, node->forced_by_abi, 1);
600 bp_pack_value (&bp, node->unique_name, 1);
601 bp_pack_value (&bp,
602 node->body_removed
603 || (!encode_initializer_p && !node->alias && node->definition),
604 1);
605 bp_pack_value (&bp, node->implicit_section, 1);
606 bp_pack_value (&bp, node->writeonly, 1);
607 bp_pack_value (&bp, node->definition && (encode_initializer_p || node->alias),
608 1);
609 bp_pack_value (&bp, node->alias, 1);
610 bp_pack_value (&bp, node->transparent_alias, 1);
611 bp_pack_value (&bp, node->weakref, 1);
612 bp_pack_value (&bp, node->symver, 1);
613 bp_pack_value (&bp, node->analyzed && (!boundary_p || node->alias), 1);
614 gcc_assert (node->definition || !node->analyzed);
615 /* Constant pool initializers can be de-unified into individual ltrans units.
616 FIXME: Alternatively at -Os we may want to avoid generating for them the local
617 labels and share them across LTRANS partitions. */
618 if (node->get_partitioning_class () != SYMBOL_PARTITION)
619 {
620 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
621 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
622 }
623 else
624 {
625 bp_pack_value (&bp, node->definition
626 && referenced_from_other_partition_p (node, encoder), 1);
627 bp_pack_value (&bp, node->analyzed
628 && boundary_p && !DECL_EXTERNAL (node->decl), 1);
629 /* in_other_partition. */
630 }
631 bp_pack_value (&bp, node->tls_model, 3);
632 bp_pack_value (&bp, node->used_by_single_function, 1);
633 bp_pack_value (&bp, node->dynamically_initialized, 1);
634 streamer_write_bitpack (&bp);
635
636 group = node->get_comdat_group ();
637 if (group)
638 comdat = IDENTIFIER_POINTER (group);
639 else
640 comdat = "";
641 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
642
643 if (group)
644 {
645 if (node->same_comdat_group)
646 {
647 ref = LCC_NOT_FOUND;
648 for (struct symtab_node *n = node->same_comdat_group;
649 ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
650 ref = lto_symtab_encoder_lookup (encoder, n);
651 }
652 else
653 ref = LCC_NOT_FOUND;
654 streamer_write_hwi_stream (ob->main_stream, ref);
655 }
656
657 section = node->get_section ();
658 if (!section)
659 section = "";
660 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
661
662 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
663 LDPR_NUM_KNOWN, node->resolution);
664 }
665
666 /* Output the varpool NODE to OB.
667 If NODE is not in SET, then NODE is a boundary. */
668
669 static void
670 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
671 lto_symtab_encoder_t encoder)
672 {
673 struct bitpack_d bp;
674 int nref;
675 int uid = !ref->stmt ? ref->lto_stmt_uid : gimple_uid (ref->stmt) + 1;
676 struct cgraph_node *node;
677
678 bp = bitpack_create (ob->main_stream);
679 bp_pack_value (&bp, ref->use, 3);
680 bp_pack_value (&bp, ref->speculative, 1);
681 streamer_write_bitpack (&bp);
682 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
683 gcc_assert (nref != LCC_NOT_FOUND);
684 streamer_write_hwi_stream (ob->main_stream, nref);
685
686 node = dyn_cast <cgraph_node *> (ref->referring);
687 if (node)
688 {
689 if (ref->stmt)
690 uid = gimple_uid (ref->stmt) + 1;
691 streamer_write_hwi_stream (ob->main_stream, uid);
692 bp_pack_value (&bp, ref->speculative_id, 16);
693 streamer_write_bitpack (&bp);
694 }
695 }
696
697 /* Stream out profile_summary to OB. */
698
699 static void
700 output_profile_summary (struct lto_simple_output_block *ob)
701 {
702 if (profile_info)
703 {
704 /* We do not output num and run_max, they are not used by
705 GCC profile feedback and they are difficult to merge from multiple
706 units. */
707 unsigned runs = (profile_info->runs);
708 streamer_write_uhwi_stream (ob->main_stream, runs);
709
710 /* IPA-profile computes hot bb threshold based on cumulated
711 whole program profile. We need to stream it down to ltrans. */
712 if (flag_wpa)
713 streamer_write_gcov_count_stream (ob->main_stream,
714 get_hot_bb_threshold ());
715 }
716 else
717 streamer_write_uhwi_stream (ob->main_stream, 0);
718 }
719
720 /* Output all callees or indirect outgoing edges. EDGE must be the first such
721 edge. */
722
723 static void
724 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
725 struct lto_simple_output_block *ob,
726 lto_symtab_encoder_t encoder)
727 {
728 if (!edge)
729 return;
730
731 /* Output edges in backward direction, so the reconstructed callgraph match
732 and it is easy to associate call sites in the IPA pass summaries. */
733 while (edge->next_callee)
734 edge = edge->next_callee;
735 for (; edge; edge = edge->prev_callee)
736 lto_output_edge (ob, edge, encoder);
737 }
738
739 /* Output the part of the cgraph in SET. */
740
741 static void
742 output_refs (lto_symtab_encoder_t encoder)
743 {
744 struct lto_simple_output_block *ob;
745 int count;
746 struct ipa_ref *ref;
747
748 ob = lto_create_simple_output_block (LTO_section_refs);
749
750 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
751 {
752 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
753
754 /* IPA_REF_ALIAS references are always preserved
755 in the boundary. Alias node can't have other references and
756 can be always handled as if it's not in the boundary. */
757 if (!node->alias && !lto_symtab_encoder_in_partition_p (encoder, node))
758 continue;
759
760 count = node->ref_list.nreferences ();
761 if (count)
762 {
763 streamer_write_gcov_count_stream (ob->main_stream, count);
764 streamer_write_uhwi_stream (ob->main_stream,
765 lto_symtab_encoder_lookup (encoder, node));
766 for (int i = 0; node->iterate_reference (i, ref); i++)
767 lto_output_ref (ob, ref, encoder);
768 }
769 }
770
771 streamer_write_uhwi_stream (ob->main_stream, 0);
772
773 lto_destroy_simple_output_block (ob);
774 }
775
776 /* Add NODE into encoder as well as nodes it is cloned from.
777 Do it in a way so clones appear first. */
778
779 static void
780 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
781 bool include_body)
782 {
783 if (node->clone_of)
784 add_node_to (encoder, node->clone_of, include_body);
785 else if (include_body)
786 lto_set_symtab_encoder_encode_body (encoder, node);
787 lto_symtab_encoder_encode (encoder, node);
788 }
789
790 /* Add all references in NODE to encoders. */
791
792 static void
793 create_references (lto_symtab_encoder_t encoder, symtab_node *node)
794 {
795 int i;
796 struct ipa_ref *ref = NULL;
797 for (i = 0; node->iterate_reference (i, ref); i++)
798 if (is_a <cgraph_node *> (ref->referred))
799 add_node_to (encoder, dyn_cast <cgraph_node *> (ref->referred), false);
800 else
801 lto_symtab_encoder_encode (encoder, ref->referred);
802 }
803
804 /* Select what needs to be streamed out. In regular lto mode stream everything.
805 In offload lto mode stream only nodes marked as offloadable. */
806 void
807 select_what_to_stream (void)
808 {
809 struct symtab_node *snode;
810 FOR_EACH_SYMBOL (snode)
811 snode->need_lto_streaming = !lto_stream_offload_p || snode->offloadable;
812 }
813
814 /* Find all symbols we want to stream into given partition and insert them
815 to encoders.
816
817 The function actually replaces IN_ENCODER by new one. The reason is that
818 streaming code needs clone's origin to be streamed before clone. This
819 means that we need to insert the nodes in specific order. This order is
820 ignored by the partitioning logic earlier. */
821
822 lto_symtab_encoder_t
823 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
824 {
825 struct cgraph_edge *edge;
826 int i;
827 lto_symtab_encoder_t encoder;
828 lto_symtab_encoder_iterator lsei;
829 hash_set<void *> reachable_call_targets;
830
831 encoder = lto_symtab_encoder_new (false);
832
833 /* Go over all entries in the IN_ENCODER and duplicate them to
834 ENCODER. At the same time insert masters of clones so
835 every master appears before clone. */
836 for (lsei = lsei_start_function_in_partition (in_encoder);
837 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
838 {
839 struct cgraph_node *node = lsei_cgraph_node (lsei);
840 if (!node->need_lto_streaming)
841 continue;
842 add_node_to (encoder, node, true);
843 lto_set_symtab_encoder_in_partition (encoder, node);
844 create_references (encoder, node);
845 }
846 for (lsei = lsei_start_variable_in_partition (in_encoder);
847 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
848 {
849 varpool_node *vnode = lsei_varpool_node (lsei);
850
851 if (!vnode->need_lto_streaming)
852 continue;
853 lto_set_symtab_encoder_in_partition (encoder, vnode);
854 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
855 create_references (encoder, vnode);
856 }
857 /* Pickle in also the initializer of all referenced readonly variables
858 to help folding. Constant pool variables are not shared, so we must
859 pickle those too. */
860 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
861 {
862 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
863 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
864 {
865 if (!lto_symtab_encoder_encode_initializer_p (encoder,
866 vnode)
867 && (((vnode->ctor_useable_for_folding_p ()
868 && (!DECL_VIRTUAL_P (vnode->decl)
869 || !flag_wpa
870 || flag_ltrans_devirtualize)))))
871 {
872 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
873 create_references (encoder, vnode);
874 }
875 }
876 }
877
878 /* Go over all the nodes again to include callees that are not in
879 SET. */
880 for (lsei = lsei_start_function_in_partition (encoder);
881 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
882 {
883 struct cgraph_node *node = lsei_cgraph_node (lsei);
884 for (edge = node->callees; edge; edge = edge->next_callee)
885 {
886 struct cgraph_node *callee = edge->callee;
887 if (!lto_symtab_encoder_in_partition_p (encoder, callee))
888 {
889 /* We should have moved all the inlines. */
890 gcc_assert (!callee->inlined_to);
891 add_node_to (encoder, callee, false);
892 }
893 }
894 /* Add all possible targets for late devirtualization. */
895 if (flag_ltrans_devirtualize || !flag_wpa)
896 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
897 if (edge->indirect_info->polymorphic)
898 {
899 unsigned int i;
900 void *cache_token;
901 bool final;
902 vec <cgraph_node *>targets
903 = possible_polymorphic_call_targets
904 (edge, &final, &cache_token);
905 if (!reachable_call_targets.add (cache_token))
906 {
907 for (i = 0; i < targets.length (); i++)
908 {
909 struct cgraph_node *callee = targets[i];
910
911 /* Adding an external declarations into the unit serves
912 no purpose and just increases its boundary. */
913 if (callee->definition
914 && !lto_symtab_encoder_in_partition_p
915 (encoder, callee))
916 {
917 gcc_assert (!callee->inlined_to);
918 add_node_to (encoder, callee, false);
919 }
920 }
921 }
922 }
923 }
924 /* Be sure to also insert alias targert and thunk callees. These needs
925 to stay to aid local calling conventions. */
926 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
927 {
928 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
929 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
930
931 if (node->alias && node->analyzed)
932 create_references (encoder, node);
933 if (cnode
934 && cnode->thunk.thunk_p && !cnode->inlined_to)
935 add_node_to (encoder, cnode->callees->callee, false);
936 while (node->transparent_alias && node->analyzed)
937 {
938 node = node->get_alias_target ();
939 if (is_a <cgraph_node *> (node))
940 add_node_to (encoder, dyn_cast <cgraph_node *> (node),
941 false);
942 else
943 lto_symtab_encoder_encode (encoder, node);
944 }
945 }
946 lto_symtab_encoder_delete (in_encoder);
947 return encoder;
948 }
949
950 /* Output the part of the symtab in SET and VSET. */
951
952 void
953 output_symtab (void)
954 {
955 struct cgraph_node *node;
956 struct lto_simple_output_block *ob;
957 int i, n_nodes;
958 lto_symtab_encoder_t encoder;
959
960 if (flag_wpa)
961 output_cgraph_opt_summary ();
962
963 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
964
965 output_profile_summary (ob);
966
967 /* An encoder for cgraph nodes should have been created by
968 ipa_write_summaries_1. */
969 gcc_assert (ob->decl_state->symtab_node_encoder);
970 encoder = ob->decl_state->symtab_node_encoder;
971
972 /* Write out the nodes. We must first output a node and then its clones,
973 otherwise at a time reading back the node there would be nothing to clone
974 from. */
975 n_nodes = lto_symtab_encoder_size (encoder);
976 for (i = 0; i < n_nodes; i++)
977 {
978 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
979 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
980 lto_output_node (ob, cnode, encoder);
981 else
982 lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
983 }
984
985 /* Go over the nodes in SET again to write edges. */
986 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
987 {
988 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder, i));
989 if (node
990 && ((node->thunk.thunk_p && !node->inlined_to)
991 || lto_symtab_encoder_in_partition_p (encoder, node)))
992 {
993 output_outgoing_cgraph_edges (node->callees, ob, encoder);
994 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
995 }
996 }
997
998 streamer_write_uhwi_stream (ob->main_stream, 0);
999
1000 lto_destroy_simple_output_block (ob);
1001
1002 /* Emit toplevel asms.
1003 When doing WPA we must output every asm just once. Since we do not partition asm
1004 nodes at all, output them to first output. This is kind of hack, but should work
1005 well. */
1006 if (!asm_nodes_output)
1007 {
1008 asm_nodes_output = true;
1009 lto_output_toplevel_asms ();
1010 }
1011
1012 output_refs (encoder);
1013 }
1014
1015 /* Return identifier encoded in IB as a plain string. */
1016
1017 static tree
1018 read_identifier (class lto_input_block *ib)
1019 {
1020 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1021 tree id;
1022
1023 if (ib->data[ib->p + len])
1024 lto_section_overrun (ib);
1025 if (!len)
1026 {
1027 ib->p++;
1028 return NULL;
1029 }
1030 id = get_identifier (ib->data + ib->p);
1031 ib->p += len + 1;
1032 return id;
1033 }
1034
1035 /* Return string encoded in IB, NULL if string is empty. */
1036
1037 static const char *
1038 read_string (class lto_input_block *ib)
1039 {
1040 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1041 const char *str;
1042
1043 if (ib->data[ib->p + len])
1044 lto_section_overrun (ib);
1045 if (!len)
1046 {
1047 ib->p++;
1048 return NULL;
1049 }
1050 str = ib->data + ib->p;
1051 ib->p += len + 1;
1052 return str;
1053 }
1054
1055 /* Output function/variable tables that will allow libgomp to look up offload
1056 target code.
1057 OFFLOAD_FUNCS is filled in expand_omp_target, OFFLOAD_VARS is filled in
1058 varpool_node::get_create. In WHOPR (partitioned) mode during the WPA stage
1059 both OFFLOAD_FUNCS and OFFLOAD_VARS are filled by input_offload_tables. */
1060
1061 void
1062 output_offload_tables (void)
1063 {
1064 if (vec_safe_is_empty (offload_funcs) && vec_safe_is_empty (offload_vars))
1065 return;
1066
1067 struct lto_simple_output_block *ob
1068 = lto_create_simple_output_block (LTO_section_offload_table);
1069
1070 for (unsigned i = 0; i < vec_safe_length (offload_funcs); i++)
1071 {
1072 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1073 LTO_symtab_last_tag, LTO_symtab_unavail_node);
1074 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
1075 (*offload_funcs)[i]);
1076 }
1077
1078 for (unsigned i = 0; i < vec_safe_length (offload_vars); i++)
1079 {
1080 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1081 LTO_symtab_last_tag, LTO_symtab_variable);
1082 lto_output_var_decl_index (ob->decl_state, ob->main_stream,
1083 (*offload_vars)[i]);
1084 }
1085
1086 streamer_write_uhwi_stream (ob->main_stream, 0);
1087 lto_destroy_simple_output_block (ob);
1088
1089 /* In WHOPR mode during the WPA stage the joint offload tables need to be
1090 streamed to one partition only. That's why we free offload_funcs and
1091 offload_vars after the first call of output_offload_tables. */
1092 if (flag_wpa)
1093 {
1094 vec_free (offload_funcs);
1095 vec_free (offload_vars);
1096 }
1097 }
1098
1099 /* Verify the partitioning of NODE. */
1100
1101 static inline void
1102 verify_node_partition (symtab_node *node)
1103 {
1104 if (flag_ltrans)
1105 return;
1106
1107 #ifdef ACCEL_COMPILER
1108 if (node->in_other_partition)
1109 {
1110 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1111 error_at (DECL_SOURCE_LOCATION (node->decl),
1112 "function %qs has been referenced in offloaded code but"
1113 " hasn%'t been marked to be included in the offloaded code",
1114 node->name ());
1115 else if (VAR_P (node->decl))
1116 error_at (DECL_SOURCE_LOCATION (node->decl),
1117 "variable %qs has been referenced in offloaded code but"
1118 " hasn%'t been marked to be included in the offloaded code",
1119 node->name ());
1120 else
1121 gcc_unreachable ();
1122 }
1123 #else
1124 gcc_assert (!node->in_other_partition
1125 && !node->used_from_other_partition);
1126 #endif
1127 }
1128
1129 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
1130 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
1131 NODE or to replace the values in it, for instance because the first
1132 time we saw it, the function body was not available but now it
1133 is. BP is a bitpack with all the bitflags for NODE read from the
1134 stream. */
1135
1136 static void
1137 input_overwrite_node (struct lto_file_decl_data *file_data,
1138 struct cgraph_node *node,
1139 enum LTO_symtab_tags tag,
1140 struct bitpack_d *bp)
1141 {
1142 node->aux = (void *) tag;
1143 node->lto_file_data = file_data;
1144
1145 node->local = bp_unpack_value (bp, 1);
1146 node->externally_visible = bp_unpack_value (bp, 1);
1147 node->no_reorder = bp_unpack_value (bp, 1);
1148 node->definition = bp_unpack_value (bp, 1);
1149 node->versionable = bp_unpack_value (bp, 1);
1150 node->can_change_signature = bp_unpack_value (bp, 1);
1151 node->redefined_extern_inline = bp_unpack_value (bp, 1);
1152 node->force_output = bp_unpack_value (bp, 1);
1153 node->forced_by_abi = bp_unpack_value (bp, 1);
1154 node->unique_name = bp_unpack_value (bp, 1);
1155 node->body_removed = bp_unpack_value (bp, 1);
1156 node->implicit_section = bp_unpack_value (bp, 1);
1157 node->address_taken = bp_unpack_value (bp, 1);
1158 node->used_from_other_partition = bp_unpack_value (bp, 1);
1159 node->lowered = bp_unpack_value (bp, 1);
1160 node->analyzed = tag == LTO_symtab_analyzed_node;
1161 node->in_other_partition = bp_unpack_value (bp, 1);
1162 if (node->in_other_partition
1163 /* Avoid updating decl when we are seeing just inline clone.
1164 When inlining function that has functions already inlined into it,
1165 we produce clones of inline clones.
1166
1167 WPA partitioning might put each clone into different unit and
1168 we might end up streaming inline clone from other partition
1169 to support clone we are interested in. */
1170 && (!node->clone_of
1171 || node->clone_of->decl != node->decl))
1172 {
1173 DECL_EXTERNAL (node->decl) = 1;
1174 TREE_STATIC (node->decl) = 0;
1175 }
1176 node->alias = bp_unpack_value (bp, 1);
1177 node->transparent_alias = bp_unpack_value (bp, 1);
1178 node->weakref = bp_unpack_value (bp, 1);
1179 node->symver = bp_unpack_value (bp, 1);
1180 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
1181 node->only_called_at_startup = bp_unpack_value (bp, 1);
1182 node->only_called_at_exit = bp_unpack_value (bp, 1);
1183 node->tm_clone = bp_unpack_value (bp, 1);
1184 node->calls_comdat_local = bp_unpack_value (bp, 1);
1185 node->icf_merged = bp_unpack_value (bp, 1);
1186 node->nonfreeing_fn = bp_unpack_value (bp, 1);
1187 node->merged_comdat = bp_unpack_value (bp, 1);
1188 node->merged_extern_inline = bp_unpack_value (bp, 1);
1189 node->thunk.thunk_p = bp_unpack_value (bp, 1);
1190 node->parallelized_function = bp_unpack_value (bp, 1);
1191 node->declare_variant_alt = bp_unpack_value (bp, 1);
1192 node->calls_declare_variant_alt = bp_unpack_value (bp, 1);
1193 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
1194 LDPR_NUM_KNOWN);
1195 node->split_part = bp_unpack_value (bp, 1);
1196 verify_node_partition (node);
1197 }
1198
1199 /* Return string alias is alias of. */
1200
1201 static tree
1202 get_alias_symbol (tree decl)
1203 {
1204 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1205 return get_identifier (TREE_STRING_POINTER
1206 (TREE_VALUE (TREE_VALUE (alias))));
1207 }
1208
1209 /* Read a node from input_block IB. TAG is the node's tag just read.
1210 Return the node read or overwriten. */
1211
1212 static struct cgraph_node *
1213 input_node (struct lto_file_decl_data *file_data,
1214 class lto_input_block *ib,
1215 enum LTO_symtab_tags tag,
1216 vec<symtab_node *> nodes)
1217 {
1218 gcc::pass_manager *passes = g->get_passes ();
1219 tree fn_decl;
1220 struct cgraph_node *node;
1221 struct bitpack_d bp;
1222 unsigned decl_index;
1223 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1224 int clone_ref;
1225 int order;
1226 int i, count;
1227 tree group;
1228 const char *section;
1229 order = streamer_read_hwi (ib) + file_data->order_base;
1230 clone_ref = streamer_read_hwi (ib);
1231
1232 decl_index = streamer_read_uhwi (ib);
1233 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1234
1235 if (clone_ref != LCC_NOT_FOUND)
1236 {
1237 node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
1238 profile_count::uninitialized (), false,
1239 vNULL, false, NULL, NULL);
1240 }
1241 else
1242 {
1243 /* Declaration of functions can be already merged with a declaration
1244 from other input file. We keep cgraph unmerged until after streaming
1245 of ipa passes is done. Alays forcingly create a fresh node. */
1246 node = symtab->create_empty ();
1247 node->decl = fn_decl;
1248 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (fn_decl)))
1249 node->ifunc_resolver = 1;
1250 node->register_symbol ();
1251 }
1252
1253 node->order = order;
1254 if (order >= symtab->order)
1255 symtab->order = order + 1;
1256
1257 node->count = profile_count::stream_in (ib);
1258 node->count_materialization_scale = streamer_read_hwi (ib);
1259
1260 count = streamer_read_hwi (ib);
1261 node->ipa_transforms_to_apply = vNULL;
1262 for (i = 0; i < count; i++)
1263 {
1264 opt_pass *pass;
1265 int pid = streamer_read_hwi (ib);
1266
1267 gcc_assert (pid < passes->passes_by_id_size);
1268 pass = passes->passes_by_id[pid];
1269 node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass_d *) pass);
1270 }
1271
1272 if (tag == LTO_symtab_analyzed_node)
1273 ref = streamer_read_hwi (ib);
1274
1275 group = read_identifier (ib);
1276 if (group)
1277 ref2 = streamer_read_hwi (ib);
1278
1279 /* Make sure that we have not read this node before. Nodes that
1280 have already been read will have their tag stored in the 'aux'
1281 field. Since built-in functions can be referenced in multiple
1282 functions, they are expected to be read more than once. */
1283 if (node->aux && !fndecl_built_in_p (node->decl))
1284 internal_error ("bytecode stream: found multiple instances of cgraph "
1285 "node with uid %d", node->get_uid ());
1286
1287 node->tp_first_run = streamer_read_uhwi (ib);
1288
1289 bp = streamer_read_bitpack (ib);
1290
1291 input_overwrite_node (file_data, node, tag, &bp);
1292
1293 /* Store a reference for now, and fix up later to be a pointer. */
1294 node->inlined_to = (cgraph_node *) (intptr_t) ref;
1295
1296 if (group)
1297 {
1298 node->set_comdat_group (group);
1299 /* Store a reference for now, and fix up later to be a pointer. */
1300 node->same_comdat_group = (symtab_node *) (intptr_t) ref2;
1301 }
1302 else
1303 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1304 section = read_string (ib);
1305 if (section)
1306 node->set_section_for_node (section);
1307
1308 if (node->definition)
1309 {
1310 int type = streamer_read_uhwi (ib);
1311 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1312 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1313 HOST_WIDE_INT indirect_offset = streamer_read_uhwi (ib);
1314
1315 node->thunk.fixed_offset = fixed_offset;
1316 node->thunk.virtual_value = virtual_value;
1317 node->thunk.indirect_offset = indirect_offset;
1318 node->thunk.this_adjusting = (type & 2);
1319 node->thunk.virtual_offset_p = (type & 4);
1320 }
1321 if (node->alias && !node->analyzed && node->weakref)
1322 node->alias_target = get_alias_symbol (node->decl);
1323 node->profile_id = streamer_read_hwi (ib);
1324 node->unit_id = streamer_read_hwi (ib) + file_data->unit_base;
1325 if (symtab->max_unit < node->unit_id)
1326 symtab->max_unit = node->unit_id;
1327 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1328 node->set_init_priority (streamer_read_hwi (ib));
1329 if (DECL_STATIC_DESTRUCTOR (node->decl))
1330 node->set_fini_priority (streamer_read_hwi (ib));
1331
1332 return node;
1333 }
1334
1335 /* Read a node from input_block IB. TAG is the node's tag just read.
1336 Return the node read or overwriten. */
1337
1338 static varpool_node *
1339 input_varpool_node (struct lto_file_decl_data *file_data,
1340 class lto_input_block *ib)
1341 {
1342 int decl_index;
1343 tree var_decl;
1344 varpool_node *node;
1345 struct bitpack_d bp;
1346 int ref = LCC_NOT_FOUND;
1347 int order;
1348 tree group;
1349 const char *section;
1350
1351 order = streamer_read_hwi (ib) + file_data->order_base;
1352 decl_index = streamer_read_uhwi (ib);
1353 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1354
1355 /* Declaration of functions can be already merged with a declaration
1356 from other input file. We keep cgraph unmerged until after streaming
1357 of ipa passes is done. Alays forcingly create a fresh node. */
1358 node = varpool_node::create_empty ();
1359 node->decl = var_decl;
1360 node->register_symbol ();
1361
1362 node->order = order;
1363 if (order >= symtab->order)
1364 symtab->order = order + 1;
1365 node->lto_file_data = file_data;
1366
1367 bp = streamer_read_bitpack (ib);
1368 node->externally_visible = bp_unpack_value (&bp, 1);
1369 node->no_reorder = bp_unpack_value (&bp, 1);
1370 node->force_output = bp_unpack_value (&bp, 1);
1371 node->forced_by_abi = bp_unpack_value (&bp, 1);
1372 node->unique_name = bp_unpack_value (&bp, 1);
1373 node->body_removed = bp_unpack_value (&bp, 1);
1374 node->implicit_section = bp_unpack_value (&bp, 1);
1375 node->writeonly = bp_unpack_value (&bp, 1);
1376 node->definition = bp_unpack_value (&bp, 1);
1377 node->alias = bp_unpack_value (&bp, 1);
1378 node->transparent_alias = bp_unpack_value (&bp, 1);
1379 node->weakref = bp_unpack_value (&bp, 1);
1380 node->symver = bp_unpack_value (&bp, 1);
1381 node->analyzed = bp_unpack_value (&bp, 1);
1382 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1383 node->in_other_partition = bp_unpack_value (&bp, 1);
1384 if (node->in_other_partition)
1385 {
1386 DECL_EXTERNAL (node->decl) = 1;
1387 TREE_STATIC (node->decl) = 0;
1388 }
1389 if (node->alias && !node->analyzed && node->weakref)
1390 node->alias_target = get_alias_symbol (node->decl);
1391 node->tls_model = (enum tls_model)bp_unpack_value (&bp, 3);
1392 node->used_by_single_function = (enum tls_model)bp_unpack_value (&bp, 1);
1393 node->dynamically_initialized = bp_unpack_value (&bp, 1);
1394 group = read_identifier (ib);
1395 if (group)
1396 {
1397 node->set_comdat_group (group);
1398 ref = streamer_read_hwi (ib);
1399 /* Store a reference for now, and fix up later to be a pointer. */
1400 node->same_comdat_group = (symtab_node *) (intptr_t) ref;
1401 }
1402 else
1403 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1404 section = read_string (ib);
1405 if (section)
1406 node->set_section_for_node (section);
1407 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1408 LDPR_NUM_KNOWN);
1409 verify_node_partition (node);
1410 return node;
1411 }
1412
1413 /* Read a node from input_block IB. TAG is the node's tag just read.
1414 Return the node read or overwriten. */
1415
1416 static void
1417 input_ref (class lto_input_block *ib,
1418 symtab_node *referring_node,
1419 vec<symtab_node *> nodes)
1420 {
1421 symtab_node *node = NULL;
1422 struct bitpack_d bp;
1423 enum ipa_ref_use use;
1424 bool speculative;
1425 struct ipa_ref *ref;
1426
1427 bp = streamer_read_bitpack (ib);
1428 use = (enum ipa_ref_use) bp_unpack_value (&bp, 3);
1429 speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1430 node = nodes[streamer_read_hwi (ib)];
1431 ref = referring_node->create_reference (node, use);
1432 ref->speculative = speculative;
1433 if (is_a <cgraph_node *> (referring_node))
1434 {
1435 ref->lto_stmt_uid = streamer_read_hwi (ib);
1436 bp = streamer_read_bitpack (ib);
1437 ref->speculative_id = bp_unpack_value (&bp, 16);
1438 }
1439 }
1440
1441 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1442 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1443 edge being read is indirect (in the sense that it has
1444 indirect_unknown_callee set). */
1445
1446 static void
1447 input_edge (class lto_input_block *ib, vec<symtab_node *> nodes,
1448 bool indirect)
1449 {
1450 struct cgraph_node *caller, *callee;
1451 struct cgraph_edge *edge;
1452 unsigned int stmt_id, speculative_id;
1453 profile_count count;
1454 cgraph_inline_failed_t inline_failed;
1455 struct bitpack_d bp;
1456 int ecf_flags = 0;
1457
1458 caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1459 if (caller == NULL || caller->decl == NULL_TREE)
1460 internal_error ("bytecode stream: no caller found while reading edge");
1461
1462 if (!indirect)
1463 {
1464 callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1465 if (callee == NULL || callee->decl == NULL_TREE)
1466 internal_error ("bytecode stream: no callee found while reading edge");
1467 }
1468 else
1469 callee = NULL;
1470
1471 count = profile_count::stream_in (ib);
1472
1473 bp = streamer_read_bitpack (ib);
1474 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
1475 stmt_id = bp_unpack_var_len_unsigned (&bp);
1476 speculative_id = bp_unpack_value (&bp, 16);
1477
1478 if (indirect)
1479 edge = caller->create_indirect_edge (NULL, 0, count);
1480 else
1481 edge = caller->create_edge (callee, NULL, count);
1482
1483 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1484 edge->speculative = bp_unpack_value (&bp, 1);
1485 edge->lto_stmt_uid = stmt_id;
1486 edge->speculative_id = speculative_id;
1487 edge->inline_failed = inline_failed;
1488 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1489 edge->can_throw_external = bp_unpack_value (&bp, 1);
1490 edge->in_polymorphic_cdtor = bp_unpack_value (&bp, 1);
1491 if (indirect)
1492 {
1493 if (bp_unpack_value (&bp, 1))
1494 ecf_flags |= ECF_CONST;
1495 if (bp_unpack_value (&bp, 1))
1496 ecf_flags |= ECF_PURE;
1497 if (bp_unpack_value (&bp, 1))
1498 ecf_flags |= ECF_NORETURN;
1499 if (bp_unpack_value (&bp, 1))
1500 ecf_flags |= ECF_MALLOC;
1501 if (bp_unpack_value (&bp, 1))
1502 ecf_flags |= ECF_NOTHROW;
1503 if (bp_unpack_value (&bp, 1))
1504 ecf_flags |= ECF_RETURNS_TWICE;
1505 edge->indirect_info->ecf_flags = ecf_flags;
1506
1507 edge->indirect_info->num_speculative_call_targets
1508 = bp_unpack_value (&bp, 16);
1509 }
1510 }
1511
1512
1513 /* Read a cgraph from IB using the info in FILE_DATA. */
1514
1515 static vec<symtab_node *>
1516 input_cgraph_1 (struct lto_file_decl_data *file_data,
1517 class lto_input_block *ib)
1518 {
1519 enum LTO_symtab_tags tag;
1520 vec<symtab_node *> nodes = vNULL;
1521 symtab_node *node;
1522 unsigned i;
1523
1524 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1525 file_data->order_base = symtab->order;
1526 file_data->unit_base = symtab->max_unit + 1;
1527 while (tag)
1528 {
1529 if (tag == LTO_symtab_edge)
1530 input_edge (ib, nodes, false);
1531 else if (tag == LTO_symtab_indirect_edge)
1532 input_edge (ib, nodes, true);
1533 else if (tag == LTO_symtab_variable)
1534 {
1535 node = input_varpool_node (file_data, ib);
1536 nodes.safe_push (node);
1537 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1538 }
1539 else
1540 {
1541 node = input_node (file_data, ib, tag, nodes);
1542 if (node == NULL || node->decl == NULL_TREE)
1543 internal_error ("bytecode stream: found empty cgraph node");
1544 nodes.safe_push (node);
1545 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1546 }
1547
1548 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1549 }
1550
1551 lto_input_toplevel_asms (file_data, file_data->order_base);
1552
1553 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1554 if (flag_checking)
1555 {
1556 FOR_EACH_VEC_ELT (nodes, i, node)
1557 gcc_assert (node->aux || !is_a <cgraph_node *> (node));
1558 }
1559 FOR_EACH_VEC_ELT (nodes, i, node)
1560 {
1561 int ref;
1562 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1563 {
1564 ref = (int) (intptr_t) cnode->inlined_to;
1565
1566 /* We share declaration of builtins, so we may read same node twice. */
1567 if (!node->aux)
1568 continue;
1569 node->aux = NULL;
1570
1571 /* Fixup inlined_to from reference to pointer. */
1572 if (ref != LCC_NOT_FOUND)
1573 dyn_cast<cgraph_node *> (node)->inlined_to
1574 = dyn_cast<cgraph_node *> (nodes[ref]);
1575 else
1576 cnode->inlined_to = NULL;
1577 }
1578
1579 ref = (int) (intptr_t) node->same_comdat_group;
1580
1581 /* Fixup same_comdat_group from reference to pointer. */
1582 if (ref != LCC_NOT_FOUND)
1583 node->same_comdat_group = nodes[ref];
1584 else
1585 node->same_comdat_group = NULL;
1586 }
1587 FOR_EACH_VEC_ELT (nodes, i, node)
1588 node->aux = is_a <cgraph_node *> (node) ? (void *)1 : NULL;
1589 return nodes;
1590 }
1591
1592 /* Input ipa_refs. */
1593
1594 static void
1595 input_refs (class lto_input_block *ib,
1596 vec<symtab_node *> nodes)
1597 {
1598 int count;
1599 int idx;
1600 while (true)
1601 {
1602 symtab_node *node;
1603 count = streamer_read_uhwi (ib);
1604 if (!count)
1605 break;
1606 idx = streamer_read_uhwi (ib);
1607 node = nodes[idx];
1608 while (count)
1609 {
1610 input_ref (ib, node, nodes);
1611 count--;
1612 }
1613 }
1614 }
1615
1616 /* Input profile_info from IB. */
1617 static void
1618 input_profile_summary (class lto_input_block *ib,
1619 struct lto_file_decl_data *file_data)
1620 {
1621 unsigned int runs = streamer_read_uhwi (ib);
1622 if (runs)
1623 {
1624 file_data->profile_info.runs = runs;
1625
1626 /* IPA-profile computes hot bb threshold based on cumulated
1627 whole program profile. We need to stream it down to ltrans. */
1628 if (flag_ltrans)
1629 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1630 }
1631
1632 }
1633
1634 /* Rescale profile summaries to the same number of runs in the whole unit. */
1635
1636 static void
1637 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1638 {
1639 struct lto_file_decl_data *file_data;
1640 unsigned int j;
1641 gcov_unsigned_t max_runs = 0;
1642 struct cgraph_node *node;
1643 struct cgraph_edge *edge;
1644
1645 /* Find unit with maximal number of runs. If we ever get serious about
1646 roundoff errors, we might also consider computing smallest common
1647 multiply. */
1648 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1649 if (max_runs < file_data->profile_info.runs)
1650 max_runs = file_data->profile_info.runs;
1651
1652 if (!max_runs)
1653 return;
1654
1655 /* Simple overflow check. We probably don't need to support that many train
1656 runs. Such a large value probably imply data corruption anyway. */
1657 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1658 {
1659 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1660 INT_MAX / REG_BR_PROB_BASE);
1661 return;
1662 }
1663
1664 profile_info = XCNEW (gcov_summary);
1665 profile_info->runs = max_runs;
1666
1667 /* If merging already happent at WPA time, we are done. */
1668 if (flag_ltrans)
1669 return;
1670
1671 /* Now compute count_materialization_scale of each node.
1672 During LTRANS we already have values of count_materialization_scale
1673 computed, so just update them. */
1674 FOR_EACH_FUNCTION (node)
1675 if (node->lto_file_data
1676 && node->lto_file_data->profile_info.runs)
1677 {
1678 int scale;
1679
1680 scale = RDIV (node->count_materialization_scale * max_runs,
1681 node->lto_file_data->profile_info.runs);
1682 node->count_materialization_scale = scale;
1683 if (scale < 0)
1684 fatal_error (input_location, "Profile information in %s corrupted",
1685 file_data->file_name);
1686
1687 if (scale == REG_BR_PROB_BASE)
1688 continue;
1689 for (edge = node->callees; edge; edge = edge->next_callee)
1690 if (edge->count.ipa ().nonzero_p ())
1691 edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
1692 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
1693 if (edge->count.ipa ().nonzero_p ())
1694 edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
1695 if (node->count.ipa ().nonzero_p ())
1696 node->count = node->count.apply_scale (scale, REG_BR_PROB_BASE);
1697 }
1698 }
1699
1700 /* Input and merge the symtab from each of the .o files passed to
1701 lto1. */
1702
1703 void
1704 input_symtab (void)
1705 {
1706 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1707 struct lto_file_decl_data *file_data;
1708 unsigned int j = 0;
1709 struct cgraph_node *node;
1710
1711 while ((file_data = file_data_vec[j++]))
1712 {
1713 const char *data;
1714 size_t len;
1715 class lto_input_block *ib;
1716 vec<symtab_node *> nodes;
1717
1718 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1719 &data, &len);
1720 if (!ib)
1721 fatal_error (input_location,
1722 "cannot find LTO cgraph in %s", file_data->file_name);
1723 input_profile_summary (ib, file_data);
1724 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1725 nodes = input_cgraph_1 (file_data, ib);
1726 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1727 ib, data, len);
1728
1729 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1730 &data, &len);
1731 if (!ib)
1732 fatal_error (input_location, "cannot find LTO section refs in %s",
1733 file_data->file_name);
1734 input_refs (ib, nodes);
1735 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1736 ib, data, len);
1737 if (flag_ltrans)
1738 input_cgraph_opt_summary (nodes);
1739 nodes.release ();
1740 }
1741
1742 merge_profile_summaries (file_data_vec);
1743
1744 /* Clear out the aux field that was used to store enough state to
1745 tell which nodes should be overwritten. */
1746 FOR_EACH_FUNCTION (node)
1747 {
1748 /* Some nodes may have been created by cgraph_node. This
1749 happens when the callgraph contains nested functions. If the
1750 node for the parent function was never emitted to the gimple
1751 file, cgraph_node will create a node for it when setting the
1752 context of the nested function. */
1753 if (node->lto_file_data)
1754 node->aux = NULL;
1755 }
1756 }
1757
1758 /* Input function/variable tables that will allow libgomp to look up offload
1759 target code, and store them into OFFLOAD_FUNCS and OFFLOAD_VARS. */
1760
1761 void
1762 input_offload_tables (bool do_force_output)
1763 {
1764 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1765 struct lto_file_decl_data *file_data;
1766 unsigned int j = 0;
1767
1768 while ((file_data = file_data_vec[j++]))
1769 {
1770 const char *data;
1771 size_t len;
1772 class lto_input_block *ib
1773 = lto_create_simple_input_block (file_data, LTO_section_offload_table,
1774 &data, &len);
1775 if (!ib)
1776 continue;
1777
1778 enum LTO_symtab_tags tag
1779 = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1780 while (tag)
1781 {
1782 if (tag == LTO_symtab_unavail_node)
1783 {
1784 int decl_index = streamer_read_uhwi (ib);
1785 tree fn_decl
1786 = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1787 vec_safe_push (offload_funcs, fn_decl);
1788
1789 /* Prevent IPA from removing fn_decl as unreachable, since there
1790 may be no refs from the parent function to child_fn in offload
1791 LTO mode. */
1792 if (do_force_output)
1793 cgraph_node::get (fn_decl)->mark_force_output ();
1794 }
1795 else if (tag == LTO_symtab_variable)
1796 {
1797 int decl_index = streamer_read_uhwi (ib);
1798 tree var_decl
1799 = lto_file_decl_data_get_var_decl (file_data, decl_index);
1800 vec_safe_push (offload_vars, var_decl);
1801
1802 /* Prevent IPA from removing var_decl as unused, since there
1803 may be no refs to var_decl in offload LTO mode. */
1804 if (do_force_output)
1805 varpool_node::get (var_decl)->force_output = 1;
1806 }
1807 else
1808 fatal_error (input_location,
1809 "invalid offload table in %s", file_data->file_name);
1810
1811 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1812 }
1813
1814 lto_destroy_simple_input_block (file_data, LTO_section_offload_table,
1815 ib, data, len);
1816 }
1817 }
1818
1819 /* True when we need optimization summary for NODE. */
1820
1821 static int
1822 output_cgraph_opt_summary_p (struct cgraph_node *node)
1823 {
1824 return ((node->clone_of || node->former_clone_of)
1825 && (node->clone.tree_map
1826 || node->clone.param_adjustments));
1827 }
1828
1829 /* Output optimization summary for EDGE to OB. */
1830 static void
1831 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1832 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1833 {
1834 }
1835
1836 /* Output optimization summary for NODE to OB. */
1837
1838 static void
1839 output_node_opt_summary (struct output_block *ob,
1840 struct cgraph_node *node,
1841 lto_symtab_encoder_t encoder)
1842 {
1843 struct ipa_replace_map *map;
1844 int i;
1845 struct cgraph_edge *e;
1846
1847 /* TODO: Should this code be moved to ipa-param-manipulation? */
1848 struct bitpack_d bp;
1849 bp = bitpack_create (ob->main_stream);
1850 bp_pack_value (&bp, (node->clone.param_adjustments != NULL), 1);
1851 streamer_write_bitpack (&bp);
1852 if (ipa_param_adjustments *adjustments = node->clone.param_adjustments)
1853 {
1854 streamer_write_uhwi (ob, vec_safe_length (adjustments->m_adj_params));
1855 ipa_adjusted_param *adj;
1856 FOR_EACH_VEC_SAFE_ELT (adjustments->m_adj_params, i, adj)
1857 {
1858 bp = bitpack_create (ob->main_stream);
1859 bp_pack_value (&bp, adj->base_index, IPA_PARAM_MAX_INDEX_BITS);
1860 bp_pack_value (&bp, adj->prev_clone_index, IPA_PARAM_MAX_INDEX_BITS);
1861 bp_pack_value (&bp, adj->op, 2);
1862 bp_pack_value (&bp, adj->param_prefix_index, 2);
1863 bp_pack_value (&bp, adj->prev_clone_adjustment, 1);
1864 bp_pack_value (&bp, adj->reverse, 1);
1865 bp_pack_value (&bp, adj->user_flag, 1);
1866 streamer_write_bitpack (&bp);
1867 if (adj->op == IPA_PARAM_OP_SPLIT
1868 || adj->op == IPA_PARAM_OP_NEW)
1869 {
1870 stream_write_tree (ob, adj->type, true);
1871 if (adj->op == IPA_PARAM_OP_SPLIT)
1872 {
1873 stream_write_tree (ob, adj->alias_ptr_type, true);
1874 streamer_write_uhwi (ob, adj->unit_offset);
1875 }
1876 }
1877 }
1878 streamer_write_hwi (ob, adjustments->m_always_copy_start);
1879 bp = bitpack_create (ob->main_stream);
1880 bp_pack_value (&bp, node->clone.param_adjustments->m_skip_return, 1);
1881 streamer_write_bitpack (&bp);
1882 }
1883
1884 streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
1885 FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
1886 {
1887 streamer_write_uhwi (ob, map->parm_num);
1888 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
1889 stream_write_tree (ob, map->new_tree, true);
1890 }
1891
1892 if (lto_symtab_encoder_in_partition_p (encoder, node))
1893 {
1894 for (e = node->callees; e; e = e->next_callee)
1895 output_edge_opt_summary (ob, e);
1896 for (e = node->indirect_calls; e; e = e->next_callee)
1897 output_edge_opt_summary (ob, e);
1898 }
1899 }
1900
1901 /* Output optimization summaries stored in callgraph.
1902 At the moment it is the clone info structure. */
1903
1904 static void
1905 output_cgraph_opt_summary (void)
1906 {
1907 int i, n_nodes;
1908 lto_symtab_encoder_t encoder;
1909 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1910 unsigned count = 0;
1911
1912 ob->symbol = NULL;
1913 encoder = ob->decl_state->symtab_node_encoder;
1914 n_nodes = lto_symtab_encoder_size (encoder);
1915 for (i = 0; i < n_nodes; i++)
1916 {
1917 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1918 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1919 if (cnode && output_cgraph_opt_summary_p (cnode))
1920 count++;
1921 }
1922 streamer_write_uhwi (ob, count);
1923 for (i = 0; i < n_nodes; i++)
1924 {
1925 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1926 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1927 if (cnode && output_cgraph_opt_summary_p (cnode))
1928 {
1929 streamer_write_uhwi (ob, i);
1930 output_node_opt_summary (ob, cnode, encoder);
1931 }
1932 }
1933 produce_asm (ob, NULL);
1934 destroy_output_block (ob);
1935 }
1936
1937 /* Input optimisation summary of EDGE. */
1938
1939 static void
1940 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1941 class lto_input_block *ib_main ATTRIBUTE_UNUSED)
1942 {
1943 }
1944
1945 /* Input optimisation summary of NODE. */
1946
1947 static void
1948 input_node_opt_summary (struct cgraph_node *node,
1949 class lto_input_block *ib_main,
1950 class data_in *data_in)
1951 {
1952 int i;
1953 int count;
1954 struct cgraph_edge *e;
1955
1956 /* TODO: Should this code be moved to ipa-param-manipulation? */
1957 struct bitpack_d bp;
1958 bp = streamer_read_bitpack (ib_main);
1959 bool have_adjustments = bp_unpack_value (&bp, 1);
1960 if (have_adjustments)
1961 {
1962 count = streamer_read_uhwi (ib_main);
1963 vec<ipa_adjusted_param, va_gc> *new_params = NULL;
1964 for (i = 0; i < count; i++)
1965 {
1966 ipa_adjusted_param adj;
1967 memset (&adj, 0, sizeof (adj));
1968 bp = streamer_read_bitpack (ib_main);
1969 adj.base_index = bp_unpack_value (&bp, IPA_PARAM_MAX_INDEX_BITS);
1970 adj.prev_clone_index
1971 = bp_unpack_value (&bp, IPA_PARAM_MAX_INDEX_BITS);
1972 adj.op = (enum ipa_parm_op) bp_unpack_value (&bp, 2);
1973 adj.param_prefix_index = bp_unpack_value (&bp, 2);
1974 adj.prev_clone_adjustment = bp_unpack_value (&bp, 1);
1975 adj.reverse = bp_unpack_value (&bp, 1);
1976 adj.user_flag = bp_unpack_value (&bp, 1);
1977 if (adj.op == IPA_PARAM_OP_SPLIT
1978 || adj.op == IPA_PARAM_OP_NEW)
1979 {
1980 adj.type = stream_read_tree (ib_main, data_in);
1981 if (adj.op == IPA_PARAM_OP_SPLIT)
1982 {
1983 adj.alias_ptr_type = stream_read_tree (ib_main, data_in);
1984 adj.unit_offset = streamer_read_uhwi (ib_main);
1985 }
1986 }
1987 vec_safe_push (new_params, adj);
1988 }
1989 int always_copy_start = streamer_read_hwi (ib_main);
1990 bp = streamer_read_bitpack (ib_main);
1991 bool skip_return = bp_unpack_value (&bp, 1);
1992 node->clone.param_adjustments
1993 = (new (ggc_alloc <ipa_param_adjustments> ())
1994 ipa_param_adjustments (new_params, always_copy_start, skip_return));
1995 }
1996
1997 count = streamer_read_uhwi (ib_main);
1998 for (i = 0; i < count; i++)
1999 {
2000 struct ipa_replace_map *map = ggc_alloc<ipa_replace_map> ();
2001
2002 vec_safe_push (node->clone.tree_map, map);
2003 map->parm_num = streamer_read_uhwi (ib_main);
2004 map->new_tree = stream_read_tree (ib_main, data_in);
2005 }
2006 for (e = node->callees; e; e = e->next_callee)
2007 input_edge_opt_summary (e, ib_main);
2008 for (e = node->indirect_calls; e; e = e->next_callee)
2009 input_edge_opt_summary (e, ib_main);
2010 }
2011
2012 /* Read section in file FILE_DATA of length LEN with data DATA. */
2013
2014 static void
2015 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
2016 const char *data, size_t len,
2017 vec<symtab_node *> nodes)
2018 {
2019 const struct lto_function_header *header =
2020 (const struct lto_function_header *) data;
2021 const int cfg_offset = sizeof (struct lto_function_header);
2022 const int main_offset = cfg_offset + header->cfg_size;
2023 const int string_offset = main_offset + header->main_size;
2024 class data_in *data_in;
2025 unsigned int i;
2026 unsigned int count;
2027
2028 lto_input_block ib_main ((const char *) data + main_offset,
2029 header->main_size, file_data->mode_table);
2030
2031 data_in =
2032 lto_data_in_create (file_data, (const char *) data + string_offset,
2033 header->string_size, vNULL);
2034 count = streamer_read_uhwi (&ib_main);
2035
2036 for (i = 0; i < count; i++)
2037 {
2038 int ref = streamer_read_uhwi (&ib_main);
2039 input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
2040 &ib_main, data_in);
2041 }
2042 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
2043 len);
2044 lto_data_in_delete (data_in);
2045 }
2046
2047 /* Input optimization summary of cgraph. */
2048
2049 static void
2050 input_cgraph_opt_summary (vec<symtab_node *> nodes)
2051 {
2052 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2053 struct lto_file_decl_data *file_data;
2054 unsigned int j = 0;
2055
2056 while ((file_data = file_data_vec[j++]))
2057 {
2058 size_t len;
2059 const char *data
2060 = lto_get_summary_section_data (file_data, LTO_section_cgraph_opt_sum,
2061 &len);
2062 if (data)
2063 input_cgraph_opt_section (file_data, data, len, nodes);
2064 }
2065 }