]> git.ipfire.org Git - thirdparty/gcc.git/blame_incremental - gcc/lto-cgraph.cc
OpenMP: Move omp requires checks to libgomp
[thirdparty/gcc.git] / gcc / lto-cgraph.cc
... / ...
CommitLineData
1/* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright (C) 2009-2022 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
11Software Foundation; either version 3, or (at your option) any later
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "backend.h"
27#include "rtl.h"
28#include "tree.h"
29#include "gimple.h"
30#include "predict.h"
31#include "stringpool.h"
32#include "tree-streamer.h"
33#include "cgraph.h"
34#include "tree-pass.h"
35#include "profile.h"
36#include "context.h"
37#include "pass_manager.h"
38#include "ipa-utils.h"
39#include "omp-offload.h"
40#include "omp-general.h"
41#include "stringpool.h"
42#include "attribs.h"
43#include "alloc-pool.h"
44#include "symbol-summary.h"
45#include "symtab-thunks.h"
46#include "symtab-clones.h"
47
48/* True when asm nodes has been output. */
49bool asm_nodes_output = false;
50
51static void output_cgraph_opt_summary (void);
52static void input_cgraph_opt_summary (vec<symtab_node *> nodes);
53
54/* Number of LDPR values known to GCC. */
55#define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
56
57/* Cgraph streaming is organized as set of record whose type
58 is indicated by a tag. */
59enum LTO_symtab_tags
60{
61 /* Must leave 0 for the stopper. */
62
63 /* Cgraph node without body available. */
64 LTO_symtab_unavail_node = 1,
65 /* Cgraph node with function body. */
66 LTO_symtab_analyzed_node,
67 /* Cgraph edges. */
68 LTO_symtab_edge,
69 LTO_symtab_indirect_edge,
70 LTO_symtab_variable,
71 LTO_symtab_last_tag
72};
73
74/* Create a new symtab encoder.
75 if FOR_INPUT, the encoder allocate only datastructures needed
76 to read the symtab. */
77
78lto_symtab_encoder_t
79lto_symtab_encoder_new (bool for_input)
80{
81 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
82
83 if (!for_input)
84 encoder->map = new hash_map<symtab_node *, size_t>;
85 encoder->nodes.create (0);
86 return encoder;
87}
88
89
90/* Delete ENCODER and its components. */
91
92void
93lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
94{
95 encoder->nodes.release ();
96 if (encoder->map)
97 delete encoder->map;
98 free (encoder);
99}
100
101
102/* Return the existing reference number of NODE in the symtab encoder in
103 output block OB. Assign a new reference if this is the first time
104 NODE is encoded. */
105
106int
107lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
108 symtab_node *node)
109{
110 int ref;
111
112 if (!encoder->map)
113 {
114 lto_encoder_entry entry = {node, false, false, false};
115
116 ref = encoder->nodes.length ();
117 encoder->nodes.safe_push (entry);
118 return ref;
119 }
120
121 size_t *slot = encoder->map->get (node);
122 if (!slot || !*slot)
123 {
124 lto_encoder_entry entry = {node, false, false, false};
125 ref = encoder->nodes.length ();
126 if (!slot)
127 encoder->map->put (node, ref + 1);
128 encoder->nodes.safe_push (entry);
129 }
130 else
131 ref = *slot - 1;
132
133 return ref;
134}
135
136/* Remove NODE from encoder. */
137
138bool
139lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
140 symtab_node *node)
141{
142 int index;
143 lto_encoder_entry last_node;
144
145 size_t *slot = encoder->map->get (node);
146 if (slot == NULL || !*slot)
147 return false;
148
149 index = *slot - 1;
150 gcc_checking_assert (encoder->nodes[index].node == node);
151
152 /* Remove from vector. We do this by swapping node with the last element
153 of the vector. */
154 last_node = encoder->nodes.pop ();
155 if (last_node.node != node)
156 {
157 gcc_assert (encoder->map->put (last_node.node, index + 1));
158
159 /* Move the last element to the original spot of NODE. */
160 encoder->nodes[index] = last_node;
161 }
162
163 /* Remove element from hash table. */
164 encoder->map->remove (node);
165 return true;
166}
167
168
169/* Return TRUE if we should encode the body of NODE (if any). */
170
171bool
172lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
173 struct cgraph_node *node)
174{
175 int index = lto_symtab_encoder_lookup (encoder, node);
176 return encoder->nodes[index].body;
177}
178
179/* Specify that we encode the body of NODE in this partition. */
180
181static void
182lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
183 struct cgraph_node *node)
184{
185 int index = lto_symtab_encoder_encode (encoder, node);
186 gcc_checking_assert (encoder->nodes[index].node == node);
187 encoder->nodes[index].body = true;
188}
189
190/* Return TRUE if we should encode initializer of NODE (if any). */
191
192bool
193lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
194 varpool_node *node)
195{
196 int index = lto_symtab_encoder_lookup (encoder, node);
197 if (index == LCC_NOT_FOUND)
198 return false;
199 return encoder->nodes[index].initializer;
200}
201
202/* Specify that we should encode initializer of NODE (if any). */
203
204static void
205lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
206 varpool_node *node)
207{
208 int index = lto_symtab_encoder_lookup (encoder, node);
209 encoder->nodes[index].initializer = true;
210}
211
212/* Return TRUE if NODE is in this partition. */
213
214bool
215lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
216 symtab_node *node)
217{
218 int index = lto_symtab_encoder_lookup (encoder, node);
219 if (index == LCC_NOT_FOUND)
220 return false;
221 return encoder->nodes[index].in_partition;
222}
223
224/* Specify that NODE is in this partition. */
225
226void
227lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
228 symtab_node *node)
229{
230 int index = lto_symtab_encoder_encode (encoder, node);
231 encoder->nodes[index].in_partition = true;
232}
233
234/* Output the cgraph EDGE to OB using ENCODER. */
235
236static void
237lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
238 lto_symtab_encoder_t encoder)
239{
240 unsigned int uid;
241 intptr_t ref;
242 struct bitpack_d bp;
243
244 if (edge->indirect_unknown_callee)
245 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
246 LTO_symtab_indirect_edge);
247 else
248 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
249 LTO_symtab_edge);
250
251 ref = lto_symtab_encoder_lookup (encoder, edge->caller);
252 gcc_assert (ref != LCC_NOT_FOUND);
253 streamer_write_hwi_stream (ob->main_stream, ref);
254
255 if (!edge->indirect_unknown_callee)
256 {
257 ref = lto_symtab_encoder_lookup (encoder, edge->callee);
258 gcc_assert (ref != LCC_NOT_FOUND);
259 streamer_write_hwi_stream (ob->main_stream, ref);
260 }
261
262 edge->count.stream_out (ob->main_stream);
263
264 bp = bitpack_create (ob->main_stream);
265 uid = !edge->call_stmt ? edge->lto_stmt_uid
266 : gimple_uid (edge->call_stmt) + 1;
267 bp_pack_enum (&bp, cgraph_inline_failed_t,
268 CIF_N_REASONS, edge->inline_failed);
269 gcc_checking_assert (uid || edge->caller->thunk);
270 bp_pack_var_len_unsigned (&bp, uid);
271 bp_pack_value (&bp, edge->speculative_id, 16);
272 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
273 bp_pack_value (&bp, edge->speculative, 1);
274 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
275 gcc_assert (!edge->call_stmt_cannot_inline_p
276 || edge->inline_failed != CIF_BODY_NOT_AVAILABLE);
277 bp_pack_value (&bp, edge->can_throw_external, 1);
278 bp_pack_value (&bp, edge->in_polymorphic_cdtor, 1);
279 if (edge->indirect_unknown_callee)
280 {
281 int flags = edge->indirect_info->ecf_flags;
282 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
283 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
284 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
285 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
286 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
287 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
288 /* Flags that should not appear on indirect calls. */
289 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
290 | ECF_MAY_BE_ALLOCA
291 | ECF_SIBCALL
292 | ECF_LEAF
293 | ECF_NOVOPS)));
294
295 bp_pack_value (&bp, edge->indirect_info->num_speculative_call_targets,
296 16);
297 }
298 streamer_write_bitpack (&bp);
299}
300
301/* Return if NODE contain references from other partitions. */
302
303bool
304referenced_from_other_partition_p (symtab_node *node, lto_symtab_encoder_t encoder)
305{
306 int i;
307 struct ipa_ref *ref = NULL;
308
309 for (i = 0; node->iterate_referring (i, ref); i++)
310 {
311 /* Ignore references from non-offloadable nodes while streaming NODE into
312 offload LTO section. */
313 if (!ref->referring->need_lto_streaming)
314 continue;
315
316 if (ref->referring->in_other_partition
317 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
318 return true;
319 }
320 return false;
321}
322
323/* Return true when node is reachable from other partition. */
324
325bool
326reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
327{
328 struct cgraph_edge *e;
329 if (!node->definition)
330 return false;
331 if (node->inlined_to)
332 return false;
333 for (e = node->callers; e; e = e->next_caller)
334 {
335 /* Ignore references from non-offloadable nodes while streaming NODE into
336 offload LTO section. */
337 if (!e->caller->need_lto_streaming)
338 continue;
339
340 if (e->caller->in_other_partition
341 || !lto_symtab_encoder_in_partition_p (encoder, e->caller))
342 return true;
343 }
344 return false;
345}
346
347/* Return if NODE contain references from other partitions. */
348
349bool
350referenced_from_this_partition_p (symtab_node *node,
351 lto_symtab_encoder_t encoder)
352{
353 int i;
354 struct ipa_ref *ref = NULL;
355
356 for (i = 0; node->iterate_referring (i, ref); i++)
357 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
358 return true;
359 return false;
360}
361
362/* Return true when node is reachable from other partition. */
363
364bool
365reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
366{
367 struct cgraph_edge *e;
368 for (e = node->callers; e; e = e->next_caller)
369 if (lto_symtab_encoder_in_partition_p (encoder, e->caller))
370 return true;
371 return false;
372}
373
374/* Output the cgraph NODE to OB. ENCODER is used to find the
375 reference number of NODE->inlined_to. SET is the set of nodes we
376 are writing to the current file. If NODE is not in SET, then NODE
377 is a boundary of a cgraph_node_set and we pretend NODE just has a
378 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
379 that have had their callgraph node written so far. This is used to
380 determine if NODE is a clone of a previously written node. */
381
382static void
383lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
384 lto_symtab_encoder_t encoder)
385{
386 unsigned int tag;
387 struct bitpack_d bp;
388 bool boundary_p;
389 intptr_t ref;
390 bool in_other_partition = false;
391 struct cgraph_node *clone_of, *ultimate_clone_of;
392 ipa_opt_pass_d *pass;
393 int i;
394 const char *comdat;
395 const char *section;
396 tree group;
397
398 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
399
400 if (node->analyzed && (!boundary_p || node->alias
401 || (node->thunk && !node->inlined_to)))
402 tag = LTO_symtab_analyzed_node;
403 else
404 tag = LTO_symtab_unavail_node;
405
406 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
407 tag);
408 streamer_write_hwi_stream (ob->main_stream, node->order);
409
410 /* In WPA mode, we only output part of the call-graph. Also, we
411 fake cgraph node attributes. There are two cases that we care.
412
413 Boundary nodes: There are nodes that are not part of SET but are
414 called from within SET. We artificially make them look like
415 externally visible nodes with no function body.
416
417 Cherry-picked nodes: These are nodes we pulled from other
418 translation units into SET during IPA-inlining. We make them as
419 local static nodes to prevent clashes with other local statics. */
420 if (boundary_p && node->analyzed
421 && node->get_partitioning_class () == SYMBOL_PARTITION)
422 {
423 /* Inline clones cannot be part of boundary.
424 gcc_assert (!node->inlined_to);
425
426 FIXME: At the moment they can be, when partition contains an inline
427 clone that is clone of inline clone from outside partition. We can
428 reshape the clone tree and make other tree to be the root, but it
429 needs a bit extra work and will be promplty done by cgraph_remove_node
430 after reading back. */
431 in_other_partition = 1;
432 }
433
434 clone_of = node->clone_of;
435 while (clone_of
436 && (ref = lto_symtab_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
437 if (clone_of->prev_sibling_clone)
438 clone_of = clone_of->prev_sibling_clone;
439 else
440 clone_of = clone_of->clone_of;
441
442 /* See if body of the master function is output. If not, we are seeing only
443 an declaration and we do not need to pass down clone tree. */
444 ultimate_clone_of = clone_of;
445 while (ultimate_clone_of && ultimate_clone_of->clone_of)
446 ultimate_clone_of = ultimate_clone_of->clone_of;
447
448 if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
449 clone_of = NULL;
450
451 if (tag == LTO_symtab_analyzed_node)
452 gcc_assert (clone_of || !node->clone_of);
453 if (!clone_of)
454 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
455 else
456 streamer_write_hwi_stream (ob->main_stream, ref);
457
458
459 lto_output_fn_decl_ref (ob->decl_state, ob->main_stream, node->decl);
460 node->count.stream_out (ob->main_stream);
461 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
462
463 streamer_write_hwi_stream (ob->main_stream,
464 node->ipa_transforms_to_apply.length ());
465 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
466 streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
467
468 if (tag == LTO_symtab_analyzed_node)
469 {
470 if (node->inlined_to)
471 {
472 ref = lto_symtab_encoder_lookup (encoder, node->inlined_to);
473 gcc_assert (ref != LCC_NOT_FOUND);
474 }
475 else
476 ref = LCC_NOT_FOUND;
477
478 streamer_write_hwi_stream (ob->main_stream, ref);
479 }
480
481 group = node->get_comdat_group ();
482 if (group)
483 comdat = IDENTIFIER_POINTER (group);
484 else
485 comdat = "";
486 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
487
488 if (group)
489 {
490 if (node->same_comdat_group)
491 {
492 ref = LCC_NOT_FOUND;
493 for (struct symtab_node *n = node->same_comdat_group;
494 ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
495 ref = lto_symtab_encoder_lookup (encoder, n);
496 }
497 else
498 ref = LCC_NOT_FOUND;
499 streamer_write_hwi_stream (ob->main_stream, ref);
500 }
501
502 section = node->get_section ();
503 if (!section)
504 section = "";
505
506 streamer_write_hwi_stream (ob->main_stream, node->tp_first_run);
507
508 bp = bitpack_create (ob->main_stream);
509 bp_pack_value (&bp, node->local, 1);
510 bp_pack_value (&bp, node->externally_visible, 1);
511 bp_pack_value (&bp, node->no_reorder, 1);
512 bp_pack_value (&bp, node->definition, 1);
513 bp_pack_value (&bp, node->versionable, 1);
514 bp_pack_value (&bp, node->can_change_signature, 1);
515 bp_pack_value (&bp, node->redefined_extern_inline, 1);
516 bp_pack_value (&bp, node->force_output, 1);
517 bp_pack_value (&bp, node->forced_by_abi, 1);
518 bp_pack_value (&bp, node->unique_name, 1);
519 bp_pack_value (&bp, node->body_removed, 1);
520 bp_pack_value (&bp, node->semantic_interposition, 1);
521 bp_pack_value (&bp, node->implicit_section, 1);
522 bp_pack_value (&bp, node->address_taken, 1);
523 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
524 && node->get_partitioning_class () == SYMBOL_PARTITION
525 && (reachable_from_other_partition_p (node, encoder)
526 || referenced_from_other_partition_p (node, encoder)), 1);
527 bp_pack_value (&bp, node->lowered, 1);
528 bp_pack_value (&bp, in_other_partition, 1);
529 bp_pack_value (&bp, node->alias, 1);
530 bp_pack_value (&bp, node->transparent_alias, 1);
531 bp_pack_value (&bp, node->weakref, 1);
532 bp_pack_value (&bp, node->symver, 1);
533 bp_pack_value (&bp, node->frequency, 2);
534 bp_pack_value (&bp, node->only_called_at_startup, 1);
535 bp_pack_value (&bp, node->only_called_at_exit, 1);
536 bp_pack_value (&bp, node->tm_clone, 1);
537 bp_pack_value (&bp, node->calls_comdat_local, 1);
538 bp_pack_value (&bp, node->icf_merged, 1);
539 bp_pack_value (&bp, node->nonfreeing_fn, 1);
540 bp_pack_value (&bp, node->merged_comdat, 1);
541 bp_pack_value (&bp, node->merged_extern_inline, 1);
542 bp_pack_value (&bp, node->thunk, 1);
543 bp_pack_value (&bp, node->parallelized_function, 1);
544 bp_pack_value (&bp, node->declare_variant_alt, 1);
545 bp_pack_value (&bp, node->calls_declare_variant_alt, 1);
546
547 /* Stream thunk info always because we use it in
548 ipa_polymorphic_call_context::ipa_polymorphic_call_context
549 to properly interpret THIS pointers for thunks that has been converted
550 to Gimple. */
551 struct thunk_info *thunk = node->definition ? thunk_info::get (node) : NULL;
552
553 bp_pack_value (&bp, thunk != NULL, 1);
554
555 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
556 LDPR_NUM_KNOWN,
557 /* When doing incremental link, we will get new resolution
558 info next time we process the file. */
559 flag_incremental_link ? LDPR_UNKNOWN : node->resolution);
560 bp_pack_value (&bp, node->split_part, 1);
561 streamer_write_bitpack (&bp);
562 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
563
564 streamer_write_hwi_stream (ob->main_stream, node->profile_id);
565 streamer_write_hwi_stream (ob->main_stream, node->unit_id);
566 if (DECL_STATIC_CONSTRUCTOR (node->decl))
567 streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ());
568 if (DECL_STATIC_DESTRUCTOR (node->decl))
569 streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ());
570
571 if (thunk)
572 thunk_info::get (node)->stream_out (ob);
573}
574
575/* Output the varpool NODE to OB.
576 If NODE is not in SET, then NODE is a boundary. */
577
578static void
579lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
580 lto_symtab_encoder_t encoder)
581{
582 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
583 bool encode_initializer_p
584 = (node->definition
585 && lto_symtab_encoder_encode_initializer_p (encoder, node));
586 struct bitpack_d bp;
587 int ref;
588 const char *comdat;
589 const char *section;
590 tree group;
591
592 gcc_assert (!encode_initializer_p || node->definition);
593 gcc_assert (boundary_p || encode_initializer_p);
594
595 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
596 LTO_symtab_variable);
597 streamer_write_hwi_stream (ob->main_stream, node->order);
598 lto_output_var_decl_ref (ob->decl_state, ob->main_stream, node->decl);
599 bp = bitpack_create (ob->main_stream);
600 bp_pack_value (&bp, node->externally_visible, 1);
601 bp_pack_value (&bp, node->no_reorder, 1);
602 bp_pack_value (&bp, node->force_output, 1);
603 bp_pack_value (&bp, node->forced_by_abi, 1);
604 bp_pack_value (&bp, node->unique_name, 1);
605 bp_pack_value (&bp,
606 node->body_removed
607 || (!encode_initializer_p && !node->alias && node->definition),
608 1);
609 bp_pack_value (&bp, node->semantic_interposition, 1);
610 bp_pack_value (&bp, node->implicit_section, 1);
611 bp_pack_value (&bp, node->writeonly, 1);
612 bp_pack_value (&bp, node->definition && (encode_initializer_p || node->alias),
613 1);
614 bp_pack_value (&bp, node->alias, 1);
615 bp_pack_value (&bp, node->transparent_alias, 1);
616 bp_pack_value (&bp, node->weakref, 1);
617 bp_pack_value (&bp, node->symver, 1);
618 bp_pack_value (&bp, node->analyzed && (!boundary_p || node->alias), 1);
619 gcc_assert (node->definition || !node->analyzed);
620 /* Constant pool initializers can be de-unified into individual ltrans units.
621 FIXME: Alternatively at -Os we may want to avoid generating for them the local
622 labels and share them across LTRANS partitions. */
623 if (node->get_partitioning_class () != SYMBOL_PARTITION)
624 {
625 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
626 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
627 }
628 else
629 {
630 bp_pack_value (&bp, node->definition
631 && referenced_from_other_partition_p (node, encoder), 1);
632 bp_pack_value (&bp, node->analyzed
633 && boundary_p && !DECL_EXTERNAL (node->decl), 1);
634 /* in_other_partition. */
635 }
636 bp_pack_value (&bp, node->tls_model, 3);
637 bp_pack_value (&bp, node->used_by_single_function, 1);
638 bp_pack_value (&bp, node->dynamically_initialized, 1);
639 streamer_write_bitpack (&bp);
640
641 group = node->get_comdat_group ();
642 if (group)
643 comdat = IDENTIFIER_POINTER (group);
644 else
645 comdat = "";
646 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
647
648 if (group)
649 {
650 if (node->same_comdat_group)
651 {
652 ref = LCC_NOT_FOUND;
653 for (struct symtab_node *n = node->same_comdat_group;
654 ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
655 ref = lto_symtab_encoder_lookup (encoder, n);
656 }
657 else
658 ref = LCC_NOT_FOUND;
659 streamer_write_hwi_stream (ob->main_stream, ref);
660 }
661
662 section = node->get_section ();
663 if (!section)
664 section = "";
665 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
666
667 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
668 LDPR_NUM_KNOWN, node->resolution);
669}
670
671/* Output the varpool NODE to OB.
672 If NODE is not in SET, then NODE is a boundary. */
673
674static void
675lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
676 lto_symtab_encoder_t encoder)
677{
678 struct bitpack_d bp;
679 int nref;
680 int uid = !ref->stmt ? ref->lto_stmt_uid : gimple_uid (ref->stmt) + 1;
681 struct cgraph_node *node;
682
683 bp = bitpack_create (ob->main_stream);
684 bp_pack_value (&bp, ref->use, 3);
685 bp_pack_value (&bp, ref->speculative, 1);
686 streamer_write_bitpack (&bp);
687 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
688 gcc_assert (nref != LCC_NOT_FOUND);
689 streamer_write_hwi_stream (ob->main_stream, nref);
690
691 node = dyn_cast <cgraph_node *> (ref->referring);
692 if (node)
693 {
694 if (ref->stmt)
695 uid = gimple_uid (ref->stmt) + 1;
696 streamer_write_hwi_stream (ob->main_stream, uid);
697 bp_pack_value (&bp, ref->speculative_id, 16);
698 streamer_write_bitpack (&bp);
699 }
700}
701
702/* Stream out profile_summary to OB. */
703
704static void
705output_profile_summary (struct lto_simple_output_block *ob)
706{
707 if (profile_info)
708 {
709 /* We do not output num and run_max, they are not used by
710 GCC profile feedback and they are difficult to merge from multiple
711 units. */
712 unsigned runs = (profile_info->runs);
713 streamer_write_uhwi_stream (ob->main_stream, runs);
714
715 /* IPA-profile computes hot bb threshold based on cumulated
716 whole program profile. We need to stream it down to ltrans. */
717 if (flag_wpa)
718 streamer_write_gcov_count_stream (ob->main_stream,
719 get_hot_bb_threshold ());
720 }
721 else
722 streamer_write_uhwi_stream (ob->main_stream, 0);
723}
724
725/* Output all callees or indirect outgoing edges. EDGE must be the first such
726 edge. */
727
728static void
729output_outgoing_cgraph_edges (struct cgraph_edge *edge,
730 struct lto_simple_output_block *ob,
731 lto_symtab_encoder_t encoder)
732{
733 if (!edge)
734 return;
735
736 /* Output edges in backward direction, so the reconstructed callgraph match
737 and it is easy to associate call sites in the IPA pass summaries. */
738 while (edge->next_callee)
739 edge = edge->next_callee;
740 for (; edge; edge = edge->prev_callee)
741 lto_output_edge (ob, edge, encoder);
742}
743
744/* Output the part of the cgraph in SET. */
745
746static void
747output_refs (lto_symtab_encoder_t encoder)
748{
749 struct lto_simple_output_block *ob;
750 int count;
751 struct ipa_ref *ref;
752
753 ob = lto_create_simple_output_block (LTO_section_refs);
754
755 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
756 {
757 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
758
759 /* IPA_REF_ALIAS references are always preserved
760 in the boundary. Alias node can't have other references and
761 can be always handled as if it's not in the boundary. */
762 if (!node->alias && !lto_symtab_encoder_in_partition_p (encoder, node))
763 continue;
764
765 count = node->ref_list.nreferences ();
766 if (count)
767 {
768 streamer_write_gcov_count_stream (ob->main_stream, count);
769 streamer_write_uhwi_stream (ob->main_stream,
770 lto_symtab_encoder_lookup (encoder, node));
771 for (int i = 0; node->iterate_reference (i, ref); i++)
772 lto_output_ref (ob, ref, encoder);
773 }
774 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
775 if (cnode->declare_variant_alt)
776 omp_lto_output_declare_variant_alt (ob, cnode, encoder);
777 }
778
779 streamer_write_uhwi_stream (ob->main_stream, 0);
780
781 lto_destroy_simple_output_block (ob);
782}
783
784/* Add NODE into encoder as well as nodes it is cloned from.
785 Do it in a way so clones appear first. */
786
787static void
788add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
789 bool include_body)
790{
791 if (node->clone_of)
792 add_node_to (encoder, node->clone_of, include_body);
793 else if (include_body)
794 lto_set_symtab_encoder_encode_body (encoder, node);
795 lto_symtab_encoder_encode (encoder, node);
796}
797
798/* Add all references in NODE to encoders. */
799
800static void
801create_references (lto_symtab_encoder_t encoder, symtab_node *node)
802{
803 int i;
804 struct ipa_ref *ref = NULL;
805 for (i = 0; node->iterate_reference (i, ref); i++)
806 if (is_a <cgraph_node *> (ref->referred))
807 add_node_to (encoder, dyn_cast <cgraph_node *> (ref->referred), false);
808 else
809 lto_symtab_encoder_encode (encoder, ref->referred);
810}
811
812/* Select what needs to be streamed out. In regular lto mode stream everything.
813 In offload lto mode stream only nodes marked as offloadable. */
814void
815select_what_to_stream (void)
816{
817 struct symtab_node *snode;
818 FOR_EACH_SYMBOL (snode)
819 snode->need_lto_streaming = !lto_stream_offload_p || snode->offloadable;
820}
821
822/* Find all symbols we want to stream into given partition and insert them
823 to encoders.
824
825 The function actually replaces IN_ENCODER by new one. The reason is that
826 streaming code needs clone's origin to be streamed before clone. This
827 means that we need to insert the nodes in specific order. This order is
828 ignored by the partitioning logic earlier. */
829
830lto_symtab_encoder_t
831compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
832{
833 struct cgraph_edge *edge;
834 int i;
835 lto_symtab_encoder_t encoder;
836 lto_symtab_encoder_iterator lsei;
837 hash_set<void *> reachable_call_targets;
838
839 encoder = lto_symtab_encoder_new (false);
840
841 /* Go over all entries in the IN_ENCODER and duplicate them to
842 ENCODER. At the same time insert masters of clones so
843 every master appears before clone. */
844 for (lsei = lsei_start_function_in_partition (in_encoder);
845 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
846 {
847 struct cgraph_node *node = lsei_cgraph_node (lsei);
848 if (!node->need_lto_streaming)
849 continue;
850 add_node_to (encoder, node, true);
851 lto_set_symtab_encoder_in_partition (encoder, node);
852 create_references (encoder, node);
853 }
854 for (lsei = lsei_start_variable_in_partition (in_encoder);
855 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
856 {
857 varpool_node *vnode = lsei_varpool_node (lsei);
858
859 if (!vnode->need_lto_streaming)
860 continue;
861 lto_set_symtab_encoder_in_partition (encoder, vnode);
862 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
863 create_references (encoder, vnode);
864 }
865 /* Pickle in also the initializer of all referenced readonly variables
866 to help folding. Constant pool variables are not shared, so we must
867 pickle those too. */
868 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
869 {
870 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
871 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
872 {
873 if (!lto_symtab_encoder_encode_initializer_p (encoder,
874 vnode)
875 && (((vnode->ctor_useable_for_folding_p ()
876 && (!DECL_VIRTUAL_P (vnode->decl)
877 || !flag_wpa
878 || flag_ltrans_devirtualize)))))
879 {
880 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
881 create_references (encoder, vnode);
882 }
883 }
884 }
885
886 /* Go over all the nodes again to include callees that are not in
887 SET. */
888 for (lsei = lsei_start_function_in_partition (encoder);
889 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
890 {
891 struct cgraph_node *node = lsei_cgraph_node (lsei);
892 for (edge = node->callees; edge; edge = edge->next_callee)
893 {
894 struct cgraph_node *callee = edge->callee;
895 if (!lto_symtab_encoder_in_partition_p (encoder, callee))
896 {
897 /* We should have moved all the inlines. */
898 gcc_assert (!callee->inlined_to);
899 add_node_to (encoder, callee, false);
900 }
901 }
902 /* Add all possible targets for late devirtualization. */
903 if (flag_ltrans_devirtualize || !flag_wpa)
904 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
905 if (edge->indirect_info->polymorphic)
906 {
907 unsigned int i;
908 void *cache_token;
909 bool final;
910 vec <cgraph_node *>targets
911 = possible_polymorphic_call_targets
912 (edge, &final, &cache_token);
913 if (!reachable_call_targets.add (cache_token))
914 {
915 for (i = 0; i < targets.length (); i++)
916 {
917 struct cgraph_node *callee = targets[i];
918
919 /* Adding an external declarations into the unit serves
920 no purpose and just increases its boundary. */
921 if (callee->definition
922 && !lto_symtab_encoder_in_partition_p
923 (encoder, callee))
924 {
925 gcc_assert (!callee->inlined_to);
926 add_node_to (encoder, callee, false);
927 }
928 }
929 }
930 }
931 }
932 /* Be sure to also insert alias targert and thunk callees. These needs
933 to stay to aid local calling conventions. */
934 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
935 {
936 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
937 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
938
939 if (node->alias && node->analyzed)
940 create_references (encoder, node);
941 if (cnode
942 && cnode->thunk && !cnode->inlined_to)
943 add_node_to (encoder, cnode->callees->callee, false);
944 while (node->transparent_alias && node->analyzed)
945 {
946 node = node->get_alias_target ();
947 if (is_a <cgraph_node *> (node))
948 add_node_to (encoder, dyn_cast <cgraph_node *> (node),
949 false);
950 else
951 lto_symtab_encoder_encode (encoder, node);
952 }
953 }
954 lto_symtab_encoder_delete (in_encoder);
955 return encoder;
956}
957
958/* Output the part of the symtab in SET and VSET. */
959
960void
961output_symtab (void)
962{
963 struct cgraph_node *node;
964 struct lto_simple_output_block *ob;
965 int i, n_nodes;
966 lto_symtab_encoder_t encoder;
967
968 if (flag_wpa)
969 output_cgraph_opt_summary ();
970
971 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
972
973 output_profile_summary (ob);
974
975 /* An encoder for cgraph nodes should have been created by
976 ipa_write_summaries_1. */
977 gcc_assert (ob->decl_state->symtab_node_encoder);
978 encoder = ob->decl_state->symtab_node_encoder;
979
980 /* Write out the nodes. We must first output a node and then its clones,
981 otherwise at a time reading back the node there would be nothing to clone
982 from. */
983 n_nodes = lto_symtab_encoder_size (encoder);
984 for (i = 0; i < n_nodes; i++)
985 {
986 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
987 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
988 lto_output_node (ob, cnode, encoder);
989 else
990 lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
991 }
992
993 /* Go over the nodes in SET again to write edges. */
994 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
995 {
996 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder, i));
997 if (node
998 && ((node->thunk && !node->inlined_to)
999 || lto_symtab_encoder_in_partition_p (encoder, node)))
1000 {
1001 output_outgoing_cgraph_edges (node->callees, ob, encoder);
1002 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
1003 }
1004 }
1005
1006 streamer_write_uhwi_stream (ob->main_stream, 0);
1007
1008 lto_destroy_simple_output_block (ob);
1009
1010 /* Emit toplevel asms.
1011 When doing WPA we must output every asm just once. Since we do not partition asm
1012 nodes at all, output them to first output. This is kind of hack, but should work
1013 well. */
1014 if (!asm_nodes_output)
1015 {
1016 asm_nodes_output = true;
1017 lto_output_toplevel_asms ();
1018 }
1019
1020 output_refs (encoder);
1021}
1022
1023/* Return identifier encoded in IB as a plain string. */
1024
1025static tree
1026read_identifier (class lto_input_block *ib)
1027{
1028 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1029 tree id;
1030
1031 if (ib->data[ib->p + len])
1032 lto_section_overrun (ib);
1033 if (!len)
1034 {
1035 ib->p++;
1036 return NULL;
1037 }
1038 id = get_identifier (ib->data + ib->p);
1039 ib->p += len + 1;
1040 return id;
1041}
1042
1043/* Return string encoded in IB, NULL if string is empty. */
1044
1045static const char *
1046read_string (class lto_input_block *ib)
1047{
1048 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1049 const char *str;
1050
1051 if (ib->data[ib->p + len])
1052 lto_section_overrun (ib);
1053 if (!len)
1054 {
1055 ib->p++;
1056 return NULL;
1057 }
1058 str = ib->data + ib->p;
1059 ib->p += len + 1;
1060 return str;
1061}
1062
1063/* Output function/variable tables that will allow libgomp to look up offload
1064 target code.
1065 OFFLOAD_FUNCS is filled in expand_omp_target, OFFLOAD_VARS is filled in
1066 varpool_node::get_create. In WHOPR (partitioned) mode during the WPA stage
1067 both OFFLOAD_FUNCS and OFFLOAD_VARS are filled by input_offload_tables. */
1068
1069void
1070output_offload_tables (void)
1071{
1072 bool output_requires = (flag_openmp
1073 && (omp_requires_mask & OMP_REQUIRES_TARGET_USED) != 0);
1074 if (vec_safe_is_empty (offload_funcs) && vec_safe_is_empty (offload_vars)
1075 && !output_requires)
1076 return;
1077
1078 struct lto_simple_output_block *ob
1079 = lto_create_simple_output_block (LTO_section_offload_table);
1080
1081 for (unsigned i = 0; i < vec_safe_length (offload_funcs); i++)
1082 {
1083 symtab_node *node = symtab_node::get ((*offload_funcs)[i]);
1084 if (!node)
1085 continue;
1086 node->force_output = true;
1087 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1088 LTO_symtab_last_tag, LTO_symtab_unavail_node);
1089 lto_output_fn_decl_ref (ob->decl_state, ob->main_stream,
1090 (*offload_funcs)[i]);
1091 }
1092
1093 for (unsigned i = 0; i < vec_safe_length (offload_vars); i++)
1094 {
1095 symtab_node *node = symtab_node::get ((*offload_vars)[i]);
1096 if (!node)
1097 continue;
1098 node->force_output = true;
1099 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1100 LTO_symtab_last_tag, LTO_symtab_variable);
1101 lto_output_var_decl_ref (ob->decl_state, ob->main_stream,
1102 (*offload_vars)[i]);
1103 }
1104
1105 if (output_requires)
1106 {
1107 HOST_WIDE_INT val = ((HOST_WIDE_INT) omp_requires_mask
1108 & (OMP_REQUIRES_UNIFIED_ADDRESS
1109 | OMP_REQUIRES_UNIFIED_SHARED_MEMORY
1110 | OMP_REQUIRES_REVERSE_OFFLOAD
1111 | OMP_REQUIRES_TARGET_USED));
1112 /* (Mis)use LTO_symtab_edge for this variable. */
1113 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1114 LTO_symtab_last_tag, LTO_symtab_edge);
1115 streamer_write_hwi_stream (ob->main_stream, val);
1116 }
1117
1118 streamer_write_uhwi_stream (ob->main_stream, 0);
1119 lto_destroy_simple_output_block (ob);
1120
1121 /* In WHOPR mode during the WPA stage the joint offload tables need to be
1122 streamed to one partition only. That's why we free offload_funcs and
1123 offload_vars after the first call of output_offload_tables. */
1124 if (flag_wpa)
1125 {
1126 vec_free (offload_funcs);
1127 vec_free (offload_vars);
1128 }
1129}
1130
1131/* Verify the partitioning of NODE. */
1132
1133static inline void
1134verify_node_partition (symtab_node *node)
1135{
1136 if (flag_ltrans)
1137 return;
1138
1139#ifdef ACCEL_COMPILER
1140 if (node->in_other_partition)
1141 {
1142 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1143 error_at (DECL_SOURCE_LOCATION (node->decl),
1144 "function %qs has been referenced in offloaded code but"
1145 " hasn%'t been marked to be included in the offloaded code",
1146 node->name ());
1147 else if (VAR_P (node->decl))
1148 error_at (DECL_SOURCE_LOCATION (node->decl),
1149 "variable %qs has been referenced in offloaded code but"
1150 " hasn%'t been marked to be included in the offloaded code",
1151 node->name ());
1152 else
1153 gcc_unreachable ();
1154 }
1155#else
1156 gcc_assert (!node->in_other_partition
1157 && !node->used_from_other_partition);
1158#endif
1159}
1160
1161/* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
1162 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
1163 NODE or to replace the values in it, for instance because the first
1164 time we saw it, the function body was not available but now it
1165 is. BP is a bitpack with all the bitflags for NODE read from the
1166 stream. Initialize HAS_THUNK_INFO to indicate if thunk info should
1167 be streamed in. */
1168
1169static void
1170input_overwrite_node (struct lto_file_decl_data *file_data,
1171 struct cgraph_node *node,
1172 enum LTO_symtab_tags tag,
1173 struct bitpack_d *bp, bool *has_thunk_info)
1174{
1175 node->aux = (void *) tag;
1176 node->lto_file_data = file_data;
1177
1178 node->local = bp_unpack_value (bp, 1);
1179 node->externally_visible = bp_unpack_value (bp, 1);
1180 node->no_reorder = bp_unpack_value (bp, 1);
1181 node->definition = bp_unpack_value (bp, 1);
1182 node->versionable = bp_unpack_value (bp, 1);
1183 node->can_change_signature = bp_unpack_value (bp, 1);
1184 node->redefined_extern_inline = bp_unpack_value (bp, 1);
1185 node->force_output = bp_unpack_value (bp, 1);
1186 node->forced_by_abi = bp_unpack_value (bp, 1);
1187 node->unique_name = bp_unpack_value (bp, 1);
1188 node->body_removed = bp_unpack_value (bp, 1);
1189 node->semantic_interposition = bp_unpack_value (bp, 1);
1190 node->implicit_section = bp_unpack_value (bp, 1);
1191 node->address_taken = bp_unpack_value (bp, 1);
1192 node->used_from_other_partition = bp_unpack_value (bp, 1);
1193 node->lowered = bp_unpack_value (bp, 1);
1194 node->analyzed = tag == LTO_symtab_analyzed_node;
1195 node->in_other_partition = bp_unpack_value (bp, 1);
1196 if (node->in_other_partition
1197 /* Avoid updating decl when we are seeing just inline clone.
1198 When inlining function that has functions already inlined into it,
1199 we produce clones of inline clones.
1200
1201 WPA partitioning might put each clone into different unit and
1202 we might end up streaming inline clone from other partition
1203 to support clone we are interested in. */
1204 && (!node->clone_of
1205 || node->clone_of->decl != node->decl))
1206 {
1207 DECL_EXTERNAL (node->decl) = 1;
1208 TREE_STATIC (node->decl) = 0;
1209 }
1210 node->alias = bp_unpack_value (bp, 1);
1211 node->transparent_alias = bp_unpack_value (bp, 1);
1212 node->weakref = bp_unpack_value (bp, 1);
1213 node->symver = bp_unpack_value (bp, 1);
1214 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
1215 node->only_called_at_startup = bp_unpack_value (bp, 1);
1216 node->only_called_at_exit = bp_unpack_value (bp, 1);
1217 node->tm_clone = bp_unpack_value (bp, 1);
1218 node->calls_comdat_local = bp_unpack_value (bp, 1);
1219 node->icf_merged = bp_unpack_value (bp, 1);
1220 node->nonfreeing_fn = bp_unpack_value (bp, 1);
1221 node->merged_comdat = bp_unpack_value (bp, 1);
1222 node->merged_extern_inline = bp_unpack_value (bp, 1);
1223 node->thunk = bp_unpack_value (bp, 1);
1224 node->parallelized_function = bp_unpack_value (bp, 1);
1225 node->declare_variant_alt = bp_unpack_value (bp, 1);
1226 node->calls_declare_variant_alt = bp_unpack_value (bp, 1);
1227 *has_thunk_info = bp_unpack_value (bp, 1);
1228 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
1229 LDPR_NUM_KNOWN);
1230 node->split_part = bp_unpack_value (bp, 1);
1231 verify_node_partition (node);
1232}
1233
1234/* Return string alias is alias of. */
1235
1236static tree
1237get_alias_symbol (tree decl)
1238{
1239 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1240 return get_identifier (TREE_STRING_POINTER
1241 (TREE_VALUE (TREE_VALUE (alias))));
1242}
1243
1244/* Read a node from input_block IB. TAG is the node's tag just read.
1245 Return the node read or overwriten. */
1246
1247static struct cgraph_node *
1248input_node (struct lto_file_decl_data *file_data,
1249 class lto_input_block *ib,
1250 enum LTO_symtab_tags tag,
1251 vec<symtab_node *> nodes)
1252{
1253 gcc::pass_manager *passes = g->get_passes ();
1254 tree fn_decl;
1255 struct cgraph_node *node;
1256 struct bitpack_d bp;
1257 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1258 int clone_ref;
1259 int order;
1260 int i, count;
1261 tree group;
1262 const char *section;
1263 order = streamer_read_hwi (ib) + file_data->order_base;
1264 clone_ref = streamer_read_hwi (ib);
1265 bool has_thunk_info;
1266
1267 fn_decl = lto_input_fn_decl_ref (ib, file_data);
1268
1269 if (clone_ref != LCC_NOT_FOUND)
1270 {
1271 node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
1272 profile_count::uninitialized (), false,
1273 vNULL, false, NULL, NULL);
1274 }
1275 else
1276 {
1277 /* Declaration of functions can be already merged with a declaration
1278 from other input file. We keep cgraph unmerged until after streaming
1279 of ipa passes is done. Alays forcingly create a fresh node. */
1280 node = symtab->create_empty ();
1281 node->decl = fn_decl;
1282 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (fn_decl)))
1283 node->ifunc_resolver = 1;
1284 node->register_symbol ();
1285 }
1286
1287 node->order = order;
1288 if (order >= symtab->order)
1289 symtab->order = order + 1;
1290
1291 node->count = profile_count::stream_in (ib);
1292 node->count_materialization_scale = streamer_read_hwi (ib);
1293
1294 count = streamer_read_hwi (ib);
1295 node->ipa_transforms_to_apply = vNULL;
1296 for (i = 0; i < count; i++)
1297 {
1298 opt_pass *pass;
1299 int pid = streamer_read_hwi (ib);
1300
1301 gcc_assert (pid < passes->passes_by_id_size);
1302 pass = passes->passes_by_id[pid];
1303 node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass_d *) pass);
1304 }
1305
1306 if (tag == LTO_symtab_analyzed_node)
1307 ref = streamer_read_hwi (ib);
1308
1309 group = read_identifier (ib);
1310 if (group)
1311 ref2 = streamer_read_hwi (ib);
1312
1313 /* Make sure that we have not read this node before. Nodes that
1314 have already been read will have their tag stored in the 'aux'
1315 field. Since built-in functions can be referenced in multiple
1316 functions, they are expected to be read more than once. */
1317 if (node->aux && !fndecl_built_in_p (node->decl))
1318 internal_error ("bytecode stream: found multiple instances of cgraph "
1319 "node with uid %d", node->get_uid ());
1320
1321 node->tp_first_run = streamer_read_uhwi (ib);
1322
1323 bp = streamer_read_bitpack (ib);
1324
1325 input_overwrite_node (file_data, node, tag, &bp, &has_thunk_info);
1326
1327 /* Store a reference for now, and fix up later to be a pointer. */
1328 node->inlined_to = (cgraph_node *) (intptr_t) ref;
1329
1330 if (group)
1331 {
1332 node->set_comdat_group (group);
1333 /* Store a reference for now, and fix up later to be a pointer. */
1334 node->same_comdat_group = (symtab_node *) (intptr_t) ref2;
1335 }
1336 else
1337 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1338 section = read_string (ib);
1339 if (section)
1340 node->set_section_for_node (section);
1341
1342 if (node->alias && !node->analyzed && node->weakref)
1343 node->alias_target = get_alias_symbol (node->decl);
1344 node->profile_id = streamer_read_hwi (ib);
1345 node->unit_id = streamer_read_hwi (ib) + file_data->unit_base;
1346 if (symtab->max_unit < node->unit_id)
1347 symtab->max_unit = node->unit_id;
1348 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1349 node->set_init_priority (streamer_read_hwi (ib));
1350 if (DECL_STATIC_DESTRUCTOR (node->decl))
1351 node->set_fini_priority (streamer_read_hwi (ib));
1352
1353 if (has_thunk_info)
1354 thunk_info::get_create (node)->stream_in (ib);
1355
1356 return node;
1357}
1358
1359/* Read a node from input_block IB. TAG is the node's tag just read.
1360 Return the node read or overwriten. */
1361
1362static varpool_node *
1363input_varpool_node (struct lto_file_decl_data *file_data,
1364 class lto_input_block *ib)
1365{
1366 tree var_decl;
1367 varpool_node *node;
1368 struct bitpack_d bp;
1369 int ref = LCC_NOT_FOUND;
1370 int order;
1371 tree group;
1372 const char *section;
1373
1374 order = streamer_read_hwi (ib) + file_data->order_base;
1375 var_decl = lto_input_var_decl_ref (ib, file_data);
1376
1377 /* Declaration of functions can be already merged with a declaration
1378 from other input file. We keep cgraph unmerged until after streaming
1379 of ipa passes is done. Alays forcingly create a fresh node. */
1380 node = varpool_node::create_empty ();
1381 node->decl = var_decl;
1382 node->register_symbol ();
1383
1384 node->order = order;
1385 if (order >= symtab->order)
1386 symtab->order = order + 1;
1387 node->lto_file_data = file_data;
1388
1389 bp = streamer_read_bitpack (ib);
1390 node->externally_visible = bp_unpack_value (&bp, 1);
1391 node->no_reorder = bp_unpack_value (&bp, 1);
1392 node->force_output = bp_unpack_value (&bp, 1);
1393 node->forced_by_abi = bp_unpack_value (&bp, 1);
1394 node->unique_name = bp_unpack_value (&bp, 1);
1395 node->body_removed = bp_unpack_value (&bp, 1);
1396 node->semantic_interposition = bp_unpack_value (&bp, 1);
1397 node->implicit_section = bp_unpack_value (&bp, 1);
1398 node->writeonly = bp_unpack_value (&bp, 1);
1399 node->definition = bp_unpack_value (&bp, 1);
1400 node->alias = bp_unpack_value (&bp, 1);
1401 node->transparent_alias = bp_unpack_value (&bp, 1);
1402 node->weakref = bp_unpack_value (&bp, 1);
1403 node->symver = bp_unpack_value (&bp, 1);
1404 node->analyzed = bp_unpack_value (&bp, 1);
1405 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1406 node->in_other_partition = bp_unpack_value (&bp, 1);
1407 if (node->in_other_partition)
1408 {
1409 DECL_EXTERNAL (node->decl) = 1;
1410 TREE_STATIC (node->decl) = 0;
1411 }
1412 if (node->alias && !node->analyzed && node->weakref)
1413 node->alias_target = get_alias_symbol (node->decl);
1414 node->tls_model = (enum tls_model)bp_unpack_value (&bp, 3);
1415 node->used_by_single_function = (enum tls_model)bp_unpack_value (&bp, 1);
1416 node->dynamically_initialized = bp_unpack_value (&bp, 1);
1417 group = read_identifier (ib);
1418 if (group)
1419 {
1420 node->set_comdat_group (group);
1421 ref = streamer_read_hwi (ib);
1422 /* Store a reference for now, and fix up later to be a pointer. */
1423 node->same_comdat_group = (symtab_node *) (intptr_t) ref;
1424 }
1425 else
1426 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1427 section = read_string (ib);
1428 if (section)
1429 node->set_section_for_node (section);
1430 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1431 LDPR_NUM_KNOWN);
1432 verify_node_partition (node);
1433 return node;
1434}
1435
1436/* Read a node from input_block IB. TAG is the node's tag just read.
1437 Return the node read or overwriten. */
1438
1439static void
1440input_ref (class lto_input_block *ib,
1441 symtab_node *referring_node,
1442 vec<symtab_node *> nodes)
1443{
1444 symtab_node *node = NULL;
1445 struct bitpack_d bp;
1446 enum ipa_ref_use use;
1447 bool speculative;
1448 struct ipa_ref *ref;
1449
1450 bp = streamer_read_bitpack (ib);
1451 use = (enum ipa_ref_use) bp_unpack_value (&bp, 3);
1452 speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1453 node = nodes[streamer_read_hwi (ib)];
1454 ref = referring_node->create_reference (node, use);
1455 ref->speculative = speculative;
1456 if (is_a <cgraph_node *> (referring_node))
1457 {
1458 ref->lto_stmt_uid = streamer_read_hwi (ib);
1459 bp = streamer_read_bitpack (ib);
1460 ref->speculative_id = bp_unpack_value (&bp, 16);
1461 }
1462}
1463
1464/* Read an edge from IB. NODES points to a vector of previously read nodes for
1465 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1466 edge being read is indirect (in the sense that it has
1467 indirect_unknown_callee set). */
1468
1469static void
1470input_edge (class lto_input_block *ib, vec<symtab_node *> nodes,
1471 bool indirect)
1472{
1473 struct cgraph_node *caller, *callee;
1474 struct cgraph_edge *edge;
1475 unsigned int stmt_id, speculative_id;
1476 profile_count count;
1477 cgraph_inline_failed_t inline_failed;
1478 struct bitpack_d bp;
1479 int ecf_flags = 0;
1480
1481 caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1482 if (caller == NULL || caller->decl == NULL_TREE)
1483 internal_error ("bytecode stream: no caller found while reading edge");
1484
1485 if (!indirect)
1486 {
1487 callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1488 if (callee == NULL || callee->decl == NULL_TREE)
1489 internal_error ("bytecode stream: no callee found while reading edge");
1490 }
1491 else
1492 callee = NULL;
1493
1494 count = profile_count::stream_in (ib);
1495
1496 bp = streamer_read_bitpack (ib);
1497 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
1498 stmt_id = bp_unpack_var_len_unsigned (&bp);
1499 speculative_id = bp_unpack_value (&bp, 16);
1500
1501 if (indirect)
1502 edge = caller->create_indirect_edge (NULL, 0, count);
1503 else
1504 edge = caller->create_edge (callee, NULL, count);
1505
1506 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1507 edge->speculative = bp_unpack_value (&bp, 1);
1508 edge->lto_stmt_uid = stmt_id;
1509 edge->speculative_id = speculative_id;
1510 edge->inline_failed = inline_failed;
1511 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1512 edge->can_throw_external = bp_unpack_value (&bp, 1);
1513 edge->in_polymorphic_cdtor = bp_unpack_value (&bp, 1);
1514 if (indirect)
1515 {
1516 if (bp_unpack_value (&bp, 1))
1517 ecf_flags |= ECF_CONST;
1518 if (bp_unpack_value (&bp, 1))
1519 ecf_flags |= ECF_PURE;
1520 if (bp_unpack_value (&bp, 1))
1521 ecf_flags |= ECF_NORETURN;
1522 if (bp_unpack_value (&bp, 1))
1523 ecf_flags |= ECF_MALLOC;
1524 if (bp_unpack_value (&bp, 1))
1525 ecf_flags |= ECF_NOTHROW;
1526 if (bp_unpack_value (&bp, 1))
1527 ecf_flags |= ECF_RETURNS_TWICE;
1528 edge->indirect_info->ecf_flags = ecf_flags;
1529
1530 edge->indirect_info->num_speculative_call_targets
1531 = bp_unpack_value (&bp, 16);
1532 }
1533}
1534
1535
1536/* Read a cgraph from IB using the info in FILE_DATA. */
1537
1538static vec<symtab_node *>
1539input_cgraph_1 (struct lto_file_decl_data *file_data,
1540 class lto_input_block *ib)
1541{
1542 enum LTO_symtab_tags tag;
1543 vec<symtab_node *> nodes = vNULL;
1544 symtab_node *node;
1545 unsigned i;
1546
1547 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1548 file_data->order_base = symtab->order;
1549 file_data->unit_base = symtab->max_unit + 1;
1550 while (tag)
1551 {
1552 if (tag == LTO_symtab_edge)
1553 input_edge (ib, nodes, false);
1554 else if (tag == LTO_symtab_indirect_edge)
1555 input_edge (ib, nodes, true);
1556 else if (tag == LTO_symtab_variable)
1557 {
1558 node = input_varpool_node (file_data, ib);
1559 nodes.safe_push (node);
1560 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1561 }
1562 else
1563 {
1564 node = input_node (file_data, ib, tag, nodes);
1565 if (node == NULL || node->decl == NULL_TREE)
1566 internal_error ("bytecode stream: found empty cgraph node");
1567 nodes.safe_push (node);
1568 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1569 }
1570
1571 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1572 }
1573
1574 lto_input_toplevel_asms (file_data, file_data->order_base);
1575
1576 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1577 if (flag_checking)
1578 {
1579 FOR_EACH_VEC_ELT (nodes, i, node)
1580 gcc_assert (node->aux || !is_a <cgraph_node *> (node));
1581 }
1582 FOR_EACH_VEC_ELT (nodes, i, node)
1583 {
1584 int ref;
1585 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1586 {
1587 ref = (int) (intptr_t) cnode->inlined_to;
1588
1589 /* We share declaration of builtins, so we may read same node twice. */
1590 if (!node->aux)
1591 continue;
1592 node->aux = NULL;
1593
1594 /* Fixup inlined_to from reference to pointer. */
1595 if (ref != LCC_NOT_FOUND)
1596 dyn_cast<cgraph_node *> (node)->inlined_to
1597 = dyn_cast<cgraph_node *> (nodes[ref]);
1598 else
1599 cnode->inlined_to = NULL;
1600 }
1601
1602 ref = (int) (intptr_t) node->same_comdat_group;
1603
1604 /* Fixup same_comdat_group from reference to pointer. */
1605 if (ref != LCC_NOT_FOUND)
1606 node->same_comdat_group = nodes[ref];
1607 else
1608 node->same_comdat_group = NULL;
1609 }
1610 FOR_EACH_VEC_ELT (nodes, i, node)
1611 node->aux = is_a <cgraph_node *> (node) ? (void *)1 : NULL;
1612 return nodes;
1613}
1614
1615/* Input ipa_refs. */
1616
1617static void
1618input_refs (class lto_input_block *ib,
1619 vec<symtab_node *> nodes)
1620{
1621 int count;
1622 int idx;
1623 while (true)
1624 {
1625 symtab_node *node;
1626 count = streamer_read_uhwi (ib);
1627 if (!count)
1628 break;
1629 idx = streamer_read_uhwi (ib);
1630 node = nodes[idx];
1631 while (count)
1632 {
1633 input_ref (ib, node, nodes);
1634 count--;
1635 }
1636 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1637 if (cnode->declare_variant_alt)
1638 omp_lto_input_declare_variant_alt (ib, cnode, nodes);
1639 }
1640}
1641
1642/* Input profile_info from IB. */
1643static void
1644input_profile_summary (class lto_input_block *ib,
1645 struct lto_file_decl_data *file_data)
1646{
1647 unsigned int runs = streamer_read_uhwi (ib);
1648 if (runs)
1649 {
1650 file_data->profile_info.runs = runs;
1651
1652 /* IPA-profile computes hot bb threshold based on cumulated
1653 whole program profile. We need to stream it down to ltrans. */
1654 if (flag_ltrans)
1655 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1656 }
1657
1658}
1659
1660/* Rescale profile summaries to the same number of runs in the whole unit. */
1661
1662static void
1663merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1664{
1665 struct lto_file_decl_data *file_data;
1666 unsigned int j;
1667 gcov_unsigned_t max_runs = 0;
1668 struct cgraph_node *node;
1669 struct cgraph_edge *edge;
1670
1671 /* Find unit with maximal number of runs. If we ever get serious about
1672 roundoff errors, we might also consider computing smallest common
1673 multiply. */
1674 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1675 if (max_runs < file_data->profile_info.runs)
1676 max_runs = file_data->profile_info.runs;
1677
1678 if (!max_runs)
1679 return;
1680
1681 /* Simple overflow check. We probably don't need to support that many train
1682 runs. Such a large value probably imply data corruption anyway. */
1683 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1684 {
1685 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1686 INT_MAX / REG_BR_PROB_BASE);
1687 return;
1688 }
1689
1690 profile_info = XCNEW (gcov_summary);
1691 profile_info->runs = max_runs;
1692
1693 /* If merging already happent at WPA time, we are done. */
1694 if (flag_ltrans)
1695 return;
1696
1697 /* Now compute count_materialization_scale of each node.
1698 During LTRANS we already have values of count_materialization_scale
1699 computed, so just update them. */
1700 FOR_EACH_FUNCTION (node)
1701 if (node->lto_file_data
1702 && node->lto_file_data->profile_info.runs)
1703 {
1704 int scale;
1705
1706 scale = RDIV (node->count_materialization_scale * max_runs,
1707 node->lto_file_data->profile_info.runs);
1708 node->count_materialization_scale = scale;
1709 if (scale < 0)
1710 fatal_error (input_location, "Profile information in %s corrupted",
1711 file_data->file_name);
1712
1713 if (scale == REG_BR_PROB_BASE)
1714 continue;
1715 for (edge = node->callees; edge; edge = edge->next_callee)
1716 if (edge->count.ipa ().nonzero_p ())
1717 edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
1718 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
1719 if (edge->count.ipa ().nonzero_p ())
1720 edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
1721 if (node->count.ipa ().nonzero_p ())
1722 node->count = node->count.apply_scale (scale, REG_BR_PROB_BASE);
1723 }
1724}
1725
1726/* Input and merge the symtab from each of the .o files passed to
1727 lto1. */
1728
1729void
1730input_symtab (void)
1731{
1732 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1733 struct lto_file_decl_data *file_data;
1734 unsigned int j = 0;
1735 struct cgraph_node *node;
1736
1737 while ((file_data = file_data_vec[j++]))
1738 {
1739 const char *data;
1740 size_t len;
1741 class lto_input_block *ib;
1742 vec<symtab_node *> nodes;
1743
1744 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1745 &data, &len);
1746 if (!ib)
1747 fatal_error (input_location,
1748 "cannot find LTO cgraph in %s", file_data->file_name);
1749 input_profile_summary (ib, file_data);
1750 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1751 nodes = input_cgraph_1 (file_data, ib);
1752 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1753 ib, data, len);
1754
1755 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1756 &data, &len);
1757 if (!ib)
1758 fatal_error (input_location, "cannot find LTO section refs in %s",
1759 file_data->file_name);
1760 input_refs (ib, nodes);
1761 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1762 ib, data, len);
1763 if (flag_ltrans)
1764 input_cgraph_opt_summary (nodes);
1765 nodes.release ();
1766 }
1767
1768 merge_profile_summaries (file_data_vec);
1769
1770 /* Clear out the aux field that was used to store enough state to
1771 tell which nodes should be overwritten. */
1772 FOR_EACH_FUNCTION (node)
1773 {
1774 /* Some nodes may have been created by cgraph_node. This
1775 happens when the callgraph contains nested functions. If the
1776 node for the parent function was never emitted to the gimple
1777 file, cgraph_node will create a node for it when setting the
1778 context of the nested function. */
1779 if (node->lto_file_data)
1780 node->aux = NULL;
1781 }
1782}
1783
1784static void
1785omp_requires_to_name (char *buf, size_t size, HOST_WIDE_INT requires_mask)
1786{
1787 char *end = buf + size, *p = buf;
1788 if (requires_mask & GOMP_REQUIRES_UNIFIED_ADDRESS)
1789 p += snprintf (p, end - p, "unified_address");
1790 if (requires_mask & GOMP_REQUIRES_UNIFIED_SHARED_MEMORY)
1791 p += snprintf (p, end - p, "%sunified_shared_memory",
1792 (p == buf ? "" : ", "));
1793 if (requires_mask & GOMP_REQUIRES_REVERSE_OFFLOAD)
1794 p += snprintf (p, end - p, "%sreverse_offload",
1795 (p == buf ? "" : ", "));
1796}
1797
1798/* Input function/variable tables that will allow libgomp to look up offload
1799 target code, and store them into OFFLOAD_FUNCS and OFFLOAD_VARS. */
1800
1801void
1802input_offload_tables (bool do_force_output)
1803{
1804 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1805 struct lto_file_decl_data *file_data;
1806 unsigned int j = 0;
1807 const char *requires_fn = NULL;
1808 tree requires_decl = NULL_TREE;
1809
1810 omp_requires_mask = (omp_requires) 0;
1811
1812 while ((file_data = file_data_vec[j++]))
1813 {
1814 const char *data;
1815 size_t len;
1816 class lto_input_block *ib
1817 = lto_create_simple_input_block (file_data, LTO_section_offload_table,
1818 &data, &len);
1819 if (!ib)
1820 continue;
1821
1822 tree tmp_decl = NULL_TREE;
1823 enum LTO_symtab_tags tag
1824 = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1825 while (tag)
1826 {
1827 if (tag == LTO_symtab_unavail_node)
1828 {
1829 tree fn_decl
1830 = lto_input_fn_decl_ref (ib, file_data);
1831 vec_safe_push (offload_funcs, fn_decl);
1832
1833 /* Prevent IPA from removing fn_decl as unreachable, since there
1834 may be no refs from the parent function to child_fn in offload
1835 LTO mode. */
1836 if (do_force_output)
1837 cgraph_node::get (fn_decl)->mark_force_output ();
1838 tmp_decl = fn_decl;
1839 }
1840 else if (tag == LTO_symtab_variable)
1841 {
1842 tree var_decl
1843 = lto_input_var_decl_ref (ib, file_data);
1844 vec_safe_push (offload_vars, var_decl);
1845
1846 /* Prevent IPA from removing var_decl as unused, since there
1847 may be no refs to var_decl in offload LTO mode. */
1848 if (do_force_output)
1849 varpool_node::get (var_decl)->force_output = 1;
1850 tmp_decl = var_decl;
1851 }
1852 else if (tag == LTO_symtab_edge)
1853 {
1854 static bool error_emitted = false;
1855 HOST_WIDE_INT val = streamer_read_hwi (ib);
1856
1857 if (omp_requires_mask == 0)
1858 {
1859 omp_requires_mask = (omp_requires) val;
1860 requires_decl = tmp_decl;
1861 requires_fn = file_data->file_name;
1862 }
1863 else if (omp_requires_mask != val && !error_emitted)
1864 {
1865 const char *fn1 = requires_fn;
1866 if (requires_decl != NULL_TREE)
1867 {
1868 while (DECL_CONTEXT (requires_decl) != NULL_TREE
1869 && TREE_CODE (requires_decl) != TRANSLATION_UNIT_DECL)
1870 requires_decl = DECL_CONTEXT (requires_decl);
1871 if (requires_decl != NULL_TREE)
1872 fn1 = IDENTIFIER_POINTER (DECL_NAME (requires_decl));
1873 }
1874
1875 const char *fn2 = file_data->file_name;
1876 if (tmp_decl != NULL_TREE)
1877 {
1878 while (DECL_CONTEXT (tmp_decl) != NULL_TREE
1879 && TREE_CODE (tmp_decl) != TRANSLATION_UNIT_DECL)
1880 tmp_decl = DECL_CONTEXT (tmp_decl);
1881 if (tmp_decl != NULL_TREE)
1882 fn2 = IDENTIFIER_POINTER (DECL_NAME (requires_decl));
1883 }
1884
1885 char buf1[sizeof ("unified_address, unified_shared_memory, "
1886 "reverse_offload")];
1887 char buf2[sizeof ("unified_address, unified_shared_memory, "
1888 "reverse_offload")];
1889 omp_requires_to_name (buf2, sizeof (buf2),
1890 val != OMP_REQUIRES_TARGET_USED
1891 ? val
1892 : (HOST_WIDE_INT) omp_requires_mask);
1893 if (val != OMP_REQUIRES_TARGET_USED
1894 && omp_requires_mask != OMP_REQUIRES_TARGET_USED)
1895 {
1896 omp_requires_to_name (buf1, sizeof (buf1),
1897 omp_requires_mask);
1898 error ("OpenMP %<requires%> directive with non-identical "
1899 "clauses in multiple compilation units: %qs vs. "
1900 "%qs", buf1, buf2);
1901 inform (UNKNOWN_LOCATION, "%qs has %qs", fn1, buf1);
1902 inform (UNKNOWN_LOCATION, "%qs has %qs", fn2, buf2);
1903 }
1904 else
1905 {
1906 error ("OpenMP %<requires%> directive with %qs specified "
1907 "only in some compilation units", buf2);
1908 inform (UNKNOWN_LOCATION, "%qs has %qs",
1909 val != OMP_REQUIRES_TARGET_USED ? fn2 : fn1,
1910 buf2);
1911 inform (UNKNOWN_LOCATION, "but %qs has not",
1912 val != OMP_REQUIRES_TARGET_USED ? fn1 : fn2);
1913 }
1914 error_emitted = true;
1915 }
1916 }
1917 else
1918 fatal_error (input_location,
1919 "invalid offload table in %s", file_data->file_name);
1920
1921 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1922 }
1923
1924 lto_destroy_simple_input_block (file_data, LTO_section_offload_table,
1925 ib, data, len);
1926 }
1927#ifdef ACCEL_COMPILER
1928 char *omp_requires_file = getenv ("GCC_OFFLOAD_OMP_REQUIRES_FILE");
1929 if (omp_requires_file == NULL || omp_requires_file[0] == '\0')
1930 fatal_error (input_location, "GCC_OFFLOAD_OMP_REQUIRES_FILE unset");
1931 FILE *f = fopen (omp_requires_file, "wb");
1932 if (!f)
1933 fatal_error (input_location, "Cannot open omp_requires file %qs",
1934 omp_requires_file);
1935 uint32_t req_mask = omp_requires_mask;
1936 fwrite (&req_mask, sizeof (req_mask), 1, f);
1937 fclose (f);
1938#endif
1939}
1940
1941/* True when we need optimization summary for NODE. */
1942
1943static int
1944output_cgraph_opt_summary_p (struct cgraph_node *node)
1945{
1946 if (node->clone_of || node->former_clone_of)
1947 return true;
1948 clone_info *info = clone_info::get (node);
1949 return info && (info->tree_map || info->param_adjustments);
1950}
1951
1952/* Output optimization summary for EDGE to OB. */
1953static void
1954output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1955 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1956{
1957}
1958
1959/* Output optimization summary for NODE to OB. */
1960
1961static void
1962output_node_opt_summary (struct output_block *ob,
1963 struct cgraph_node *node,
1964 lto_symtab_encoder_t encoder)
1965{
1966 struct ipa_replace_map *map;
1967 int i;
1968 struct cgraph_edge *e;
1969
1970 /* TODO: Should this code be moved to ipa-param-manipulation? */
1971 struct bitpack_d bp;
1972 bp = bitpack_create (ob->main_stream);
1973 clone_info *info = clone_info::get (node);
1974
1975 bp_pack_value (&bp, (info && info->param_adjustments != NULL), 1);
1976 streamer_write_bitpack (&bp);
1977 if (ipa_param_adjustments *adjustments
1978 = info ? info->param_adjustments : NULL)
1979 {
1980 streamer_write_uhwi (ob, vec_safe_length (adjustments->m_adj_params));
1981 ipa_adjusted_param *adj;
1982 FOR_EACH_VEC_SAFE_ELT (adjustments->m_adj_params, i, adj)
1983 {
1984 bp = bitpack_create (ob->main_stream);
1985 bp_pack_value (&bp, adj->base_index, IPA_PARAM_MAX_INDEX_BITS);
1986 bp_pack_value (&bp, adj->prev_clone_index, IPA_PARAM_MAX_INDEX_BITS);
1987 bp_pack_value (&bp, adj->op, 2);
1988 bp_pack_value (&bp, adj->param_prefix_index, 2);
1989 bp_pack_value (&bp, adj->prev_clone_adjustment, 1);
1990 bp_pack_value (&bp, adj->reverse, 1);
1991 bp_pack_value (&bp, adj->user_flag, 1);
1992 streamer_write_bitpack (&bp);
1993 if (adj->op == IPA_PARAM_OP_SPLIT
1994 || adj->op == IPA_PARAM_OP_NEW)
1995 {
1996 stream_write_tree (ob, adj->type, true);
1997 if (adj->op == IPA_PARAM_OP_SPLIT)
1998 {
1999 stream_write_tree (ob, adj->alias_ptr_type, true);
2000 streamer_write_uhwi (ob, adj->unit_offset);
2001 }
2002 }
2003 }
2004 streamer_write_hwi (ob, adjustments->m_always_copy_start);
2005 bp = bitpack_create (ob->main_stream);
2006 bp_pack_value (&bp, info->param_adjustments->m_skip_return, 1);
2007 streamer_write_bitpack (&bp);
2008 }
2009
2010 streamer_write_uhwi (ob, info ? vec_safe_length (info->tree_map) : 0);
2011 if (info)
2012 FOR_EACH_VEC_SAFE_ELT (info->tree_map, i, map)
2013 {
2014 streamer_write_uhwi (ob, map->parm_num);
2015 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
2016 stream_write_tree (ob, map->new_tree, true);
2017 }
2018
2019 if (lto_symtab_encoder_in_partition_p (encoder, node))
2020 {
2021 for (e = node->callees; e; e = e->next_callee)
2022 output_edge_opt_summary (ob, e);
2023 for (e = node->indirect_calls; e; e = e->next_callee)
2024 output_edge_opt_summary (ob, e);
2025 }
2026}
2027
2028/* Output optimization summaries stored in callgraph.
2029 At the moment it is the clone info structure. */
2030
2031static void
2032output_cgraph_opt_summary (void)
2033{
2034 int i, n_nodes;
2035 lto_symtab_encoder_t encoder;
2036 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
2037 unsigned count = 0;
2038
2039 ob->symbol = NULL;
2040 encoder = ob->decl_state->symtab_node_encoder;
2041 n_nodes = lto_symtab_encoder_size (encoder);
2042 for (i = 0; i < n_nodes; i++)
2043 {
2044 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
2045 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2046 if (cnode && output_cgraph_opt_summary_p (cnode))
2047 count++;
2048 }
2049 streamer_write_uhwi (ob, count);
2050 for (i = 0; i < n_nodes; i++)
2051 {
2052 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
2053 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2054 if (cnode && output_cgraph_opt_summary_p (cnode))
2055 {
2056 streamer_write_uhwi (ob, i);
2057 output_node_opt_summary (ob, cnode, encoder);
2058 }
2059 }
2060 produce_asm (ob, NULL);
2061 destroy_output_block (ob);
2062}
2063
2064/* Input optimisation summary of EDGE. */
2065
2066static void
2067input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
2068 class lto_input_block *ib_main ATTRIBUTE_UNUSED)
2069{
2070}
2071
2072/* Input optimisation summary of NODE. */
2073
2074static void
2075input_node_opt_summary (struct cgraph_node *node,
2076 class lto_input_block *ib_main,
2077 class data_in *data_in)
2078{
2079 int i;
2080 int count;
2081 struct cgraph_edge *e;
2082
2083 /* TODO: Should this code be moved to ipa-param-manipulation? */
2084 struct bitpack_d bp;
2085 bp = streamer_read_bitpack (ib_main);
2086 bool have_adjustments = bp_unpack_value (&bp, 1);
2087 clone_info *info = clone_info::get_create (node);
2088
2089 if (have_adjustments)
2090 {
2091 count = streamer_read_uhwi (ib_main);
2092 vec<ipa_adjusted_param, va_gc> *new_params = NULL;
2093 for (i = 0; i < count; i++)
2094 {
2095 ipa_adjusted_param adj;
2096 memset (&adj, 0, sizeof (adj));
2097 bp = streamer_read_bitpack (ib_main);
2098 adj.base_index = bp_unpack_value (&bp, IPA_PARAM_MAX_INDEX_BITS);
2099 adj.prev_clone_index
2100 = bp_unpack_value (&bp, IPA_PARAM_MAX_INDEX_BITS);
2101 adj.op = (enum ipa_parm_op) bp_unpack_value (&bp, 2);
2102 adj.param_prefix_index = bp_unpack_value (&bp, 2);
2103 adj.prev_clone_adjustment = bp_unpack_value (&bp, 1);
2104 adj.reverse = bp_unpack_value (&bp, 1);
2105 adj.user_flag = bp_unpack_value (&bp, 1);
2106 if (adj.op == IPA_PARAM_OP_SPLIT
2107 || adj.op == IPA_PARAM_OP_NEW)
2108 {
2109 adj.type = stream_read_tree (ib_main, data_in);
2110 if (adj.op == IPA_PARAM_OP_SPLIT)
2111 {
2112 adj.alias_ptr_type = stream_read_tree (ib_main, data_in);
2113 adj.unit_offset = streamer_read_uhwi (ib_main);
2114 }
2115 }
2116 vec_safe_push (new_params, adj);
2117 }
2118 int always_copy_start = streamer_read_hwi (ib_main);
2119 bp = streamer_read_bitpack (ib_main);
2120 bool skip_return = bp_unpack_value (&bp, 1);
2121 info->param_adjustments
2122 = (new (ggc_alloc <ipa_param_adjustments> ())
2123 ipa_param_adjustments (new_params, always_copy_start, skip_return));
2124 }
2125
2126 count = streamer_read_uhwi (ib_main);
2127 for (i = 0; i < count; i++)
2128 {
2129 struct ipa_replace_map *map = ggc_alloc<ipa_replace_map> ();
2130
2131 vec_safe_push (info->tree_map, map);
2132 map->parm_num = streamer_read_uhwi (ib_main);
2133 map->new_tree = stream_read_tree (ib_main, data_in);
2134 }
2135 for (e = node->callees; e; e = e->next_callee)
2136 input_edge_opt_summary (e, ib_main);
2137 for (e = node->indirect_calls; e; e = e->next_callee)
2138 input_edge_opt_summary (e, ib_main);
2139}
2140
2141/* Read section in file FILE_DATA of length LEN with data DATA. */
2142
2143static void
2144input_cgraph_opt_section (struct lto_file_decl_data *file_data,
2145 const char *data, size_t len,
2146 vec<symtab_node *> nodes)
2147{
2148 const struct lto_function_header *header =
2149 (const struct lto_function_header *) data;
2150 const int cfg_offset = sizeof (struct lto_function_header);
2151 const int main_offset = cfg_offset + header->cfg_size;
2152 const int string_offset = main_offset + header->main_size;
2153 class data_in *data_in;
2154 unsigned int i;
2155 unsigned int count;
2156
2157 lto_input_block ib_main ((const char *) data + main_offset,
2158 header->main_size, file_data->mode_table);
2159
2160 data_in =
2161 lto_data_in_create (file_data, (const char *) data + string_offset,
2162 header->string_size, vNULL);
2163 count = streamer_read_uhwi (&ib_main);
2164
2165 for (i = 0; i < count; i++)
2166 {
2167 int ref = streamer_read_uhwi (&ib_main);
2168 input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
2169 &ib_main, data_in);
2170 }
2171 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
2172 len);
2173 lto_data_in_delete (data_in);
2174}
2175
2176/* Input optimization summary of cgraph. */
2177
2178static void
2179input_cgraph_opt_summary (vec<symtab_node *> nodes)
2180{
2181 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2182 struct lto_file_decl_data *file_data;
2183 unsigned int j = 0;
2184
2185 while ((file_data = file_data_vec[j++]))
2186 {
2187 size_t len;
2188 const char *data
2189 = lto_get_summary_section_data (file_data, LTO_section_cgraph_opt_sum,
2190 &len);
2191 if (data)
2192 input_cgraph_opt_section (file_data, data, len, nodes);
2193 }
2194}