]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-streamer-out.c
Fix streamer desynchornization caused by streamer debugging patch
[thirdparty/gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2020 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
43 #include "debug.h"
44 #include "omp-offload.h"
45 #include "print-tree.h"
46 #include "tree-dfa.h"
47 #include "file-prefix-map.h" /* remap_debug_filename() */
48 #include "output.h"
49 #include "ipa-utils.h"
50
51
52 static void lto_write_tree (struct output_block*, tree, bool);
53
54 /* Clear the line info stored in DATA_IN. */
55
56 static void
57 clear_line_info (struct output_block *ob)
58 {
59 ob->current_file = NULL;
60 ob->current_line = 0;
61 ob->current_col = 0;
62 ob->current_sysp = false;
63 }
64
65
66 /* Create the output block and return it. SECTION_TYPE is
67 LTO_section_function_body or LTO_static_initializer. */
68
69 struct output_block *
70 create_output_block (enum lto_section_type section_type)
71 {
72 struct output_block *ob = XCNEW (struct output_block);
73 if (streamer_dump_file)
74 fprintf (streamer_dump_file, "Creating output block for %s\n",
75 lto_section_name[section_type]);
76
77 ob->section_type = section_type;
78 ob->decl_state = lto_get_out_decl_state ();
79 /* Only global decl stream in non-wpa will ever be considered by tree
80 merging. */
81 if (!flag_wpa && section_type == LTO_section_decls)
82 ob->local_trees = new (hash_set <tree>);
83 ob->main_stream = XCNEW (struct lto_output_stream);
84 ob->string_stream = XCNEW (struct lto_output_stream);
85 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
86
87 if (section_type == LTO_section_function_body)
88 ob->cfg_stream = XCNEW (struct lto_output_stream);
89
90 clear_line_info (ob);
91
92 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
93 gcc_obstack_init (&ob->obstack);
94
95 return ob;
96 }
97
98
99 /* Destroy the output block OB. */
100
101 void
102 destroy_output_block (struct output_block *ob)
103 {
104 enum lto_section_type section_type = ob->section_type;
105
106 delete ob->string_hash_table;
107 ob->string_hash_table = NULL;
108 delete ob->local_trees;
109
110 free (ob->main_stream);
111 free (ob->string_stream);
112 if (section_type == LTO_section_function_body)
113 free (ob->cfg_stream);
114
115 streamer_tree_cache_delete (ob->writer_cache);
116 obstack_free (&ob->obstack, NULL);
117
118 free (ob);
119 }
120
121
122 /* Look up NODE in the type table and write the index for it to OB. */
123
124 static void
125 output_type_ref (struct output_block *ob, tree node)
126 {
127 streamer_write_record_start (ob, LTO_type_ref);
128 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
129 }
130
131 /* Wrapper around variably_modified_type_p avoiding type modification
132 during WPA streaming. */
133
134 static bool
135 lto_variably_modified_type_p (tree type)
136 {
137 return (in_lto_p
138 ? TYPE_LANG_FLAG_0 (TYPE_MAIN_VARIANT (type))
139 : variably_modified_type_p (type, NULL_TREE));
140 }
141
142
143 /* Return true if tree node T is written to various tables. For these
144 nodes, we sometimes want to write their phyiscal representation
145 (via lto_output_tree), and sometimes we need to emit an index
146 reference into a table (via lto_output_tree_ref). */
147
148 static bool
149 tree_is_indexable (tree t)
150 {
151 /* Parameters and return values of functions of variably modified types
152 must go to global stream, because they may be used in the type
153 definition. */
154 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
155 && DECL_CONTEXT (t))
156 return lto_variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)));
157 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared.
158 We should no longer need to stream it. */
159 else if (TREE_CODE (t) == IMPORTED_DECL)
160 gcc_unreachable ();
161 else if (TREE_CODE (t) == LABEL_DECL)
162 return FORCED_LABEL (t) || DECL_NONLOCAL (t);
163 else if (((VAR_P (t) && !TREE_STATIC (t))
164 || TREE_CODE (t) == TYPE_DECL
165 || TREE_CODE (t) == CONST_DECL
166 || TREE_CODE (t) == NAMELIST_DECL)
167 && decl_function_context (t))
168 return false;
169 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
170 return false;
171 /* Variably modified types need to be streamed alongside function
172 bodies because they can refer to local entities. Together with
173 them we have to localize their members as well.
174 ??? In theory that includes non-FIELD_DECLs as well. */
175 else if (TYPE_P (t)
176 && lto_variably_modified_type_p (t))
177 return false;
178 else if (TREE_CODE (t) == FIELD_DECL
179 && lto_variably_modified_type_p (DECL_CONTEXT (t)))
180 return false;
181 else
182 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
183 }
184
185
186 /* Output info about new location into bitpack BP.
187 After outputting bitpack, lto_output_location_data has
188 to be done to output actual data. */
189
190 void
191 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
192 location_t loc)
193 {
194 expanded_location xloc;
195
196 loc = LOCATION_LOCUS (loc);
197 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
198 loc < RESERVED_LOCATION_COUNT
199 ? loc : RESERVED_LOCATION_COUNT);
200 if (loc < RESERVED_LOCATION_COUNT)
201 return;
202
203 xloc = expand_location (loc);
204
205 bp_pack_value (bp, ob->current_file != xloc.file, 1);
206 bp_pack_value (bp, ob->current_line != xloc.line, 1);
207 bp_pack_value (bp, ob->current_col != xloc.column, 1);
208
209 if (ob->current_file != xloc.file)
210 {
211 bp_pack_string (ob, bp, remap_debug_filename (xloc.file), true);
212 bp_pack_value (bp, xloc.sysp, 1);
213 }
214 ob->current_file = xloc.file;
215 ob->current_sysp = xloc.sysp;
216
217 if (ob->current_line != xloc.line)
218 bp_pack_var_len_unsigned (bp, xloc.line);
219 ob->current_line = xloc.line;
220
221 if (ob->current_col != xloc.column)
222 bp_pack_var_len_unsigned (bp, xloc.column);
223 ob->current_col = xloc.column;
224 }
225
226
227 /* If EXPR is an indexable tree node, output a reference to it to
228 output block OB. Otherwise, output the physical representation of
229 EXPR to OB. */
230
231 static void
232 lto_output_tree_ref (struct output_block *ob, tree expr)
233 {
234 enum tree_code code;
235
236 if (TYPE_P (expr))
237 {
238 output_type_ref (ob, expr);
239 return;
240 }
241
242 code = TREE_CODE (expr);
243 switch (code)
244 {
245 case SSA_NAME:
246 streamer_write_record_start (ob, LTO_ssa_name_ref);
247 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
248 break;
249
250 case FIELD_DECL:
251 streamer_write_record_start (ob, LTO_field_decl_ref);
252 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
253 break;
254
255 case FUNCTION_DECL:
256 streamer_write_record_start (ob, LTO_function_decl_ref);
257 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
258 break;
259
260 case VAR_DECL:
261 case DEBUG_EXPR_DECL:
262 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
263 /* FALLTHRU */
264 case PARM_DECL:
265 streamer_write_record_start (ob, LTO_global_decl_ref);
266 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
267 break;
268
269 case CONST_DECL:
270 streamer_write_record_start (ob, LTO_const_decl_ref);
271 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
272 break;
273
274 case IMPORTED_DECL:
275 gcc_assert (decl_function_context (expr) == NULL);
276 streamer_write_record_start (ob, LTO_imported_decl_ref);
277 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
278 break;
279
280 case TYPE_DECL:
281 streamer_write_record_start (ob, LTO_type_decl_ref);
282 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
283 break;
284
285 case NAMELIST_DECL:
286 streamer_write_record_start (ob, LTO_namelist_decl_ref);
287 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
288 break;
289
290 case NAMESPACE_DECL:
291 streamer_write_record_start (ob, LTO_namespace_decl_ref);
292 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
293 break;
294
295 case LABEL_DECL:
296 streamer_write_record_start (ob, LTO_label_decl_ref);
297 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
298 break;
299
300 case RESULT_DECL:
301 streamer_write_record_start (ob, LTO_result_decl_ref);
302 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
303 break;
304
305 case TRANSLATION_UNIT_DECL:
306 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
307 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
308 break;
309
310 default:
311 /* No other node is indexable, so it should have been handled by
312 lto_output_tree. */
313 gcc_unreachable ();
314 }
315 }
316
317
318 /* Return true if EXPR is a tree node that can be written to disk. */
319
320 static inline bool
321 lto_is_streamable (tree expr)
322 {
323 enum tree_code code = TREE_CODE (expr);
324
325 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
326 name version in lto_output_tree_ref (see output_ssa_names). */
327 return !is_lang_specific (expr)
328 && code != SSA_NAME
329 && code != LANG_TYPE
330 && code != MODIFY_EXPR
331 && code != INIT_EXPR
332 && code != TARGET_EXPR
333 && code != BIND_EXPR
334 && code != WITH_CLEANUP_EXPR
335 && code != STATEMENT_LIST
336 && (code == CASE_LABEL_EXPR
337 || code == DECL_EXPR
338 || TREE_CODE_CLASS (code) != tcc_statement);
339 }
340
341 /* Very rough estimate of streaming size of the initializer. If we ignored
342 presence of strings, we could simply just count number of non-indexable
343 tree nodes and number of references to indexable nodes. Strings however
344 may be very large and we do not want to dump them int othe global stream.
345
346 Count the size of initializer until the size in DATA is positive. */
347
348 static tree
349 subtract_estimated_size (tree *tp, int *ws, void *data)
350 {
351 long *sum = (long *)data;
352 if (tree_is_indexable (*tp))
353 {
354 /* Indexable tree is one reference to global stream.
355 Guess it may be about 4 bytes. */
356 *sum -= 4;
357 *ws = 0;
358 }
359 /* String table entry + base of tree node needs to be streamed. */
360 if (TREE_CODE (*tp) == STRING_CST)
361 *sum -= TREE_STRING_LENGTH (*tp) + 8;
362 else
363 {
364 /* Identifiers are also variable length but should not appear
365 naked in constructor. */
366 gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
367 /* We do not really make attempt to work out size of pickled tree, as
368 it is very variable. Make it bigger than the reference. */
369 *sum -= 16;
370 }
371 if (*sum < 0)
372 return *tp;
373 return NULL_TREE;
374 }
375
376
377 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
378
379 static tree
380 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
381 {
382 gcc_checking_assert (DECL_P (expr)
383 && TREE_CODE (expr) != FUNCTION_DECL
384 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
385
386 /* Handle DECL_INITIAL for symbols. */
387 tree initial = DECL_INITIAL (expr);
388 if (VAR_P (expr)
389 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
390 && !DECL_IN_CONSTANT_POOL (expr)
391 && initial)
392 {
393 varpool_node *vnode;
394 /* Extra section needs about 30 bytes; do not produce it for simple
395 scalar values. */
396 if (!(vnode = varpool_node::get (expr))
397 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
398 initial = error_mark_node;
399 if (initial != error_mark_node)
400 {
401 long max_size = 30;
402 if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
403 NULL))
404 initial = error_mark_node;
405 }
406 }
407
408 return initial;
409 }
410
411
412 /* Write a physical representation of tree node EXPR to output block
413 OB. If REF_P is true, the leaves of EXPR are emitted as references
414 via lto_output_tree_ref. IX is the index into the streamer cache
415 where EXPR is stored. */
416
417 static void
418 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
419 {
420 if (streamer_dump_file)
421 {
422 print_node_brief (streamer_dump_file, " Streaming body of ",
423 expr, 4);
424 fprintf (streamer_dump_file, " to %s\n",
425 lto_section_name[ob->section_type]);
426 }
427
428 /* Pack all the non-pointer fields in EXPR into a bitpack and write
429 the resulting bitpack. */
430 streamer_write_tree_bitfields (ob, expr);
431
432 /* Write all the pointer fields in EXPR. */
433 streamer_write_tree_body (ob, expr, ref_p);
434
435 /* Write any LTO-specific data to OB. */
436 if (DECL_P (expr)
437 && TREE_CODE (expr) != FUNCTION_DECL
438 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
439 {
440 /* Handle DECL_INITIAL for symbols. */
441 tree initial = get_symbol_initial_value
442 (ob->decl_state->symtab_node_encoder, expr);
443 stream_write_tree (ob, initial, ref_p);
444 }
445
446 /* Stream references to early generated DIEs. Keep in sync with the
447 trees handled in dwarf2out_die_ref_for_decl. */
448 if ((DECL_P (expr)
449 && TREE_CODE (expr) != FIELD_DECL
450 && TREE_CODE (expr) != DEBUG_EXPR_DECL
451 && TREE_CODE (expr) != TYPE_DECL)
452 || TREE_CODE (expr) == BLOCK)
453 {
454 const char *sym;
455 unsigned HOST_WIDE_INT off;
456 if (debug_info_level > DINFO_LEVEL_NONE
457 && debug_hooks->die_ref_for_decl (expr, &sym, &off))
458 {
459 streamer_write_string (ob, ob->main_stream, sym, true);
460 streamer_write_uhwi (ob, off);
461 }
462 else
463 streamer_write_string (ob, ob->main_stream, NULL, true);
464 }
465 }
466
467 /* Write a physical representation of tree node EXPR to output block
468 OB. If REF_P is true, the leaves of EXPR are emitted as references
469 via lto_output_tree_ref. IX is the index into the streamer cache
470 where EXPR is stored. */
471
472 static void
473 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
474 {
475 if (!lto_is_streamable (expr))
476 internal_error ("tree code %qs is not supported in LTO streams",
477 get_tree_code_name (TREE_CODE (expr)));
478
479 /* Write the header, containing everything needed to materialize
480 EXPR on the reading side. */
481 streamer_write_tree_header (ob, expr);
482
483 lto_write_tree_1 (ob, expr, ref_p);
484 }
485
486 /* Emit the physical representation of tree node EXPR to output block OB,
487 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
488 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
489
490 static void
491 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
492 bool ref_p, bool this_ref_p)
493 {
494 unsigned ix;
495
496 gcc_checking_assert (expr != NULL_TREE
497 && !(this_ref_p && tree_is_indexable (expr)));
498
499 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
500 expr, hash, &ix);
501 gcc_assert (!exists_p);
502 if (TREE_CODE (expr) == INTEGER_CST
503 && !TREE_OVERFLOW (expr))
504 {
505 /* Shared INTEGER_CST nodes are special because they need their
506 original type to be materialized by the reader (to implement
507 TYPE_CACHED_VALUES). */
508 streamer_write_integer_cst (ob, expr, ref_p);
509 }
510 else
511 {
512 /* This is the first time we see EXPR, write its fields
513 to OB. */
514 lto_write_tree (ob, expr, ref_p);
515 }
516 }
517
518 class DFS
519 {
520 public:
521 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
522 bool single_p);
523 ~DFS ();
524
525 struct scc_entry
526 {
527 tree t;
528 hashval_t hash;
529 };
530 auto_vec<scc_entry,32> sccstack;
531
532 private:
533 struct sccs
534 {
535 unsigned int dfsnum;
536 unsigned int low;
537 };
538 struct worklist
539 {
540 tree expr;
541 sccs *from_state;
542 sccs *cstate;
543 bool ref_p;
544 bool this_ref_p;
545 };
546 /* Maximum index of scc stack containing a local tree. */
547 int max_local_entry;
548
549 static int scc_entry_compare (const void *, const void *);
550
551 void DFS_write_tree_body (struct output_block *ob,
552 tree expr, sccs *expr_state, bool ref_p);
553
554 void DFS_write_tree (struct output_block *ob, sccs *from_state,
555 tree expr, bool ref_p, bool this_ref_p);
556
557 hashval_t
558 hash_scc (struct output_block *ob, unsigned first, unsigned size,
559 bool ref_p, bool this_ref_p);
560
561 hash_map<tree, sccs *> sccstate;
562 auto_vec<worklist, 32> worklist_vec;
563 struct obstack sccstate_obstack;
564 };
565
566 /* Return true if type can not be merged with structurally same tree in
567 other translation unit. During stream out this information is propagated
568 to all trees referring to T and they are not streamed with additional
569 information needed by the tree merging in lto-common.c (in particular,
570 scc hash codes are not streamed).
571
572 TRANSLATION_UNIT_DECL is handled specially since references to it does
573 not make other trees local as well. */
574
575 static bool
576 local_tree_p (tree t)
577 {
578 switch (TREE_CODE (t))
579 {
580 case LABEL_DECL:
581 return true;
582 case NAMESPACE_DECL:
583 return !DECL_NAME (t);
584 case VAR_DECL:
585 case FUNCTION_DECL:
586 return !TREE_PUBLIC (t) && !DECL_EXTERNAL (t);
587 case RECORD_TYPE:
588 case UNION_TYPE:
589 case ENUMERAL_TYPE:
590 /* Anonymous namespace types are local.
591 Only work hard for main variants;
592 variant types will inherit locality. */
593 return TYPE_MAIN_VARIANT (t) == t
594 && odr_type_p (t) && type_with_linkage_p (t)
595 && type_in_anonymous_namespace_p (t);
596 default:
597 return false;
598 }
599 }
600
601 /* Emit the physical representation of tree node EXPR to output block OB,
602 using depth-first search on the subgraph. If THIS_REF_P is true, the
603 leaves of EXPR are emitted as references via lto_output_tree_ref.
604 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
605 this is for a rewalk of a single leaf SCC. */
606
607 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
608 bool single_p)
609 {
610 unsigned int next_dfs_num = 1;
611
612 max_local_entry = -1;
613 gcc_obstack_init (&sccstate_obstack);
614 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
615 while (!worklist_vec.is_empty ())
616 {
617 worklist &w = worklist_vec.last ();
618 expr = w.expr;
619 sccs *from_state = w.from_state;
620 sccs *cstate = w.cstate;
621 ref_p = w.ref_p;
622 this_ref_p = w.this_ref_p;
623 if (cstate == NULL)
624 {
625 sccs **slot = &sccstate.get_or_insert (expr);
626 cstate = *slot;
627 if (cstate)
628 {
629 gcc_checking_assert (from_state);
630 if (cstate->dfsnum < from_state->dfsnum)
631 from_state->low = MIN (cstate->dfsnum, from_state->low);
632 worklist_vec.pop ();
633 continue;
634 }
635
636 scc_entry e = { expr, 0 };
637 /* Not yet visited. DFS recurse and push it onto the stack. */
638 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
639 if (ob->local_trees && local_tree_p (expr))
640 max_local_entry = sccstack.length ();
641 sccstack.safe_push (e);
642 cstate->dfsnum = next_dfs_num++;
643 cstate->low = cstate->dfsnum;
644 w.cstate = cstate;
645
646 if (TREE_CODE (expr) == INTEGER_CST
647 && !TREE_OVERFLOW (expr))
648 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
649 else
650 {
651 DFS_write_tree_body (ob, expr, cstate, ref_p);
652
653 /* Walk any LTO-specific edges. */
654 if (DECL_P (expr)
655 && TREE_CODE (expr) != FUNCTION_DECL
656 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
657 {
658 /* Handle DECL_INITIAL for symbols. */
659 tree initial
660 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
661 expr);
662 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
663 }
664 }
665 continue;
666 }
667
668 /* See if we found an SCC. */
669 if (cstate->low == cstate->dfsnum)
670 {
671 unsigned first, size;
672 tree x;
673
674 /* If we are re-walking a single leaf SCC just pop it,
675 let earlier worklist item access the sccstack. */
676 if (single_p)
677 {
678 worklist_vec.pop ();
679 continue;
680 }
681
682 /* Pop the SCC and compute its size. */
683 first = sccstack.length ();
684 do
685 {
686 x = sccstack[--first].t;
687 }
688 while (x != expr);
689 size = sccstack.length () - first;
690
691 /* No need to compute hashes for LTRANS units, we don't perform
692 any merging there. */
693 hashval_t scc_hash = 0;
694 unsigned scc_entry_len = 0;
695 bool local_to_unit = !ob->local_trees
696 || max_local_entry >= (int)first;
697
698 /* Remember that trees are local so info gets propagated to other
699 SCCs. */
700 if (local_to_unit && ob->local_trees)
701 {
702 for (unsigned i = 0; i < size; ++i)
703 ob->local_trees->add (sccstack[first + i].t);
704 }
705
706 /* As a special case do not stream TRANSLATION_UNIT_DECL as shared
707 tree. We can not mark it local because references to it does not
708 make other trees local (all global decls reffer to it via
709 CONTEXT). */
710 if (size == 1
711 && TREE_CODE (sccstack[first].t) == TRANSLATION_UNIT_DECL)
712 local_to_unit = true;
713
714 if (!local_to_unit)
715 {
716 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
717
718 /* Put the entries with the least number of collisions first. */
719 unsigned entry_start = 0;
720 scc_entry_len = size + 1;
721 for (unsigned i = 0; i < size;)
722 {
723 unsigned from = i;
724 for (i = i + 1; i < size
725 && (sccstack[first + i].hash
726 == sccstack[first + from].hash); ++i)
727 ;
728 if (i - from < scc_entry_len)
729 {
730 scc_entry_len = i - from;
731 entry_start = from;
732 }
733 }
734 for (unsigned i = 0; i < scc_entry_len; ++i)
735 std::swap (sccstack[first + i],
736 sccstack[first + entry_start + i]);
737
738 /* We already sorted SCC deterministically in hash_scc. */
739
740 /* Check that we have only one SCC.
741 Naturally we may have conflicts if hash function is not
742 strong enough. Lets see how far this gets. */
743 gcc_checking_assert (scc_entry_len == 1);
744 }
745
746 worklist_vec.pop ();
747
748 unsigned int prev_size = ob->main_stream->total_size;
749
750 /* Only global decl sections are considered by tree merging. */
751 if (ob->section_type != LTO_section_decls)
752 {
753 /* If this is the original tree we stream and it forms SCC
754 by itself then we do not need to stream SCC at all. */
755 if (worklist_vec.is_empty () && first == 0 && size == 1)
756 return;
757 if (streamer_dump_file)
758 {
759 fprintf (streamer_dump_file,
760 " Start of LTO_trees of size %i\n", size);
761 }
762 streamer_write_record_start (ob, LTO_trees);
763 streamer_write_uhwi (ob, size);
764 }
765 /* Write LTO_tree_scc if tree merging is going to be performed. */
766 else if (!local_to_unit
767 /* These are special since sharing is not done by tree
768 merging machinery. We can not special case them earlier
769 because we still need to compute hash for further sharing
770 of trees referring to them. */
771 && (size != 1
772 || (TREE_CODE (sccstack[first].t) != IDENTIFIER_NODE
773 && (TREE_CODE (sccstack[first].t) != INTEGER_CST
774 || TREE_OVERFLOW (sccstack[first].t)))))
775
776 {
777 gcc_checking_assert (ob->section_type == LTO_section_decls);
778 if (streamer_dump_file)
779 {
780 fprintf (streamer_dump_file,
781 " Start of LTO_tree_scc of size %i\n", size);
782 }
783 streamer_write_record_start (ob, LTO_tree_scc);
784 /* In wast majority of cases scc_entry_len is 1 and size is small
785 integer. Use extra bit of size to stream info about
786 exceptions. */
787 streamer_write_uhwi (ob, size * 2 + (scc_entry_len != 1));
788 if (scc_entry_len != 1)
789 streamer_write_uhwi (ob, scc_entry_len);
790 streamer_write_uhwi (ob, scc_hash);
791 }
792 /* Non-trivial SCCs must be packed to trees blocks so forward
793 references work correctly. */
794 else if (size != 1)
795 {
796 if (streamer_dump_file)
797 {
798 fprintf (streamer_dump_file,
799 " Start of LTO_trees of size %i\n", size);
800 }
801 streamer_write_record_start (ob, LTO_trees);
802 streamer_write_uhwi (ob, size);
803 }
804 else if (streamer_dump_file)
805 {
806 fprintf (streamer_dump_file, " Streaming single tree\n");
807 }
808
809 /* Write size-1 SCCs without wrapping them inside SCC bundles.
810 All INTEGER_CSTs need to be handled this way as we need
811 their type to materialize them. Also builtins are handled
812 this way. */
813 if (size == 1)
814 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
815 else
816 {
817
818 /* Write all headers and populate the streamer cache. */
819 for (unsigned i = 0; i < size; ++i)
820 {
821 hashval_t hash = sccstack[first+i].hash;
822 tree t = sccstack[first+i].t;
823 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
824 t, hash, NULL);
825 gcc_assert (!exists_p);
826
827 if (!lto_is_streamable (t))
828 internal_error ("tree code %qs is not supported "
829 "in LTO streams",
830 get_tree_code_name (TREE_CODE (t)));
831
832 /* Write the header, containing everything needed to
833 materialize EXPR on the reading side. */
834 streamer_write_tree_header (ob, t);
835 }
836
837 /* Write the bitpacks and tree references. */
838 for (unsigned i = 0; i < size; ++i)
839 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
840 }
841 if (streamer_dump_file)
842 fprintf (streamer_dump_file, " %u bytes\n",
843 ob->main_stream->total_size - prev_size);
844
845 /* Finally truncate the vector. */
846 sccstack.truncate (first);
847 if ((int)first <= max_local_entry)
848 max_local_entry = first - 1;
849
850 if (from_state)
851 from_state->low = MIN (from_state->low, cstate->low);
852 continue;
853 }
854
855 gcc_checking_assert (from_state);
856 from_state->low = MIN (from_state->low, cstate->low);
857 if (cstate->dfsnum < from_state->dfsnum)
858 from_state->low = MIN (cstate->dfsnum, from_state->low);
859 worklist_vec.pop ();
860 }
861 }
862
863 DFS::~DFS ()
864 {
865 obstack_free (&sccstate_obstack, NULL);
866 }
867
868 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
869 DFS recurse for all tree edges originating from it. */
870
871 void
872 DFS::DFS_write_tree_body (struct output_block *ob,
873 tree expr, sccs *expr_state, bool ref_p)
874 {
875 #define DFS_follow_tree_edge(DEST) \
876 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
877
878 enum tree_code code;
879
880 code = TREE_CODE (expr);
881
882 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
883 {
884 if (TREE_CODE (expr) != IDENTIFIER_NODE)
885 DFS_follow_tree_edge (TREE_TYPE (expr));
886 }
887
888 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
889 {
890 unsigned int count = vector_cst_encoded_nelts (expr);
891 for (unsigned int i = 0; i < count; ++i)
892 DFS_follow_tree_edge (VECTOR_CST_ENCODED_ELT (expr, i));
893 }
894
895 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
896 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
897 DFS_follow_tree_edge (POLY_INT_CST_COEFF (expr, i));
898
899 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
900 {
901 DFS_follow_tree_edge (TREE_REALPART (expr));
902 DFS_follow_tree_edge (TREE_IMAGPART (expr));
903 }
904
905 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
906 {
907 /* Drop names that were created for anonymous entities. */
908 if (DECL_NAME (expr)
909 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
910 && IDENTIFIER_ANON_P (DECL_NAME (expr)))
911 ;
912 else
913 DFS_follow_tree_edge (DECL_NAME (expr));
914 if (TREE_CODE (expr) != TRANSLATION_UNIT_DECL
915 && ! DECL_CONTEXT (expr))
916 DFS_follow_tree_edge ((*all_translation_units)[0]);
917 else
918 DFS_follow_tree_edge (DECL_CONTEXT (expr));
919 }
920
921 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
922 {
923 DFS_follow_tree_edge (DECL_SIZE (expr));
924 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
925
926 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
927 special handling in LTO, it must be handled by streamer hooks. */
928
929 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
930
931 /* We use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
932 declarations which should be eliminated by decl merging. Be sure none
933 leaks to this point. */
934 gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
935 DFS_follow_tree_edge (DECL_ABSTRACT_ORIGIN (expr));
936
937 if ((VAR_P (expr)
938 || TREE_CODE (expr) == PARM_DECL)
939 && DECL_HAS_VALUE_EXPR_P (expr))
940 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
941 if (VAR_P (expr)
942 && DECL_HAS_DEBUG_EXPR_P (expr))
943 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
944 }
945
946 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
947 {
948 /* Make sure we don't inadvertently set the assembler name. */
949 if (DECL_ASSEMBLER_NAME_SET_P (expr))
950 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
951 }
952
953 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
954 {
955 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
956 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
957 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
958 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
959 gcc_checking_assert (!DECL_FCONTEXT (expr));
960 }
961
962 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
963 {
964 gcc_checking_assert (DECL_VINDEX (expr) == NULL);
965 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
966 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
967 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
968 }
969
970 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
971 {
972 DFS_follow_tree_edge (TYPE_SIZE (expr));
973 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
974 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
975 DFS_follow_tree_edge (TYPE_NAME (expr));
976 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
977 reconstructed during fixup. */
978 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
979 during fixup. */
980 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
981 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
982 /* TYPE_CANONICAL is re-computed during type merging, so no need
983 to follow it here. */
984 /* Do not stream TYPE_STUB_DECL; it is not needed by LTO but currently
985 it cannot be freed by free_lang_data without triggering ICEs in
986 langhooks. */
987 }
988
989 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
990 {
991 if (TREE_CODE (expr) == ENUMERAL_TYPE)
992 DFS_follow_tree_edge (TYPE_VALUES (expr));
993 else if (TREE_CODE (expr) == ARRAY_TYPE)
994 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
995 else if (RECORD_OR_UNION_TYPE_P (expr))
996 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
997 DFS_follow_tree_edge (t);
998 else if (TREE_CODE (expr) == FUNCTION_TYPE
999 || TREE_CODE (expr) == METHOD_TYPE)
1000 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
1001
1002 if (!POINTER_TYPE_P (expr))
1003 DFS_follow_tree_edge (TYPE_MIN_VALUE_RAW (expr));
1004 DFS_follow_tree_edge (TYPE_MAX_VALUE_RAW (expr));
1005 }
1006
1007 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1008 {
1009 DFS_follow_tree_edge (TREE_PURPOSE (expr));
1010 DFS_follow_tree_edge (TREE_VALUE (expr));
1011 DFS_follow_tree_edge (TREE_CHAIN (expr));
1012 }
1013
1014 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1015 {
1016 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
1017 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
1018 }
1019
1020 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1021 {
1022 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
1023 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
1024 DFS_follow_tree_edge (TREE_BLOCK (expr));
1025 }
1026
1027 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
1028 {
1029 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
1030 {
1031 /* We would have to stream externals in the block chain as
1032 non-references but we should have dropped them in
1033 free-lang-data. */
1034 gcc_assert (!VAR_OR_FUNCTION_DECL_P (t) || !DECL_EXTERNAL (t));
1035 DFS_follow_tree_edge (t);
1036 }
1037
1038 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
1039 DFS_follow_tree_edge (BLOCK_ABSTRACT_ORIGIN (expr));
1040
1041 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
1042 information for early inlined BLOCKs so drop it on the floor instead
1043 of ICEing in dwarf2out.c. */
1044
1045 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
1046 streaming time. */
1047
1048 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
1049 list is re-constructed from BLOCK_SUPERCONTEXT. */
1050 }
1051
1052 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1053 {
1054 unsigned i;
1055 tree t;
1056
1057 /* Note that the number of BINFO slots has already been emitted in
1058 EXPR's header (see streamer_write_tree_header) because this length
1059 is needed to build the empty BINFO node on the reader side. */
1060 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
1061 DFS_follow_tree_edge (t);
1062 DFS_follow_tree_edge (BINFO_OFFSET (expr));
1063 DFS_follow_tree_edge (BINFO_VTABLE (expr));
1064
1065 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX,
1066 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
1067 by C++ FE only. */
1068 }
1069
1070 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1071 {
1072 unsigned i;
1073 tree index, value;
1074
1075 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
1076 {
1077 DFS_follow_tree_edge (index);
1078 DFS_follow_tree_edge (value);
1079 }
1080 }
1081
1082 if (code == OMP_CLAUSE)
1083 {
1084 int i;
1085 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
1086 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
1087 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
1088 }
1089
1090 #undef DFS_follow_tree_edge
1091 }
1092
1093 /* Return a hash value for the tree T.
1094 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
1095 may hold hash values if trees inside current SCC. */
1096
1097 static hashval_t
1098 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
1099 {
1100 inchash::hash hstate;
1101
1102 #define visit(SIBLING) \
1103 do { \
1104 unsigned ix; \
1105 if (!SIBLING) \
1106 hstate.add_int (0); \
1107 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
1108 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
1109 else if (map) \
1110 hstate.add_int (*map->get (SIBLING)); \
1111 else \
1112 hstate.add_int (1); \
1113 } while (0)
1114
1115 /* Hash TS_BASE. */
1116 enum tree_code code = TREE_CODE (t);
1117 hstate.add_int (code);
1118 if (!TYPE_P (t))
1119 {
1120 hstate.add_flag (TREE_SIDE_EFFECTS (t));
1121 hstate.add_flag (TREE_CONSTANT (t));
1122 hstate.add_flag (TREE_READONLY (t));
1123 hstate.add_flag (TREE_PUBLIC (t));
1124 }
1125 hstate.add_flag (TREE_ADDRESSABLE (t));
1126 hstate.add_flag (TREE_THIS_VOLATILE (t));
1127 if (DECL_P (t))
1128 hstate.add_flag (DECL_UNSIGNED (t));
1129 else if (TYPE_P (t))
1130 hstate.add_flag (TYPE_UNSIGNED (t));
1131 if (TYPE_P (t))
1132 hstate.add_flag (TYPE_ARTIFICIAL (t));
1133 else
1134 hstate.add_flag (TREE_NO_WARNING (t));
1135 hstate.add_flag (TREE_NOTHROW (t));
1136 hstate.add_flag (TREE_STATIC (t));
1137 hstate.add_flag (TREE_PROTECTED (t));
1138 hstate.add_flag (TREE_DEPRECATED (t));
1139 if (code != TREE_BINFO)
1140 hstate.add_flag (TREE_PRIVATE (t));
1141 if (TYPE_P (t))
1142 {
1143 hstate.add_flag (AGGREGATE_TYPE_P (t)
1144 ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
1145 hstate.add_flag (TYPE_ADDR_SPACE (t));
1146 }
1147 else if (code == SSA_NAME)
1148 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
1149 hstate.commit_flag ();
1150
1151 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1152 hstate.add_wide_int (wi::to_widest (t));
1153
1154 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1155 {
1156 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1157 hstate.add_flag (r.cl);
1158 hstate.add_flag (r.sign);
1159 hstate.add_flag (r.signalling);
1160 hstate.add_flag (r.canonical);
1161 hstate.commit_flag ();
1162 hstate.add_int (r.uexp);
1163 hstate.add (r.sig, sizeof (r.sig));
1164 }
1165
1166 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1167 {
1168 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1169 hstate.add_int (f.mode);
1170 hstate.add_int (f.data.low);
1171 hstate.add_int (f.data.high);
1172 }
1173
1174 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1175 {
1176 hstate.add_hwi (DECL_MODE (t));
1177 hstate.add_flag (DECL_NONLOCAL (t));
1178 hstate.add_flag (DECL_VIRTUAL_P (t));
1179 hstate.add_flag (DECL_IGNORED_P (t));
1180 hstate.add_flag (DECL_ABSTRACT_P (t));
1181 hstate.add_flag (DECL_ARTIFICIAL (t));
1182 hstate.add_flag (DECL_USER_ALIGN (t));
1183 hstate.add_flag (DECL_PRESERVE_P (t));
1184 hstate.add_flag (DECL_EXTERNAL (t));
1185 hstate.add_flag (DECL_NOT_GIMPLE_REG_P (t));
1186 hstate.commit_flag ();
1187 hstate.add_int (DECL_ALIGN (t));
1188 if (code == LABEL_DECL)
1189 {
1190 hstate.add_int (EH_LANDING_PAD_NR (t));
1191 hstate.add_int (LABEL_DECL_UID (t));
1192 }
1193 else if (code == FIELD_DECL)
1194 {
1195 hstate.add_flag (DECL_PACKED (t));
1196 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1197 hstate.add_flag (DECL_PADDING_P (t));
1198 hstate.add_flag (DECL_FIELD_ABI_IGNORED (t));
1199 hstate.add_int (DECL_OFFSET_ALIGN (t));
1200 }
1201 else if (code == VAR_DECL)
1202 {
1203 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1204 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1205 }
1206 if (code == RESULT_DECL
1207 || code == PARM_DECL
1208 || code == VAR_DECL)
1209 {
1210 hstate.add_flag (DECL_BY_REFERENCE (t));
1211 if (code == VAR_DECL
1212 || code == PARM_DECL)
1213 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1214 }
1215 hstate.commit_flag ();
1216 }
1217
1218 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1219 hstate.add_int (DECL_REGISTER (t));
1220
1221 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1222 {
1223 hstate.add_flag (DECL_COMMON (t));
1224 hstate.add_flag (DECL_DLLIMPORT_P (t));
1225 hstate.add_flag (DECL_WEAK (t));
1226 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1227 hstate.add_flag (DECL_COMDAT (t));
1228 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1229 hstate.add_int (DECL_VISIBILITY (t));
1230 if (code == VAR_DECL)
1231 {
1232 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1233 hstate.add_flag (DECL_HARD_REGISTER (t));
1234 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1235 }
1236 if (TREE_CODE (t) == FUNCTION_DECL)
1237 {
1238 hstate.add_flag (DECL_FINAL_P (t));
1239 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1240 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1241 }
1242 hstate.commit_flag ();
1243 }
1244
1245 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1246 {
1247 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1248 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1249 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1250 hstate.add_flag (FUNCTION_DECL_DECL_TYPE (t));
1251 hstate.add_flag (DECL_UNINLINABLE (t));
1252 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1253 hstate.add_flag (DECL_IS_NOVOPS (t));
1254 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1255 hstate.add_flag (DECL_IS_MALLOC (t));
1256 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1257 hstate.add_flag (DECL_STATIC_CHAIN (t));
1258 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1259 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1260 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1261 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1262 hstate.add_flag (DECL_PURE_P (t));
1263 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1264 hstate.commit_flag ();
1265 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1266 hstate.add_int (DECL_UNCHECKED_FUNCTION_CODE (t));
1267 }
1268
1269 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1270 {
1271 hstate.add_hwi (TYPE_MODE (t));
1272 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1273 no streaming. */
1274 hstate.add_flag (TYPE_PACKED (t));
1275 hstate.add_flag (TYPE_RESTRICT (t));
1276 hstate.add_flag (TYPE_USER_ALIGN (t));
1277 hstate.add_flag (TYPE_READONLY (t));
1278 if (RECORD_OR_UNION_TYPE_P (t))
1279 {
1280 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1281 hstate.add_flag (TYPE_FINAL_P (t));
1282 hstate.add_flag (TYPE_CXX_ODR_P (t));
1283 }
1284 else if (code == ARRAY_TYPE)
1285 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1286 if (code == ARRAY_TYPE || code == INTEGER_TYPE)
1287 hstate.add_flag (TYPE_STRING_FLAG (t));
1288 if (AGGREGATE_TYPE_P (t))
1289 hstate.add_flag (TYPE_TYPELESS_STORAGE (t));
1290 hstate.commit_flag ();
1291 hstate.add_int (TYPE_PRECISION (t));
1292 hstate.add_int (TYPE_ALIGN (t));
1293 hstate.add_int (TYPE_EMPTY_P (t));
1294 }
1295
1296 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1297 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1298 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1299
1300 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1301 /* We don't stream these when passing things to a different target. */
1302 && !lto_stream_offload_p)
1303 hstate.add_hwi (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1304
1305 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1306 hstate.add_hwi (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1307
1308 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1309 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1310
1311 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1312 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1313
1314 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1315 {
1316 if (code != IDENTIFIER_NODE)
1317 visit (TREE_TYPE (t));
1318 }
1319
1320 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1321 {
1322 unsigned int count = vector_cst_encoded_nelts (t);
1323 for (unsigned int i = 0; i < count; ++i)
1324 visit (VECTOR_CST_ENCODED_ELT (t, i));
1325 }
1326
1327 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
1328 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1329 visit (POLY_INT_CST_COEFF (t, i));
1330
1331 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1332 {
1333 visit (TREE_REALPART (t));
1334 visit (TREE_IMAGPART (t));
1335 }
1336
1337 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1338 {
1339 /* Drop names that were created for anonymous entities. */
1340 if (DECL_NAME (t)
1341 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1342 && IDENTIFIER_ANON_P (DECL_NAME (t)))
1343 ;
1344 else
1345 visit (DECL_NAME (t));
1346 if (DECL_FILE_SCOPE_P (t))
1347 ;
1348 else
1349 visit (DECL_CONTEXT (t));
1350 }
1351
1352 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1353 {
1354 visit (DECL_SIZE (t));
1355 visit (DECL_SIZE_UNIT (t));
1356 visit (DECL_ATTRIBUTES (t));
1357 if ((code == VAR_DECL
1358 || code == PARM_DECL)
1359 && DECL_HAS_VALUE_EXPR_P (t))
1360 visit (DECL_VALUE_EXPR (t));
1361 if (code == VAR_DECL
1362 && DECL_HAS_DEBUG_EXPR_P (t))
1363 visit (DECL_DEBUG_EXPR (t));
1364 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1365 be able to call get_symbol_initial_value. */
1366 }
1367
1368 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1369 {
1370 if (DECL_ASSEMBLER_NAME_SET_P (t))
1371 visit (DECL_ASSEMBLER_NAME (t));
1372 }
1373
1374 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1375 {
1376 visit (DECL_FIELD_OFFSET (t));
1377 visit (DECL_BIT_FIELD_TYPE (t));
1378 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1379 visit (DECL_FIELD_BIT_OFFSET (t));
1380 }
1381
1382 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1383 {
1384 visit (DECL_FUNCTION_PERSONALITY (t));
1385 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1386 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1387 }
1388
1389 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1390 {
1391 visit (TYPE_SIZE (t));
1392 visit (TYPE_SIZE_UNIT (t));
1393 visit (TYPE_ATTRIBUTES (t));
1394 visit (TYPE_NAME (t));
1395 visit (TYPE_MAIN_VARIANT (t));
1396 if (TYPE_FILE_SCOPE_P (t))
1397 ;
1398 else
1399 visit (TYPE_CONTEXT (t));
1400 }
1401
1402 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1403 {
1404 if (code == ENUMERAL_TYPE)
1405 visit (TYPE_VALUES (t));
1406 else if (code == ARRAY_TYPE)
1407 visit (TYPE_DOMAIN (t));
1408 else if (RECORD_OR_UNION_TYPE_P (t))
1409 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1410 visit (f);
1411 else if (code == FUNCTION_TYPE
1412 || code == METHOD_TYPE)
1413 visit (TYPE_ARG_TYPES (t));
1414 if (!POINTER_TYPE_P (t))
1415 visit (TYPE_MIN_VALUE_RAW (t));
1416 visit (TYPE_MAX_VALUE_RAW (t));
1417 }
1418
1419 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1420 {
1421 visit (TREE_PURPOSE (t));
1422 visit (TREE_VALUE (t));
1423 visit (TREE_CHAIN (t));
1424 }
1425
1426 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1427 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1428 visit (TREE_VEC_ELT (t, i));
1429
1430 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1431 {
1432 hstate.add_hwi (TREE_OPERAND_LENGTH (t));
1433 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1434 visit (TREE_OPERAND (t, i));
1435 }
1436
1437 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1438 {
1439 unsigned i;
1440 tree b;
1441 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1442 visit (b);
1443 visit (BINFO_OFFSET (t));
1444 visit (BINFO_VTABLE (t));
1445 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1446 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
1447 by C++ FE only. */
1448 }
1449
1450 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1451 {
1452 unsigned i;
1453 tree index, value;
1454 hstate.add_hwi (CONSTRUCTOR_NELTS (t));
1455 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1456 {
1457 visit (index);
1458 visit (value);
1459 }
1460 }
1461
1462 if (code == OMP_CLAUSE)
1463 {
1464 int i;
1465 HOST_WIDE_INT val;
1466
1467 hstate.add_hwi (OMP_CLAUSE_CODE (t));
1468 switch (OMP_CLAUSE_CODE (t))
1469 {
1470 case OMP_CLAUSE_DEFAULT:
1471 val = OMP_CLAUSE_DEFAULT_KIND (t);
1472 break;
1473 case OMP_CLAUSE_SCHEDULE:
1474 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1475 break;
1476 case OMP_CLAUSE_DEPEND:
1477 val = OMP_CLAUSE_DEPEND_KIND (t);
1478 break;
1479 case OMP_CLAUSE_MAP:
1480 val = OMP_CLAUSE_MAP_KIND (t);
1481 break;
1482 case OMP_CLAUSE_PROC_BIND:
1483 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1484 break;
1485 case OMP_CLAUSE_REDUCTION:
1486 case OMP_CLAUSE_TASK_REDUCTION:
1487 case OMP_CLAUSE_IN_REDUCTION:
1488 val = OMP_CLAUSE_REDUCTION_CODE (t);
1489 break;
1490 default:
1491 val = 0;
1492 break;
1493 }
1494 hstate.add_hwi (val);
1495 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1496 visit (OMP_CLAUSE_OPERAND (t, i));
1497 visit (OMP_CLAUSE_CHAIN (t));
1498 }
1499
1500 return hstate.end ();
1501
1502 #undef visit
1503 }
1504
1505 /* Compare two SCC entries by their hash value for qsorting them. */
1506
1507 int
1508 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1509 {
1510 const scc_entry *p1 = (const scc_entry *) p1_;
1511 const scc_entry *p2 = (const scc_entry *) p2_;
1512 if (p1->hash < p2->hash)
1513 return -1;
1514 else if (p1->hash > p2->hash)
1515 return 1;
1516 return 0;
1517 }
1518
1519 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1520 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1521
1522 hashval_t
1523 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1524 bool ref_p, bool this_ref_p)
1525 {
1526 unsigned int last_classes = 0, iterations = 0;
1527
1528 /* Compute hash values for the SCC members. */
1529 for (unsigned i = 0; i < size; ++i)
1530 sccstack[first+i].hash
1531 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1532
1533 if (size == 1)
1534 return sccstack[first].hash;
1535
1536 /* We aim to get unique hash for every tree within SCC and compute hash value
1537 of the whole SCC by combining all values together in a stable (entry-point
1538 independent) order. This guarantees that the same SCC regions within
1539 different translation units will get the same hash values and therefore
1540 will be merged at WPA time.
1541
1542 Often the hashes are already unique. In that case we compute the SCC hash
1543 by combining individual hash values in an increasing order.
1544
1545 If there are duplicates, we seek at least one tree with unique hash (and
1546 pick one with minimal hash and this property). Then we obtain a stable
1547 order by DFS walk starting from this unique tree and then use the index
1548 within this order to make individual hash values unique.
1549
1550 If there is no tree with unique hash, we iteratively propagate the hash
1551 values across the internal edges of SCC. This usually quickly leads
1552 to unique hashes. Consider, for example, an SCC containing two pointers
1553 that are identical except for the types they point to and assume that
1554 these types are also part of the SCC. The propagation will add the
1555 points-to type information into their hash values. */
1556 do
1557 {
1558 /* Sort the SCC so we can easily check for uniqueness. */
1559 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1560
1561 unsigned int classes = 1;
1562 int firstunique = -1;
1563
1564 /* Find the tree with lowest unique hash (if it exists) and compute
1565 the number of equivalence classes. */
1566 if (sccstack[first].hash != sccstack[first+1].hash)
1567 firstunique = 0;
1568 for (unsigned i = 1; i < size; ++i)
1569 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1570 {
1571 classes++;
1572 if (firstunique == -1
1573 && (i == size - 1
1574 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1575 firstunique = i;
1576 }
1577
1578 /* If we found a tree with unique hash, stop the iteration. */
1579 if (firstunique != -1
1580 /* Also terminate if we run out of iterations or if the number of
1581 equivalence classes is no longer increasing.
1582 For example a cyclic list of trees that are all equivalent will
1583 never have unique entry point; we however do not build such SCCs
1584 in our IL. */
1585 || classes <= last_classes || iterations > 16)
1586 {
1587 hashval_t scc_hash;
1588
1589 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1590 starting from FIRSTUNIQUE to obtain a stable order. */
1591 if (classes != size && firstunique != -1)
1592 {
1593 hash_map <tree, hashval_t> map(size*2);
1594
1595 /* Store hash values into a map, so we can associate them with
1596 the reordered SCC. */
1597 for (unsigned i = 0; i < size; ++i)
1598 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1599
1600 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1601 true);
1602 gcc_assert (again.sccstack.length () == size);
1603
1604 memcpy (sccstack.address () + first,
1605 again.sccstack.address (),
1606 sizeof (scc_entry) * size);
1607
1608 /* Update hash values of individual members by hashing in the
1609 index within the stable order. This ensures uniqueness.
1610 Also compute the SCC hash by mixing in all hash values in
1611 the stable order we obtained. */
1612 sccstack[first].hash = *map.get (sccstack[first].t);
1613 scc_hash = sccstack[first].hash;
1614 for (unsigned i = 1; i < size; ++i)
1615 {
1616 sccstack[first+i].hash
1617 = iterative_hash_hashval_t (i,
1618 *map.get (sccstack[first+i].t));
1619 scc_hash
1620 = iterative_hash_hashval_t (scc_hash,
1621 sccstack[first+i].hash);
1622 }
1623 }
1624 /* If we got a unique hash value for each tree, then sort already
1625 ensured entry-point independent order. Only compute the final
1626 SCC hash.
1627
1628 If we failed to find the unique entry point, we go by the same
1629 route. We will eventually introduce unwanted hash conflicts. */
1630 else
1631 {
1632 scc_hash = sccstack[first].hash;
1633 for (unsigned i = 1; i < size; ++i)
1634 scc_hash
1635 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1636
1637 /* We cannot 100% guarantee that the hash won't conflict so as
1638 to make it impossible to find a unique hash. This however
1639 should be an extremely rare case. ICE for now so possible
1640 issues are found and evaluated. */
1641 gcc_checking_assert (classes == size);
1642 }
1643
1644 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1645 hash into the hash of each element. */
1646 for (unsigned i = 0; i < size; ++i)
1647 sccstack[first+i].hash
1648 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1649 return scc_hash;
1650 }
1651
1652 last_classes = classes;
1653 iterations++;
1654
1655 /* We failed to identify the entry point; propagate hash values across
1656 the edges. */
1657 hash_map <tree, hashval_t> map(size*2);
1658
1659 for (unsigned i = 0; i < size; ++i)
1660 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1661
1662 for (unsigned i = 0; i < size; i++)
1663 sccstack[first+i].hash
1664 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1665 }
1666 while (true);
1667 }
1668
1669 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1670 already in the streamer cache. Main routine called for
1671 each visit of EXPR. */
1672
1673 void
1674 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1675 tree expr, bool ref_p, bool this_ref_p)
1676 {
1677 /* Handle special cases. */
1678 if (expr == NULL_TREE)
1679 return;
1680
1681 /* Do not DFS walk into indexable trees. */
1682 if (this_ref_p && tree_is_indexable (expr))
1683 return;
1684
1685 /* Check if we already streamed EXPR. */
1686 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1687 {
1688 /* Refernece to a local tree makes entry also local. We always process
1689 top of stack entry, so set max to number of entries in stack - 1. */
1690 if (ob->local_trees
1691 && ob->local_trees->contains (expr))
1692 max_local_entry = sccstack.length () - 1;
1693 return;
1694 }
1695
1696 worklist w;
1697 w.expr = expr;
1698 w.from_state = from_state;
1699 w.cstate = NULL;
1700 w.ref_p = ref_p;
1701 w.this_ref_p = this_ref_p;
1702 worklist_vec.safe_push (w);
1703 }
1704
1705
1706 /* Emit the physical representation of tree node EXPR to output block OB.
1707 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1708 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1709
1710 void
1711 lto_output_tree (struct output_block *ob, tree expr,
1712 bool ref_p, bool this_ref_p)
1713 {
1714 unsigned ix;
1715 bool existed_p;
1716 unsigned int size = ob->main_stream->total_size;
1717 /* This is the first time we see EXPR, write all reachable
1718 trees to OB. */
1719 static bool in_dfs_walk;
1720
1721 if (expr == NULL_TREE)
1722 {
1723 streamer_write_record_start (ob, LTO_null);
1724 return;
1725 }
1726
1727 if (this_ref_p && tree_is_indexable (expr))
1728 {
1729 lto_output_tree_ref (ob, expr);
1730 return;
1731 }
1732
1733 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1734 if (existed_p)
1735 {
1736 if (streamer_dump_file)
1737 {
1738 if (in_dfs_walk)
1739 print_node_brief (streamer_dump_file, " Streaming ref to ",
1740 expr, 4);
1741 else
1742 print_node_brief (streamer_dump_file, " Streaming ref to ",
1743 expr, 4);
1744 fprintf (streamer_dump_file, "\n");
1745 }
1746 /* If a node has already been streamed out, make sure that
1747 we don't write it more than once. Otherwise, the reader
1748 will instantiate two different nodes for the same object. */
1749 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1750 streamer_write_uhwi (ob, ix);
1751 if (streamer_debugging)
1752 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1753 lto_tree_code_to_tag (TREE_CODE (expr)));
1754 lto_stats.num_pickle_refs_output++;
1755 }
1756 else
1757 {
1758 /* Protect against recursion which means disconnect between
1759 what tree edges we walk in the DFS walk and what edges
1760 we stream out. */
1761 gcc_assert (!in_dfs_walk);
1762
1763 if (streamer_dump_file)
1764 {
1765 print_node_brief (streamer_dump_file, " Streaming tree ",
1766 expr, 4);
1767 fprintf (streamer_dump_file, "\n");
1768 }
1769
1770 /* Start the DFS walk. */
1771 /* Save ob state ... */
1772 /* let's see ... */
1773 in_dfs_walk = true;
1774 DFS (ob, expr, ref_p, this_ref_p, false);
1775
1776 /* Finally append a reference to the tree we were writing. */
1777 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1778
1779 /* DFS walk above possibly skipped streaming EXPR itself to let us inline
1780 it. */
1781 if (!existed_p)
1782 lto_output_tree_1 (ob, expr, 0, ref_p, this_ref_p);
1783 else if (this_ref_p)
1784 {
1785 if (streamer_dump_file)
1786 {
1787 print_node_brief (streamer_dump_file,
1788 " Streaming final ref to ",
1789 expr, 4);
1790 fprintf (streamer_dump_file, "\n");
1791 }
1792 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1793 streamer_write_uhwi (ob, ix);
1794 if (streamer_debugging)
1795 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1796 lto_tree_code_to_tag (TREE_CODE (expr)));
1797 }
1798 in_dfs_walk = false;
1799 lto_stats.num_pickle_refs_output++;
1800 }
1801 if (streamer_dump_file && !in_dfs_walk)
1802 fprintf (streamer_dump_file, " %u bytes\n",
1803 ob->main_stream->total_size - size);
1804 }
1805
1806
1807 /* Output to OB a list of try/catch handlers starting with FIRST. */
1808
1809 static void
1810 output_eh_try_list (struct output_block *ob, eh_catch first)
1811 {
1812 eh_catch n;
1813
1814 for (n = first; n; n = n->next_catch)
1815 {
1816 streamer_write_record_start (ob, LTO_eh_catch);
1817 stream_write_tree (ob, n->type_list, true);
1818 stream_write_tree (ob, n->filter_list, true);
1819 stream_write_tree (ob, n->label, true);
1820 }
1821
1822 streamer_write_record_start (ob, LTO_null);
1823 }
1824
1825
1826 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1827 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1828 detect EH region sharing. */
1829
1830 static void
1831 output_eh_region (struct output_block *ob, eh_region r)
1832 {
1833 enum LTO_tags tag;
1834
1835 if (r == NULL)
1836 {
1837 streamer_write_record_start (ob, LTO_null);
1838 return;
1839 }
1840
1841 if (r->type == ERT_CLEANUP)
1842 tag = LTO_ert_cleanup;
1843 else if (r->type == ERT_TRY)
1844 tag = LTO_ert_try;
1845 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1846 tag = LTO_ert_allowed_exceptions;
1847 else if (r->type == ERT_MUST_NOT_THROW)
1848 tag = LTO_ert_must_not_throw;
1849 else
1850 gcc_unreachable ();
1851
1852 streamer_write_record_start (ob, tag);
1853 streamer_write_hwi (ob, r->index);
1854
1855 if (r->outer)
1856 streamer_write_hwi (ob, r->outer->index);
1857 else
1858 streamer_write_zero (ob);
1859
1860 if (r->inner)
1861 streamer_write_hwi (ob, r->inner->index);
1862 else
1863 streamer_write_zero (ob);
1864
1865 if (r->next_peer)
1866 streamer_write_hwi (ob, r->next_peer->index);
1867 else
1868 streamer_write_zero (ob);
1869
1870 if (r->type == ERT_TRY)
1871 {
1872 output_eh_try_list (ob, r->u.eh_try.first_catch);
1873 }
1874 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1875 {
1876 stream_write_tree (ob, r->u.allowed.type_list, true);
1877 stream_write_tree (ob, r->u.allowed.label, true);
1878 streamer_write_uhwi (ob, r->u.allowed.filter);
1879 }
1880 else if (r->type == ERT_MUST_NOT_THROW)
1881 {
1882 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1883 bitpack_d bp = bitpack_create (ob->main_stream);
1884 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1885 streamer_write_bitpack (&bp);
1886 }
1887
1888 if (r->landing_pads)
1889 streamer_write_hwi (ob, r->landing_pads->index);
1890 else
1891 streamer_write_zero (ob);
1892 }
1893
1894
1895 /* Output landing pad LP to OB. */
1896
1897 static void
1898 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1899 {
1900 if (lp == NULL)
1901 {
1902 streamer_write_record_start (ob, LTO_null);
1903 return;
1904 }
1905
1906 streamer_write_record_start (ob, LTO_eh_landing_pad);
1907 streamer_write_hwi (ob, lp->index);
1908 if (lp->next_lp)
1909 streamer_write_hwi (ob, lp->next_lp->index);
1910 else
1911 streamer_write_zero (ob);
1912
1913 if (lp->region)
1914 streamer_write_hwi (ob, lp->region->index);
1915 else
1916 streamer_write_zero (ob);
1917
1918 stream_write_tree (ob, lp->post_landing_pad, true);
1919 }
1920
1921
1922 /* Output the existing eh_table to OB. */
1923
1924 static void
1925 output_eh_regions (struct output_block *ob, struct function *fn)
1926 {
1927 if (fn->eh && fn->eh->region_tree)
1928 {
1929 unsigned i;
1930 eh_region eh;
1931 eh_landing_pad lp;
1932 tree ttype;
1933
1934 streamer_write_record_start (ob, LTO_eh_table);
1935
1936 /* Emit the index of the root of the EH region tree. */
1937 streamer_write_hwi (ob, fn->eh->region_tree->index);
1938
1939 /* Emit all the EH regions in the region array. */
1940 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1941 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1942 output_eh_region (ob, eh);
1943
1944 /* Emit all landing pads. */
1945 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1946 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1947 output_eh_lp (ob, lp);
1948
1949 /* Emit all the runtime type data. */
1950 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1951 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1952 stream_write_tree (ob, ttype, true);
1953
1954 /* Emit the table of action chains. */
1955 if (targetm.arm_eabi_unwinder)
1956 {
1957 tree t;
1958 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1959 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1960 stream_write_tree (ob, t, true);
1961 }
1962 else
1963 {
1964 uchar c;
1965 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1966 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1967 streamer_write_char_stream (ob->main_stream, c);
1968 }
1969 }
1970
1971 /* The LTO_null either terminates the record or indicates that there
1972 are no eh_records at all. */
1973 streamer_write_record_start (ob, LTO_null);
1974 }
1975
1976
1977 /* Output all of the active ssa names to the ssa_names stream. */
1978
1979 static void
1980 output_ssa_names (struct output_block *ob, struct function *fn)
1981 {
1982 unsigned int i, len;
1983
1984 len = vec_safe_length (SSANAMES (fn));
1985 streamer_write_uhwi (ob, len);
1986
1987 for (i = 1; i < len; i++)
1988 {
1989 tree ptr = (*SSANAMES (fn))[i];
1990
1991 if (ptr == NULL_TREE
1992 || SSA_NAME_IN_FREE_LIST (ptr)
1993 || virtual_operand_p (ptr)
1994 /* Simply skip unreleased SSA names. */
1995 || (! SSA_NAME_IS_DEFAULT_DEF (ptr)
1996 && (! SSA_NAME_DEF_STMT (ptr)
1997 || ! gimple_bb (SSA_NAME_DEF_STMT (ptr)))))
1998 continue;
1999
2000 streamer_write_uhwi (ob, i);
2001 streamer_write_char_stream (ob->main_stream,
2002 SSA_NAME_IS_DEFAULT_DEF (ptr));
2003 if (SSA_NAME_VAR (ptr))
2004 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
2005 else
2006 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
2007 stream_write_tree (ob, TREE_TYPE (ptr), true);
2008 }
2009
2010 streamer_write_zero (ob);
2011 }
2012
2013
2014
2015 /* Output the cfg. */
2016
2017 static void
2018 output_cfg (struct output_block *ob, struct function *fn)
2019 {
2020 struct lto_output_stream *tmp_stream = ob->main_stream;
2021 basic_block bb;
2022
2023 ob->main_stream = ob->cfg_stream;
2024
2025 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
2026 profile_status_for_fn (fn));
2027
2028 /* Output the number of the highest basic block. */
2029 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
2030
2031 FOR_ALL_BB_FN (bb, fn)
2032 {
2033 edge_iterator ei;
2034 edge e;
2035
2036 streamer_write_hwi (ob, bb->index);
2037
2038 /* Output the successors and the edge flags. */
2039 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
2040 FOR_EACH_EDGE (e, ei, bb->succs)
2041 {
2042 streamer_write_uhwi (ob, e->dest->index);
2043 e->probability.stream_out (ob);
2044 streamer_write_uhwi (ob, e->flags);
2045 }
2046 }
2047
2048 streamer_write_hwi (ob, -1);
2049
2050 bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
2051 while (bb->next_bb)
2052 {
2053 streamer_write_hwi (ob, bb->next_bb->index);
2054 bb = bb->next_bb;
2055 }
2056
2057 streamer_write_hwi (ob, -1);
2058
2059 /* Output the number of loops. */
2060 streamer_write_uhwi (ob, number_of_loops (fn));
2061
2062 /* Output each loop, skipping the tree root which has number zero. */
2063 for (unsigned i = 1; i < number_of_loops (fn); ++i)
2064 {
2065 class loop *loop = get_loop (fn, i);
2066
2067 /* Write the index of the loop header. That's enough to rebuild
2068 the loop tree on the reader side. Stream -1 for an unused
2069 loop entry. */
2070 if (!loop)
2071 {
2072 streamer_write_hwi (ob, -1);
2073 continue;
2074 }
2075 else
2076 streamer_write_hwi (ob, loop->header->index);
2077
2078 /* Write everything copy_loop_info copies. */
2079 streamer_write_enum (ob->main_stream,
2080 loop_estimation, EST_LAST, loop->estimate_state);
2081 streamer_write_hwi (ob, loop->any_upper_bound);
2082 if (loop->any_upper_bound)
2083 streamer_write_widest_int (ob, loop->nb_iterations_upper_bound);
2084 streamer_write_hwi (ob, loop->any_likely_upper_bound);
2085 if (loop->any_likely_upper_bound)
2086 streamer_write_widest_int (ob, loop->nb_iterations_likely_upper_bound);
2087 streamer_write_hwi (ob, loop->any_estimate);
2088 if (loop->any_estimate)
2089 streamer_write_widest_int (ob, loop->nb_iterations_estimate);
2090
2091 /* Write OMP SIMD related info. */
2092 streamer_write_hwi (ob, loop->safelen);
2093 streamer_write_hwi (ob, loop->unroll);
2094 streamer_write_hwi (ob, loop->owned_clique);
2095 streamer_write_hwi (ob, loop->dont_vectorize);
2096 streamer_write_hwi (ob, loop->force_vectorize);
2097 streamer_write_hwi (ob, loop->finite_p);
2098 stream_write_tree (ob, loop->simduid, true);
2099 }
2100
2101 ob->main_stream = tmp_stream;
2102 }
2103
2104
2105 /* Create the header in the file using OB. If the section type is for
2106 a function, set FN to the decl for that function. */
2107
2108 void
2109 produce_asm (struct output_block *ob, tree fn)
2110 {
2111 enum lto_section_type section_type = ob->section_type;
2112 struct lto_function_header header;
2113 char *section_name;
2114
2115 if (section_type == LTO_section_function_body)
2116 {
2117 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
2118 section_name = lto_get_section_name (section_type, name,
2119 symtab_node::get (fn)->order,
2120 NULL);
2121 }
2122 else
2123 section_name = lto_get_section_name (section_type, NULL, 0, NULL);
2124
2125 lto_begin_section (section_name, !flag_wpa);
2126 free (section_name);
2127
2128 /* The entire header is stream computed here. */
2129 memset (&header, 0, sizeof (struct lto_function_header));
2130
2131 if (section_type == LTO_section_function_body)
2132 header.cfg_size = ob->cfg_stream->total_size;
2133 header.main_size = ob->main_stream->total_size;
2134 header.string_size = ob->string_stream->total_size;
2135 lto_write_data (&header, sizeof header);
2136
2137 /* Put all of the gimple and the string table out the asm file as a
2138 block of text. */
2139 if (section_type == LTO_section_function_body)
2140 lto_write_stream (ob->cfg_stream);
2141 lto_write_stream (ob->main_stream);
2142 lto_write_stream (ob->string_stream);
2143
2144 lto_end_section ();
2145 }
2146
2147
2148 /* Output the base body of struct function FN using output block OB. */
2149
2150 static void
2151 output_struct_function_base (struct output_block *ob, struct function *fn)
2152 {
2153 struct bitpack_d bp;
2154 unsigned i;
2155 tree t;
2156
2157 /* Output the static chain and non-local goto save area. */
2158 stream_write_tree (ob, fn->static_chain_decl, true);
2159 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
2160
2161 /* Output all the local variables in the function. */
2162 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
2163 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
2164 stream_write_tree (ob, t, true);
2165
2166 /* Output current IL state of the function. */
2167 streamer_write_uhwi (ob, fn->curr_properties);
2168
2169 /* Write all the attributes for FN. */
2170 bp = bitpack_create (ob->main_stream);
2171 bp_pack_value (&bp, fn->is_thunk, 1);
2172 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
2173 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
2174 bp_pack_value (&bp, fn->returns_struct, 1);
2175 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
2176 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
2177 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
2178 bp_pack_value (&bp, fn->after_inlining, 1);
2179 bp_pack_value (&bp, fn->stdarg, 1);
2180 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2181 bp_pack_value (&bp, fn->has_forced_label_in_static, 1);
2182 bp_pack_value (&bp, fn->calls_alloca, 1);
2183 bp_pack_value (&bp, fn->calls_setjmp, 1);
2184 bp_pack_value (&bp, fn->calls_eh_return, 1);
2185 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2186 bp_pack_value (&bp, fn->has_simduid_loops, 1);
2187 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2188 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2189 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2190
2191 /* Output the function start and end loci. */
2192 stream_output_location (ob, &bp, fn->function_start_locus);
2193 stream_output_location (ob, &bp, fn->function_end_locus);
2194
2195 /* Save the instance discriminator if present. */
2196 int *instance_number_p = NULL;
2197 if (decl_to_instance_map)
2198 instance_number_p = decl_to_instance_map->get (fn->decl);
2199 bp_pack_value (&bp, !!instance_number_p, 1);
2200 if (instance_number_p)
2201 bp_pack_value (&bp, *instance_number_p, sizeof (int) * CHAR_BIT);
2202
2203 streamer_write_bitpack (&bp);
2204 }
2205
2206
2207 /* Collect all leaf BLOCKs beyond ROOT into LEAFS. */
2208
2209 static void
2210 collect_block_tree_leafs (tree root, vec<tree> &leafs)
2211 {
2212 for (root = BLOCK_SUBBLOCKS (root); root; root = BLOCK_CHAIN (root))
2213 if (! BLOCK_SUBBLOCKS (root))
2214 leafs.safe_push (root);
2215 else
2216 collect_block_tree_leafs (BLOCK_SUBBLOCKS (root), leafs);
2217 }
2218
2219 /* This performs function body modifications that are needed for streaming
2220 to work. */
2221
2222 void
2223 lto_prepare_function_for_streaming (struct cgraph_node *node)
2224 {
2225 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2226 basic_block bb;
2227
2228 if (number_of_loops (fn))
2229 {
2230 push_cfun (fn);
2231 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
2232 loop_optimizer_finalize ();
2233 pop_cfun ();
2234 }
2235 /* We will renumber the statements. The code that does this uses
2236 the same ordering that we use for serializing them so we can use
2237 the same code on the other end and not have to write out the
2238 statement numbers. We do not assign UIDs to PHIs here because
2239 virtual PHIs get re-computed on-the-fly which would make numbers
2240 inconsistent. */
2241 set_gimple_stmt_max_uid (fn, 0);
2242 FOR_ALL_BB_FN (bb, fn)
2243 {
2244 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2245 gsi_next (&gsi))
2246 {
2247 gphi *stmt = gsi.phi ();
2248
2249 /* Virtual PHIs are not going to be streamed. */
2250 if (!virtual_operand_p (gimple_phi_result (stmt)))
2251 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fn));
2252 }
2253 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2254 gsi_next (&gsi))
2255 {
2256 gimple *stmt = gsi_stmt (gsi);
2257 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fn));
2258 }
2259 }
2260 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2261 virtual phis now. */
2262 FOR_ALL_BB_FN (bb, fn)
2263 {
2264 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2265 gsi_next (&gsi))
2266 {
2267 gphi *stmt = gsi.phi ();
2268 if (virtual_operand_p (gimple_phi_result (stmt)))
2269 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fn));
2270 }
2271 }
2272
2273 }
2274
2275 /* Output the body of function NODE->DECL. */
2276
2277 static void
2278 output_function (struct cgraph_node *node)
2279 {
2280 tree function;
2281 struct function *fn;
2282 basic_block bb;
2283 struct output_block *ob;
2284
2285 if (streamer_dump_file)
2286 fprintf (streamer_dump_file, "\nStreaming body of %s\n",
2287 node->dump_name ());
2288
2289 function = node->decl;
2290 fn = DECL_STRUCT_FUNCTION (function);
2291 ob = create_output_block (LTO_section_function_body);
2292
2293 clear_line_info (ob);
2294 ob->symbol = node;
2295
2296 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2297
2298 /* Make string 0 be a NULL string. */
2299 streamer_write_char_stream (ob->string_stream, 0);
2300
2301 streamer_write_record_start (ob, LTO_function);
2302
2303 /* Output decls for parameters and args. */
2304 stream_write_tree (ob, DECL_RESULT (function), true);
2305 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2306
2307 /* Output debug args if available. */
2308 vec<tree, va_gc> **debugargs = decl_debug_args_lookup (function);
2309 if (! debugargs)
2310 streamer_write_uhwi (ob, 0);
2311 else
2312 {
2313 streamer_write_uhwi (ob, (*debugargs)->length ());
2314 for (unsigned i = 0; i < (*debugargs)->length (); ++i)
2315 stream_write_tree (ob, (**debugargs)[i], true);
2316 }
2317
2318 /* Output DECL_INITIAL for the function, which contains the tree of
2319 lexical scopes. */
2320 stream_write_tree (ob, DECL_INITIAL (function), true);
2321 /* As we do not recurse into BLOCK_SUBBLOCKS but only BLOCK_SUPERCONTEXT
2322 collect block tree leafs and stream those. */
2323 auto_vec<tree> block_tree_leafs;
2324 if (DECL_INITIAL (function))
2325 collect_block_tree_leafs (DECL_INITIAL (function), block_tree_leafs);
2326 streamer_write_uhwi (ob, block_tree_leafs.length ());
2327 for (unsigned i = 0; i < block_tree_leafs.length (); ++i)
2328 stream_write_tree (ob, block_tree_leafs[i], true);
2329
2330 /* We also stream abstract functions where we stream only stuff needed for
2331 debug info. */
2332 if (gimple_has_body_p (function))
2333 {
2334 streamer_write_uhwi (ob, 1);
2335 output_struct_function_base (ob, fn);
2336
2337 /* Output all the SSA names used in the function. */
2338 output_ssa_names (ob, fn);
2339
2340 /* Output any exception handling regions. */
2341 output_eh_regions (ob, fn);
2342
2343 /* Output the code for the function. */
2344 FOR_ALL_BB_FN (bb, fn)
2345 output_bb (ob, bb, fn);
2346
2347 /* The terminator for this function. */
2348 streamer_write_record_start (ob, LTO_null);
2349
2350 output_cfg (ob, fn);
2351 }
2352 else
2353 streamer_write_uhwi (ob, 0);
2354
2355 /* Create a section to hold the pickled output of this function. */
2356 produce_asm (ob, function);
2357
2358 destroy_output_block (ob);
2359 if (streamer_dump_file)
2360 fprintf (streamer_dump_file, "Finished streaming %s\n",
2361 node->dump_name ());
2362 }
2363
2364 /* Output the body of function NODE->DECL. */
2365
2366 static void
2367 output_constructor (struct varpool_node *node)
2368 {
2369 tree var = node->decl;
2370 struct output_block *ob;
2371
2372 if (streamer_dump_file)
2373 fprintf (streamer_dump_file, "\nStreaming constructor of %s\n",
2374 node->dump_name ());
2375
2376 timevar_push (TV_IPA_LTO_CTORS_OUT);
2377 ob = create_output_block (LTO_section_function_body);
2378
2379 clear_line_info (ob);
2380 ob->symbol = node;
2381
2382 /* Make string 0 be a NULL string. */
2383 streamer_write_char_stream (ob->string_stream, 0);
2384
2385 /* Output DECL_INITIAL for the function, which contains the tree of
2386 lexical scopes. */
2387 stream_write_tree (ob, DECL_INITIAL (var), true);
2388
2389 /* Create a section to hold the pickled output of this function. */
2390 produce_asm (ob, var);
2391
2392 destroy_output_block (ob);
2393 if (streamer_dump_file)
2394 fprintf (streamer_dump_file, "Finished streaming %s\n",
2395 node->dump_name ());
2396 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2397 }
2398
2399
2400 /* Emit toplevel asms. */
2401
2402 void
2403 lto_output_toplevel_asms (void)
2404 {
2405 struct output_block *ob;
2406 struct asm_node *can;
2407 char *section_name;
2408 struct lto_simple_header_with_strings header;
2409
2410 if (!symtab->first_asm_symbol ())
2411 return;
2412
2413 ob = create_output_block (LTO_section_asm);
2414
2415 /* Make string 0 be a NULL string. */
2416 streamer_write_char_stream (ob->string_stream, 0);
2417
2418 for (can = symtab->first_asm_symbol (); can; can = can->next)
2419 {
2420 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2421 streamer_write_hwi (ob, can->order);
2422 }
2423
2424 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2425
2426 section_name = lto_get_section_name (LTO_section_asm, NULL, 0, NULL);
2427 lto_begin_section (section_name, !flag_wpa);
2428 free (section_name);
2429
2430 /* The entire header stream is computed here. */
2431 memset (&header, 0, sizeof (header));
2432
2433 header.main_size = ob->main_stream->total_size;
2434 header.string_size = ob->string_stream->total_size;
2435 lto_write_data (&header, sizeof header);
2436
2437 /* Put all of the gimple and the string table out the asm file as a
2438 block of text. */
2439 lto_write_stream (ob->main_stream);
2440 lto_write_stream (ob->string_stream);
2441
2442 lto_end_section ();
2443
2444 destroy_output_block (ob);
2445 }
2446
2447
2448 /* Copy the function body or variable constructor of NODE without deserializing. */
2449
2450 static void
2451 copy_function_or_variable (struct symtab_node *node)
2452 {
2453 tree function = node->decl;
2454 struct lto_file_decl_data *file_data = node->lto_file_data;
2455 const char *data;
2456 size_t len;
2457 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2458 char *section_name =
2459 lto_get_section_name (LTO_section_function_body, name, node->order, NULL);
2460 size_t i, j;
2461 struct lto_in_decl_state *in_state;
2462 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2463
2464 if (streamer_dump_file)
2465 fprintf (streamer_dump_file, "Copying section for %s\n", name);
2466 lto_begin_section (section_name, false);
2467 free (section_name);
2468
2469 /* We may have renamed the declaration, e.g., a static function. */
2470 name = lto_get_decl_name_mapping (file_data, name);
2471
2472 data = lto_get_raw_section_data (file_data, LTO_section_function_body,
2473 name, node->order - file_data->order_base,
2474 &len);
2475 gcc_assert (data);
2476
2477 /* Do a bit copy of the function body. */
2478 lto_write_raw_data (data, len);
2479
2480 /* Copy decls. */
2481 in_state =
2482 lto_get_function_in_decl_state (node->lto_file_data, function);
2483 out_state->compressed = in_state->compressed;
2484 gcc_assert (in_state);
2485
2486 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2487 {
2488 size_t n = vec_safe_length (in_state->streams[i]);
2489 vec<tree, va_gc> *trees = in_state->streams[i];
2490 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2491
2492 /* The out state must have the same indices and the in state.
2493 So just copy the vector. All the encoders in the in state
2494 must be empty where we reach here. */
2495 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2496 encoder->trees.reserve_exact (n);
2497 for (j = 0; j < n; j++)
2498 encoder->trees.safe_push ((*trees)[j]);
2499 }
2500
2501 lto_free_raw_section_data (file_data, LTO_section_function_body, name,
2502 data, len);
2503 lto_end_section ();
2504 }
2505
2506 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2507
2508 static tree
2509 wrap_refs (tree *tp, int *ws, void *)
2510 {
2511 tree t = *tp;
2512 if (handled_component_p (t)
2513 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
2514 && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2515 {
2516 tree decl = TREE_OPERAND (t, 0);
2517 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2518 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2519 build1 (ADDR_EXPR, ptrtype, decl),
2520 build_int_cst (ptrtype, 0));
2521 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2522 *ws = 0;
2523 }
2524 else if (TREE_CODE (t) == CONSTRUCTOR)
2525 ;
2526 else if (!EXPR_P (t))
2527 *ws = 0;
2528 return NULL_TREE;
2529 }
2530
2531 /* Remove functions that are no longer used from offload_funcs, and mark the
2532 remaining ones with DECL_PRESERVE_P. */
2533
2534 static void
2535 prune_offload_funcs (void)
2536 {
2537 if (!offload_funcs)
2538 return;
2539
2540 unsigned ix, ix2;
2541 tree *elem_ptr;
2542 VEC_ORDERED_REMOVE_IF (*offload_funcs, ix, ix2, elem_ptr,
2543 cgraph_node::get (*elem_ptr) == NULL);
2544
2545 tree fn_decl;
2546 FOR_EACH_VEC_ELT (*offload_funcs, ix, fn_decl)
2547 DECL_PRESERVE_P (fn_decl) = 1;
2548 }
2549
2550 /* Produce LTO section that contains global information
2551 about LTO bytecode. */
2552
2553 static void
2554 produce_lto_section ()
2555 {
2556 /* Stream LTO meta section. */
2557 output_block *ob = create_output_block (LTO_section_lto);
2558
2559 char * section_name = lto_get_section_name (LTO_section_lto, NULL, 0, NULL);
2560 lto_begin_section (section_name, false);
2561 free (section_name);
2562
2563 #ifdef HAVE_ZSTD_H
2564 lto_compression compression = ZSTD;
2565 #else
2566 lto_compression compression = ZLIB;
2567 #endif
2568
2569 bool slim_object = flag_generate_lto && !flag_fat_lto_objects;
2570 lto_section s
2571 = { LTO_major_version, LTO_minor_version, slim_object, 0 };
2572 s.set_compression (compression);
2573 lto_write_data (&s, sizeof s);
2574 lto_end_section ();
2575 destroy_output_block (ob);
2576 }
2577
2578 /* Compare symbols to get them sorted by filename (to optimize streaming) */
2579
2580 static int
2581 cmp_symbol_files (const void *pn1, const void *pn2)
2582 {
2583 const symtab_node *n1 = *(const symtab_node * const *)pn1;
2584 const symtab_node *n2 = *(const symtab_node * const *)pn2;
2585
2586 int file_order1 = n1->lto_file_data ? n1->lto_file_data->order : -1;
2587 int file_order2 = n2->lto_file_data ? n2->lto_file_data->order : -1;
2588
2589 /* Order files same way as they appeared in the command line to reduce
2590 seeking while copying sections. */
2591 if (file_order1 != file_order2)
2592 return file_order1 - file_order2;
2593
2594 /* Order within static library. */
2595 if (n1->lto_file_data && n1->lto_file_data->id != n2->lto_file_data->id)
2596 {
2597 if (n1->lto_file_data->id > n2->lto_file_data->id)
2598 return 1;
2599 if (n1->lto_file_data->id < n2->lto_file_data->id)
2600 return -1;
2601 }
2602
2603 /* And finaly order by the definition order. */
2604 return n1->order - n2->order;
2605 }
2606
2607 /* Main entry point from the pass manager. */
2608
2609 void
2610 lto_output (void)
2611 {
2612 struct lto_out_decl_state *decl_state;
2613 bitmap output = NULL;
2614 bitmap_obstack output_obstack;
2615 unsigned int i, n_nodes;
2616 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2617 auto_vec<symtab_node *> symbols_to_copy;
2618
2619 prune_offload_funcs ();
2620
2621 if (flag_checking)
2622 {
2623 bitmap_obstack_initialize (&output_obstack);
2624 output = BITMAP_ALLOC (&output_obstack);
2625 }
2626
2627 /* Initialize the streamer. */
2628 lto_streamer_init ();
2629
2630 produce_lto_section ();
2631
2632 n_nodes = lto_symtab_encoder_size (encoder);
2633 /* Prepare vector of functions to output and then sort it to optimize
2634 section copying. */
2635 for (i = 0; i < n_nodes; i++)
2636 {
2637 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2638 if (snode->alias)
2639 continue;
2640 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2641 {
2642 if (lto_symtab_encoder_encode_body_p (encoder, node))
2643 symbols_to_copy.safe_push (node);
2644 }
2645 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2646 {
2647 /* Wrap symbol references inside the ctor in a type
2648 preserving MEM_REF. */
2649 tree ctor = DECL_INITIAL (node->decl);
2650 if (ctor && !in_lto_p)
2651 walk_tree (&ctor, wrap_refs, NULL, NULL);
2652 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2653 && lto_symtab_encoder_encode_initializer_p (encoder, node))
2654 symbols_to_copy.safe_push (node);
2655 }
2656 }
2657 symbols_to_copy.qsort (cmp_symbol_files);
2658 for (i = 0; i < symbols_to_copy.length (); i++)
2659 {
2660 symtab_node *snode = symbols_to_copy[i];
2661 cgraph_node *cnode;
2662 varpool_node *vnode;
2663
2664 if (flag_checking)
2665 gcc_assert (bitmap_set_bit (output, DECL_UID (snode->decl)));
2666
2667 decl_state = lto_new_out_decl_state ();
2668 lto_push_out_decl_state (decl_state);
2669
2670 if ((cnode = dyn_cast <cgraph_node *> (snode))
2671 && (gimple_has_body_p (cnode->decl)
2672 || (!flag_wpa
2673 && flag_incremental_link != INCREMENTAL_LINK_LTO)
2674 /* Thunks have no body but they may be synthetized
2675 at WPA time. */
2676 || DECL_ARGUMENTS (cnode->decl)))
2677 output_function (cnode);
2678 else if ((vnode = dyn_cast <varpool_node *> (snode))
2679 && (DECL_INITIAL (vnode->decl) != error_mark_node
2680 || (!flag_wpa
2681 && flag_incremental_link != INCREMENTAL_LINK_LTO)))
2682 output_constructor (vnode);
2683 else
2684 copy_function_or_variable (snode);
2685 gcc_assert (lto_get_out_decl_state () == decl_state);
2686 lto_pop_out_decl_state ();
2687 lto_record_function_out_decl_state (snode->decl, decl_state);
2688 }
2689
2690 /* Emit the callgraph after emitting function bodies. This needs to
2691 be done now to make sure that all the statements in every function
2692 have been renumbered so that edges can be associated with call
2693 statements using the statement UIDs. */
2694 output_symtab ();
2695
2696 output_offload_tables ();
2697
2698 if (flag_checking)
2699 {
2700 BITMAP_FREE (output);
2701 bitmap_obstack_release (&output_obstack);
2702 }
2703 }
2704
2705 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2706 from it and required for correct representation of its semantics.
2707 Each node in ENCODER must be a global declaration or a type. A node
2708 is written only once, even if it appears multiple times in the
2709 vector. Certain transitively-reachable nodes, such as those
2710 representing expressions, may be duplicated, but such nodes
2711 must not appear in ENCODER itself. */
2712
2713 static void
2714 write_global_stream (struct output_block *ob,
2715 struct lto_tree_ref_encoder *encoder)
2716 {
2717 tree t;
2718 size_t index;
2719 const size_t size = lto_tree_ref_encoder_size (encoder);
2720
2721 for (index = 0; index < size; index++)
2722 {
2723 t = lto_tree_ref_encoder_get_tree (encoder, index);
2724 if (streamer_dump_file)
2725 {
2726 fprintf (streamer_dump_file, " %i:", (int)index);
2727 print_node_brief (streamer_dump_file, "", t, 4);
2728 fprintf (streamer_dump_file, "\n");
2729 }
2730 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2731 stream_write_tree (ob, t, false);
2732 }
2733 }
2734
2735
2736 /* Write a sequence of indices into the globals vector corresponding
2737 to the trees in ENCODER. These are used by the reader to map the
2738 indices used to refer to global entities within function bodies to
2739 their referents. */
2740
2741 static void
2742 write_global_references (struct output_block *ob,
2743 struct lto_tree_ref_encoder *encoder)
2744 {
2745 tree t;
2746 uint32_t index;
2747 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2748
2749 /* Write size and slot indexes as 32-bit unsigned numbers. */
2750 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2751 data[0] = size;
2752
2753 for (index = 0; index < size; index++)
2754 {
2755 unsigned slot_num;
2756
2757 t = lto_tree_ref_encoder_get_tree (encoder, index);
2758 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2759 gcc_assert (slot_num != (unsigned)-1);
2760 data[index + 1] = slot_num;
2761 }
2762
2763 lto_write_data (data, sizeof (int32_t) * (size + 1));
2764 free (data);
2765 }
2766
2767
2768 /* Write all the streams in an lto_out_decl_state STATE using
2769 output block OB and output stream OUT_STREAM. */
2770
2771 void
2772 lto_output_decl_state_streams (struct output_block *ob,
2773 struct lto_out_decl_state *state)
2774 {
2775 int i;
2776
2777 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2778 write_global_stream (ob, &state->streams[i]);
2779 }
2780
2781
2782 /* Write all the references in an lto_out_decl_state STATE using
2783 output block OB and output stream OUT_STREAM. */
2784
2785 void
2786 lto_output_decl_state_refs (struct output_block *ob,
2787 struct lto_out_decl_state *state)
2788 {
2789 unsigned i;
2790 unsigned ref;
2791 tree decl;
2792
2793 /* Write reference to FUNCTION_DECL. If there is not function,
2794 write reference to void_type_node. */
2795 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2796 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2797 gcc_assert (ref != (unsigned)-1);
2798 ref = ref * 2 + (state->compressed ? 1 : 0);
2799 lto_write_data (&ref, sizeof (uint32_t));
2800
2801 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2802 write_global_references (ob, &state->streams[i]);
2803 }
2804
2805
2806 /* Return the written size of STATE. */
2807
2808 static size_t
2809 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2810 {
2811 int i;
2812 size_t size;
2813
2814 size = sizeof (int32_t); /* fn_ref. */
2815 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2816 {
2817 size += sizeof (int32_t); /* vector size. */
2818 size += (lto_tree_ref_encoder_size (&state->streams[i])
2819 * sizeof (int32_t));
2820 }
2821 return size;
2822 }
2823
2824
2825 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2826 so far. */
2827
2828 static void
2829 write_symbol (struct streamer_tree_cache_d *cache,
2830 tree t, hash_set<const char *> *seen, bool alias)
2831 {
2832 const char *name;
2833 enum gcc_plugin_symbol_kind kind;
2834 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2835 unsigned slot_num;
2836 uint64_t size;
2837 const char *comdat;
2838 unsigned char c;
2839
2840 gcc_assert (VAR_OR_FUNCTION_DECL_P (t));
2841
2842 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2843
2844 /* This behaves like assemble_name_raw in varasm.c, performing the
2845 same name manipulations that ASM_OUTPUT_LABELREF does. */
2846 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2847
2848 if (seen->add (name))
2849 return;
2850
2851 streamer_tree_cache_lookup (cache, t, &slot_num);
2852 gcc_assert (slot_num != (unsigned)-1);
2853
2854 if (DECL_EXTERNAL (t))
2855 {
2856 if (DECL_WEAK (t))
2857 kind = GCCPK_WEAKUNDEF;
2858 else
2859 kind = GCCPK_UNDEF;
2860 }
2861 else
2862 {
2863 if (DECL_WEAK (t))
2864 kind = GCCPK_WEAKDEF;
2865 else if (DECL_COMMON (t))
2866 kind = GCCPK_COMMON;
2867 else
2868 kind = GCCPK_DEF;
2869
2870 /* When something is defined, it should have node attached. */
2871 gcc_assert (alias || !VAR_P (t) || varpool_node::get (t)->definition);
2872 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2873 || (cgraph_node::get (t)
2874 && cgraph_node::get (t)->definition));
2875 }
2876
2877 /* Imitate what default_elf_asm_output_external do.
2878 When symbol is external, we need to output it with DEFAULT visibility
2879 when compiling with -fvisibility=default, while with HIDDEN visibility
2880 when symbol has attribute (visibility("hidden")) specified.
2881 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2882 right. */
2883
2884 if (DECL_EXTERNAL (t)
2885 && !targetm.binds_local_p (t))
2886 visibility = GCCPV_DEFAULT;
2887 else
2888 switch (DECL_VISIBILITY (t))
2889 {
2890 case VISIBILITY_DEFAULT:
2891 visibility = GCCPV_DEFAULT;
2892 break;
2893 case VISIBILITY_PROTECTED:
2894 visibility = GCCPV_PROTECTED;
2895 break;
2896 case VISIBILITY_HIDDEN:
2897 visibility = GCCPV_HIDDEN;
2898 break;
2899 case VISIBILITY_INTERNAL:
2900 visibility = GCCPV_INTERNAL;
2901 break;
2902 }
2903
2904 if (kind == GCCPK_COMMON
2905 && DECL_SIZE_UNIT (t)
2906 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2907 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2908 else
2909 size = 0;
2910
2911 if (DECL_ONE_ONLY (t))
2912 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2913 else
2914 comdat = "";
2915
2916 lto_write_data (name, strlen (name) + 1);
2917 lto_write_data (comdat, strlen (comdat) + 1);
2918 c = (unsigned char) kind;
2919 lto_write_data (&c, 1);
2920 c = (unsigned char) visibility;
2921 lto_write_data (&c, 1);
2922 lto_write_data (&size, 8);
2923 lto_write_data (&slot_num, 4);
2924 }
2925
2926 /* Write extension information for symbols (symbol type, section flags). */
2927
2928 static void
2929 write_symbol_extension_info (tree t)
2930 {
2931 unsigned char c;
2932 c = ((unsigned char) TREE_CODE (t) == VAR_DECL
2933 ? GCCST_VARIABLE : GCCST_FUNCTION);
2934 lto_write_data (&c, 1);
2935 unsigned char section_kind = 0;
2936 if (TREE_CODE (t) == VAR_DECL)
2937 {
2938 section *s = get_variable_section (t, false);
2939 if (s->common.flags & SECTION_BSS)
2940 section_kind |= GCCSSK_BSS;
2941 }
2942 lto_write_data (&section_kind, 1);
2943 }
2944
2945 /* Write an IL symbol table to OB.
2946 SET and VSET are cgraph/varpool node sets we are outputting. */
2947
2948 static unsigned int
2949 produce_symtab (struct output_block *ob)
2950 {
2951 unsigned int streamed_symbols = 0;
2952 struct streamer_tree_cache_d *cache = ob->writer_cache;
2953 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, 0, NULL);
2954 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2955 lto_symtab_encoder_iterator lsei;
2956
2957 lto_begin_section (section_name, false);
2958 free (section_name);
2959
2960 hash_set<const char *> seen;
2961
2962 /* Write the symbol table.
2963 First write everything defined and then all declarations.
2964 This is necessary to handle cases where we have duplicated symbols. */
2965 for (lsei = lsei_start (encoder);
2966 !lsei_end_p (lsei); lsei_next (&lsei))
2967 {
2968 symtab_node *node = lsei_node (lsei);
2969
2970 if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2971 continue;
2972 write_symbol (cache, node->decl, &seen, false);
2973 ++streamed_symbols;
2974 }
2975 for (lsei = lsei_start (encoder);
2976 !lsei_end_p (lsei); lsei_next (&lsei))
2977 {
2978 symtab_node *node = lsei_node (lsei);
2979
2980 if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2981 continue;
2982 write_symbol (cache, node->decl, &seen, false);
2983 ++streamed_symbols;
2984 }
2985
2986 lto_end_section ();
2987
2988 return streamed_symbols;
2989 }
2990
2991 /* Symtab extension version. */
2992 #define LTO_SYMTAB_EXTENSION_VERSION 1
2993
2994 /* Write an IL symbol table extension to OB.
2995 SET and VSET are cgraph/varpool node sets we are outputting. */
2996
2997 static void
2998 produce_symtab_extension (struct output_block *ob,
2999 unsigned int previous_streamed_symbols)
3000 {
3001 unsigned int streamed_symbols = 0;
3002 char *section_name = lto_get_section_name (LTO_section_symtab_extension,
3003 NULL, 0, NULL);
3004 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
3005 lto_symtab_encoder_iterator lsei;
3006
3007 lto_begin_section (section_name, false);
3008 free (section_name);
3009
3010 unsigned char version = LTO_SYMTAB_EXTENSION_VERSION;
3011 lto_write_data (&version, 1);
3012
3013 /* Write the symbol table.
3014 First write everything defined and then all declarations.
3015 This is necessary to handle cases where we have duplicated symbols. */
3016 for (lsei = lsei_start (encoder);
3017 !lsei_end_p (lsei); lsei_next (&lsei))
3018 {
3019 symtab_node *node = lsei_node (lsei);
3020
3021 if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
3022 continue;
3023 write_symbol_extension_info (node->decl);
3024 ++streamed_symbols;
3025 }
3026 for (lsei = lsei_start (encoder);
3027 !lsei_end_p (lsei); lsei_next (&lsei))
3028 {
3029 symtab_node *node = lsei_node (lsei);
3030
3031 if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
3032 continue;
3033 write_symbol_extension_info (node->decl);
3034 ++streamed_symbols;
3035 }
3036
3037 gcc_assert (previous_streamed_symbols == streamed_symbols);
3038 lto_end_section ();
3039 }
3040
3041
3042 /* Init the streamer_mode_table for output, where we collect info on what
3043 machine_mode values have been streamed. */
3044 void
3045 lto_output_init_mode_table (void)
3046 {
3047 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
3048 }
3049
3050
3051 /* Write the mode table. */
3052 static void
3053 lto_write_mode_table (void)
3054 {
3055 struct output_block *ob;
3056 ob = create_output_block (LTO_section_mode_table);
3057 bitpack_d bp = bitpack_create (ob->main_stream);
3058
3059 /* Ensure that for GET_MODE_INNER (m) != m we have
3060 also the inner mode marked. */
3061 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
3062 if (streamer_mode_table[i])
3063 {
3064 machine_mode m = (machine_mode) i;
3065 machine_mode inner_m = GET_MODE_INNER (m);
3066 if (inner_m != m)
3067 streamer_mode_table[(int) inner_m] = 1;
3068 }
3069 /* First stream modes that have GET_MODE_INNER (m) == m,
3070 so that we can refer to them afterwards. */
3071 for (int pass = 0; pass < 2; pass++)
3072 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
3073 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
3074 {
3075 machine_mode m = (machine_mode) i;
3076 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
3077 continue;
3078 bp_pack_value (&bp, m, 8);
3079 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
3080 bp_pack_poly_value (&bp, GET_MODE_SIZE (m), 16);
3081 bp_pack_poly_value (&bp, GET_MODE_PRECISION (m), 16);
3082 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
3083 bp_pack_poly_value (&bp, GET_MODE_NUNITS (m), 16);
3084 switch (GET_MODE_CLASS (m))
3085 {
3086 case MODE_FRACT:
3087 case MODE_UFRACT:
3088 case MODE_ACCUM:
3089 case MODE_UACCUM:
3090 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
3091 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
3092 break;
3093 case MODE_FLOAT:
3094 case MODE_DECIMAL_FLOAT:
3095 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
3096 break;
3097 default:
3098 break;
3099 }
3100 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
3101 }
3102 bp_pack_value (&bp, VOIDmode, 8);
3103
3104 streamer_write_bitpack (&bp);
3105
3106 char *section_name
3107 = lto_get_section_name (LTO_section_mode_table, NULL, 0, NULL);
3108 lto_begin_section (section_name, !flag_wpa);
3109 free (section_name);
3110
3111 /* The entire header stream is computed here. */
3112 struct lto_simple_header_with_strings header;
3113 memset (&header, 0, sizeof (header));
3114
3115 header.main_size = ob->main_stream->total_size;
3116 header.string_size = ob->string_stream->total_size;
3117 lto_write_data (&header, sizeof header);
3118
3119 /* Put all of the gimple and the string table out the asm file as a
3120 block of text. */
3121 lto_write_stream (ob->main_stream);
3122 lto_write_stream (ob->string_stream);
3123
3124 lto_end_section ();
3125 destroy_output_block (ob);
3126 }
3127
3128
3129 /* This pass is run after all of the functions are serialized and all
3130 of the IPA passes have written their serialized forms. This pass
3131 causes the vector of all of the global decls and types used from
3132 this file to be written in to a section that can then be read in to
3133 recover these on other side. */
3134
3135 void
3136 produce_asm_for_decls (void)
3137 {
3138 struct lto_out_decl_state *out_state;
3139 struct lto_out_decl_state *fn_out_state;
3140 struct lto_decl_header header;
3141 char *section_name;
3142 struct output_block *ob;
3143 unsigned idx, num_fns;
3144 size_t decl_state_size;
3145 int32_t num_decl_states;
3146
3147 ob = create_output_block (LTO_section_decls);
3148
3149 memset (&header, 0, sizeof (struct lto_decl_header));
3150
3151 section_name = lto_get_section_name (LTO_section_decls, NULL, 0, NULL);
3152 lto_begin_section (section_name, !flag_wpa);
3153 free (section_name);
3154
3155 /* Make string 0 be a NULL string. */
3156 streamer_write_char_stream (ob->string_stream, 0);
3157
3158 gcc_assert (!alias_pairs);
3159
3160 /* Get rid of the global decl state hash tables to save some memory. */
3161 out_state = lto_get_out_decl_state ();
3162 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
3163 if (out_state->streams[i].tree_hash_table)
3164 {
3165 delete out_state->streams[i].tree_hash_table;
3166 out_state->streams[i].tree_hash_table = NULL;
3167 }
3168
3169 /* Write the global symbols. */
3170 if (streamer_dump_file)
3171 fprintf (streamer_dump_file, "Outputting global stream\n");
3172 lto_output_decl_state_streams (ob, out_state);
3173 num_fns = lto_function_decl_states.length ();
3174 for (idx = 0; idx < num_fns; idx++)
3175 {
3176 fn_out_state =
3177 lto_function_decl_states[idx];
3178 if (streamer_dump_file)
3179 fprintf (streamer_dump_file, "Outputting stream for %s\n",
3180 IDENTIFIER_POINTER
3181 (DECL_ASSEMBLER_NAME (fn_out_state->fn_decl)));
3182 lto_output_decl_state_streams (ob, fn_out_state);
3183 }
3184
3185 /* Currently not used. This field would allow us to preallocate
3186 the globals vector, so that it need not be resized as it is extended. */
3187 header.num_nodes = -1;
3188
3189 /* Compute the total size of all decl out states. */
3190 decl_state_size = sizeof (int32_t);
3191 decl_state_size += lto_out_decl_state_written_size (out_state);
3192 for (idx = 0; idx < num_fns; idx++)
3193 {
3194 fn_out_state =
3195 lto_function_decl_states[idx];
3196 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
3197 }
3198 header.decl_state_size = decl_state_size;
3199
3200 header.main_size = ob->main_stream->total_size;
3201 header.string_size = ob->string_stream->total_size;
3202
3203 lto_write_data (&header, sizeof header);
3204
3205 /* Write the main out-decl state, followed by out-decl states of
3206 functions. */
3207 num_decl_states = num_fns + 1;
3208 lto_write_data (&num_decl_states, sizeof (num_decl_states));
3209 lto_output_decl_state_refs (ob, out_state);
3210 for (idx = 0; idx < num_fns; idx++)
3211 {
3212 fn_out_state = lto_function_decl_states[idx];
3213 lto_output_decl_state_refs (ob, fn_out_state);
3214 }
3215
3216 lto_write_stream (ob->main_stream);
3217 lto_write_stream (ob->string_stream);
3218
3219 lto_end_section ();
3220
3221 /* Write the symbol table. It is used by linker to determine dependencies
3222 and thus we can skip it for WPA. */
3223 if (!flag_wpa)
3224 {
3225 unsigned int streamed_symbols = produce_symtab (ob);
3226 produce_symtab_extension (ob, streamed_symbols);
3227 }
3228
3229 /* Write command line opts. */
3230 lto_write_options ();
3231
3232 /* Deallocate memory and clean up. */
3233 for (idx = 0; idx < num_fns; idx++)
3234 {
3235 fn_out_state =
3236 lto_function_decl_states[idx];
3237 lto_delete_out_decl_state (fn_out_state);
3238 }
3239 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
3240 lto_function_decl_states.release ();
3241 destroy_output_block (ob);
3242 if (lto_stream_offload_p)
3243 lto_write_mode_table ();
3244 }