]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-streamer-out.c
rs6000: Correct prefix testsuite failures on AIX.
[thirdparty/gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2020 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
43 #include "debug.h"
44 #include "omp-offload.h"
45 #include "print-tree.h"
46 #include "tree-dfa.h"
47 #include "file-prefix-map.h" /* remap_debug_filename() */
48 #include "output.h"
49 #include "ipa-utils.h"
50
51
52 static void lto_write_tree (struct output_block*, tree, bool);
53
54 /* Clear the line info stored in DATA_IN. */
55
56 static void
57 clear_line_info (struct output_block *ob)
58 {
59 ob->current_file = NULL;
60 ob->current_line = 0;
61 ob->current_col = 0;
62 ob->current_sysp = false;
63 }
64
65
66 /* Create the output block and return it. SECTION_TYPE is
67 LTO_section_function_body or LTO_static_initializer. */
68
69 struct output_block *
70 create_output_block (enum lto_section_type section_type)
71 {
72 struct output_block *ob = XCNEW (struct output_block);
73 if (streamer_dump_file)
74 fprintf (streamer_dump_file, "Creating output block for %s\n",
75 lto_section_name[section_type]);
76
77 ob->section_type = section_type;
78 ob->decl_state = lto_get_out_decl_state ();
79 /* Only global decl stream in non-wpa will ever be considered by tree
80 merging. */
81 if (!flag_wpa && section_type == LTO_section_decls)
82 ob->local_trees = new (hash_set <tree>);
83 ob->main_stream = XCNEW (struct lto_output_stream);
84 ob->string_stream = XCNEW (struct lto_output_stream);
85 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
86
87 if (section_type == LTO_section_function_body)
88 ob->cfg_stream = XCNEW (struct lto_output_stream);
89
90 clear_line_info (ob);
91
92 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
93 gcc_obstack_init (&ob->obstack);
94
95 return ob;
96 }
97
98
99 /* Destroy the output block OB. */
100
101 void
102 destroy_output_block (struct output_block *ob)
103 {
104 enum lto_section_type section_type = ob->section_type;
105
106 delete ob->string_hash_table;
107 ob->string_hash_table = NULL;
108 delete ob->local_trees;
109
110 free (ob->main_stream);
111 free (ob->string_stream);
112 if (section_type == LTO_section_function_body)
113 free (ob->cfg_stream);
114
115 streamer_tree_cache_delete (ob->writer_cache);
116 obstack_free (&ob->obstack, NULL);
117
118 free (ob);
119 }
120
121
122 /* Wrapper around variably_modified_type_p avoiding type modification
123 during WPA streaming. */
124
125 static bool
126 lto_variably_modified_type_p (tree type)
127 {
128 return (in_lto_p
129 ? TYPE_LANG_FLAG_0 (TYPE_MAIN_VARIANT (type))
130 : variably_modified_type_p (type, NULL_TREE));
131 }
132
133
134 /* Return true if tree node T is written to various tables. For these
135 nodes, we sometimes want to write their phyiscal representation
136 (via lto_output_tree), and sometimes we need to emit an index
137 reference into a table (via lto_output_tree_ref). */
138
139 static bool
140 tree_is_indexable (tree t)
141 {
142 /* Parameters and return values of functions of variably modified types
143 must go to global stream, because they may be used in the type
144 definition. */
145 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
146 && DECL_CONTEXT (t))
147 return lto_variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)));
148 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared.
149 We should no longer need to stream it. */
150 else if (TREE_CODE (t) == IMPORTED_DECL)
151 gcc_unreachable ();
152 else if (TREE_CODE (t) == LABEL_DECL)
153 return FORCED_LABEL (t) || DECL_NONLOCAL (t);
154 else if (((VAR_P (t) && !TREE_STATIC (t))
155 || TREE_CODE (t) == TYPE_DECL
156 || TREE_CODE (t) == CONST_DECL
157 || TREE_CODE (t) == NAMELIST_DECL)
158 && decl_function_context (t))
159 return false;
160 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
161 return false;
162 /* Variably modified types need to be streamed alongside function
163 bodies because they can refer to local entities. Together with
164 them we have to localize their members as well.
165 ??? In theory that includes non-FIELD_DECLs as well. */
166 else if (TYPE_P (t)
167 && lto_variably_modified_type_p (t))
168 return false;
169 else if (TREE_CODE (t) == FIELD_DECL
170 && lto_variably_modified_type_p (DECL_CONTEXT (t)))
171 return false;
172 else
173 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
174 }
175
176
177 /* Output info about new location into bitpack BP.
178 After outputting bitpack, lto_output_location_data has
179 to be done to output actual data. */
180
181 void
182 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
183 location_t loc)
184 {
185 expanded_location xloc;
186
187 loc = LOCATION_LOCUS (loc);
188 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
189 loc < RESERVED_LOCATION_COUNT
190 ? loc : RESERVED_LOCATION_COUNT);
191 if (loc < RESERVED_LOCATION_COUNT)
192 return;
193
194 xloc = expand_location (loc);
195
196 bp_pack_value (bp, ob->current_file != xloc.file, 1);
197 bp_pack_value (bp, ob->current_line != xloc.line, 1);
198 bp_pack_value (bp, ob->current_col != xloc.column, 1);
199
200 if (ob->current_file != xloc.file)
201 {
202 bp_pack_string (ob, bp, remap_debug_filename (xloc.file), true);
203 bp_pack_value (bp, xloc.sysp, 1);
204 }
205 ob->current_file = xloc.file;
206 ob->current_sysp = xloc.sysp;
207
208 if (ob->current_line != xloc.line)
209 bp_pack_var_len_unsigned (bp, xloc.line);
210 ob->current_line = xloc.line;
211
212 if (ob->current_col != xloc.column)
213 bp_pack_var_len_unsigned (bp, xloc.column);
214 ob->current_col = xloc.column;
215 }
216
217
218 /* Lookup NAME in ENCODER. If NAME is not found, create a new entry in
219 ENCODER for NAME with the next available index of ENCODER, then
220 print the index to OBS.
221 Return the index. */
222
223
224 static unsigned
225 lto_get_index (struct lto_tree_ref_encoder *encoder, tree t)
226 {
227 bool existed_p;
228
229 unsigned int &index
230 = encoder->tree_hash_table->get_or_insert (t, &existed_p);
231 if (!existed_p)
232 {
233 index = encoder->trees.length ();
234 if (streamer_dump_file)
235 {
236 print_node_brief (streamer_dump_file, " Encoding indexable ",
237 t, 4);
238 fprintf (streamer_dump_file, " as %i \n", index);
239 }
240 encoder->trees.safe_push (t);
241 }
242
243 return index;
244 }
245
246
247 /* If EXPR is an indexable tree node, output a reference to it to
248 output block OB. Otherwise, output the physical representation of
249 EXPR to OB. */
250
251 static void
252 lto_indexable_tree_ref (struct output_block *ob, tree expr,
253 enum LTO_tags *tag, unsigned *index)
254 {
255 gcc_checking_assert (tree_is_indexable (expr));
256
257 if (TREE_CODE (expr) == SSA_NAME)
258 {
259 *tag = LTO_ssa_name_ref;
260 *index = SSA_NAME_VERSION (expr);
261 }
262 else
263 {
264 *tag = LTO_global_stream_ref;
265 *index = lto_get_index (&ob->decl_state->streams[LTO_DECL_STREAM], expr);
266 }
267 }
268
269
270 /* Output a static or extern var DECL to OBS. */
271
272 void
273 lto_output_var_decl_ref (struct lto_out_decl_state *decl_state,
274 struct lto_output_stream * obs, tree decl)
275 {
276 gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
277 streamer_write_uhwi_stream
278 (obs, lto_get_index (&decl_state->streams[LTO_DECL_STREAM],
279 decl));
280 }
281
282
283 /* Output a static or extern var DECL to OBS. */
284
285 void
286 lto_output_fn_decl_ref (struct lto_out_decl_state *decl_state,
287 struct lto_output_stream * obs, tree decl)
288 {
289 gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL);
290 streamer_write_uhwi_stream
291 (obs, lto_get_index (&decl_state->streams[LTO_DECL_STREAM], decl));
292 }
293
294 /* Return true if EXPR is a tree node that can be written to disk. */
295
296 static inline bool
297 lto_is_streamable (tree expr)
298 {
299 enum tree_code code = TREE_CODE (expr);
300
301 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
302 name version in lto_output_tree_ref (see output_ssa_names). */
303 return !is_lang_specific (expr)
304 && code != SSA_NAME
305 && code != LANG_TYPE
306 && code != MODIFY_EXPR
307 && code != INIT_EXPR
308 && code != TARGET_EXPR
309 && code != BIND_EXPR
310 && code != WITH_CLEANUP_EXPR
311 && code != STATEMENT_LIST
312 && (code == CASE_LABEL_EXPR
313 || code == DECL_EXPR
314 || TREE_CODE_CLASS (code) != tcc_statement);
315 }
316
317 /* Very rough estimate of streaming size of the initializer. If we ignored
318 presence of strings, we could simply just count number of non-indexable
319 tree nodes and number of references to indexable nodes. Strings however
320 may be very large and we do not want to dump them int othe global stream.
321
322 Count the size of initializer until the size in DATA is positive. */
323
324 static tree
325 subtract_estimated_size (tree *tp, int *ws, void *data)
326 {
327 long *sum = (long *)data;
328 if (tree_is_indexable (*tp))
329 {
330 /* Indexable tree is one reference to global stream.
331 Guess it may be about 4 bytes. */
332 *sum -= 4;
333 *ws = 0;
334 }
335 /* String table entry + base of tree node needs to be streamed. */
336 if (TREE_CODE (*tp) == STRING_CST)
337 *sum -= TREE_STRING_LENGTH (*tp) + 8;
338 else
339 {
340 /* Identifiers are also variable length but should not appear
341 naked in constructor. */
342 gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
343 /* We do not really make attempt to work out size of pickled tree, as
344 it is very variable. Make it bigger than the reference. */
345 *sum -= 16;
346 }
347 if (*sum < 0)
348 return *tp;
349 return NULL_TREE;
350 }
351
352
353 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
354
355 static tree
356 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
357 {
358 gcc_checking_assert (DECL_P (expr)
359 && TREE_CODE (expr) != FUNCTION_DECL
360 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
361
362 /* Handle DECL_INITIAL for symbols. */
363 tree initial = DECL_INITIAL (expr);
364 if (VAR_P (expr)
365 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
366 && !DECL_IN_CONSTANT_POOL (expr)
367 && initial)
368 {
369 varpool_node *vnode;
370 /* Extra section needs about 30 bytes; do not produce it for simple
371 scalar values. */
372 if (!(vnode = varpool_node::get (expr))
373 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
374 initial = error_mark_node;
375 if (initial != error_mark_node)
376 {
377 long max_size = 30;
378 if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
379 NULL))
380 initial = error_mark_node;
381 }
382 }
383
384 return initial;
385 }
386
387
388 /* Output reference to tree T to the stream.
389 Assume that T is already in encoder cache.
390 This is used to stream tree bodies where we know the DFS walk arranged
391 everything to cache. Must be matched with stream_read_tree_ref. */
392
393 void
394 stream_write_tree_ref (struct output_block *ob, tree t)
395 {
396 if (!t)
397 streamer_write_zero (ob);
398 else
399 {
400 unsigned int ix;
401 bool existed_p = streamer_tree_cache_lookup (ob->writer_cache, t, &ix);
402 if (existed_p)
403 streamer_write_hwi (ob, ix + 1);
404 else
405 {
406 enum LTO_tags tag;
407 unsigned ix;
408 int id = 0;
409
410 lto_indexable_tree_ref (ob, t, &tag, &ix);
411 if (tag == LTO_ssa_name_ref)
412 id = 1;
413 else
414 gcc_checking_assert (tag == LTO_global_stream_ref);
415 streamer_write_hwi (ob, -(int)(ix * 2 + id + 1));
416 }
417 if (streamer_debugging)
418 streamer_write_uhwi (ob, TREE_CODE (t));
419 }
420 }
421
422
423
424 /* Write a physical representation of tree node EXPR to output block
425 OB. If REF_P is true, the leaves of EXPR are emitted as references
426 via lto_output_tree_ref. IX is the index into the streamer cache
427 where EXPR is stored. */
428
429 static void
430 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
431 {
432 if (streamer_dump_file)
433 {
434 print_node_brief (streamer_dump_file, " Streaming body of ",
435 expr, 4);
436 fprintf (streamer_dump_file, " to %s\n",
437 lto_section_name[ob->section_type]);
438 }
439
440 /* Pack all the non-pointer fields in EXPR into a bitpack and write
441 the resulting bitpack. */
442 streamer_write_tree_bitfields (ob, expr);
443
444 /* Write all the pointer fields in EXPR. */
445 streamer_write_tree_body (ob, expr);
446
447 /* Write any LTO-specific data to OB. */
448 if (DECL_P (expr)
449 && TREE_CODE (expr) != FUNCTION_DECL
450 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
451 {
452 /* Handle DECL_INITIAL for symbols. */
453 tree initial = get_symbol_initial_value
454 (ob->decl_state->symtab_node_encoder, expr);
455 stream_write_tree (ob, initial, ref_p);
456 }
457
458 /* Stream references to early generated DIEs. Keep in sync with the
459 trees handled in dwarf2out_die_ref_for_decl. */
460 if ((DECL_P (expr)
461 && TREE_CODE (expr) != FIELD_DECL
462 && TREE_CODE (expr) != DEBUG_EXPR_DECL
463 && TREE_CODE (expr) != TYPE_DECL)
464 || TREE_CODE (expr) == BLOCK)
465 {
466 const char *sym;
467 unsigned HOST_WIDE_INT off;
468 if (debug_info_level > DINFO_LEVEL_NONE
469 && debug_hooks->die_ref_for_decl (expr, &sym, &off))
470 {
471 streamer_write_string (ob, ob->main_stream, sym, true);
472 streamer_write_uhwi (ob, off);
473 }
474 else
475 streamer_write_string (ob, ob->main_stream, NULL, true);
476 }
477 }
478
479 /* Write a physical representation of tree node EXPR to output block
480 OB. If REF_P is true, the leaves of EXPR are emitted as references
481 via lto_output_tree_ref. IX is the index into the streamer cache
482 where EXPR is stored. */
483
484 static void
485 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
486 {
487 if (!lto_is_streamable (expr))
488 internal_error ("tree code %qs is not supported in LTO streams",
489 get_tree_code_name (TREE_CODE (expr)));
490
491 /* Write the header, containing everything needed to materialize
492 EXPR on the reading side. */
493 streamer_write_tree_header (ob, expr);
494
495 lto_write_tree_1 (ob, expr, ref_p);
496 }
497
498 /* Emit the physical representation of tree node EXPR to output block OB,
499 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
500 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
501
502 static void
503 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
504 bool ref_p, bool this_ref_p)
505 {
506 unsigned ix;
507
508 gcc_checking_assert (expr != NULL_TREE
509 && !(this_ref_p && tree_is_indexable (expr)));
510
511 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
512 expr, hash, &ix);
513 gcc_assert (!exists_p);
514 if (TREE_CODE (expr) == INTEGER_CST
515 && !TREE_OVERFLOW (expr))
516 {
517 /* Shared INTEGER_CST nodes are special because they need their
518 original type to be materialized by the reader (to implement
519 TYPE_CACHED_VALUES). */
520 streamer_write_integer_cst (ob, expr);
521 }
522 else
523 {
524 /* This is the first time we see EXPR, write its fields
525 to OB. */
526 lto_write_tree (ob, expr, ref_p);
527 }
528 }
529
530 class DFS
531 {
532 public:
533 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
534 bool single_p);
535 ~DFS ();
536
537 struct scc_entry
538 {
539 tree t;
540 hashval_t hash;
541 };
542 auto_vec<scc_entry,32> sccstack;
543
544 private:
545 struct sccs
546 {
547 unsigned int dfsnum;
548 unsigned int low;
549 };
550 struct worklist
551 {
552 tree expr;
553 sccs *from_state;
554 sccs *cstate;
555 bool ref_p;
556 bool this_ref_p;
557 };
558 /* Maximum index of scc stack containing a local tree. */
559 int max_local_entry;
560
561 static int scc_entry_compare (const void *, const void *);
562
563 void DFS_write_tree_body (struct output_block *ob,
564 tree expr, sccs *expr_state, bool ref_p);
565
566 void DFS_write_tree (struct output_block *ob, sccs *from_state,
567 tree expr, bool ref_p, bool this_ref_p);
568
569 hashval_t
570 hash_scc (struct output_block *ob, unsigned first, unsigned size,
571 bool ref_p, bool this_ref_p);
572
573 hash_map<tree, sccs *> sccstate;
574 auto_vec<worklist, 32> worklist_vec;
575 struct obstack sccstate_obstack;
576 };
577
578 /* Return true if type can not be merged with structurally same tree in
579 other translation unit. During stream out this information is propagated
580 to all trees referring to T and they are not streamed with additional
581 information needed by the tree merging in lto-common.c (in particular,
582 scc hash codes are not streamed).
583
584 TRANSLATION_UNIT_DECL is handled specially since references to it does
585 not make other trees local as well. */
586
587 static bool
588 local_tree_p (tree t)
589 {
590 switch (TREE_CODE (t))
591 {
592 case LABEL_DECL:
593 return true;
594 case NAMESPACE_DECL:
595 return !DECL_NAME (t);
596 case VAR_DECL:
597 case FUNCTION_DECL:
598 return !TREE_PUBLIC (t) && !DECL_EXTERNAL (t);
599 case RECORD_TYPE:
600 case UNION_TYPE:
601 case ENUMERAL_TYPE:
602 /* Anonymous namespace types are local.
603 Only work hard for main variants;
604 variant types will inherit locality. */
605 return TYPE_MAIN_VARIANT (t) == t
606 && odr_type_p (t) && type_with_linkage_p (t)
607 && type_in_anonymous_namespace_p (t);
608 default:
609 return false;
610 }
611 }
612
613 /* Emit the physical representation of tree node EXPR to output block OB,
614 using depth-first search on the subgraph. If THIS_REF_P is true, the
615 leaves of EXPR are emitted as references via lto_output_tree_ref.
616 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
617 this is for a rewalk of a single leaf SCC. */
618
619 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
620 bool single_p)
621 {
622 unsigned int next_dfs_num = 1;
623
624 max_local_entry = -1;
625 gcc_obstack_init (&sccstate_obstack);
626 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
627 while (!worklist_vec.is_empty ())
628 {
629 worklist &w = worklist_vec.last ();
630 expr = w.expr;
631 sccs *from_state = w.from_state;
632 sccs *cstate = w.cstate;
633 ref_p = w.ref_p;
634 this_ref_p = w.this_ref_p;
635 if (cstate == NULL)
636 {
637 sccs **slot = &sccstate.get_or_insert (expr);
638 cstate = *slot;
639 if (cstate)
640 {
641 gcc_checking_assert (from_state);
642 if (cstate->dfsnum < from_state->dfsnum)
643 from_state->low = MIN (cstate->dfsnum, from_state->low);
644 worklist_vec.pop ();
645 continue;
646 }
647
648 scc_entry e = { expr, 0 };
649 /* Not yet visited. DFS recurse and push it onto the stack. */
650 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
651 if (ob->local_trees && local_tree_p (expr))
652 max_local_entry = sccstack.length ();
653 sccstack.safe_push (e);
654 cstate->dfsnum = next_dfs_num++;
655 cstate->low = cstate->dfsnum;
656 w.cstate = cstate;
657
658 if (TREE_CODE (expr) == INTEGER_CST
659 && !TREE_OVERFLOW (expr))
660 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
661 else
662 {
663 DFS_write_tree_body (ob, expr, cstate, ref_p);
664
665 /* Walk any LTO-specific edges. */
666 if (DECL_P (expr)
667 && TREE_CODE (expr) != FUNCTION_DECL
668 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
669 {
670 /* Handle DECL_INITIAL for symbols. */
671 tree initial
672 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
673 expr);
674 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
675 }
676 }
677 continue;
678 }
679
680 /* See if we found an SCC. */
681 if (cstate->low == cstate->dfsnum)
682 {
683 unsigned first, size;
684 tree x;
685
686 /* If we are re-walking a single leaf SCC just pop it,
687 let earlier worklist item access the sccstack. */
688 if (single_p)
689 {
690 worklist_vec.pop ();
691 continue;
692 }
693
694 /* Pop the SCC and compute its size. */
695 first = sccstack.length ();
696 do
697 {
698 x = sccstack[--first].t;
699 }
700 while (x != expr);
701 size = sccstack.length () - first;
702
703 /* No need to compute hashes for LTRANS units, we don't perform
704 any merging there. */
705 hashval_t scc_hash = 0;
706 unsigned scc_entry_len = 0;
707 bool local_to_unit = !ob->local_trees
708 || max_local_entry >= (int)first;
709
710 /* Remember that trees are local so info gets propagated to other
711 SCCs. */
712 if (local_to_unit && ob->local_trees)
713 {
714 for (unsigned i = 0; i < size; ++i)
715 ob->local_trees->add (sccstack[first + i].t);
716 }
717
718 /* As a special case do not stream TRANSLATION_UNIT_DECL as shared
719 tree. We can not mark it local because references to it does not
720 make other trees local (all global decls reffer to it via
721 CONTEXT). */
722 if (size == 1
723 && TREE_CODE (sccstack[first].t) == TRANSLATION_UNIT_DECL)
724 local_to_unit = true;
725
726 if (!local_to_unit)
727 {
728 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
729
730 /* Put the entries with the least number of collisions first. */
731 unsigned entry_start = 0;
732 scc_entry_len = size + 1;
733 for (unsigned i = 0; i < size;)
734 {
735 unsigned from = i;
736 for (i = i + 1; i < size
737 && (sccstack[first + i].hash
738 == sccstack[first + from].hash); ++i)
739 ;
740 if (i - from < scc_entry_len)
741 {
742 scc_entry_len = i - from;
743 entry_start = from;
744 }
745 }
746 for (unsigned i = 0; i < scc_entry_len; ++i)
747 std::swap (sccstack[first + i],
748 sccstack[first + entry_start + i]);
749
750 /* We already sorted SCC deterministically in hash_scc. */
751
752 /* Check that we have only one SCC.
753 Naturally we may have conflicts if hash function is not
754 strong enough. Lets see how far this gets. */
755 gcc_checking_assert (scc_entry_len == 1);
756 }
757
758 worklist_vec.pop ();
759
760 unsigned int prev_size = ob->main_stream->total_size;
761
762 /* Only global decl sections are considered by tree merging. */
763 if (ob->section_type != LTO_section_decls)
764 {
765 /* If this is the original tree we stream and it forms SCC
766 by itself then we do not need to stream SCC at all. */
767 if (worklist_vec.is_empty () && first == 0 && size == 1)
768 return;
769 if (streamer_dump_file)
770 {
771 fprintf (streamer_dump_file,
772 " Start of LTO_trees of size %i\n", size);
773 }
774 streamer_write_record_start (ob, LTO_trees);
775 streamer_write_uhwi (ob, size);
776 }
777 /* Write LTO_tree_scc if tree merging is going to be performed. */
778 else if (!local_to_unit
779 /* These are special since sharing is not done by tree
780 merging machinery. We can not special case them earlier
781 because we still need to compute hash for further sharing
782 of trees referring to them. */
783 && (size != 1
784 || (TREE_CODE (sccstack[first].t) != IDENTIFIER_NODE
785 && (TREE_CODE (sccstack[first].t) != INTEGER_CST
786 || TREE_OVERFLOW (sccstack[first].t)))))
787
788 {
789 gcc_checking_assert (ob->section_type == LTO_section_decls);
790 if (streamer_dump_file)
791 {
792 fprintf (streamer_dump_file,
793 " Start of LTO_tree_scc of size %i\n", size);
794 }
795 streamer_write_record_start (ob, LTO_tree_scc);
796 /* In wast majority of cases scc_entry_len is 1 and size is small
797 integer. Use extra bit of size to stream info about
798 exceptions. */
799 streamer_write_uhwi (ob, size * 2 + (scc_entry_len != 1));
800 if (scc_entry_len != 1)
801 streamer_write_uhwi (ob, scc_entry_len);
802 streamer_write_uhwi (ob, scc_hash);
803 }
804 /* Non-trivial SCCs must be packed to trees blocks so forward
805 references work correctly. */
806 else if (size != 1)
807 {
808 if (streamer_dump_file)
809 {
810 fprintf (streamer_dump_file,
811 " Start of LTO_trees of size %i\n", size);
812 }
813 streamer_write_record_start (ob, LTO_trees);
814 streamer_write_uhwi (ob, size);
815 }
816 else if (streamer_dump_file)
817 {
818 fprintf (streamer_dump_file, " Streaming single tree\n");
819 }
820
821 /* Write size-1 SCCs without wrapping them inside SCC bundles.
822 All INTEGER_CSTs need to be handled this way as we need
823 their type to materialize them. Also builtins are handled
824 this way. */
825 if (size == 1)
826 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
827 else
828 {
829
830 /* Write all headers and populate the streamer cache. */
831 for (unsigned i = 0; i < size; ++i)
832 {
833 hashval_t hash = sccstack[first+i].hash;
834 tree t = sccstack[first+i].t;
835 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
836 t, hash, NULL);
837 gcc_assert (!exists_p);
838
839 if (!lto_is_streamable (t))
840 internal_error ("tree code %qs is not supported "
841 "in LTO streams",
842 get_tree_code_name (TREE_CODE (t)));
843
844 /* Write the header, containing everything needed to
845 materialize EXPR on the reading side. */
846 streamer_write_tree_header (ob, t);
847 }
848
849 /* Write the bitpacks and tree references. */
850 for (unsigned i = 0; i < size; ++i)
851 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
852 }
853 if (streamer_dump_file)
854 fprintf (streamer_dump_file, " %u bytes\n",
855 ob->main_stream->total_size - prev_size);
856
857 /* Finally truncate the vector. */
858 sccstack.truncate (first);
859 if ((int)first <= max_local_entry)
860 max_local_entry = first - 1;
861
862 if (from_state)
863 from_state->low = MIN (from_state->low, cstate->low);
864 continue;
865 }
866
867 gcc_checking_assert (from_state);
868 from_state->low = MIN (from_state->low, cstate->low);
869 if (cstate->dfsnum < from_state->dfsnum)
870 from_state->low = MIN (cstate->dfsnum, from_state->low);
871 worklist_vec.pop ();
872 }
873 }
874
875 DFS::~DFS ()
876 {
877 obstack_free (&sccstate_obstack, NULL);
878 }
879
880 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
881 DFS recurse for all tree edges originating from it. */
882
883 void
884 DFS::DFS_write_tree_body (struct output_block *ob,
885 tree expr, sccs *expr_state, bool ref_p)
886 {
887 #define DFS_follow_tree_edge(DEST) \
888 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
889
890 enum tree_code code;
891
892 code = TREE_CODE (expr);
893
894 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
895 {
896 if (TREE_CODE (expr) != IDENTIFIER_NODE)
897 DFS_follow_tree_edge (TREE_TYPE (expr));
898 }
899
900 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
901 {
902 unsigned int count = vector_cst_encoded_nelts (expr);
903 for (unsigned int i = 0; i < count; ++i)
904 DFS_follow_tree_edge (VECTOR_CST_ENCODED_ELT (expr, i));
905 }
906
907 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
908 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
909 DFS_follow_tree_edge (POLY_INT_CST_COEFF (expr, i));
910
911 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
912 {
913 DFS_follow_tree_edge (TREE_REALPART (expr));
914 DFS_follow_tree_edge (TREE_IMAGPART (expr));
915 }
916
917 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
918 {
919 /* Drop names that were created for anonymous entities. */
920 if (DECL_NAME (expr)
921 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
922 && IDENTIFIER_ANON_P (DECL_NAME (expr)))
923 ;
924 else
925 DFS_follow_tree_edge (DECL_NAME (expr));
926 if (TREE_CODE (expr) != TRANSLATION_UNIT_DECL
927 && ! DECL_CONTEXT (expr))
928 DFS_follow_tree_edge ((*all_translation_units)[0]);
929 else
930 DFS_follow_tree_edge (DECL_CONTEXT (expr));
931 }
932
933 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
934 {
935 DFS_follow_tree_edge (DECL_SIZE (expr));
936 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
937
938 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
939 special handling in LTO, it must be handled by streamer hooks. */
940
941 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
942
943 /* We use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
944 declarations which should be eliminated by decl merging. Be sure none
945 leaks to this point. */
946 gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
947 DFS_follow_tree_edge (DECL_ABSTRACT_ORIGIN (expr));
948
949 if ((VAR_P (expr)
950 || TREE_CODE (expr) == PARM_DECL)
951 && DECL_HAS_VALUE_EXPR_P (expr))
952 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
953 if (VAR_P (expr)
954 && DECL_HAS_DEBUG_EXPR_P (expr))
955 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
956 }
957
958 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
959 {
960 /* Make sure we don't inadvertently set the assembler name. */
961 if (DECL_ASSEMBLER_NAME_SET_P (expr))
962 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
963 }
964
965 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
966 {
967 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
968 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
969 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
970 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
971 gcc_checking_assert (!DECL_FCONTEXT (expr));
972 }
973
974 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
975 {
976 gcc_checking_assert (DECL_VINDEX (expr) == NULL);
977 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
978 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
979 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
980 }
981
982 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
983 {
984 DFS_follow_tree_edge (TYPE_SIZE (expr));
985 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
986 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
987 DFS_follow_tree_edge (TYPE_NAME (expr));
988 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
989 reconstructed during fixup. */
990 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
991 during fixup. */
992 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
993 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
994 /* TYPE_CANONICAL is re-computed during type merging, so no need
995 to follow it here. */
996 /* Do not stream TYPE_STUB_DECL; it is not needed by LTO but currently
997 it cannot be freed by free_lang_data without triggering ICEs in
998 langhooks. */
999 }
1000
1001 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1002 {
1003 if (TREE_CODE (expr) == ARRAY_TYPE)
1004 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
1005 else if (RECORD_OR_UNION_TYPE_P (expr))
1006 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
1007 DFS_follow_tree_edge (t);
1008 else if (TREE_CODE (expr) == FUNCTION_TYPE
1009 || TREE_CODE (expr) == METHOD_TYPE)
1010 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
1011
1012 if (!POINTER_TYPE_P (expr))
1013 DFS_follow_tree_edge (TYPE_MIN_VALUE_RAW (expr));
1014 DFS_follow_tree_edge (TYPE_MAX_VALUE_RAW (expr));
1015 }
1016
1017 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1018 {
1019 DFS_follow_tree_edge (TREE_PURPOSE (expr));
1020 DFS_follow_tree_edge (TREE_VALUE (expr));
1021 DFS_follow_tree_edge (TREE_CHAIN (expr));
1022 }
1023
1024 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1025 {
1026 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
1027 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
1028 }
1029
1030 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1031 {
1032 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
1033 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
1034 DFS_follow_tree_edge (TREE_BLOCK (expr));
1035 }
1036
1037 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
1038 {
1039 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
1040 {
1041 /* We would have to stream externals in the block chain as
1042 non-references but we should have dropped them in
1043 free-lang-data. */
1044 gcc_assert (!VAR_OR_FUNCTION_DECL_P (t) || !DECL_EXTERNAL (t));
1045 DFS_follow_tree_edge (t);
1046 }
1047
1048 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
1049 DFS_follow_tree_edge (BLOCK_ABSTRACT_ORIGIN (expr));
1050
1051 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
1052 information for early inlined BLOCKs so drop it on the floor instead
1053 of ICEing in dwarf2out.c. */
1054
1055 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
1056 streaming time. */
1057
1058 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
1059 list is re-constructed from BLOCK_SUPERCONTEXT. */
1060 }
1061
1062 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1063 {
1064 unsigned i;
1065 tree t;
1066
1067 /* Note that the number of BINFO slots has already been emitted in
1068 EXPR's header (see streamer_write_tree_header) because this length
1069 is needed to build the empty BINFO node on the reader side. */
1070 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
1071 DFS_follow_tree_edge (t);
1072 DFS_follow_tree_edge (BINFO_OFFSET (expr));
1073 DFS_follow_tree_edge (BINFO_VTABLE (expr));
1074
1075 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX,
1076 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
1077 by C++ FE only. */
1078 }
1079
1080 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1081 {
1082 unsigned i;
1083 tree index, value;
1084
1085 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
1086 {
1087 DFS_follow_tree_edge (index);
1088 DFS_follow_tree_edge (value);
1089 }
1090 }
1091
1092 if (code == OMP_CLAUSE)
1093 {
1094 int i;
1095 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
1096 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
1097 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
1098 }
1099
1100 #undef DFS_follow_tree_edge
1101 }
1102
1103 /* Return a hash value for the tree T.
1104 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
1105 may hold hash values if trees inside current SCC. */
1106
1107 static hashval_t
1108 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
1109 {
1110 inchash::hash hstate;
1111
1112 #define visit(SIBLING) \
1113 do { \
1114 unsigned ix; \
1115 if (!SIBLING) \
1116 hstate.add_int (0); \
1117 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
1118 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
1119 else if (map) \
1120 hstate.add_int (*map->get (SIBLING)); \
1121 else \
1122 hstate.add_int (1); \
1123 } while (0)
1124
1125 /* Hash TS_BASE. */
1126 enum tree_code code = TREE_CODE (t);
1127 hstate.add_int (code);
1128 if (!TYPE_P (t))
1129 {
1130 hstate.add_flag (TREE_SIDE_EFFECTS (t));
1131 hstate.add_flag (TREE_CONSTANT (t));
1132 hstate.add_flag (TREE_READONLY (t));
1133 hstate.add_flag (TREE_PUBLIC (t));
1134 }
1135 hstate.add_flag (TREE_ADDRESSABLE (t));
1136 hstate.add_flag (TREE_THIS_VOLATILE (t));
1137 if (DECL_P (t))
1138 hstate.add_flag (DECL_UNSIGNED (t));
1139 else if (TYPE_P (t))
1140 hstate.add_flag (TYPE_UNSIGNED (t));
1141 if (TYPE_P (t))
1142 hstate.add_flag (TYPE_ARTIFICIAL (t));
1143 else
1144 hstate.add_flag (TREE_NO_WARNING (t));
1145 hstate.add_flag (TREE_NOTHROW (t));
1146 hstate.add_flag (TREE_STATIC (t));
1147 hstate.add_flag (TREE_PROTECTED (t));
1148 hstate.add_flag (TREE_DEPRECATED (t));
1149 if (code != TREE_BINFO)
1150 hstate.add_flag (TREE_PRIVATE (t));
1151 if (TYPE_P (t))
1152 {
1153 hstate.add_flag (AGGREGATE_TYPE_P (t)
1154 ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
1155 hstate.add_flag (TYPE_ADDR_SPACE (t));
1156 }
1157 else if (code == SSA_NAME)
1158 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
1159 hstate.commit_flag ();
1160
1161 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1162 hstate.add_wide_int (wi::to_widest (t));
1163
1164 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1165 {
1166 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1167 hstate.add_flag (r.cl);
1168 hstate.add_flag (r.sign);
1169 hstate.add_flag (r.signalling);
1170 hstate.add_flag (r.canonical);
1171 hstate.commit_flag ();
1172 hstate.add_int (r.uexp);
1173 hstate.add (r.sig, sizeof (r.sig));
1174 }
1175
1176 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1177 {
1178 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1179 hstate.add_int (f.mode);
1180 hstate.add_int (f.data.low);
1181 hstate.add_int (f.data.high);
1182 }
1183
1184 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1185 {
1186 hstate.add_hwi (DECL_MODE (t));
1187 hstate.add_flag (DECL_NONLOCAL (t));
1188 hstate.add_flag (DECL_VIRTUAL_P (t));
1189 hstate.add_flag (DECL_IGNORED_P (t));
1190 hstate.add_flag (DECL_ABSTRACT_P (t));
1191 hstate.add_flag (DECL_ARTIFICIAL (t));
1192 hstate.add_flag (DECL_USER_ALIGN (t));
1193 hstate.add_flag (DECL_PRESERVE_P (t));
1194 hstate.add_flag (DECL_EXTERNAL (t));
1195 hstate.add_flag (DECL_NOT_GIMPLE_REG_P (t));
1196 hstate.commit_flag ();
1197 hstate.add_int (DECL_ALIGN (t));
1198 if (code == LABEL_DECL)
1199 {
1200 hstate.add_int (EH_LANDING_PAD_NR (t));
1201 hstate.add_int (LABEL_DECL_UID (t));
1202 }
1203 else if (code == FIELD_DECL)
1204 {
1205 hstate.add_flag (DECL_PACKED (t));
1206 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1207 hstate.add_flag (DECL_PADDING_P (t));
1208 hstate.add_flag (DECL_FIELD_ABI_IGNORED (t));
1209 hstate.add_int (DECL_OFFSET_ALIGN (t));
1210 }
1211 else if (code == VAR_DECL)
1212 {
1213 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1214 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1215 }
1216 if (code == RESULT_DECL
1217 || code == PARM_DECL
1218 || code == VAR_DECL)
1219 {
1220 hstate.add_flag (DECL_BY_REFERENCE (t));
1221 if (code == VAR_DECL
1222 || code == PARM_DECL)
1223 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1224 }
1225 hstate.commit_flag ();
1226 }
1227
1228 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1229 hstate.add_int (DECL_REGISTER (t));
1230
1231 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1232 {
1233 hstate.add_flag (DECL_COMMON (t));
1234 hstate.add_flag (DECL_DLLIMPORT_P (t));
1235 hstate.add_flag (DECL_WEAK (t));
1236 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1237 hstate.add_flag (DECL_COMDAT (t));
1238 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1239 hstate.add_int (DECL_VISIBILITY (t));
1240 if (code == VAR_DECL)
1241 {
1242 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1243 hstate.add_flag (DECL_HARD_REGISTER (t));
1244 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1245 }
1246 if (TREE_CODE (t) == FUNCTION_DECL)
1247 {
1248 hstate.add_flag (DECL_FINAL_P (t));
1249 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1250 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1251 }
1252 hstate.commit_flag ();
1253 }
1254
1255 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1256 {
1257 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1258 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1259 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1260 hstate.add_flag (FUNCTION_DECL_DECL_TYPE (t));
1261 hstate.add_flag (DECL_UNINLINABLE (t));
1262 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1263 hstate.add_flag (DECL_IS_NOVOPS (t));
1264 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1265 hstate.add_flag (DECL_IS_MALLOC (t));
1266 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1267 hstate.add_flag (DECL_STATIC_CHAIN (t));
1268 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1269 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1270 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1271 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1272 hstate.add_flag (DECL_PURE_P (t));
1273 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1274 hstate.commit_flag ();
1275 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1276 hstate.add_int (DECL_UNCHECKED_FUNCTION_CODE (t));
1277 }
1278
1279 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1280 {
1281 hstate.add_hwi (TYPE_MODE (t));
1282 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1283 no streaming. */
1284 hstate.add_flag (TYPE_PACKED (t));
1285 hstate.add_flag (TYPE_RESTRICT (t));
1286 hstate.add_flag (TYPE_USER_ALIGN (t));
1287 hstate.add_flag (TYPE_READONLY (t));
1288 if (RECORD_OR_UNION_TYPE_P (t))
1289 {
1290 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1291 hstate.add_flag (TYPE_FINAL_P (t));
1292 hstate.add_flag (TYPE_CXX_ODR_P (t));
1293 }
1294 else if (code == ARRAY_TYPE)
1295 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1296 if (code == ARRAY_TYPE || code == INTEGER_TYPE)
1297 hstate.add_flag (TYPE_STRING_FLAG (t));
1298 if (AGGREGATE_TYPE_P (t))
1299 hstate.add_flag (TYPE_TYPELESS_STORAGE (t));
1300 hstate.commit_flag ();
1301 hstate.add_int (TYPE_PRECISION (t));
1302 hstate.add_int (TYPE_ALIGN (t));
1303 hstate.add_int (TYPE_EMPTY_P (t));
1304 }
1305
1306 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1307 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1308 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1309
1310 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1311 /* We don't stream these when passing things to a different target. */
1312 && !lto_stream_offload_p)
1313 hstate.add_hwi (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1314
1315 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1316 hstate.add_hwi (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1317
1318 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1319 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1320
1321 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1322 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1323
1324 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1325 {
1326 if (code != IDENTIFIER_NODE)
1327 visit (TREE_TYPE (t));
1328 }
1329
1330 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1331 {
1332 unsigned int count = vector_cst_encoded_nelts (t);
1333 for (unsigned int i = 0; i < count; ++i)
1334 visit (VECTOR_CST_ENCODED_ELT (t, i));
1335 }
1336
1337 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
1338 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1339 visit (POLY_INT_CST_COEFF (t, i));
1340
1341 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1342 {
1343 visit (TREE_REALPART (t));
1344 visit (TREE_IMAGPART (t));
1345 }
1346
1347 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1348 {
1349 /* Drop names that were created for anonymous entities. */
1350 if (DECL_NAME (t)
1351 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1352 && IDENTIFIER_ANON_P (DECL_NAME (t)))
1353 ;
1354 else
1355 visit (DECL_NAME (t));
1356 if (DECL_FILE_SCOPE_P (t))
1357 ;
1358 else
1359 visit (DECL_CONTEXT (t));
1360 }
1361
1362 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1363 {
1364 visit (DECL_SIZE (t));
1365 visit (DECL_SIZE_UNIT (t));
1366 visit (DECL_ATTRIBUTES (t));
1367 if ((code == VAR_DECL
1368 || code == PARM_DECL)
1369 && DECL_HAS_VALUE_EXPR_P (t))
1370 visit (DECL_VALUE_EXPR (t));
1371 if (code == VAR_DECL
1372 && DECL_HAS_DEBUG_EXPR_P (t))
1373 visit (DECL_DEBUG_EXPR (t));
1374 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1375 be able to call get_symbol_initial_value. */
1376 }
1377
1378 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1379 {
1380 if (DECL_ASSEMBLER_NAME_SET_P (t))
1381 visit (DECL_ASSEMBLER_NAME (t));
1382 }
1383
1384 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1385 {
1386 visit (DECL_FIELD_OFFSET (t));
1387 visit (DECL_BIT_FIELD_TYPE (t));
1388 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1389 visit (DECL_FIELD_BIT_OFFSET (t));
1390 }
1391
1392 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1393 {
1394 visit (DECL_FUNCTION_PERSONALITY (t));
1395 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1396 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1397 }
1398
1399 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1400 {
1401 visit (TYPE_SIZE (t));
1402 visit (TYPE_SIZE_UNIT (t));
1403 visit (TYPE_ATTRIBUTES (t));
1404 visit (TYPE_NAME (t));
1405 visit (TYPE_MAIN_VARIANT (t));
1406 if (TYPE_FILE_SCOPE_P (t))
1407 ;
1408 else
1409 visit (TYPE_CONTEXT (t));
1410 }
1411
1412 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1413 {
1414 if (code == ARRAY_TYPE)
1415 visit (TYPE_DOMAIN (t));
1416 else if (RECORD_OR_UNION_TYPE_P (t))
1417 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1418 visit (f);
1419 else if (code == FUNCTION_TYPE
1420 || code == METHOD_TYPE)
1421 visit (TYPE_ARG_TYPES (t));
1422 if (!POINTER_TYPE_P (t))
1423 visit (TYPE_MIN_VALUE_RAW (t));
1424 visit (TYPE_MAX_VALUE_RAW (t));
1425 }
1426
1427 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1428 {
1429 visit (TREE_PURPOSE (t));
1430 visit (TREE_VALUE (t));
1431 visit (TREE_CHAIN (t));
1432 }
1433
1434 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1435 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1436 visit (TREE_VEC_ELT (t, i));
1437
1438 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1439 {
1440 hstate.add_hwi (TREE_OPERAND_LENGTH (t));
1441 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1442 visit (TREE_OPERAND (t, i));
1443 }
1444
1445 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1446 {
1447 unsigned i;
1448 tree b;
1449 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1450 visit (b);
1451 visit (BINFO_OFFSET (t));
1452 visit (BINFO_VTABLE (t));
1453 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1454 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
1455 by C++ FE only. */
1456 }
1457
1458 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1459 {
1460 unsigned i;
1461 tree index, value;
1462 hstate.add_hwi (CONSTRUCTOR_NELTS (t));
1463 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1464 {
1465 visit (index);
1466 visit (value);
1467 }
1468 }
1469
1470 if (code == OMP_CLAUSE)
1471 {
1472 int i;
1473 HOST_WIDE_INT val;
1474
1475 hstate.add_hwi (OMP_CLAUSE_CODE (t));
1476 switch (OMP_CLAUSE_CODE (t))
1477 {
1478 case OMP_CLAUSE_DEFAULT:
1479 val = OMP_CLAUSE_DEFAULT_KIND (t);
1480 break;
1481 case OMP_CLAUSE_SCHEDULE:
1482 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1483 break;
1484 case OMP_CLAUSE_DEPEND:
1485 val = OMP_CLAUSE_DEPEND_KIND (t);
1486 break;
1487 case OMP_CLAUSE_MAP:
1488 val = OMP_CLAUSE_MAP_KIND (t);
1489 break;
1490 case OMP_CLAUSE_PROC_BIND:
1491 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1492 break;
1493 case OMP_CLAUSE_REDUCTION:
1494 case OMP_CLAUSE_TASK_REDUCTION:
1495 case OMP_CLAUSE_IN_REDUCTION:
1496 val = OMP_CLAUSE_REDUCTION_CODE (t);
1497 break;
1498 default:
1499 val = 0;
1500 break;
1501 }
1502 hstate.add_hwi (val);
1503 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1504 visit (OMP_CLAUSE_OPERAND (t, i));
1505 visit (OMP_CLAUSE_CHAIN (t));
1506 }
1507
1508 return hstate.end ();
1509
1510 #undef visit
1511 }
1512
1513 /* Compare two SCC entries by their hash value for qsorting them. */
1514
1515 int
1516 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1517 {
1518 const scc_entry *p1 = (const scc_entry *) p1_;
1519 const scc_entry *p2 = (const scc_entry *) p2_;
1520 if (p1->hash < p2->hash)
1521 return -1;
1522 else if (p1->hash > p2->hash)
1523 return 1;
1524 return 0;
1525 }
1526
1527 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1528 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1529
1530 hashval_t
1531 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1532 bool ref_p, bool this_ref_p)
1533 {
1534 unsigned int last_classes = 0, iterations = 0;
1535
1536 /* Compute hash values for the SCC members. */
1537 for (unsigned i = 0; i < size; ++i)
1538 sccstack[first+i].hash
1539 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1540
1541 if (size == 1)
1542 return sccstack[first].hash;
1543
1544 /* We aim to get unique hash for every tree within SCC and compute hash value
1545 of the whole SCC by combining all values together in a stable (entry-point
1546 independent) order. This guarantees that the same SCC regions within
1547 different translation units will get the same hash values and therefore
1548 will be merged at WPA time.
1549
1550 Often the hashes are already unique. In that case we compute the SCC hash
1551 by combining individual hash values in an increasing order.
1552
1553 If there are duplicates, we seek at least one tree with unique hash (and
1554 pick one with minimal hash and this property). Then we obtain a stable
1555 order by DFS walk starting from this unique tree and then use the index
1556 within this order to make individual hash values unique.
1557
1558 If there is no tree with unique hash, we iteratively propagate the hash
1559 values across the internal edges of SCC. This usually quickly leads
1560 to unique hashes. Consider, for example, an SCC containing two pointers
1561 that are identical except for the types they point to and assume that
1562 these types are also part of the SCC. The propagation will add the
1563 points-to type information into their hash values. */
1564 do
1565 {
1566 /* Sort the SCC so we can easily check for uniqueness. */
1567 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1568
1569 unsigned int classes = 1;
1570 int firstunique = -1;
1571
1572 /* Find the tree with lowest unique hash (if it exists) and compute
1573 the number of equivalence classes. */
1574 if (sccstack[first].hash != sccstack[first+1].hash)
1575 firstunique = 0;
1576 for (unsigned i = 1; i < size; ++i)
1577 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1578 {
1579 classes++;
1580 if (firstunique == -1
1581 && (i == size - 1
1582 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1583 firstunique = i;
1584 }
1585
1586 /* If we found a tree with unique hash, stop the iteration. */
1587 if (firstunique != -1
1588 /* Also terminate if we run out of iterations or if the number of
1589 equivalence classes is no longer increasing.
1590 For example a cyclic list of trees that are all equivalent will
1591 never have unique entry point; we however do not build such SCCs
1592 in our IL. */
1593 || classes <= last_classes || iterations > 16)
1594 {
1595 hashval_t scc_hash;
1596
1597 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1598 starting from FIRSTUNIQUE to obtain a stable order. */
1599 if (classes != size && firstunique != -1)
1600 {
1601 hash_map <tree, hashval_t> map(size*2);
1602
1603 /* Store hash values into a map, so we can associate them with
1604 the reordered SCC. */
1605 for (unsigned i = 0; i < size; ++i)
1606 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1607
1608 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1609 true);
1610 gcc_assert (again.sccstack.length () == size);
1611
1612 memcpy (sccstack.address () + first,
1613 again.sccstack.address (),
1614 sizeof (scc_entry) * size);
1615
1616 /* Update hash values of individual members by hashing in the
1617 index within the stable order. This ensures uniqueness.
1618 Also compute the SCC hash by mixing in all hash values in
1619 the stable order we obtained. */
1620 sccstack[first].hash = *map.get (sccstack[first].t);
1621 scc_hash = sccstack[first].hash;
1622 for (unsigned i = 1; i < size; ++i)
1623 {
1624 sccstack[first+i].hash
1625 = iterative_hash_hashval_t (i,
1626 *map.get (sccstack[first+i].t));
1627 scc_hash
1628 = iterative_hash_hashval_t (scc_hash,
1629 sccstack[first+i].hash);
1630 }
1631 }
1632 /* If we got a unique hash value for each tree, then sort already
1633 ensured entry-point independent order. Only compute the final
1634 SCC hash.
1635
1636 If we failed to find the unique entry point, we go by the same
1637 route. We will eventually introduce unwanted hash conflicts. */
1638 else
1639 {
1640 scc_hash = sccstack[first].hash;
1641 for (unsigned i = 1; i < size; ++i)
1642 scc_hash
1643 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1644
1645 /* We cannot 100% guarantee that the hash won't conflict so as
1646 to make it impossible to find a unique hash. This however
1647 should be an extremely rare case. ICE for now so possible
1648 issues are found and evaluated. */
1649 gcc_checking_assert (classes == size);
1650 }
1651
1652 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1653 hash into the hash of each element. */
1654 for (unsigned i = 0; i < size; ++i)
1655 sccstack[first+i].hash
1656 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1657 return scc_hash;
1658 }
1659
1660 last_classes = classes;
1661 iterations++;
1662
1663 /* We failed to identify the entry point; propagate hash values across
1664 the edges. */
1665 hash_map <tree, hashval_t> map(size*2);
1666
1667 for (unsigned i = 0; i < size; ++i)
1668 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1669
1670 for (unsigned i = 0; i < size; i++)
1671 sccstack[first+i].hash
1672 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1673 }
1674 while (true);
1675 }
1676
1677 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1678 already in the streamer cache. Main routine called for
1679 each visit of EXPR. */
1680
1681 void
1682 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1683 tree expr, bool ref_p, bool this_ref_p)
1684 {
1685 /* Handle special cases. */
1686 if (expr == NULL_TREE)
1687 return;
1688
1689 /* Do not DFS walk into indexable trees. */
1690 if (this_ref_p && tree_is_indexable (expr))
1691 return;
1692
1693 /* Check if we already streamed EXPR. */
1694 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1695 {
1696 /* Reference to a local tree makes entry also local. We always process
1697 top of stack entry, so set max to number of entries in stack - 1. */
1698 if (ob->local_trees
1699 && ob->local_trees->contains (expr))
1700 max_local_entry = sccstack.length () - 1;
1701 return;
1702 }
1703
1704 worklist w;
1705 w.expr = expr;
1706 w.from_state = from_state;
1707 w.cstate = NULL;
1708 w.ref_p = ref_p;
1709 w.this_ref_p = this_ref_p;
1710 worklist_vec.safe_push (w);
1711 }
1712
1713
1714 /* Emit the physical representation of tree node EXPR to output block OB.
1715 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1716 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1717
1718 void
1719 lto_output_tree (struct output_block *ob, tree expr,
1720 bool ref_p, bool this_ref_p)
1721 {
1722 unsigned ix;
1723 bool existed_p;
1724 unsigned int size = ob->main_stream->total_size;
1725 /* This is the first time we see EXPR, write all reachable
1726 trees to OB. */
1727 static bool in_dfs_walk;
1728
1729 if (expr == NULL_TREE)
1730 {
1731 streamer_write_record_start (ob, LTO_null);
1732 return;
1733 }
1734
1735 if (this_ref_p && tree_is_indexable (expr))
1736 {
1737 enum LTO_tags tag;
1738 unsigned ix;
1739
1740 lto_indexable_tree_ref (ob, expr, &tag, &ix);
1741 streamer_write_record_start (ob, tag);
1742 streamer_write_uhwi (ob, ix);
1743 return;
1744 }
1745
1746 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1747 if (existed_p)
1748 {
1749 if (streamer_dump_file)
1750 {
1751 if (in_dfs_walk)
1752 print_node_brief (streamer_dump_file, " Streaming ref to ",
1753 expr, 4);
1754 else
1755 print_node_brief (streamer_dump_file, " Streaming ref to ",
1756 expr, 4);
1757 fprintf (streamer_dump_file, "\n");
1758 }
1759 /* If a node has already been streamed out, make sure that
1760 we don't write it more than once. Otherwise, the reader
1761 will instantiate two different nodes for the same object. */
1762 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1763 streamer_write_uhwi (ob, ix);
1764 if (streamer_debugging)
1765 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1766 lto_tree_code_to_tag (TREE_CODE (expr)));
1767 lto_stats.num_pickle_refs_output++;
1768 }
1769 else
1770 {
1771 /* Protect against recursion which means disconnect between
1772 what tree edges we walk in the DFS walk and what edges
1773 we stream out. */
1774 gcc_assert (!in_dfs_walk);
1775
1776 if (streamer_dump_file)
1777 {
1778 print_node_brief (streamer_dump_file, " Streaming tree ",
1779 expr, 4);
1780 fprintf (streamer_dump_file, "\n");
1781 }
1782
1783 /* Start the DFS walk. */
1784 /* Save ob state ... */
1785 /* let's see ... */
1786 in_dfs_walk = true;
1787 DFS (ob, expr, ref_p, this_ref_p, false);
1788
1789 /* Finally append a reference to the tree we were writing. */
1790 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1791
1792 /* DFS walk above possibly skipped streaming EXPR itself to let us inline
1793 it. */
1794 if (!existed_p)
1795 lto_output_tree_1 (ob, expr, 0, ref_p, this_ref_p);
1796 else if (this_ref_p)
1797 {
1798 if (streamer_dump_file)
1799 {
1800 print_node_brief (streamer_dump_file,
1801 " Streaming final ref to ",
1802 expr, 4);
1803 fprintf (streamer_dump_file, "\n");
1804 }
1805 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1806 streamer_write_uhwi (ob, ix);
1807 if (streamer_debugging)
1808 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1809 lto_tree_code_to_tag (TREE_CODE (expr)));
1810 }
1811 in_dfs_walk = false;
1812 lto_stats.num_pickle_refs_output++;
1813 }
1814 if (streamer_dump_file && !in_dfs_walk)
1815 fprintf (streamer_dump_file, " %u bytes\n",
1816 ob->main_stream->total_size - size);
1817 }
1818
1819
1820 /* Output to OB a list of try/catch handlers starting with FIRST. */
1821
1822 static void
1823 output_eh_try_list (struct output_block *ob, eh_catch first)
1824 {
1825 eh_catch n;
1826
1827 for (n = first; n; n = n->next_catch)
1828 {
1829 streamer_write_record_start (ob, LTO_eh_catch);
1830 stream_write_tree (ob, n->type_list, true);
1831 stream_write_tree (ob, n->filter_list, true);
1832 stream_write_tree (ob, n->label, true);
1833 }
1834
1835 streamer_write_record_start (ob, LTO_null);
1836 }
1837
1838
1839 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1840 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1841 detect EH region sharing. */
1842
1843 static void
1844 output_eh_region (struct output_block *ob, eh_region r)
1845 {
1846 enum LTO_tags tag;
1847
1848 if (r == NULL)
1849 {
1850 streamer_write_record_start (ob, LTO_null);
1851 return;
1852 }
1853
1854 if (r->type == ERT_CLEANUP)
1855 tag = LTO_ert_cleanup;
1856 else if (r->type == ERT_TRY)
1857 tag = LTO_ert_try;
1858 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1859 tag = LTO_ert_allowed_exceptions;
1860 else if (r->type == ERT_MUST_NOT_THROW)
1861 tag = LTO_ert_must_not_throw;
1862 else
1863 gcc_unreachable ();
1864
1865 streamer_write_record_start (ob, tag);
1866 streamer_write_hwi (ob, r->index);
1867
1868 if (r->outer)
1869 streamer_write_hwi (ob, r->outer->index);
1870 else
1871 streamer_write_zero (ob);
1872
1873 if (r->inner)
1874 streamer_write_hwi (ob, r->inner->index);
1875 else
1876 streamer_write_zero (ob);
1877
1878 if (r->next_peer)
1879 streamer_write_hwi (ob, r->next_peer->index);
1880 else
1881 streamer_write_zero (ob);
1882
1883 if (r->type == ERT_TRY)
1884 {
1885 output_eh_try_list (ob, r->u.eh_try.first_catch);
1886 }
1887 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1888 {
1889 stream_write_tree (ob, r->u.allowed.type_list, true);
1890 stream_write_tree (ob, r->u.allowed.label, true);
1891 streamer_write_uhwi (ob, r->u.allowed.filter);
1892 }
1893 else if (r->type == ERT_MUST_NOT_THROW)
1894 {
1895 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1896 bitpack_d bp = bitpack_create (ob->main_stream);
1897 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1898 streamer_write_bitpack (&bp);
1899 }
1900
1901 if (r->landing_pads)
1902 streamer_write_hwi (ob, r->landing_pads->index);
1903 else
1904 streamer_write_zero (ob);
1905 }
1906
1907
1908 /* Output landing pad LP to OB. */
1909
1910 static void
1911 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1912 {
1913 if (lp == NULL)
1914 {
1915 streamer_write_record_start (ob, LTO_null);
1916 return;
1917 }
1918
1919 streamer_write_record_start (ob, LTO_eh_landing_pad);
1920 streamer_write_hwi (ob, lp->index);
1921 if (lp->next_lp)
1922 streamer_write_hwi (ob, lp->next_lp->index);
1923 else
1924 streamer_write_zero (ob);
1925
1926 if (lp->region)
1927 streamer_write_hwi (ob, lp->region->index);
1928 else
1929 streamer_write_zero (ob);
1930
1931 stream_write_tree (ob, lp->post_landing_pad, true);
1932 }
1933
1934
1935 /* Output the existing eh_table to OB. */
1936
1937 static void
1938 output_eh_regions (struct output_block *ob, struct function *fn)
1939 {
1940 if (fn->eh && fn->eh->region_tree)
1941 {
1942 unsigned i;
1943 eh_region eh;
1944 eh_landing_pad lp;
1945 tree ttype;
1946
1947 streamer_write_record_start (ob, LTO_eh_table);
1948
1949 /* Emit the index of the root of the EH region tree. */
1950 streamer_write_hwi (ob, fn->eh->region_tree->index);
1951
1952 /* Emit all the EH regions in the region array. */
1953 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1954 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1955 output_eh_region (ob, eh);
1956
1957 /* Emit all landing pads. */
1958 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1959 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1960 output_eh_lp (ob, lp);
1961
1962 /* Emit all the runtime type data. */
1963 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1964 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1965 stream_write_tree (ob, ttype, true);
1966
1967 /* Emit the table of action chains. */
1968 if (targetm.arm_eabi_unwinder)
1969 {
1970 tree t;
1971 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1972 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1973 stream_write_tree (ob, t, true);
1974 }
1975 else
1976 {
1977 uchar c;
1978 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1979 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1980 streamer_write_char_stream (ob->main_stream, c);
1981 }
1982 }
1983
1984 /* The LTO_null either terminates the record or indicates that there
1985 are no eh_records at all. */
1986 streamer_write_record_start (ob, LTO_null);
1987 }
1988
1989
1990 /* Output all of the active ssa names to the ssa_names stream. */
1991
1992 static void
1993 output_ssa_names (struct output_block *ob, struct function *fn)
1994 {
1995 unsigned int i, len;
1996
1997 len = vec_safe_length (SSANAMES (fn));
1998 streamer_write_uhwi (ob, len);
1999
2000 for (i = 1; i < len; i++)
2001 {
2002 tree ptr = (*SSANAMES (fn))[i];
2003
2004 if (ptr == NULL_TREE
2005 || SSA_NAME_IN_FREE_LIST (ptr)
2006 || virtual_operand_p (ptr)
2007 /* Simply skip unreleased SSA names. */
2008 || (! SSA_NAME_IS_DEFAULT_DEF (ptr)
2009 && (! SSA_NAME_DEF_STMT (ptr)
2010 || ! gimple_bb (SSA_NAME_DEF_STMT (ptr)))))
2011 continue;
2012
2013 streamer_write_uhwi (ob, i);
2014 streamer_write_char_stream (ob->main_stream,
2015 SSA_NAME_IS_DEFAULT_DEF (ptr));
2016 if (SSA_NAME_VAR (ptr))
2017 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
2018 else
2019 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
2020 stream_write_tree (ob, TREE_TYPE (ptr), true);
2021 }
2022
2023 streamer_write_zero (ob);
2024 }
2025
2026
2027
2028 /* Output the cfg. */
2029
2030 static void
2031 output_cfg (struct output_block *ob, struct function *fn)
2032 {
2033 struct lto_output_stream *tmp_stream = ob->main_stream;
2034 basic_block bb;
2035
2036 ob->main_stream = ob->cfg_stream;
2037
2038 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
2039 profile_status_for_fn (fn));
2040
2041 /* Output the number of the highest basic block. */
2042 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
2043
2044 FOR_ALL_BB_FN (bb, fn)
2045 {
2046 edge_iterator ei;
2047 edge e;
2048
2049 streamer_write_hwi (ob, bb->index);
2050
2051 /* Output the successors and the edge flags. */
2052 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
2053 FOR_EACH_EDGE (e, ei, bb->succs)
2054 {
2055 streamer_write_uhwi (ob, e->dest->index);
2056 e->probability.stream_out (ob);
2057 streamer_write_uhwi (ob, e->flags);
2058 }
2059 }
2060
2061 streamer_write_hwi (ob, -1);
2062
2063 bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
2064 while (bb->next_bb)
2065 {
2066 streamer_write_hwi (ob, bb->next_bb->index);
2067 bb = bb->next_bb;
2068 }
2069
2070 streamer_write_hwi (ob, -1);
2071
2072 /* Output the number of loops. */
2073 streamer_write_uhwi (ob, number_of_loops (fn));
2074
2075 /* Output each loop, skipping the tree root which has number zero. */
2076 for (unsigned i = 1; i < number_of_loops (fn); ++i)
2077 {
2078 class loop *loop = get_loop (fn, i);
2079
2080 /* Write the index of the loop header. That's enough to rebuild
2081 the loop tree on the reader side. Stream -1 for an unused
2082 loop entry. */
2083 if (!loop)
2084 {
2085 streamer_write_hwi (ob, -1);
2086 continue;
2087 }
2088 else
2089 streamer_write_hwi (ob, loop->header->index);
2090
2091 /* Write everything copy_loop_info copies. */
2092 streamer_write_enum (ob->main_stream,
2093 loop_estimation, EST_LAST, loop->estimate_state);
2094 streamer_write_hwi (ob, loop->any_upper_bound);
2095 if (loop->any_upper_bound)
2096 streamer_write_widest_int (ob, loop->nb_iterations_upper_bound);
2097 streamer_write_hwi (ob, loop->any_likely_upper_bound);
2098 if (loop->any_likely_upper_bound)
2099 streamer_write_widest_int (ob, loop->nb_iterations_likely_upper_bound);
2100 streamer_write_hwi (ob, loop->any_estimate);
2101 if (loop->any_estimate)
2102 streamer_write_widest_int (ob, loop->nb_iterations_estimate);
2103
2104 /* Write OMP SIMD related info. */
2105 streamer_write_hwi (ob, loop->safelen);
2106 streamer_write_hwi (ob, loop->unroll);
2107 streamer_write_hwi (ob, loop->owned_clique);
2108 streamer_write_hwi (ob, loop->dont_vectorize);
2109 streamer_write_hwi (ob, loop->force_vectorize);
2110 streamer_write_hwi (ob, loop->finite_p);
2111 stream_write_tree (ob, loop->simduid, true);
2112 }
2113
2114 ob->main_stream = tmp_stream;
2115 }
2116
2117
2118 /* Create the header in the file using OB. If the section type is for
2119 a function, set FN to the decl for that function. */
2120
2121 void
2122 produce_asm (struct output_block *ob, tree fn)
2123 {
2124 enum lto_section_type section_type = ob->section_type;
2125 struct lto_function_header header;
2126 char *section_name;
2127
2128 if (section_type == LTO_section_function_body)
2129 {
2130 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
2131 section_name = lto_get_section_name (section_type, name,
2132 symtab_node::get (fn)->order,
2133 NULL);
2134 }
2135 else
2136 section_name = lto_get_section_name (section_type, NULL, 0, NULL);
2137
2138 lto_begin_section (section_name, !flag_wpa);
2139 free (section_name);
2140
2141 /* The entire header is stream computed here. */
2142 memset (&header, 0, sizeof (struct lto_function_header));
2143
2144 if (section_type == LTO_section_function_body)
2145 header.cfg_size = ob->cfg_stream->total_size;
2146 header.main_size = ob->main_stream->total_size;
2147 header.string_size = ob->string_stream->total_size;
2148 lto_write_data (&header, sizeof header);
2149
2150 /* Put all of the gimple and the string table out the asm file as a
2151 block of text. */
2152 if (section_type == LTO_section_function_body)
2153 lto_write_stream (ob->cfg_stream);
2154 lto_write_stream (ob->main_stream);
2155 lto_write_stream (ob->string_stream);
2156
2157 lto_end_section ();
2158 }
2159
2160
2161 /* Output the base body of struct function FN using output block OB. */
2162
2163 static void
2164 output_struct_function_base (struct output_block *ob, struct function *fn)
2165 {
2166 struct bitpack_d bp;
2167 unsigned i;
2168 tree t;
2169
2170 /* Output the static chain and non-local goto save area. */
2171 stream_write_tree (ob, fn->static_chain_decl, true);
2172 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
2173
2174 /* Output all the local variables in the function. */
2175 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
2176 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
2177 stream_write_tree (ob, t, true);
2178
2179 /* Output current IL state of the function. */
2180 streamer_write_uhwi (ob, fn->curr_properties);
2181
2182 /* Write all the attributes for FN. */
2183 bp = bitpack_create (ob->main_stream);
2184 bp_pack_value (&bp, fn->is_thunk, 1);
2185 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
2186 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
2187 bp_pack_value (&bp, fn->returns_struct, 1);
2188 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
2189 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
2190 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
2191 bp_pack_value (&bp, fn->after_inlining, 1);
2192 bp_pack_value (&bp, fn->stdarg, 1);
2193 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2194 bp_pack_value (&bp, fn->has_forced_label_in_static, 1);
2195 bp_pack_value (&bp, fn->calls_alloca, 1);
2196 bp_pack_value (&bp, fn->calls_setjmp, 1);
2197 bp_pack_value (&bp, fn->calls_eh_return, 1);
2198 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2199 bp_pack_value (&bp, fn->has_simduid_loops, 1);
2200 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2201 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2202 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2203
2204 /* Output the function start and end loci. */
2205 stream_output_location (ob, &bp, fn->function_start_locus);
2206 stream_output_location (ob, &bp, fn->function_end_locus);
2207
2208 /* Save the instance discriminator if present. */
2209 int *instance_number_p = NULL;
2210 if (decl_to_instance_map)
2211 instance_number_p = decl_to_instance_map->get (fn->decl);
2212 bp_pack_value (&bp, !!instance_number_p, 1);
2213 if (instance_number_p)
2214 bp_pack_value (&bp, *instance_number_p, sizeof (int) * CHAR_BIT);
2215
2216 streamer_write_bitpack (&bp);
2217 }
2218
2219
2220 /* Collect all leaf BLOCKs beyond ROOT into LEAFS. */
2221
2222 static void
2223 collect_block_tree_leafs (tree root, vec<tree> &leafs)
2224 {
2225 for (root = BLOCK_SUBBLOCKS (root); root; root = BLOCK_CHAIN (root))
2226 if (! BLOCK_SUBBLOCKS (root))
2227 leafs.safe_push (root);
2228 else
2229 collect_block_tree_leafs (BLOCK_SUBBLOCKS (root), leafs);
2230 }
2231
2232 /* This performs function body modifications that are needed for streaming
2233 to work. */
2234
2235 void
2236 lto_prepare_function_for_streaming (struct cgraph_node *node)
2237 {
2238 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2239 basic_block bb;
2240
2241 if (number_of_loops (fn))
2242 {
2243 push_cfun (fn);
2244 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
2245 loop_optimizer_finalize ();
2246 pop_cfun ();
2247 }
2248 /* We will renumber the statements. The code that does this uses
2249 the same ordering that we use for serializing them so we can use
2250 the same code on the other end and not have to write out the
2251 statement numbers. We do not assign UIDs to PHIs here because
2252 virtual PHIs get re-computed on-the-fly which would make numbers
2253 inconsistent. */
2254 set_gimple_stmt_max_uid (fn, 0);
2255 FOR_ALL_BB_FN (bb, fn)
2256 {
2257 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2258 gsi_next (&gsi))
2259 {
2260 gphi *stmt = gsi.phi ();
2261
2262 /* Virtual PHIs are not going to be streamed. */
2263 if (!virtual_operand_p (gimple_phi_result (stmt)))
2264 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fn));
2265 }
2266 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2267 gsi_next (&gsi))
2268 {
2269 gimple *stmt = gsi_stmt (gsi);
2270 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fn));
2271 }
2272 }
2273 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2274 virtual phis now. */
2275 FOR_ALL_BB_FN (bb, fn)
2276 {
2277 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2278 gsi_next (&gsi))
2279 {
2280 gphi *stmt = gsi.phi ();
2281 if (virtual_operand_p (gimple_phi_result (stmt)))
2282 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fn));
2283 }
2284 }
2285
2286 }
2287
2288 /* Emit the chain of tree nodes starting at T. OB is the output block
2289 to write to. REF_P is true if chain elements should be emitted
2290 as references. */
2291
2292 static void
2293 streamer_write_chain (struct output_block *ob, tree t, bool ref_p)
2294 {
2295 while (t)
2296 {
2297 /* We avoid outputting external vars or functions by reference
2298 to the global decls section as we do not want to have them
2299 enter decl merging. We should not need to do this anymore because
2300 free_lang_data removes them from block scopes. */
2301 gcc_assert (!VAR_OR_FUNCTION_DECL_P (t) || !DECL_EXTERNAL (t));
2302 stream_write_tree (ob, t, ref_p);
2303
2304 t = TREE_CHAIN (t);
2305 }
2306
2307 /* Write a sentinel to terminate the chain. */
2308 stream_write_tree (ob, NULL_TREE, ref_p);
2309 }
2310
2311 /* Output the body of function NODE->DECL. */
2312
2313 static void
2314 output_function (struct cgraph_node *node)
2315 {
2316 tree function;
2317 struct function *fn;
2318 basic_block bb;
2319 struct output_block *ob;
2320
2321 if (streamer_dump_file)
2322 fprintf (streamer_dump_file, "\nStreaming body of %s\n",
2323 node->dump_name ());
2324
2325 function = node->decl;
2326 fn = DECL_STRUCT_FUNCTION (function);
2327 ob = create_output_block (LTO_section_function_body);
2328
2329 clear_line_info (ob);
2330 ob->symbol = node;
2331
2332 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2333
2334 /* Make string 0 be a NULL string. */
2335 streamer_write_char_stream (ob->string_stream, 0);
2336
2337 streamer_write_record_start (ob, LTO_function);
2338
2339 /* Output decls for parameters and args. */
2340 stream_write_tree (ob, DECL_RESULT (function), true);
2341 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2342
2343 /* Output debug args if available. */
2344 vec<tree, va_gc> **debugargs = decl_debug_args_lookup (function);
2345 if (! debugargs)
2346 streamer_write_uhwi (ob, 0);
2347 else
2348 {
2349 streamer_write_uhwi (ob, (*debugargs)->length ());
2350 for (unsigned i = 0; i < (*debugargs)->length (); ++i)
2351 stream_write_tree (ob, (**debugargs)[i], true);
2352 }
2353
2354 /* Output DECL_INITIAL for the function, which contains the tree of
2355 lexical scopes. */
2356 stream_write_tree (ob, DECL_INITIAL (function), true);
2357 /* As we do not recurse into BLOCK_SUBBLOCKS but only BLOCK_SUPERCONTEXT
2358 collect block tree leafs and stream those. */
2359 auto_vec<tree> block_tree_leafs;
2360 if (DECL_INITIAL (function))
2361 collect_block_tree_leafs (DECL_INITIAL (function), block_tree_leafs);
2362 streamer_write_uhwi (ob, block_tree_leafs.length ());
2363 for (unsigned i = 0; i < block_tree_leafs.length (); ++i)
2364 stream_write_tree (ob, block_tree_leafs[i], true);
2365
2366 /* We also stream abstract functions where we stream only stuff needed for
2367 debug info. */
2368 if (gimple_has_body_p (function))
2369 {
2370 streamer_write_uhwi (ob, 1);
2371 output_struct_function_base (ob, fn);
2372
2373 /* Output all the SSA names used in the function. */
2374 output_ssa_names (ob, fn);
2375
2376 /* Output any exception handling regions. */
2377 output_eh_regions (ob, fn);
2378
2379 /* Output the code for the function. */
2380 FOR_ALL_BB_FN (bb, fn)
2381 output_bb (ob, bb, fn);
2382
2383 /* The terminator for this function. */
2384 streamer_write_record_start (ob, LTO_null);
2385
2386 output_cfg (ob, fn);
2387 }
2388 else
2389 streamer_write_uhwi (ob, 0);
2390
2391 /* Create a section to hold the pickled output of this function. */
2392 produce_asm (ob, function);
2393
2394 destroy_output_block (ob);
2395 if (streamer_dump_file)
2396 fprintf (streamer_dump_file, "Finished streaming %s\n",
2397 node->dump_name ());
2398 }
2399
2400 /* Output the body of function NODE->DECL. */
2401
2402 static void
2403 output_constructor (struct varpool_node *node)
2404 {
2405 tree var = node->decl;
2406 struct output_block *ob;
2407
2408 if (streamer_dump_file)
2409 fprintf (streamer_dump_file, "\nStreaming constructor of %s\n",
2410 node->dump_name ());
2411
2412 timevar_push (TV_IPA_LTO_CTORS_OUT);
2413 ob = create_output_block (LTO_section_function_body);
2414
2415 clear_line_info (ob);
2416 ob->symbol = node;
2417
2418 /* Make string 0 be a NULL string. */
2419 streamer_write_char_stream (ob->string_stream, 0);
2420
2421 /* Output DECL_INITIAL for the function, which contains the tree of
2422 lexical scopes. */
2423 stream_write_tree (ob, DECL_INITIAL (var), true);
2424
2425 /* Create a section to hold the pickled output of this function. */
2426 produce_asm (ob, var);
2427
2428 destroy_output_block (ob);
2429 if (streamer_dump_file)
2430 fprintf (streamer_dump_file, "Finished streaming %s\n",
2431 node->dump_name ());
2432 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2433 }
2434
2435
2436 /* Emit toplevel asms. */
2437
2438 void
2439 lto_output_toplevel_asms (void)
2440 {
2441 struct output_block *ob;
2442 struct asm_node *can;
2443 char *section_name;
2444 struct lto_simple_header_with_strings header;
2445
2446 if (!symtab->first_asm_symbol ())
2447 return;
2448
2449 ob = create_output_block (LTO_section_asm);
2450
2451 /* Make string 0 be a NULL string. */
2452 streamer_write_char_stream (ob->string_stream, 0);
2453
2454 for (can = symtab->first_asm_symbol (); can; can = can->next)
2455 {
2456 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2457 streamer_write_hwi (ob, can->order);
2458 }
2459
2460 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2461
2462 section_name = lto_get_section_name (LTO_section_asm, NULL, 0, NULL);
2463 lto_begin_section (section_name, !flag_wpa);
2464 free (section_name);
2465
2466 /* The entire header stream is computed here. */
2467 memset (&header, 0, sizeof (header));
2468
2469 header.main_size = ob->main_stream->total_size;
2470 header.string_size = ob->string_stream->total_size;
2471 lto_write_data (&header, sizeof header);
2472
2473 /* Put all of the gimple and the string table out the asm file as a
2474 block of text. */
2475 lto_write_stream (ob->main_stream);
2476 lto_write_stream (ob->string_stream);
2477
2478 lto_end_section ();
2479
2480 destroy_output_block (ob);
2481 }
2482
2483
2484 /* Copy the function body or variable constructor of NODE without deserializing. */
2485
2486 static void
2487 copy_function_or_variable (struct symtab_node *node)
2488 {
2489 tree function = node->decl;
2490 struct lto_file_decl_data *file_data = node->lto_file_data;
2491 const char *data;
2492 size_t len;
2493 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2494 char *section_name =
2495 lto_get_section_name (LTO_section_function_body, name, node->order, NULL);
2496 size_t i, j;
2497 struct lto_in_decl_state *in_state;
2498 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2499
2500 if (streamer_dump_file)
2501 fprintf (streamer_dump_file, "Copying section for %s\n", name);
2502 lto_begin_section (section_name, false);
2503 free (section_name);
2504
2505 /* We may have renamed the declaration, e.g., a static function. */
2506 name = lto_get_decl_name_mapping (file_data, name);
2507
2508 data = lto_get_raw_section_data (file_data, LTO_section_function_body,
2509 name, node->order - file_data->order_base,
2510 &len);
2511 gcc_assert (data);
2512
2513 /* Do a bit copy of the function body. */
2514 lto_write_raw_data (data, len);
2515
2516 /* Copy decls. */
2517 in_state =
2518 lto_get_function_in_decl_state (node->lto_file_data, function);
2519 out_state->compressed = in_state->compressed;
2520 gcc_assert (in_state);
2521
2522 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2523 {
2524 size_t n = vec_safe_length (in_state->streams[i]);
2525 vec<tree, va_gc> *trees = in_state->streams[i];
2526 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2527
2528 /* The out state must have the same indices and the in state.
2529 So just copy the vector. All the encoders in the in state
2530 must be empty where we reach here. */
2531 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2532 encoder->trees.reserve_exact (n);
2533 for (j = 0; j < n; j++)
2534 encoder->trees.safe_push ((*trees)[j]);
2535 }
2536
2537 lto_free_raw_section_data (file_data, LTO_section_function_body, name,
2538 data, len);
2539 lto_end_section ();
2540 }
2541
2542 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2543
2544 static tree
2545 wrap_refs (tree *tp, int *ws, void *)
2546 {
2547 tree t = *tp;
2548 if (handled_component_p (t)
2549 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
2550 && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2551 {
2552 tree decl = TREE_OPERAND (t, 0);
2553 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2554 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2555 build1 (ADDR_EXPR, ptrtype, decl),
2556 build_int_cst (ptrtype, 0));
2557 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2558 *ws = 0;
2559 }
2560 else if (TREE_CODE (t) == CONSTRUCTOR)
2561 ;
2562 else if (!EXPR_P (t))
2563 *ws = 0;
2564 return NULL_TREE;
2565 }
2566
2567 /* Remove functions that are no longer used from offload_funcs, and mark the
2568 remaining ones with DECL_PRESERVE_P. */
2569
2570 static void
2571 prune_offload_funcs (void)
2572 {
2573 if (!offload_funcs)
2574 return;
2575
2576 unsigned ix, ix2;
2577 tree *elem_ptr;
2578 VEC_ORDERED_REMOVE_IF (*offload_funcs, ix, ix2, elem_ptr,
2579 cgraph_node::get (*elem_ptr) == NULL);
2580
2581 tree fn_decl;
2582 FOR_EACH_VEC_ELT (*offload_funcs, ix, fn_decl)
2583 DECL_PRESERVE_P (fn_decl) = 1;
2584 }
2585
2586 /* Produce LTO section that contains global information
2587 about LTO bytecode. */
2588
2589 static void
2590 produce_lto_section ()
2591 {
2592 /* Stream LTO meta section. */
2593 output_block *ob = create_output_block (LTO_section_lto);
2594
2595 char * section_name = lto_get_section_name (LTO_section_lto, NULL, 0, NULL);
2596 lto_begin_section (section_name, false);
2597 free (section_name);
2598
2599 #ifdef HAVE_ZSTD_H
2600 lto_compression compression = ZSTD;
2601 #else
2602 lto_compression compression = ZLIB;
2603 #endif
2604
2605 bool slim_object = flag_generate_lto && !flag_fat_lto_objects;
2606 lto_section s
2607 = { LTO_major_version, LTO_minor_version, slim_object, 0 };
2608 s.set_compression (compression);
2609 lto_write_data (&s, sizeof s);
2610 lto_end_section ();
2611 destroy_output_block (ob);
2612 }
2613
2614 /* Compare symbols to get them sorted by filename (to optimize streaming) */
2615
2616 static int
2617 cmp_symbol_files (const void *pn1, const void *pn2)
2618 {
2619 const symtab_node *n1 = *(const symtab_node * const *)pn1;
2620 const symtab_node *n2 = *(const symtab_node * const *)pn2;
2621
2622 int file_order1 = n1->lto_file_data ? n1->lto_file_data->order : -1;
2623 int file_order2 = n2->lto_file_data ? n2->lto_file_data->order : -1;
2624
2625 /* Order files same way as they appeared in the command line to reduce
2626 seeking while copying sections. */
2627 if (file_order1 != file_order2)
2628 return file_order1 - file_order2;
2629
2630 /* Order within static library. */
2631 if (n1->lto_file_data && n1->lto_file_data->id != n2->lto_file_data->id)
2632 {
2633 if (n1->lto_file_data->id > n2->lto_file_data->id)
2634 return 1;
2635 if (n1->lto_file_data->id < n2->lto_file_data->id)
2636 return -1;
2637 }
2638
2639 /* And finaly order by the definition order. */
2640 return n1->order - n2->order;
2641 }
2642
2643 /* Main entry point from the pass manager. */
2644
2645 void
2646 lto_output (void)
2647 {
2648 struct lto_out_decl_state *decl_state;
2649 bitmap output = NULL;
2650 bitmap_obstack output_obstack;
2651 unsigned int i, n_nodes;
2652 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2653 auto_vec<symtab_node *> symbols_to_copy;
2654
2655 prune_offload_funcs ();
2656
2657 if (flag_checking)
2658 {
2659 bitmap_obstack_initialize (&output_obstack);
2660 output = BITMAP_ALLOC (&output_obstack);
2661 }
2662
2663 /* Initialize the streamer. */
2664 lto_streamer_init ();
2665
2666 produce_lto_section ();
2667
2668 n_nodes = lto_symtab_encoder_size (encoder);
2669 /* Prepare vector of functions to output and then sort it to optimize
2670 section copying. */
2671 for (i = 0; i < n_nodes; i++)
2672 {
2673 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2674 if (snode->alias)
2675 continue;
2676 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2677 {
2678 if (lto_symtab_encoder_encode_body_p (encoder, node))
2679 symbols_to_copy.safe_push (node);
2680 }
2681 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2682 {
2683 /* Wrap symbol references inside the ctor in a type
2684 preserving MEM_REF. */
2685 tree ctor = DECL_INITIAL (node->decl);
2686 if (ctor && !in_lto_p)
2687 walk_tree (&ctor, wrap_refs, NULL, NULL);
2688 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2689 && lto_symtab_encoder_encode_initializer_p (encoder, node))
2690 symbols_to_copy.safe_push (node);
2691 }
2692 }
2693 symbols_to_copy.qsort (cmp_symbol_files);
2694 for (i = 0; i < symbols_to_copy.length (); i++)
2695 {
2696 symtab_node *snode = symbols_to_copy[i];
2697 cgraph_node *cnode;
2698 varpool_node *vnode;
2699
2700 if (flag_checking)
2701 gcc_assert (bitmap_set_bit (output, DECL_UID (snode->decl)));
2702
2703 decl_state = lto_new_out_decl_state ();
2704 lto_push_out_decl_state (decl_state);
2705
2706 if ((cnode = dyn_cast <cgraph_node *> (snode))
2707 && (gimple_has_body_p (cnode->decl)
2708 || (!flag_wpa
2709 && flag_incremental_link != INCREMENTAL_LINK_LTO)
2710 /* Thunks have no body but they may be synthetized
2711 at WPA time. */
2712 || DECL_ARGUMENTS (cnode->decl)))
2713 output_function (cnode);
2714 else if ((vnode = dyn_cast <varpool_node *> (snode))
2715 && (DECL_INITIAL (vnode->decl) != error_mark_node
2716 || (!flag_wpa
2717 && flag_incremental_link != INCREMENTAL_LINK_LTO)))
2718 output_constructor (vnode);
2719 else
2720 copy_function_or_variable (snode);
2721 gcc_assert (lto_get_out_decl_state () == decl_state);
2722 lto_pop_out_decl_state ();
2723 lto_record_function_out_decl_state (snode->decl, decl_state);
2724 }
2725
2726 /* Emit the callgraph after emitting function bodies. This needs to
2727 be done now to make sure that all the statements in every function
2728 have been renumbered so that edges can be associated with call
2729 statements using the statement UIDs. */
2730 output_symtab ();
2731
2732 output_offload_tables ();
2733
2734 if (flag_checking)
2735 {
2736 BITMAP_FREE (output);
2737 bitmap_obstack_release (&output_obstack);
2738 }
2739 }
2740
2741 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2742 from it and required for correct representation of its semantics.
2743 Each node in ENCODER must be a global declaration or a type. A node
2744 is written only once, even if it appears multiple times in the
2745 vector. Certain transitively-reachable nodes, such as those
2746 representing expressions, may be duplicated, but such nodes
2747 must not appear in ENCODER itself. */
2748
2749 static void
2750 write_global_stream (struct output_block *ob,
2751 struct lto_tree_ref_encoder *encoder)
2752 {
2753 tree t;
2754 size_t index;
2755 const size_t size = lto_tree_ref_encoder_size (encoder);
2756
2757 for (index = 0; index < size; index++)
2758 {
2759 t = lto_tree_ref_encoder_get_tree (encoder, index);
2760 if (streamer_dump_file)
2761 {
2762 fprintf (streamer_dump_file, " %i:", (int)index);
2763 print_node_brief (streamer_dump_file, "", t, 4);
2764 fprintf (streamer_dump_file, "\n");
2765 }
2766 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2767 stream_write_tree (ob, t, false);
2768 }
2769 }
2770
2771
2772 /* Write a sequence of indices into the globals vector corresponding
2773 to the trees in ENCODER. These are used by the reader to map the
2774 indices used to refer to global entities within function bodies to
2775 their referents. */
2776
2777 static void
2778 write_global_references (struct output_block *ob,
2779 struct lto_tree_ref_encoder *encoder)
2780 {
2781 tree t;
2782 uint32_t index;
2783 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2784
2785 /* Write size and slot indexes as 32-bit unsigned numbers. */
2786 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2787 data[0] = size;
2788
2789 for (index = 0; index < size; index++)
2790 {
2791 unsigned slot_num;
2792
2793 t = lto_tree_ref_encoder_get_tree (encoder, index);
2794 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2795 gcc_assert (slot_num != (unsigned)-1);
2796 data[index + 1] = slot_num;
2797 }
2798
2799 lto_write_data (data, sizeof (int32_t) * (size + 1));
2800 free (data);
2801 }
2802
2803
2804 /* Write all the streams in an lto_out_decl_state STATE using
2805 output block OB and output stream OUT_STREAM. */
2806
2807 void
2808 lto_output_decl_state_streams (struct output_block *ob,
2809 struct lto_out_decl_state *state)
2810 {
2811 int i;
2812
2813 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2814 write_global_stream (ob, &state->streams[i]);
2815 }
2816
2817
2818 /* Write all the references in an lto_out_decl_state STATE using
2819 output block OB and output stream OUT_STREAM. */
2820
2821 void
2822 lto_output_decl_state_refs (struct output_block *ob,
2823 struct lto_out_decl_state *state)
2824 {
2825 unsigned i;
2826 unsigned ref;
2827 tree decl;
2828
2829 /* Write reference to FUNCTION_DECL. If there is not function,
2830 write reference to void_type_node. */
2831 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2832 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2833 gcc_assert (ref != (unsigned)-1);
2834 ref = ref * 2 + (state->compressed ? 1 : 0);
2835 lto_write_data (&ref, sizeof (uint32_t));
2836
2837 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2838 write_global_references (ob, &state->streams[i]);
2839 }
2840
2841
2842 /* Return the written size of STATE. */
2843
2844 static size_t
2845 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2846 {
2847 int i;
2848 size_t size;
2849
2850 size = sizeof (int32_t); /* fn_ref. */
2851 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2852 {
2853 size += sizeof (int32_t); /* vector size. */
2854 size += (lto_tree_ref_encoder_size (&state->streams[i])
2855 * sizeof (int32_t));
2856 }
2857 return size;
2858 }
2859
2860
2861 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2862 so far. */
2863
2864 static void
2865 write_symbol (struct streamer_tree_cache_d *cache,
2866 tree t, hash_set<const char *> *seen, bool alias)
2867 {
2868 const char *name;
2869 enum gcc_plugin_symbol_kind kind;
2870 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2871 unsigned slot_num;
2872 uint64_t size;
2873 const char *comdat;
2874 unsigned char c;
2875
2876 gcc_assert (VAR_OR_FUNCTION_DECL_P (t));
2877
2878 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2879
2880 /* This behaves like assemble_name_raw in varasm.c, performing the
2881 same name manipulations that ASM_OUTPUT_LABELREF does. */
2882 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2883
2884 if (seen->add (name))
2885 return;
2886
2887 streamer_tree_cache_lookup (cache, t, &slot_num);
2888 gcc_assert (slot_num != (unsigned)-1);
2889
2890 if (DECL_EXTERNAL (t))
2891 {
2892 if (DECL_WEAK (t))
2893 kind = GCCPK_WEAKUNDEF;
2894 else
2895 kind = GCCPK_UNDEF;
2896 }
2897 else
2898 {
2899 if (DECL_WEAK (t))
2900 kind = GCCPK_WEAKDEF;
2901 else if (DECL_COMMON (t))
2902 kind = GCCPK_COMMON;
2903 else
2904 kind = GCCPK_DEF;
2905
2906 /* When something is defined, it should have node attached. */
2907 gcc_assert (alias || !VAR_P (t) || varpool_node::get (t)->definition);
2908 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2909 || (cgraph_node::get (t)
2910 && cgraph_node::get (t)->definition));
2911 }
2912
2913 /* Imitate what default_elf_asm_output_external do.
2914 When symbol is external, we need to output it with DEFAULT visibility
2915 when compiling with -fvisibility=default, while with HIDDEN visibility
2916 when symbol has attribute (visibility("hidden")) specified.
2917 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2918 right. */
2919
2920 if (DECL_EXTERNAL (t)
2921 && !targetm.binds_local_p (t))
2922 visibility = GCCPV_DEFAULT;
2923 else
2924 switch (DECL_VISIBILITY (t))
2925 {
2926 case VISIBILITY_DEFAULT:
2927 visibility = GCCPV_DEFAULT;
2928 break;
2929 case VISIBILITY_PROTECTED:
2930 visibility = GCCPV_PROTECTED;
2931 break;
2932 case VISIBILITY_HIDDEN:
2933 visibility = GCCPV_HIDDEN;
2934 break;
2935 case VISIBILITY_INTERNAL:
2936 visibility = GCCPV_INTERNAL;
2937 break;
2938 }
2939
2940 if (kind == GCCPK_COMMON
2941 && DECL_SIZE_UNIT (t)
2942 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2943 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2944 else
2945 size = 0;
2946
2947 if (DECL_ONE_ONLY (t))
2948 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2949 else
2950 comdat = "";
2951
2952 lto_write_data (name, strlen (name) + 1);
2953 lto_write_data (comdat, strlen (comdat) + 1);
2954 c = (unsigned char) kind;
2955 lto_write_data (&c, 1);
2956 c = (unsigned char) visibility;
2957 lto_write_data (&c, 1);
2958 lto_write_data (&size, 8);
2959 lto_write_data (&slot_num, 4);
2960 }
2961
2962 /* Write extension information for symbols (symbol type, section flags). */
2963
2964 static void
2965 write_symbol_extension_info (tree t)
2966 {
2967 unsigned char c;
2968 c = ((unsigned char) TREE_CODE (t) == VAR_DECL
2969 ? GCCST_VARIABLE : GCCST_FUNCTION);
2970 lto_write_data (&c, 1);
2971 unsigned char section_kind = 0;
2972 if (TREE_CODE (t) == VAR_DECL)
2973 {
2974 section *s = get_variable_section (t, false);
2975 if (s->common.flags & SECTION_BSS)
2976 section_kind |= GCCSSK_BSS;
2977 }
2978 lto_write_data (&section_kind, 1);
2979 }
2980
2981 /* Write an IL symbol table to OB.
2982 SET and VSET are cgraph/varpool node sets we are outputting. */
2983
2984 static unsigned int
2985 produce_symtab (struct output_block *ob)
2986 {
2987 unsigned int streamed_symbols = 0;
2988 struct streamer_tree_cache_d *cache = ob->writer_cache;
2989 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, 0, NULL);
2990 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2991 lto_symtab_encoder_iterator lsei;
2992
2993 lto_begin_section (section_name, false);
2994 free (section_name);
2995
2996 hash_set<const char *> seen;
2997
2998 /* Write the symbol table.
2999 First write everything defined and then all declarations.
3000 This is necessary to handle cases where we have duplicated symbols. */
3001 for (lsei = lsei_start (encoder);
3002 !lsei_end_p (lsei); lsei_next (&lsei))
3003 {
3004 symtab_node *node = lsei_node (lsei);
3005
3006 if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
3007 continue;
3008 write_symbol (cache, node->decl, &seen, false);
3009 ++streamed_symbols;
3010 }
3011 for (lsei = lsei_start (encoder);
3012 !lsei_end_p (lsei); lsei_next (&lsei))
3013 {
3014 symtab_node *node = lsei_node (lsei);
3015
3016 if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
3017 continue;
3018 write_symbol (cache, node->decl, &seen, false);
3019 ++streamed_symbols;
3020 }
3021
3022 lto_end_section ();
3023
3024 return streamed_symbols;
3025 }
3026
3027 /* Symtab extension version. */
3028 #define LTO_SYMTAB_EXTENSION_VERSION 1
3029
3030 /* Write an IL symbol table extension to OB.
3031 SET and VSET are cgraph/varpool node sets we are outputting. */
3032
3033 static void
3034 produce_symtab_extension (struct output_block *ob,
3035 unsigned int previous_streamed_symbols)
3036 {
3037 unsigned int streamed_symbols = 0;
3038 char *section_name = lto_get_section_name (LTO_section_symtab_extension,
3039 NULL, 0, NULL);
3040 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
3041 lto_symtab_encoder_iterator lsei;
3042
3043 lto_begin_section (section_name, false);
3044 free (section_name);
3045
3046 unsigned char version = LTO_SYMTAB_EXTENSION_VERSION;
3047 lto_write_data (&version, 1);
3048
3049 /* Write the symbol table.
3050 First write everything defined and then all declarations.
3051 This is necessary to handle cases where we have duplicated symbols. */
3052 for (lsei = lsei_start (encoder);
3053 !lsei_end_p (lsei); lsei_next (&lsei))
3054 {
3055 symtab_node *node = lsei_node (lsei);
3056
3057 if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
3058 continue;
3059 write_symbol_extension_info (node->decl);
3060 ++streamed_symbols;
3061 }
3062 for (lsei = lsei_start (encoder);
3063 !lsei_end_p (lsei); lsei_next (&lsei))
3064 {
3065 symtab_node *node = lsei_node (lsei);
3066
3067 if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
3068 continue;
3069 write_symbol_extension_info (node->decl);
3070 ++streamed_symbols;
3071 }
3072
3073 gcc_assert (previous_streamed_symbols == streamed_symbols);
3074 lto_end_section ();
3075 }
3076
3077
3078 /* Init the streamer_mode_table for output, where we collect info on what
3079 machine_mode values have been streamed. */
3080 void
3081 lto_output_init_mode_table (void)
3082 {
3083 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
3084 }
3085
3086
3087 /* Write the mode table. */
3088 static void
3089 lto_write_mode_table (void)
3090 {
3091 struct output_block *ob;
3092 ob = create_output_block (LTO_section_mode_table);
3093 bitpack_d bp = bitpack_create (ob->main_stream);
3094
3095 /* Ensure that for GET_MODE_INNER (m) != m we have
3096 also the inner mode marked. */
3097 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
3098 if (streamer_mode_table[i])
3099 {
3100 machine_mode m = (machine_mode) i;
3101 machine_mode inner_m = GET_MODE_INNER (m);
3102 if (inner_m != m)
3103 streamer_mode_table[(int) inner_m] = 1;
3104 }
3105 /* First stream modes that have GET_MODE_INNER (m) == m,
3106 so that we can refer to them afterwards. */
3107 for (int pass = 0; pass < 2; pass++)
3108 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
3109 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
3110 {
3111 machine_mode m = (machine_mode) i;
3112 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
3113 continue;
3114 bp_pack_value (&bp, m, 8);
3115 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
3116 bp_pack_poly_value (&bp, GET_MODE_SIZE (m), 16);
3117 bp_pack_poly_value (&bp, GET_MODE_PRECISION (m), 16);
3118 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
3119 bp_pack_poly_value (&bp, GET_MODE_NUNITS (m), 16);
3120 switch (GET_MODE_CLASS (m))
3121 {
3122 case MODE_FRACT:
3123 case MODE_UFRACT:
3124 case MODE_ACCUM:
3125 case MODE_UACCUM:
3126 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
3127 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
3128 break;
3129 case MODE_FLOAT:
3130 case MODE_DECIMAL_FLOAT:
3131 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
3132 break;
3133 default:
3134 break;
3135 }
3136 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
3137 }
3138 bp_pack_value (&bp, VOIDmode, 8);
3139
3140 streamer_write_bitpack (&bp);
3141
3142 char *section_name
3143 = lto_get_section_name (LTO_section_mode_table, NULL, 0, NULL);
3144 lto_begin_section (section_name, !flag_wpa);
3145 free (section_name);
3146
3147 /* The entire header stream is computed here. */
3148 struct lto_simple_header_with_strings header;
3149 memset (&header, 0, sizeof (header));
3150
3151 header.main_size = ob->main_stream->total_size;
3152 header.string_size = ob->string_stream->total_size;
3153 lto_write_data (&header, sizeof header);
3154
3155 /* Put all of the gimple and the string table out the asm file as a
3156 block of text. */
3157 lto_write_stream (ob->main_stream);
3158 lto_write_stream (ob->string_stream);
3159
3160 lto_end_section ();
3161 destroy_output_block (ob);
3162 }
3163
3164
3165 /* This pass is run after all of the functions are serialized and all
3166 of the IPA passes have written their serialized forms. This pass
3167 causes the vector of all of the global decls and types used from
3168 this file to be written in to a section that can then be read in to
3169 recover these on other side. */
3170
3171 void
3172 produce_asm_for_decls (void)
3173 {
3174 struct lto_out_decl_state *out_state;
3175 struct lto_out_decl_state *fn_out_state;
3176 struct lto_decl_header header;
3177 char *section_name;
3178 struct output_block *ob;
3179 unsigned idx, num_fns;
3180 size_t decl_state_size;
3181 int32_t num_decl_states;
3182
3183 ob = create_output_block (LTO_section_decls);
3184
3185 memset (&header, 0, sizeof (struct lto_decl_header));
3186
3187 section_name = lto_get_section_name (LTO_section_decls, NULL, 0, NULL);
3188 lto_begin_section (section_name, !flag_wpa);
3189 free (section_name);
3190
3191 /* Make string 0 be a NULL string. */
3192 streamer_write_char_stream (ob->string_stream, 0);
3193
3194 gcc_assert (!alias_pairs);
3195
3196 /* Get rid of the global decl state hash tables to save some memory. */
3197 out_state = lto_get_out_decl_state ();
3198 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
3199 if (out_state->streams[i].tree_hash_table)
3200 {
3201 delete out_state->streams[i].tree_hash_table;
3202 out_state->streams[i].tree_hash_table = NULL;
3203 }
3204
3205 /* Write the global symbols. */
3206 if (streamer_dump_file)
3207 fprintf (streamer_dump_file, "Outputting global stream\n");
3208 lto_output_decl_state_streams (ob, out_state);
3209 num_fns = lto_function_decl_states.length ();
3210 for (idx = 0; idx < num_fns; idx++)
3211 {
3212 fn_out_state =
3213 lto_function_decl_states[idx];
3214 if (streamer_dump_file)
3215 fprintf (streamer_dump_file, "Outputting stream for %s\n",
3216 IDENTIFIER_POINTER
3217 (DECL_ASSEMBLER_NAME (fn_out_state->fn_decl)));
3218 lto_output_decl_state_streams (ob, fn_out_state);
3219 }
3220
3221 /* Currently not used. This field would allow us to preallocate
3222 the globals vector, so that it need not be resized as it is extended. */
3223 header.num_nodes = -1;
3224
3225 /* Compute the total size of all decl out states. */
3226 decl_state_size = sizeof (int32_t);
3227 decl_state_size += lto_out_decl_state_written_size (out_state);
3228 for (idx = 0; idx < num_fns; idx++)
3229 {
3230 fn_out_state =
3231 lto_function_decl_states[idx];
3232 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
3233 }
3234 header.decl_state_size = decl_state_size;
3235
3236 header.main_size = ob->main_stream->total_size;
3237 header.string_size = ob->string_stream->total_size;
3238
3239 lto_write_data (&header, sizeof header);
3240
3241 /* Write the main out-decl state, followed by out-decl states of
3242 functions. */
3243 num_decl_states = num_fns + 1;
3244 lto_write_data (&num_decl_states, sizeof (num_decl_states));
3245 lto_output_decl_state_refs (ob, out_state);
3246 for (idx = 0; idx < num_fns; idx++)
3247 {
3248 fn_out_state = lto_function_decl_states[idx];
3249 lto_output_decl_state_refs (ob, fn_out_state);
3250 }
3251
3252 lto_write_stream (ob->main_stream);
3253 lto_write_stream (ob->string_stream);
3254
3255 lto_end_section ();
3256
3257 /* Write the symbol table. It is used by linker to determine dependencies
3258 and thus we can skip it for WPA. */
3259 if (!flag_wpa)
3260 {
3261 unsigned int streamed_symbols = produce_symtab (ob);
3262 produce_symtab_extension (ob, streamed_symbols);
3263 }
3264
3265 /* Write command line opts. */
3266 lto_write_options ();
3267
3268 /* Deallocate memory and clean up. */
3269 for (idx = 0; idx < num_fns; idx++)
3270 {
3271 fn_out_state =
3272 lto_function_decl_states[idx];
3273 lto_delete_out_decl_state (fn_out_state);
3274 }
3275 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
3276 lto_function_decl_states.release ();
3277 destroy_output_block (ob);
3278 if (lto_stream_offload_p)
3279 lto_write_mode_table ();
3280 }