]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-streamer-out.c
Merge with trunk.
[thirdparty/gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2013 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "expr.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "hashtab.h"
33 #include "basic-block.h"
34 #include "gimple.h"
35 #include "gimple-ssa.h"
36 #include "tree-ssanames.h"
37 #include "tree-pass.h"
38 #include "function.h"
39 #include "ggc.h"
40 #include "diagnostic-core.h"
41 #include "except.h"
42 #include "vec.h"
43 #include "lto-symtab.h"
44 #include "lto-streamer.h"
45 #include "data-streamer.h"
46 #include "gimple-streamer.h"
47 #include "tree-streamer.h"
48 #include "streamer-hooks.h"
49 #include "cfgloop.h"
50
51
52 /* Clear the line info stored in DATA_IN. */
53
54 static void
55 clear_line_info (struct output_block *ob)
56 {
57 ob->current_file = NULL;
58 ob->current_line = 0;
59 ob->current_col = 0;
60 }
61
62
63 /* Create the output block and return it. SECTION_TYPE is
64 LTO_section_function_body or LTO_static_initializer. */
65
66 struct output_block *
67 create_output_block (enum lto_section_type section_type)
68 {
69 struct output_block *ob = XCNEW (struct output_block);
70
71 ob->section_type = section_type;
72 ob->decl_state = lto_get_out_decl_state ();
73 ob->main_stream = XCNEW (struct lto_output_stream);
74 ob->string_stream = XCNEW (struct lto_output_stream);
75 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true);
76
77 if (section_type == LTO_section_function_body)
78 ob->cfg_stream = XCNEW (struct lto_output_stream);
79
80 clear_line_info (ob);
81
82 ob->string_hash_table.create (37);
83 gcc_obstack_init (&ob->obstack);
84
85 return ob;
86 }
87
88
89 /* Destroy the output block OB. */
90
91 void
92 destroy_output_block (struct output_block *ob)
93 {
94 enum lto_section_type section_type = ob->section_type;
95
96 ob->string_hash_table.dispose ();
97
98 free (ob->main_stream);
99 free (ob->string_stream);
100 if (section_type == LTO_section_function_body)
101 free (ob->cfg_stream);
102
103 streamer_tree_cache_delete (ob->writer_cache);
104 obstack_free (&ob->obstack, NULL);
105
106 free (ob);
107 }
108
109
110 /* Look up NODE in the type table and write the index for it to OB. */
111
112 static void
113 output_type_ref (struct output_block *ob, tree node)
114 {
115 streamer_write_record_start (ob, LTO_type_ref);
116 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
117 }
118
119
120 /* Return true if tree node T is written to various tables. For these
121 nodes, we sometimes want to write their phyiscal representation
122 (via lto_output_tree), and sometimes we need to emit an index
123 reference into a table (via lto_output_tree_ref). */
124
125 static bool
126 tree_is_indexable (tree t)
127 {
128 /* Parameters and return values of functions of variably modified types
129 must go to global stream, because they may be used in the type
130 definition. */
131 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
132 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
133 else if (TREE_CODE (t) == VAR_DECL && decl_function_context (t)
134 && !TREE_STATIC (t))
135 return false;
136 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
137 return false;
138 /* Variably modified types need to be streamed alongside function
139 bodies because they can refer to local entities. Together with
140 them we have to localize their members as well.
141 ??? In theory that includes non-FIELD_DECLs as well. */
142 else if (TYPE_P (t)
143 && variably_modified_type_p (t, NULL_TREE))
144 return false;
145 else if (TREE_CODE (t) == FIELD_DECL
146 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
147 return false;
148 else
149 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
150 }
151
152
153 /* Output info about new location into bitpack BP.
154 After outputting bitpack, lto_output_location_data has
155 to be done to output actual data. */
156
157 void
158 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
159 location_t loc)
160 {
161 expanded_location xloc;
162
163 loc = LOCATION_LOCUS (loc);
164 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
165 if (loc == UNKNOWN_LOCATION)
166 return;
167
168 xloc = expand_location (loc);
169
170 bp_pack_value (bp, ob->current_file != xloc.file, 1);
171 bp_pack_value (bp, ob->current_line != xloc.line, 1);
172 bp_pack_value (bp, ob->current_col != xloc.column, 1);
173
174 if (ob->current_file != xloc.file)
175 bp_pack_var_len_unsigned (bp,
176 streamer_string_index (ob, xloc.file,
177 strlen (xloc.file) + 1,
178 true));
179 ob->current_file = xloc.file;
180
181 if (ob->current_line != xloc.line)
182 bp_pack_var_len_unsigned (bp, xloc.line);
183 ob->current_line = xloc.line;
184
185 if (ob->current_col != xloc.column)
186 bp_pack_var_len_unsigned (bp, xloc.column);
187 ob->current_col = xloc.column;
188 }
189
190
191 /* If EXPR is an indexable tree node, output a reference to it to
192 output block OB. Otherwise, output the physical representation of
193 EXPR to OB. */
194
195 static void
196 lto_output_tree_ref (struct output_block *ob, tree expr)
197 {
198 enum tree_code code;
199
200 if (TYPE_P (expr))
201 {
202 output_type_ref (ob, expr);
203 return;
204 }
205
206 code = TREE_CODE (expr);
207 switch (code)
208 {
209 case SSA_NAME:
210 streamer_write_record_start (ob, LTO_ssa_name_ref);
211 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
212 break;
213
214 case FIELD_DECL:
215 streamer_write_record_start (ob, LTO_field_decl_ref);
216 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
217 break;
218
219 case FUNCTION_DECL:
220 streamer_write_record_start (ob, LTO_function_decl_ref);
221 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
222 break;
223
224 case VAR_DECL:
225 case DEBUG_EXPR_DECL:
226 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
227 case PARM_DECL:
228 streamer_write_record_start (ob, LTO_global_decl_ref);
229 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
230 break;
231
232 case CONST_DECL:
233 streamer_write_record_start (ob, LTO_const_decl_ref);
234 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
235 break;
236
237 case IMPORTED_DECL:
238 gcc_assert (decl_function_context (expr) == NULL);
239 streamer_write_record_start (ob, LTO_imported_decl_ref);
240 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
241 break;
242
243 case TYPE_DECL:
244 streamer_write_record_start (ob, LTO_type_decl_ref);
245 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
246 break;
247
248 case NAMESPACE_DECL:
249 streamer_write_record_start (ob, LTO_namespace_decl_ref);
250 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
251 break;
252
253 case LABEL_DECL:
254 streamer_write_record_start (ob, LTO_label_decl_ref);
255 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
256 break;
257
258 case RESULT_DECL:
259 streamer_write_record_start (ob, LTO_result_decl_ref);
260 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
261 break;
262
263 case TRANSLATION_UNIT_DECL:
264 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
265 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
266 break;
267
268 default:
269 /* No other node is indexable, so it should have been handled by
270 lto_output_tree. */
271 gcc_unreachable ();
272 }
273 }
274
275
276 /* Return true if EXPR is a tree node that can be written to disk. */
277
278 static inline bool
279 lto_is_streamable (tree expr)
280 {
281 enum tree_code code = TREE_CODE (expr);
282
283 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
284 name version in lto_output_tree_ref (see output_ssa_names). */
285 return !is_lang_specific (expr)
286 && code != SSA_NAME
287 && code != CALL_EXPR
288 && code != LANG_TYPE
289 && code != MODIFY_EXPR
290 && code != INIT_EXPR
291 && code != TARGET_EXPR
292 && code != BIND_EXPR
293 && code != WITH_CLEANUP_EXPR
294 && code != STATEMENT_LIST
295 && code != OMP_CLAUSE
296 && (code == CASE_LABEL_EXPR
297 || code == DECL_EXPR
298 || TREE_CODE_CLASS (code) != tcc_statement);
299 }
300
301
302 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
303
304 static tree
305 get_symbol_initial_value (struct output_block *ob, tree expr)
306 {
307 gcc_checking_assert (DECL_P (expr)
308 && TREE_CODE (expr) != FUNCTION_DECL
309 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
310
311 /* Handle DECL_INITIAL for symbols. */
312 tree initial = DECL_INITIAL (expr);
313 if (TREE_CODE (expr) == VAR_DECL
314 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
315 && !DECL_IN_CONSTANT_POOL (expr)
316 && initial)
317 {
318 lto_symtab_encoder_t encoder;
319 struct varpool_node *vnode;
320
321 encoder = ob->decl_state->symtab_node_encoder;
322 vnode = varpool_get_node (expr);
323 if (!vnode
324 || !lto_symtab_encoder_encode_initializer_p (encoder,
325 vnode))
326 initial = error_mark_node;
327 }
328
329 return initial;
330 }
331
332
333 /* Write a physical representation of tree node EXPR to output block
334 OB. If REF_P is true, the leaves of EXPR are emitted as references
335 via lto_output_tree_ref. IX is the index into the streamer cache
336 where EXPR is stored. */
337
338 static void
339 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
340 {
341 /* Pack all the non-pointer fields in EXPR into a bitpack and write
342 the resulting bitpack. */
343 bitpack_d bp = bitpack_create (ob->main_stream);
344 streamer_pack_tree_bitfields (ob, &bp, expr);
345 streamer_write_bitpack (&bp);
346
347 /* Write all the pointer fields in EXPR. */
348 streamer_write_tree_body (ob, expr, ref_p);
349
350 /* Write any LTO-specific data to OB. */
351 if (DECL_P (expr)
352 && TREE_CODE (expr) != FUNCTION_DECL
353 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
354 {
355 /* Handle DECL_INITIAL for symbols. */
356 tree initial = get_symbol_initial_value (ob, expr);
357 stream_write_tree (ob, initial, ref_p);
358 }
359 }
360
361 /* Write a physical representation of tree node EXPR to output block
362 OB. If REF_P is true, the leaves of EXPR are emitted as references
363 via lto_output_tree_ref. IX is the index into the streamer cache
364 where EXPR is stored. */
365
366 static void
367 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
368 {
369 if (!lto_is_streamable (expr))
370 internal_error ("tree code %qs is not supported in LTO streams",
371 get_tree_code_name (TREE_CODE (expr)));
372
373 /* Write the header, containing everything needed to materialize
374 EXPR on the reading side. */
375 streamer_write_tree_header (ob, expr);
376
377 lto_write_tree_1 (ob, expr, ref_p);
378
379 /* Mark the end of EXPR. */
380 streamer_write_zero (ob);
381 }
382
383 /* Emit the physical representation of tree node EXPR to output block
384 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
385 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
386
387 static void
388 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
389 bool ref_p, bool this_ref_p)
390 {
391 unsigned ix;
392
393 gcc_checking_assert (expr != NULL_TREE
394 && !(this_ref_p && tree_is_indexable (expr)));
395
396 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
397 expr, hash, &ix);
398 gcc_assert (!exists_p);
399 if (streamer_handle_as_builtin_p (expr))
400 {
401 /* MD and NORMAL builtins do not need to be written out
402 completely as they are always instantiated by the
403 compiler on startup. The only builtins that need to
404 be written out are BUILT_IN_FRONTEND. For all other
405 builtins, we simply write the class and code. */
406 streamer_write_builtin (ob, expr);
407 }
408 else if (TREE_CODE (expr) == INTEGER_CST
409 && !TREE_OVERFLOW (expr))
410 {
411 /* Shared INTEGER_CST nodes are special because they need their
412 original type to be materialized by the reader (to implement
413 TYPE_CACHED_VALUES). */
414 streamer_write_integer_cst (ob, expr, ref_p);
415 }
416 else
417 {
418 /* This is the first time we see EXPR, write its fields
419 to OB. */
420 lto_write_tree (ob, expr, ref_p);
421 }
422 }
423
424 struct sccs
425 {
426 unsigned int dfsnum;
427 unsigned int low;
428 };
429
430 struct scc_entry
431 {
432 tree t;
433 hashval_t hash;
434 };
435
436 static unsigned int next_dfs_num;
437 static vec<scc_entry> sccstack;
438 static struct pointer_map_t *sccstate;
439 static struct obstack sccstate_obstack;
440
441 static void
442 DFS_write_tree (struct output_block *ob, sccs *from_state,
443 tree expr, bool ref_p, bool this_ref_p);
444
445 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
446 DFS recurse for all tree edges originating from it. */
447
448 static void
449 DFS_write_tree_body (struct output_block *ob,
450 tree expr, sccs *expr_state, bool ref_p)
451 {
452 #define DFS_follow_tree_edge(DEST) \
453 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
454
455 enum tree_code code;
456
457 code = TREE_CODE (expr);
458
459 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
460 {
461 if (TREE_CODE (expr) != IDENTIFIER_NODE)
462 DFS_follow_tree_edge (TREE_TYPE (expr));
463 }
464
465 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
466 {
467 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
468 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
469 }
470
471 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
472 {
473 DFS_follow_tree_edge (TREE_REALPART (expr));
474 DFS_follow_tree_edge (TREE_IMAGPART (expr));
475 }
476
477 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
478 {
479 /* Drop names that were created for anonymous entities. */
480 if (DECL_NAME (expr)
481 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
482 && ANON_AGGRNAME_P (DECL_NAME (expr)))
483 ;
484 else
485 DFS_follow_tree_edge (DECL_NAME (expr));
486 DFS_follow_tree_edge (DECL_CONTEXT (expr));
487 }
488
489 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
490 {
491 DFS_follow_tree_edge (DECL_SIZE (expr));
492 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
493
494 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
495 special handling in LTO, it must be handled by streamer hooks. */
496
497 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
498
499 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
500 for early inlining so drop it on the floor instead of ICEing in
501 dwarf2out.c. */
502
503 if ((TREE_CODE (expr) == VAR_DECL
504 || TREE_CODE (expr) == PARM_DECL)
505 && DECL_HAS_VALUE_EXPR_P (expr))
506 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
507 if (TREE_CODE (expr) == VAR_DECL)
508 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
509 }
510
511 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
512 {
513 if (TREE_CODE (expr) == TYPE_DECL)
514 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
515 DFS_follow_tree_edge (DECL_VINDEX (expr));
516 }
517
518 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
519 {
520 /* Make sure we don't inadvertently set the assembler name. */
521 if (DECL_ASSEMBLER_NAME_SET_P (expr))
522 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
523 DFS_follow_tree_edge (DECL_SECTION_NAME (expr));
524 DFS_follow_tree_edge (DECL_COMDAT_GROUP (expr));
525 }
526
527 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
528 {
529 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
530 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
531 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
532 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
533 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
534 }
535
536 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
537 {
538 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
539 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
540 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
541 }
542
543 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
544 {
545 DFS_follow_tree_edge (TYPE_SIZE (expr));
546 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
547 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
548 DFS_follow_tree_edge (TYPE_NAME (expr));
549 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
550 reconstructed during fixup. */
551 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
552 during fixup. */
553 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
554 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
555 /* TYPE_CANONICAL is re-computed during type merging, so no need
556 to follow it here. */
557 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
558 }
559
560 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
561 {
562 if (TREE_CODE (expr) == ENUMERAL_TYPE)
563 DFS_follow_tree_edge (TYPE_VALUES (expr));
564 else if (TREE_CODE (expr) == ARRAY_TYPE)
565 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
566 else if (RECORD_OR_UNION_TYPE_P (expr))
567 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
568 DFS_follow_tree_edge (t);
569 else if (TREE_CODE (expr) == FUNCTION_TYPE
570 || TREE_CODE (expr) == METHOD_TYPE)
571 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
572
573 if (!POINTER_TYPE_P (expr))
574 DFS_follow_tree_edge (TYPE_MINVAL (expr));
575 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
576 if (RECORD_OR_UNION_TYPE_P (expr))
577 DFS_follow_tree_edge (TYPE_BINFO (expr));
578 }
579
580 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
581 {
582 DFS_follow_tree_edge (TREE_PURPOSE (expr));
583 DFS_follow_tree_edge (TREE_VALUE (expr));
584 DFS_follow_tree_edge (TREE_CHAIN (expr));
585 }
586
587 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
588 {
589 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
590 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
591 }
592
593 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
594 {
595 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
596 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
597 DFS_follow_tree_edge (TREE_BLOCK (expr));
598 }
599
600 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
601 {
602 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
603 /* ??? FIXME. See also streamer_write_chain. */
604 if (!(VAR_OR_FUNCTION_DECL_P (t)
605 && DECL_EXTERNAL (t)))
606 DFS_follow_tree_edge (t);
607
608 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
609
610 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
611 handle - those that represent inlined function scopes.
612 For the drop rest them on the floor instead of ICEing
613 in dwarf2out.c. */
614 if (inlined_function_outer_scope_p (expr))
615 {
616 tree ultimate_origin = block_ultimate_origin (expr);
617 DFS_follow_tree_edge (ultimate_origin);
618 }
619 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
620 information for early inlined BLOCKs so drop it on the floor instead
621 of ICEing in dwarf2out.c. */
622
623 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
624 streaming time. */
625
626 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
627 list is re-constructed from BLOCK_SUPERCONTEXT. */
628 }
629
630 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
631 {
632 unsigned i;
633 tree t;
634
635 /* Note that the number of BINFO slots has already been emitted in
636 EXPR's header (see streamer_write_tree_header) because this length
637 is needed to build the empty BINFO node on the reader side. */
638 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
639 DFS_follow_tree_edge (t);
640 DFS_follow_tree_edge (BINFO_OFFSET (expr));
641 DFS_follow_tree_edge (BINFO_VTABLE (expr));
642 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
643
644 /* The number of BINFO_BASE_ACCESSES has already been emitted in
645 EXPR's bitfield section. */
646 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
647 DFS_follow_tree_edge (t);
648
649 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
650 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
651 }
652
653 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
654 {
655 unsigned i;
656 tree index, value;
657
658 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
659 {
660 DFS_follow_tree_edge (index);
661 DFS_follow_tree_edge (value);
662 }
663 }
664
665 #undef DFS_follow_tree_edge
666 }
667
668 /* Return a hash value for the tree T. */
669
670 static hashval_t
671 hash_tree (struct streamer_tree_cache_d *cache, tree t)
672 {
673 #define visit(SIBLING) \
674 do { \
675 unsigned ix; \
676 if (SIBLING && streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
677 v = iterative_hash_hashval_t (streamer_tree_cache_get_hash (cache, ix), v); \
678 } while (0)
679
680 /* Hash TS_BASE. */
681 enum tree_code code = TREE_CODE (t);
682 hashval_t v = iterative_hash_host_wide_int (code, 0);
683 if (!TYPE_P (t))
684 {
685 v = iterative_hash_host_wide_int (TREE_SIDE_EFFECTS (t)
686 | (TREE_CONSTANT (t) << 1)
687 | (TREE_READONLY (t) << 2)
688 | (TREE_PUBLIC (t) << 3), v);
689 }
690 v = iterative_hash_host_wide_int (TREE_ADDRESSABLE (t)
691 | (TREE_THIS_VOLATILE (t) << 1), v);
692 if (DECL_P (t))
693 v = iterative_hash_host_wide_int (DECL_UNSIGNED (t), v);
694 else if (TYPE_P (t))
695 v = iterative_hash_host_wide_int (TYPE_UNSIGNED (t), v);
696 if (TYPE_P (t))
697 v = iterative_hash_host_wide_int (TYPE_ARTIFICIAL (t), v);
698 else
699 v = iterative_hash_host_wide_int (TREE_NO_WARNING (t), v);
700 v = iterative_hash_host_wide_int (TREE_NOTHROW (t)
701 | (TREE_STATIC (t) << 1)
702 | (TREE_PROTECTED (t) << 2)
703 | (TREE_DEPRECATED (t) << 3), v);
704 if (code != TREE_BINFO)
705 v = iterative_hash_host_wide_int (TREE_PRIVATE (t), v);
706 if (TYPE_P (t))
707 v = iterative_hash_host_wide_int (TYPE_SATURATING (t)
708 | (TYPE_ADDR_SPACE (t) << 1), v);
709 else if (code == SSA_NAME)
710 v = iterative_hash_host_wide_int (SSA_NAME_IS_DEFAULT_DEF (t), v);
711
712 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
713 {
714 int i;
715 v = iterative_hash_host_wide_int (TREE_INT_CST_NUNITS (t), v);
716 v = iterative_hash_host_wide_int (TREE_INT_CST_EXT_NUNITS (t), v);
717 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
718 v = iterative_hash_host_wide_int (TREE_INT_CST_ELT (t, i), v);
719 }
720
721 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
722 {
723 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
724 v = iterative_hash_host_wide_int (r.cl, v);
725 v = iterative_hash_host_wide_int (r.decimal
726 | (r.sign << 1)
727 | (r.signalling << 2)
728 | (r.canonical << 3), v);
729 v = iterative_hash_host_wide_int (r.uexp, v);
730 for (unsigned i = 0; i < SIGSZ; ++i)
731 v = iterative_hash_host_wide_int (r.sig[i], v);
732 }
733
734 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
735 {
736 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
737 v = iterative_hash_host_wide_int (f.mode, v);
738 v = iterative_hash_host_wide_int (f.data.low, v);
739 v = iterative_hash_host_wide_int (f.data.high, v);
740 }
741
742 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
743 {
744 v = iterative_hash_host_wide_int (DECL_MODE (t), v);
745 v = iterative_hash_host_wide_int (DECL_NONLOCAL (t)
746 | (DECL_VIRTUAL_P (t) << 1)
747 | (DECL_IGNORED_P (t) << 2)
748 | (DECL_ABSTRACT (t) << 3)
749 | (DECL_ARTIFICIAL (t) << 4)
750 | (DECL_USER_ALIGN (t) << 5)
751 | (DECL_PRESERVE_P (t) << 6)
752 | (DECL_EXTERNAL (t) << 7)
753 | (DECL_GIMPLE_REG_P (t) << 8), v);
754 v = iterative_hash_host_wide_int (DECL_ALIGN (t), v);
755 if (code == LABEL_DECL)
756 {
757 v = iterative_hash_host_wide_int (EH_LANDING_PAD_NR (t), v);
758 v = iterative_hash_host_wide_int (LABEL_DECL_UID (t), v);
759 }
760 else if (code == FIELD_DECL)
761 {
762 v = iterative_hash_host_wide_int (DECL_PACKED (t)
763 | (DECL_NONADDRESSABLE_P (t) << 1),
764 v);
765 v = iterative_hash_host_wide_int (DECL_OFFSET_ALIGN (t), v);
766 }
767 else if (code == VAR_DECL)
768 {
769 v = iterative_hash_host_wide_int (DECL_HAS_DEBUG_EXPR_P (t)
770 | (DECL_NONLOCAL_FRAME (t) << 1),
771 v);
772 }
773 if (code == RESULT_DECL
774 || code == PARM_DECL
775 || code == VAR_DECL)
776 {
777 v = iterative_hash_host_wide_int (DECL_BY_REFERENCE (t), v);
778 if (code == VAR_DECL
779 || code == PARM_DECL)
780 v = iterative_hash_host_wide_int (DECL_HAS_VALUE_EXPR_P (t), v);
781 }
782 }
783
784 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
785 v = iterative_hash_host_wide_int (DECL_REGISTER (t), v);
786
787 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
788 {
789 v = iterative_hash_host_wide_int ((DECL_COMMON (t))
790 | (DECL_DLLIMPORT_P (t) << 1)
791 | (DECL_WEAK (t) << 2)
792 | (DECL_SEEN_IN_BIND_EXPR_P (t) << 3)
793 | (DECL_COMDAT (t) << 4)
794 | (DECL_VISIBILITY_SPECIFIED (t) << 6),
795 v);
796 v = iterative_hash_host_wide_int (DECL_VISIBILITY (t), v);
797 if (code == VAR_DECL)
798 {
799 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
800 v = iterative_hash_host_wide_int (DECL_HARD_REGISTER (t)
801 | (DECL_IN_CONSTANT_POOL (t) << 1),
802 v);
803 v = iterative_hash_host_wide_int (DECL_TLS_MODEL (t), v);
804 }
805 if (TREE_CODE (t) == FUNCTION_DECL)
806 v = iterative_hash_host_wide_int (DECL_FINAL_P (t)
807 | (DECL_CXX_CONSTRUCTOR_P (t) << 1)
808 | (DECL_CXX_DESTRUCTOR_P (t) << 2),
809 v);
810 if (VAR_OR_FUNCTION_DECL_P (t))
811 v = iterative_hash_host_wide_int (DECL_INIT_PRIORITY (t), v);
812 }
813
814 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
815 {
816 v = iterative_hash_host_wide_int (DECL_BUILT_IN_CLASS (t), v);
817 v = iterative_hash_host_wide_int (DECL_STATIC_CONSTRUCTOR (t)
818 | (DECL_STATIC_DESTRUCTOR (t) << 1)
819 | (DECL_UNINLINABLE (t) << 2)
820 | (DECL_POSSIBLY_INLINED (t) << 3)
821 | (DECL_IS_NOVOPS (t) << 4)
822 | (DECL_IS_RETURNS_TWICE (t) << 5)
823 | (DECL_IS_MALLOC (t) << 6)
824 | (DECL_IS_OPERATOR_NEW (t) << 7)
825 | (DECL_DECLARED_INLINE_P (t) << 8)
826 | (DECL_STATIC_CHAIN (t) << 9)
827 | (DECL_NO_INLINE_WARNING_P (t) << 10)
828 | (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t) << 11)
829 | (DECL_NO_LIMIT_STACK (t) << 12)
830 | (DECL_DISREGARD_INLINE_LIMITS (t) << 13)
831 | (DECL_PURE_P (t) << 14)
832 | (DECL_LOOPING_CONST_OR_PURE_P (t) << 15), v);
833 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
834 v = iterative_hash_host_wide_int (DECL_FUNCTION_CODE (t), v);
835 if (DECL_STATIC_DESTRUCTOR (t))
836 v = iterative_hash_host_wide_int (DECL_FINI_PRIORITY (t), v);
837 }
838
839 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
840 {
841 v = iterative_hash_host_wide_int (TYPE_MODE (t), v);
842 v = iterative_hash_host_wide_int (TYPE_STRING_FLAG (t)
843 | (TYPE_NO_FORCE_BLK (t) << 1)
844 | (TYPE_NEEDS_CONSTRUCTING (t) << 2)
845 | (TYPE_PACKED (t) << 3)
846 | (TYPE_RESTRICT (t) << 4)
847 | (TYPE_USER_ALIGN (t) << 5)
848 | (TYPE_READONLY (t) << 6), v);
849 if (RECORD_OR_UNION_TYPE_P (t))
850 {
851 v = iterative_hash_host_wide_int (TYPE_TRANSPARENT_AGGR (t)
852 | (TYPE_FINAL_P (t) << 1), v);
853 }
854 else if (code == ARRAY_TYPE)
855 v = iterative_hash_host_wide_int (TYPE_NONALIASED_COMPONENT (t), v);
856 v = iterative_hash_host_wide_int (TYPE_PRECISION (t), v);
857 v = iterative_hash_host_wide_int (TYPE_ALIGN (t), v);
858 v = iterative_hash_host_wide_int ((TYPE_ALIAS_SET (t) == 0
859 || (!in_lto_p
860 && get_alias_set (t) == 0))
861 ? 0 : -1, v);
862 }
863
864 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
865 v = iterative_hash (TRANSLATION_UNIT_LANGUAGE (t),
866 strlen (TRANSLATION_UNIT_LANGUAGE (t)), v);
867
868 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
869 v = iterative_hash (t, sizeof (struct cl_target_option), v);
870
871 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
872 v = iterative_hash (t, sizeof (struct cl_optimization), v);
873
874 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
875 v = iterative_hash_host_wide_int (IDENTIFIER_HASH_VALUE (t), v);
876
877 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
878 v = iterative_hash (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t), v);
879
880 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
881 {
882 if (POINTER_TYPE_P (t))
883 {
884 /* For pointers factor in the pointed-to type recursively as
885 we cannot recurse through only pointers.
886 ??? We can generalize this by keeping track of the
887 in-SCC edges for each tree (or arbitrarily the first
888 such edge) and hashing that in in a second stage
889 (instead of the quadratic mixing of the SCC we do now). */
890 hashval_t x;
891 unsigned ix;
892 if (streamer_tree_cache_lookup (cache, TREE_TYPE (t), &ix))
893 x = streamer_tree_cache_get_hash (cache, ix);
894 else
895 x = hash_tree (cache, TREE_TYPE (t));
896 v = iterative_hash_hashval_t (x, v);
897 }
898 else if (code != IDENTIFIER_NODE)
899 visit (TREE_TYPE (t));
900 }
901
902 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
903 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
904 visit (VECTOR_CST_ELT (t, i));
905
906 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
907 {
908 visit (TREE_REALPART (t));
909 visit (TREE_IMAGPART (t));
910 }
911
912 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
913 {
914 /* Drop names that were created for anonymous entities. */
915 if (DECL_NAME (t)
916 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
917 && ANON_AGGRNAME_P (DECL_NAME (t)))
918 ;
919 else
920 visit (DECL_NAME (t));
921 if (DECL_FILE_SCOPE_P (t))
922 ;
923 else
924 visit (DECL_CONTEXT (t));
925 }
926
927 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
928 {
929 visit (DECL_SIZE (t));
930 visit (DECL_SIZE_UNIT (t));
931 visit (DECL_ATTRIBUTES (t));
932 if ((code == VAR_DECL
933 || code == PARM_DECL)
934 && DECL_HAS_VALUE_EXPR_P (t))
935 visit (DECL_VALUE_EXPR (t));
936 if (code == VAR_DECL
937 && DECL_HAS_DEBUG_EXPR_P (t))
938 visit (DECL_DEBUG_EXPR (t));
939 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
940 be able to call get_symbol_initial_value. */
941 }
942
943 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
944 {
945 if (code == TYPE_DECL)
946 visit (DECL_ORIGINAL_TYPE (t));
947 visit (DECL_VINDEX (t));
948 }
949
950 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
951 {
952 if (DECL_ASSEMBLER_NAME_SET_P (t))
953 visit (DECL_ASSEMBLER_NAME (t));
954 visit (DECL_SECTION_NAME (t));
955 visit (DECL_COMDAT_GROUP (t));
956 }
957
958 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
959 {
960 visit (DECL_FIELD_OFFSET (t));
961 visit (DECL_BIT_FIELD_TYPE (t));
962 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
963 visit (DECL_FIELD_BIT_OFFSET (t));
964 visit (DECL_FCONTEXT (t));
965 }
966
967 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
968 {
969 visit (DECL_FUNCTION_PERSONALITY (t));
970 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
971 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
972 }
973
974 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
975 {
976 visit (TYPE_SIZE (t));
977 visit (TYPE_SIZE_UNIT (t));
978 visit (TYPE_ATTRIBUTES (t));
979 visit (TYPE_NAME (t));
980 visit (TYPE_MAIN_VARIANT (t));
981 if (TYPE_FILE_SCOPE_P (t))
982 ;
983 else
984 visit (TYPE_CONTEXT (t));
985 visit (TYPE_STUB_DECL (t));
986 }
987
988 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
989 {
990 if (code == ENUMERAL_TYPE)
991 visit (TYPE_VALUES (t));
992 else if (code == ARRAY_TYPE)
993 visit (TYPE_DOMAIN (t));
994 else if (RECORD_OR_UNION_TYPE_P (t))
995 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
996 visit (f);
997 else if (code == FUNCTION_TYPE
998 || code == METHOD_TYPE)
999 visit (TYPE_ARG_TYPES (t));
1000 if (!POINTER_TYPE_P (t))
1001 visit (TYPE_MINVAL (t));
1002 visit (TYPE_MAXVAL (t));
1003 if (RECORD_OR_UNION_TYPE_P (t))
1004 visit (TYPE_BINFO (t));
1005 }
1006
1007 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1008 {
1009 visit (TREE_PURPOSE (t));
1010 visit (TREE_VALUE (t));
1011 visit (TREE_CHAIN (t));
1012 }
1013
1014 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1015 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1016 visit (TREE_VEC_ELT (t, i));
1017
1018 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1019 {
1020 v = iterative_hash_host_wide_int (TREE_OPERAND_LENGTH (t), v);
1021 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1022 visit (TREE_OPERAND (t, i));
1023 }
1024
1025 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1026 {
1027 unsigned i;
1028 tree b;
1029 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1030 visit (b);
1031 visit (BINFO_OFFSET (t));
1032 visit (BINFO_VTABLE (t));
1033 visit (BINFO_VPTR_FIELD (t));
1034 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1035 visit (b);
1036 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1037 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1038 }
1039
1040 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1041 {
1042 unsigned i;
1043 tree index, value;
1044 v = iterative_hash_host_wide_int (CONSTRUCTOR_NELTS (t), v);
1045 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1046 {
1047 visit (index);
1048 visit (value);
1049 }
1050 }
1051
1052 return v;
1053
1054 #undef visit
1055 }
1056
1057 /* Compare two SCC entries by their hash value for qsorting them. */
1058
1059 static int
1060 scc_entry_compare (const void *p1_, const void *p2_)
1061 {
1062 const scc_entry *p1 = (const scc_entry *) p1_;
1063 const scc_entry *p2 = (const scc_entry *) p2_;
1064 if (p1->hash < p2->hash)
1065 return -1;
1066 else if (p1->hash > p2->hash)
1067 return 1;
1068 return 0;
1069 }
1070
1071 /* Return a hash value for the SCC on the SCC stack from FIRST with
1072 size SIZE. */
1073
1074 static hashval_t
1075 hash_scc (struct streamer_tree_cache_d *cache, unsigned first, unsigned size)
1076 {
1077 /* Compute hash values for the SCC members. */
1078 for (unsigned i = 0; i < size; ++i)
1079 sccstack[first+i].hash = hash_tree (cache, sccstack[first+i].t);
1080
1081 if (size == 1)
1082 return sccstack[first].hash;
1083
1084 /* Sort the SCC of type, hash pairs so that when we mix in
1085 all members of the SCC the hash value becomes independent on
1086 the order we visited the SCC. Disregard hashes equal to
1087 the hash of the tree we mix into because we cannot guarantee
1088 a stable sort for those across different TUs. */
1089 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1090 hashval_t *tem = XALLOCAVEC (hashval_t, size);
1091 for (unsigned i = 0; i < size; ++i)
1092 {
1093 hashval_t hash = sccstack[first+i].hash;
1094 hashval_t orig_hash = hash;
1095 unsigned j;
1096 /* Skip same hashes. */
1097 for (j = i + 1;
1098 j < size && sccstack[first+j].hash == orig_hash; ++j)
1099 ;
1100 for (; j < size; ++j)
1101 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1102 for (j = 0; sccstack[first+j].hash != orig_hash; ++j)
1103 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1104 tem[i] = hash;
1105 }
1106 hashval_t scc_hash = 0;
1107 for (unsigned i = 0; i < size; ++i)
1108 {
1109 sccstack[first+i].hash = tem[i];
1110 scc_hash = iterative_hash_hashval_t (tem[i], scc_hash);
1111 }
1112 return scc_hash;
1113 }
1114
1115 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1116 already in the streamer cache. Main routine called for
1117 each visit of EXPR. */
1118
1119 static void
1120 DFS_write_tree (struct output_block *ob, sccs *from_state,
1121 tree expr, bool ref_p, bool this_ref_p)
1122 {
1123 unsigned ix;
1124 sccs **slot;
1125
1126 /* Handle special cases. */
1127 if (expr == NULL_TREE)
1128 return;
1129
1130 /* Do not DFS walk into indexable trees. */
1131 if (this_ref_p && tree_is_indexable (expr))
1132 return;
1133
1134 /* Check if we already streamed EXPR. */
1135 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1136 return;
1137
1138 slot = (sccs **)pointer_map_insert (sccstate, expr);
1139 sccs *cstate = *slot;
1140 if (!cstate)
1141 {
1142 scc_entry e = { expr, 0 };
1143 /* Not yet visited. DFS recurse and push it onto the stack. */
1144 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1145 sccstack.safe_push (e);
1146 cstate->dfsnum = next_dfs_num++;
1147 cstate->low = cstate->dfsnum;
1148
1149 if (streamer_handle_as_builtin_p (expr))
1150 ;
1151 else if (TREE_CODE (expr) == INTEGER_CST
1152 && !TREE_OVERFLOW (expr))
1153 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
1154 else
1155 {
1156 DFS_write_tree_body (ob, expr, cstate, ref_p);
1157
1158 /* Walk any LTO-specific edges. */
1159 if (DECL_P (expr)
1160 && TREE_CODE (expr) != FUNCTION_DECL
1161 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1162 {
1163 /* Handle DECL_INITIAL for symbols. */
1164 tree initial = get_symbol_initial_value (ob, expr);
1165 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
1166 }
1167 }
1168
1169 /* See if we found an SCC. */
1170 if (cstate->low == cstate->dfsnum)
1171 {
1172 unsigned first, size;
1173 tree x;
1174
1175 /* Pop the SCC and compute its size. */
1176 first = sccstack.length ();
1177 do
1178 {
1179 x = sccstack[--first].t;
1180 }
1181 while (x != expr);
1182 size = sccstack.length () - first;
1183
1184 /* No need to compute hashes for LTRANS units, we don't perform
1185 any merging there. */
1186 hashval_t scc_hash = 0;
1187 unsigned scc_entry_len = 0;
1188 if (!flag_wpa)
1189 {
1190 scc_hash = hash_scc (ob->writer_cache, first, size);
1191
1192 /* Put the entries with the least number of collisions first. */
1193 unsigned entry_start = 0;
1194 scc_entry_len = size + 1;
1195 for (unsigned i = 0; i < size;)
1196 {
1197 unsigned from = i;
1198 for (i = i + 1; i < size
1199 && (sccstack[first + i].hash
1200 == sccstack[first + from].hash); ++i)
1201 ;
1202 if (i - from < scc_entry_len)
1203 {
1204 scc_entry_len = i - from;
1205 entry_start = from;
1206 }
1207 }
1208 for (unsigned i = 0; i < scc_entry_len; ++i)
1209 {
1210 scc_entry tem = sccstack[first + i];
1211 sccstack[first + i] = sccstack[first + entry_start + i];
1212 sccstack[first + entry_start + i] = tem;
1213 }
1214 }
1215
1216 /* Write LTO_tree_scc. */
1217 streamer_write_record_start (ob, LTO_tree_scc);
1218 streamer_write_uhwi (ob, size);
1219 streamer_write_uhwi (ob, scc_hash);
1220
1221 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1222 All INTEGER_CSTs need to be handled this way as we need
1223 their type to materialize them. Also builtins are handled
1224 this way.
1225 ??? We still wrap these in LTO_tree_scc so at the
1226 input side we can properly identify the tree we want
1227 to ultimatively return. */
1228 size_t old_len = ob->writer_cache->nodes.length ();
1229 if (size == 1)
1230 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1231 else
1232 {
1233 /* Write the size of the SCC entry candidates. */
1234 streamer_write_uhwi (ob, scc_entry_len);
1235
1236 /* Write all headers and populate the streamer cache. */
1237 for (unsigned i = 0; i < size; ++i)
1238 {
1239 hashval_t hash = sccstack[first+i].hash;
1240 tree t = sccstack[first+i].t;
1241 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1242 t, hash, &ix);
1243 gcc_assert (!exists_p);
1244
1245 if (!lto_is_streamable (t))
1246 internal_error ("tree code %qs is not supported "
1247 "in LTO streams",
1248 get_tree_code_name (TREE_CODE (t)));
1249
1250 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1251
1252 /* Write the header, containing everything needed to
1253 materialize EXPR on the reading side. */
1254 streamer_write_tree_header (ob, t);
1255 }
1256
1257 /* Write the bitpacks and tree references. */
1258 for (unsigned i = 0; i < size; ++i)
1259 {
1260 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1261
1262 /* Mark the end of the tree. */
1263 streamer_write_zero (ob);
1264 }
1265 }
1266 gcc_assert (old_len + size == ob->writer_cache->nodes.length ());
1267
1268 /* Finally truncate the vector. */
1269 sccstack.truncate (first);
1270
1271 if (from_state)
1272 from_state->low = MIN (from_state->low, cstate->low);
1273 return;
1274 }
1275
1276 if (from_state)
1277 from_state->low = MIN (from_state->low, cstate->low);
1278 }
1279 gcc_checking_assert (from_state);
1280 if (cstate->dfsnum < from_state->dfsnum)
1281 from_state->low = MIN (cstate->dfsnum, from_state->low);
1282 }
1283
1284
1285 /* Emit the physical representation of tree node EXPR to output block
1286 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1287 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1288
1289 void
1290 lto_output_tree (struct output_block *ob, tree expr,
1291 bool ref_p, bool this_ref_p)
1292 {
1293 unsigned ix;
1294 bool existed_p;
1295
1296 if (expr == NULL_TREE)
1297 {
1298 streamer_write_record_start (ob, LTO_null);
1299 return;
1300 }
1301
1302 if (this_ref_p && tree_is_indexable (expr))
1303 {
1304 lto_output_tree_ref (ob, expr);
1305 return;
1306 }
1307
1308 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1309 if (existed_p)
1310 {
1311 /* If a node has already been streamed out, make sure that
1312 we don't write it more than once. Otherwise, the reader
1313 will instantiate two different nodes for the same object. */
1314 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1315 streamer_write_uhwi (ob, ix);
1316 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1317 lto_tree_code_to_tag (TREE_CODE (expr)));
1318 lto_stats.num_pickle_refs_output++;
1319 }
1320 else
1321 {
1322 /* This is the first time we see EXPR, write all reachable
1323 trees to OB. */
1324 static bool in_dfs_walk;
1325
1326 /* Protect against recursion which means disconnect between
1327 what tree edges we walk in the DFS walk and what edges
1328 we stream out. */
1329 gcc_assert (!in_dfs_walk);
1330
1331 /* Start the DFS walk. */
1332 /* Save ob state ... */
1333 /* let's see ... */
1334 in_dfs_walk = true;
1335 sccstate = pointer_map_create ();
1336 gcc_obstack_init (&sccstate_obstack);
1337 next_dfs_num = 1;
1338 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
1339 sccstack.release ();
1340 pointer_map_destroy (sccstate);
1341 obstack_free (&sccstate_obstack, NULL);
1342 in_dfs_walk = false;
1343
1344 /* Finally append a reference to the tree we were writing.
1345 ??? If expr ended up as a singleton we could have
1346 inlined it here and avoid outputting a reference. */
1347 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1348 gcc_assert (existed_p);
1349 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1350 streamer_write_uhwi (ob, ix);
1351 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1352 lto_tree_code_to_tag (TREE_CODE (expr)));
1353 lto_stats.num_pickle_refs_output++;
1354 }
1355 }
1356
1357
1358 /* Output to OB a list of try/catch handlers starting with FIRST. */
1359
1360 static void
1361 output_eh_try_list (struct output_block *ob, eh_catch first)
1362 {
1363 eh_catch n;
1364
1365 for (n = first; n; n = n->next_catch)
1366 {
1367 streamer_write_record_start (ob, LTO_eh_catch);
1368 stream_write_tree (ob, n->type_list, true);
1369 stream_write_tree (ob, n->filter_list, true);
1370 stream_write_tree (ob, n->label, true);
1371 }
1372
1373 streamer_write_record_start (ob, LTO_null);
1374 }
1375
1376
1377 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1378 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1379 detect EH region sharing. */
1380
1381 static void
1382 output_eh_region (struct output_block *ob, eh_region r)
1383 {
1384 enum LTO_tags tag;
1385
1386 if (r == NULL)
1387 {
1388 streamer_write_record_start (ob, LTO_null);
1389 return;
1390 }
1391
1392 if (r->type == ERT_CLEANUP)
1393 tag = LTO_ert_cleanup;
1394 else if (r->type == ERT_TRY)
1395 tag = LTO_ert_try;
1396 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1397 tag = LTO_ert_allowed_exceptions;
1398 else if (r->type == ERT_MUST_NOT_THROW)
1399 tag = LTO_ert_must_not_throw;
1400 else
1401 gcc_unreachable ();
1402
1403 streamer_write_record_start (ob, tag);
1404 streamer_write_hwi (ob, r->index);
1405
1406 if (r->outer)
1407 streamer_write_hwi (ob, r->outer->index);
1408 else
1409 streamer_write_zero (ob);
1410
1411 if (r->inner)
1412 streamer_write_hwi (ob, r->inner->index);
1413 else
1414 streamer_write_zero (ob);
1415
1416 if (r->next_peer)
1417 streamer_write_hwi (ob, r->next_peer->index);
1418 else
1419 streamer_write_zero (ob);
1420
1421 if (r->type == ERT_TRY)
1422 {
1423 output_eh_try_list (ob, r->u.eh_try.first_catch);
1424 }
1425 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1426 {
1427 stream_write_tree (ob, r->u.allowed.type_list, true);
1428 stream_write_tree (ob, r->u.allowed.label, true);
1429 streamer_write_uhwi (ob, r->u.allowed.filter);
1430 }
1431 else if (r->type == ERT_MUST_NOT_THROW)
1432 {
1433 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1434 bitpack_d bp = bitpack_create (ob->main_stream);
1435 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1436 streamer_write_bitpack (&bp);
1437 }
1438
1439 if (r->landing_pads)
1440 streamer_write_hwi (ob, r->landing_pads->index);
1441 else
1442 streamer_write_zero (ob);
1443 }
1444
1445
1446 /* Output landing pad LP to OB. */
1447
1448 static void
1449 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1450 {
1451 if (lp == NULL)
1452 {
1453 streamer_write_record_start (ob, LTO_null);
1454 return;
1455 }
1456
1457 streamer_write_record_start (ob, LTO_eh_landing_pad);
1458 streamer_write_hwi (ob, lp->index);
1459 if (lp->next_lp)
1460 streamer_write_hwi (ob, lp->next_lp->index);
1461 else
1462 streamer_write_zero (ob);
1463
1464 if (lp->region)
1465 streamer_write_hwi (ob, lp->region->index);
1466 else
1467 streamer_write_zero (ob);
1468
1469 stream_write_tree (ob, lp->post_landing_pad, true);
1470 }
1471
1472
1473 /* Output the existing eh_table to OB. */
1474
1475 static void
1476 output_eh_regions (struct output_block *ob, struct function *fn)
1477 {
1478 if (fn->eh && fn->eh->region_tree)
1479 {
1480 unsigned i;
1481 eh_region eh;
1482 eh_landing_pad lp;
1483 tree ttype;
1484
1485 streamer_write_record_start (ob, LTO_eh_table);
1486
1487 /* Emit the index of the root of the EH region tree. */
1488 streamer_write_hwi (ob, fn->eh->region_tree->index);
1489
1490 /* Emit all the EH regions in the region array. */
1491 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1492 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1493 output_eh_region (ob, eh);
1494
1495 /* Emit all landing pads. */
1496 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1497 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1498 output_eh_lp (ob, lp);
1499
1500 /* Emit all the runtime type data. */
1501 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1502 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1503 stream_write_tree (ob, ttype, true);
1504
1505 /* Emit the table of action chains. */
1506 if (targetm.arm_eabi_unwinder)
1507 {
1508 tree t;
1509 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1510 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1511 stream_write_tree (ob, t, true);
1512 }
1513 else
1514 {
1515 uchar c;
1516 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1517 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1518 streamer_write_char_stream (ob->main_stream, c);
1519 }
1520 }
1521
1522 /* The LTO_null either terminates the record or indicates that there
1523 are no eh_records at all. */
1524 streamer_write_record_start (ob, LTO_null);
1525 }
1526
1527
1528 /* Output all of the active ssa names to the ssa_names stream. */
1529
1530 static void
1531 output_ssa_names (struct output_block *ob, struct function *fn)
1532 {
1533 unsigned int i, len;
1534
1535 len = vec_safe_length (SSANAMES (fn));
1536 streamer_write_uhwi (ob, len);
1537
1538 for (i = 1; i < len; i++)
1539 {
1540 tree ptr = (*SSANAMES (fn))[i];
1541
1542 if (ptr == NULL_TREE
1543 || SSA_NAME_IN_FREE_LIST (ptr)
1544 || virtual_operand_p (ptr))
1545 continue;
1546
1547 streamer_write_uhwi (ob, i);
1548 streamer_write_char_stream (ob->main_stream,
1549 SSA_NAME_IS_DEFAULT_DEF (ptr));
1550 if (SSA_NAME_VAR (ptr))
1551 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1552 else
1553 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1554 stream_write_tree (ob, TREE_TYPE (ptr), true);
1555 }
1556
1557 streamer_write_zero (ob);
1558 }
1559
1560
1561 /* Output the cfg. */
1562
1563 static void
1564 output_cfg (struct output_block *ob, struct function *fn)
1565 {
1566 struct lto_output_stream *tmp_stream = ob->main_stream;
1567 basic_block bb;
1568
1569 ob->main_stream = ob->cfg_stream;
1570
1571 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1572 profile_status_for_function (fn));
1573
1574 /* Output the number of the highest basic block. */
1575 streamer_write_uhwi (ob, last_basic_block_for_function (fn));
1576
1577 FOR_ALL_BB_FN (bb, fn)
1578 {
1579 edge_iterator ei;
1580 edge e;
1581
1582 streamer_write_hwi (ob, bb->index);
1583
1584 /* Output the successors and the edge flags. */
1585 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1586 FOR_EACH_EDGE (e, ei, bb->succs)
1587 {
1588 streamer_write_uhwi (ob, e->dest->index);
1589 streamer_write_hwi (ob, e->probability);
1590 streamer_write_gcov_count (ob, e->count);
1591 streamer_write_uhwi (ob, e->flags);
1592 }
1593 }
1594
1595 streamer_write_hwi (ob, -1);
1596
1597 bb = ENTRY_BLOCK_PTR;
1598 while (bb->next_bb)
1599 {
1600 streamer_write_hwi (ob, bb->next_bb->index);
1601 bb = bb->next_bb;
1602 }
1603
1604 streamer_write_hwi (ob, -1);
1605
1606 /* ??? The cfgloop interface is tied to cfun. */
1607 gcc_assert (cfun == fn);
1608
1609 /* Output the number of loops. */
1610 streamer_write_uhwi (ob, number_of_loops (fn));
1611
1612 /* Output each loop, skipping the tree root which has number zero. */
1613 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1614 {
1615 struct loop *loop = get_loop (fn, i);
1616
1617 /* Write the index of the loop header. That's enough to rebuild
1618 the loop tree on the reader side. Stream -1 for an unused
1619 loop entry. */
1620 if (!loop)
1621 {
1622 streamer_write_hwi (ob, -1);
1623 continue;
1624 }
1625 else
1626 streamer_write_hwi (ob, loop->header->index);
1627
1628 /* Write everything copy_loop_info copies. */
1629 streamer_write_enum (ob->main_stream,
1630 loop_estimation, EST_LAST, loop->estimate_state);
1631 streamer_write_hwi (ob, loop->any_upper_bound);
1632 if (loop->any_upper_bound)
1633 {
1634 int len = loop->nb_iterations_upper_bound.get_len ();
1635 int i;
1636
1637 streamer_write_uhwi (ob, loop->nb_iterations_upper_bound.get_precision ());
1638 streamer_write_uhwi (ob, len);
1639 for (i = 0; i < len; i++)
1640 streamer_write_hwi (ob, loop->nb_iterations_upper_bound.elt (i));
1641 }
1642 streamer_write_hwi (ob, loop->any_estimate);
1643 if (loop->any_estimate)
1644 {
1645 int len = loop->nb_iterations_estimate.get_len ();
1646 int i;
1647
1648 streamer_write_uhwi (ob, loop->nb_iterations_estimate.get_precision ());
1649 streamer_write_uhwi (ob, len);
1650 for (i = 0; i < len; i++)
1651 streamer_write_hwi (ob, loop->nb_iterations_estimate.elt (i));
1652 }
1653 }
1654
1655 ob->main_stream = tmp_stream;
1656 }
1657
1658
1659 /* Create the header in the file using OB. If the section type is for
1660 a function, set FN to the decl for that function. */
1661
1662 void
1663 produce_asm (struct output_block *ob, tree fn)
1664 {
1665 enum lto_section_type section_type = ob->section_type;
1666 struct lto_function_header header;
1667 char *section_name;
1668 struct lto_output_stream *header_stream;
1669
1670 if (section_type == LTO_section_function_body)
1671 {
1672 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1673 section_name = lto_get_section_name (section_type, name, NULL);
1674 }
1675 else
1676 section_name = lto_get_section_name (section_type, NULL, NULL);
1677
1678 lto_begin_section (section_name, !flag_wpa);
1679 free (section_name);
1680
1681 /* The entire header is stream computed here. */
1682 memset (&header, 0, sizeof (struct lto_function_header));
1683
1684 /* Write the header. */
1685 header.lto_header.major_version = LTO_major_version;
1686 header.lto_header.minor_version = LTO_minor_version;
1687
1688 header.compressed_size = 0;
1689
1690 if (section_type == LTO_section_function_body)
1691 header.cfg_size = ob->cfg_stream->total_size;
1692 header.main_size = ob->main_stream->total_size;
1693 header.string_size = ob->string_stream->total_size;
1694
1695 header_stream = XCNEW (struct lto_output_stream);
1696 lto_output_data_stream (header_stream, &header, sizeof header);
1697 lto_write_stream (header_stream);
1698 free (header_stream);
1699
1700 /* Put all of the gimple and the string table out the asm file as a
1701 block of text. */
1702 if (section_type == LTO_section_function_body)
1703 lto_write_stream (ob->cfg_stream);
1704 lto_write_stream (ob->main_stream);
1705 lto_write_stream (ob->string_stream);
1706
1707 lto_end_section ();
1708 }
1709
1710
1711 /* Output the base body of struct function FN using output block OB. */
1712
1713 static void
1714 output_struct_function_base (struct output_block *ob, struct function *fn)
1715 {
1716 struct bitpack_d bp;
1717 unsigned i;
1718 tree t;
1719
1720 /* Output the static chain and non-local goto save area. */
1721 stream_write_tree (ob, fn->static_chain_decl, true);
1722 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1723
1724 /* Output all the local variables in the function. */
1725 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1726 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1727 stream_write_tree (ob, t, true);
1728
1729 /* Output current IL state of the function. */
1730 streamer_write_uhwi (ob, fn->curr_properties);
1731
1732 /* Write all the attributes for FN. */
1733 bp = bitpack_create (ob->main_stream);
1734 bp_pack_value (&bp, fn->is_thunk, 1);
1735 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1736 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1737 bp_pack_value (&bp, fn->returns_struct, 1);
1738 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1739 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1740 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1741 bp_pack_value (&bp, fn->after_inlining, 1);
1742 bp_pack_value (&bp, fn->stdarg, 1);
1743 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1744 bp_pack_value (&bp, fn->calls_alloca, 1);
1745 bp_pack_value (&bp, fn->calls_setjmp, 1);
1746 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1747 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1748
1749 /* Output the function start and end loci. */
1750 stream_output_location (ob, &bp, fn->function_start_locus);
1751 stream_output_location (ob, &bp, fn->function_end_locus);
1752
1753 streamer_write_bitpack (&bp);
1754 }
1755
1756
1757 /* Output the body of function NODE->DECL. */
1758
1759 static void
1760 output_function (struct cgraph_node *node)
1761 {
1762 tree function;
1763 struct function *fn;
1764 basic_block bb;
1765 struct output_block *ob;
1766
1767 function = node->decl;
1768 fn = DECL_STRUCT_FUNCTION (function);
1769 ob = create_output_block (LTO_section_function_body);
1770
1771 clear_line_info (ob);
1772 ob->cgraph_node = node;
1773
1774 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1775
1776 /* Set current_function_decl and cfun. */
1777 push_cfun (fn);
1778
1779 /* Make string 0 be a NULL string. */
1780 streamer_write_char_stream (ob->string_stream, 0);
1781
1782 streamer_write_record_start (ob, LTO_function);
1783
1784 /* Output decls for parameters and args. */
1785 stream_write_tree (ob, DECL_RESULT (function), true);
1786 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1787
1788 /* Output DECL_INITIAL for the function, which contains the tree of
1789 lexical scopes. */
1790 stream_write_tree (ob, DECL_INITIAL (function), true);
1791
1792 /* We also stream abstract functions where we stream only stuff needed for
1793 debug info. */
1794 if (gimple_has_body_p (function))
1795 {
1796 streamer_write_uhwi (ob, 1);
1797 output_struct_function_base (ob, fn);
1798
1799 /* Output all the SSA names used in the function. */
1800 output_ssa_names (ob, fn);
1801
1802 /* Output any exception handling regions. */
1803 output_eh_regions (ob, fn);
1804
1805
1806 /* We will renumber the statements. The code that does this uses
1807 the same ordering that we use for serializing them so we can use
1808 the same code on the other end and not have to write out the
1809 statement numbers. We do not assign UIDs to PHIs here because
1810 virtual PHIs get re-computed on-the-fly which would make numbers
1811 inconsistent. */
1812 set_gimple_stmt_max_uid (cfun, 0);
1813 FOR_ALL_BB (bb)
1814 {
1815 gimple_stmt_iterator gsi;
1816 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1817 {
1818 gimple stmt = gsi_stmt (gsi);
1819
1820 /* Virtual PHIs are not going to be streamed. */
1821 if (!virtual_operand_p (gimple_phi_result (stmt)))
1822 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1823 }
1824 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1825 {
1826 gimple stmt = gsi_stmt (gsi);
1827 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1828 }
1829 }
1830 /* To avoid keeping duplicate gimple IDs in the statements, renumber
1831 virtual phis now. */
1832 FOR_ALL_BB (bb)
1833 {
1834 gimple_stmt_iterator gsi;
1835 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1836 {
1837 gimple stmt = gsi_stmt (gsi);
1838 if (virtual_operand_p (gimple_phi_result (stmt)))
1839 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1840 }
1841 }
1842
1843 /* Output the code for the function. */
1844 FOR_ALL_BB_FN (bb, fn)
1845 output_bb (ob, bb, fn);
1846
1847 /* The terminator for this function. */
1848 streamer_write_record_start (ob, LTO_null);
1849
1850 output_cfg (ob, fn);
1851
1852 pop_cfun ();
1853 }
1854 else
1855 streamer_write_uhwi (ob, 0);
1856
1857 /* Create a section to hold the pickled output of this function. */
1858 produce_asm (ob, function);
1859
1860 destroy_output_block (ob);
1861 }
1862
1863
1864 /* Emit toplevel asms. */
1865
1866 void
1867 lto_output_toplevel_asms (void)
1868 {
1869 struct output_block *ob;
1870 struct asm_node *can;
1871 char *section_name;
1872 struct lto_output_stream *header_stream;
1873 struct lto_asm_header header;
1874
1875 if (! asm_nodes)
1876 return;
1877
1878 ob = create_output_block (LTO_section_asm);
1879
1880 /* Make string 0 be a NULL string. */
1881 streamer_write_char_stream (ob->string_stream, 0);
1882
1883 for (can = asm_nodes; can; can = can->next)
1884 {
1885 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
1886 streamer_write_hwi (ob, can->order);
1887 }
1888
1889 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
1890
1891 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
1892 lto_begin_section (section_name, !flag_wpa);
1893 free (section_name);
1894
1895 /* The entire header stream is computed here. */
1896 memset (&header, 0, sizeof (header));
1897
1898 /* Write the header. */
1899 header.lto_header.major_version = LTO_major_version;
1900 header.lto_header.minor_version = LTO_minor_version;
1901
1902 header.main_size = ob->main_stream->total_size;
1903 header.string_size = ob->string_stream->total_size;
1904
1905 header_stream = XCNEW (struct lto_output_stream);
1906 lto_output_data_stream (header_stream, &header, sizeof (header));
1907 lto_write_stream (header_stream);
1908 free (header_stream);
1909
1910 /* Put all of the gimple and the string table out the asm file as a
1911 block of text. */
1912 lto_write_stream (ob->main_stream);
1913 lto_write_stream (ob->string_stream);
1914
1915 lto_end_section ();
1916
1917 destroy_output_block (ob);
1918 }
1919
1920
1921 /* Copy the function body of NODE without deserializing. */
1922
1923 static void
1924 copy_function (struct cgraph_node *node)
1925 {
1926 tree function = node->decl;
1927 struct lto_file_decl_data *file_data = node->lto_file_data;
1928 struct lto_output_stream *output_stream = XCNEW (struct lto_output_stream);
1929 const char *data;
1930 size_t len;
1931 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
1932 char *section_name =
1933 lto_get_section_name (LTO_section_function_body, name, NULL);
1934 size_t i, j;
1935 struct lto_in_decl_state *in_state;
1936 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
1937
1938 lto_begin_section (section_name, !flag_wpa);
1939 free (section_name);
1940
1941 /* We may have renamed the declaration, e.g., a static function. */
1942 name = lto_get_decl_name_mapping (file_data, name);
1943
1944 data = lto_get_section_data (file_data, LTO_section_function_body,
1945 name, &len);
1946 gcc_assert (data);
1947
1948 /* Do a bit copy of the function body. */
1949 lto_output_data_stream (output_stream, data, len);
1950 lto_write_stream (output_stream);
1951
1952 /* Copy decls. */
1953 in_state =
1954 lto_get_function_in_decl_state (node->lto_file_data, function);
1955 gcc_assert (in_state);
1956
1957 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
1958 {
1959 size_t n = in_state->streams[i].size;
1960 tree *trees = in_state->streams[i].trees;
1961 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
1962
1963 /* The out state must have the same indices and the in state.
1964 So just copy the vector. All the encoders in the in state
1965 must be empty where we reach here. */
1966 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
1967 encoder->trees.reserve_exact (n);
1968 for (j = 0; j < n; j++)
1969 encoder->trees.safe_push (trees[j]);
1970 }
1971
1972 lto_free_section_data (file_data, LTO_section_function_body, name,
1973 data, len);
1974 free (output_stream);
1975 lto_end_section ();
1976 }
1977
1978
1979 /* Main entry point from the pass manager. */
1980
1981 static void
1982 lto_output (void)
1983 {
1984 struct lto_out_decl_state *decl_state;
1985 #ifdef ENABLE_CHECKING
1986 bitmap output = lto_bitmap_alloc ();
1987 #endif
1988 int i, n_nodes;
1989 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1990
1991 /* Initialize the streamer. */
1992 lto_streamer_init ();
1993
1994 n_nodes = lto_symtab_encoder_size (encoder);
1995 /* Process only the functions with bodies. */
1996 for (i = 0; i < n_nodes; i++)
1997 {
1998 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
1999 cgraph_node *node = dyn_cast <cgraph_node> (snode);
2000 if (node
2001 && lto_symtab_encoder_encode_body_p (encoder, node)
2002 && !node->alias)
2003 {
2004 #ifdef ENABLE_CHECKING
2005 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2006 bitmap_set_bit (output, DECL_UID (node->decl));
2007 #endif
2008 decl_state = lto_new_out_decl_state ();
2009 lto_push_out_decl_state (decl_state);
2010 if (gimple_has_body_p (node->decl) || !flag_wpa)
2011 output_function (node);
2012 else
2013 copy_function (node);
2014 gcc_assert (lto_get_out_decl_state () == decl_state);
2015 lto_pop_out_decl_state ();
2016 lto_record_function_out_decl_state (node->decl, decl_state);
2017 }
2018 }
2019
2020 /* Emit the callgraph after emitting function bodies. This needs to
2021 be done now to make sure that all the statements in every function
2022 have been renumbered so that edges can be associated with call
2023 statements using the statement UIDs. */
2024 output_symtab ();
2025
2026 #ifdef ENABLE_CHECKING
2027 lto_bitmap_free (output);
2028 #endif
2029 }
2030
2031 namespace {
2032
2033 const pass_data pass_data_ipa_lto_gimple_out =
2034 {
2035 IPA_PASS, /* type */
2036 "lto_gimple_out", /* name */
2037 OPTGROUP_NONE, /* optinfo_flags */
2038 true, /* has_gate */
2039 false, /* has_execute */
2040 TV_IPA_LTO_GIMPLE_OUT, /* tv_id */
2041 0, /* properties_required */
2042 0, /* properties_provided */
2043 0, /* properties_destroyed */
2044 0, /* todo_flags_start */
2045 0, /* todo_flags_finish */
2046 };
2047
2048 class pass_ipa_lto_gimple_out : public ipa_opt_pass_d
2049 {
2050 public:
2051 pass_ipa_lto_gimple_out (gcc::context *ctxt)
2052 : ipa_opt_pass_d (pass_data_ipa_lto_gimple_out, ctxt,
2053 NULL, /* generate_summary */
2054 lto_output, /* write_summary */
2055 NULL, /* read_summary */
2056 lto_output, /* write_optimization_summary */
2057 NULL, /* read_optimization_summary */
2058 NULL, /* stmt_fixup */
2059 0, /* function_transform_todo_flags_start */
2060 NULL, /* function_transform */
2061 NULL) /* variable_transform */
2062 {}
2063
2064 /* opt_pass methods: */
2065 bool gate () { return gate_lto_out (); }
2066
2067 }; // class pass_ipa_lto_gimple_out
2068
2069 } // anon namespace
2070
2071 ipa_opt_pass_d *
2072 make_pass_ipa_lto_gimple_out (gcc::context *ctxt)
2073 {
2074 return new pass_ipa_lto_gimple_out (ctxt);
2075 }
2076
2077
2078 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2079 from it and required for correct representation of its semantics.
2080 Each node in ENCODER must be a global declaration or a type. A node
2081 is written only once, even if it appears multiple times in the
2082 vector. Certain transitively-reachable nodes, such as those
2083 representing expressions, may be duplicated, but such nodes
2084 must not appear in ENCODER itself. */
2085
2086 static void
2087 write_global_stream (struct output_block *ob,
2088 struct lto_tree_ref_encoder *encoder)
2089 {
2090 tree t;
2091 size_t index;
2092 const size_t size = lto_tree_ref_encoder_size (encoder);
2093
2094 for (index = 0; index < size; index++)
2095 {
2096 t = lto_tree_ref_encoder_get_tree (encoder, index);
2097 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2098 stream_write_tree (ob, t, false);
2099 }
2100 }
2101
2102
2103 /* Write a sequence of indices into the globals vector corresponding
2104 to the trees in ENCODER. These are used by the reader to map the
2105 indices used to refer to global entities within function bodies to
2106 their referents. */
2107
2108 static void
2109 write_global_references (struct output_block *ob,
2110 struct lto_output_stream *ref_stream,
2111 struct lto_tree_ref_encoder *encoder)
2112 {
2113 tree t;
2114 uint32_t index;
2115 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2116
2117 /* Write size as 32-bit unsigned. */
2118 lto_output_data_stream (ref_stream, &size, sizeof (int32_t));
2119
2120 for (index = 0; index < size; index++)
2121 {
2122 uint32_t slot_num;
2123
2124 t = lto_tree_ref_encoder_get_tree (encoder, index);
2125 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2126 gcc_assert (slot_num != (unsigned)-1);
2127 lto_output_data_stream (ref_stream, &slot_num, sizeof slot_num);
2128 }
2129 }
2130
2131
2132 /* Write all the streams in an lto_out_decl_state STATE using
2133 output block OB and output stream OUT_STREAM. */
2134
2135 void
2136 lto_output_decl_state_streams (struct output_block *ob,
2137 struct lto_out_decl_state *state)
2138 {
2139 int i;
2140
2141 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2142 write_global_stream (ob, &state->streams[i]);
2143 }
2144
2145
2146 /* Write all the references in an lto_out_decl_state STATE using
2147 output block OB and output stream OUT_STREAM. */
2148
2149 void
2150 lto_output_decl_state_refs (struct output_block *ob,
2151 struct lto_output_stream *out_stream,
2152 struct lto_out_decl_state *state)
2153 {
2154 unsigned i;
2155 uint32_t ref;
2156 tree decl;
2157
2158 /* Write reference to FUNCTION_DECL. If there is not function,
2159 write reference to void_type_node. */
2160 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2161 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2162 gcc_assert (ref != (unsigned)-1);
2163 lto_output_data_stream (out_stream, &ref, sizeof (uint32_t));
2164
2165 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2166 write_global_references (ob, out_stream, &state->streams[i]);
2167 }
2168
2169
2170 /* Return the written size of STATE. */
2171
2172 static size_t
2173 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2174 {
2175 int i;
2176 size_t size;
2177
2178 size = sizeof (int32_t); /* fn_ref. */
2179 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2180 {
2181 size += sizeof (int32_t); /* vector size. */
2182 size += (lto_tree_ref_encoder_size (&state->streams[i])
2183 * sizeof (int32_t));
2184 }
2185 return size;
2186 }
2187
2188
2189 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2190 so far. */
2191
2192 static void
2193 write_symbol (struct streamer_tree_cache_d *cache,
2194 struct lto_output_stream *stream,
2195 tree t, struct pointer_set_t *seen, bool alias)
2196 {
2197 const char *name;
2198 enum gcc_plugin_symbol_kind kind;
2199 enum gcc_plugin_symbol_visibility visibility;
2200 unsigned slot_num;
2201 unsigned HOST_WIDEST_INT size;
2202 const char *comdat;
2203 unsigned char c;
2204
2205 /* None of the following kinds of symbols are needed in the
2206 symbol table. */
2207 if (!TREE_PUBLIC (t)
2208 || is_builtin_fn (t)
2209 || DECL_ABSTRACT (t)
2210 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2211 return;
2212 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2213
2214 gcc_assert (TREE_CODE (t) == VAR_DECL
2215 || TREE_CODE (t) == FUNCTION_DECL);
2216
2217 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2218
2219 /* This behaves like assemble_name_raw in varasm.c, performing the
2220 same name manipulations that ASM_OUTPUT_LABELREF does. */
2221 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2222
2223 if (pointer_set_contains (seen, name))
2224 return;
2225 pointer_set_insert (seen, name);
2226
2227 streamer_tree_cache_lookup (cache, t, &slot_num);
2228 gcc_assert (slot_num != (unsigned)-1);
2229
2230 if (DECL_EXTERNAL (t))
2231 {
2232 if (DECL_WEAK (t))
2233 kind = GCCPK_WEAKUNDEF;
2234 else
2235 kind = GCCPK_UNDEF;
2236 }
2237 else
2238 {
2239 if (DECL_WEAK (t))
2240 kind = GCCPK_WEAKDEF;
2241 else if (DECL_COMMON (t))
2242 kind = GCCPK_COMMON;
2243 else
2244 kind = GCCPK_DEF;
2245
2246 /* When something is defined, it should have node attached. */
2247 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2248 || varpool_get_node (t)->definition);
2249 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2250 || (cgraph_get_node (t)
2251 && cgraph_get_node (t)->definition));
2252 }
2253
2254 /* Imitate what default_elf_asm_output_external do.
2255 When symbol is external, we need to output it with DEFAULT visibility
2256 when compiling with -fvisibility=default, while with HIDDEN visibility
2257 when symbol has attribute (visibility("hidden")) specified.
2258 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2259 right. */
2260
2261 if (DECL_EXTERNAL (t)
2262 && !targetm.binds_local_p (t))
2263 visibility = GCCPV_DEFAULT;
2264 else
2265 switch (DECL_VISIBILITY (t))
2266 {
2267 case VISIBILITY_DEFAULT:
2268 visibility = GCCPV_DEFAULT;
2269 break;
2270 case VISIBILITY_PROTECTED:
2271 visibility = GCCPV_PROTECTED;
2272 break;
2273 case VISIBILITY_HIDDEN:
2274 visibility = GCCPV_HIDDEN;
2275 break;
2276 case VISIBILITY_INTERNAL:
2277 visibility = GCCPV_INTERNAL;
2278 break;
2279 }
2280
2281 if (kind == GCCPK_COMMON
2282 && DECL_SIZE_UNIT (t)
2283 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2284 size = tree_to_hwi (DECL_SIZE_UNIT (t));
2285 else
2286 size = 0;
2287
2288 if (DECL_ONE_ONLY (t))
2289 comdat = IDENTIFIER_POINTER (DECL_COMDAT_GROUP (t));
2290 else
2291 comdat = "";
2292
2293 lto_output_data_stream (stream, name, strlen (name) + 1);
2294 lto_output_data_stream (stream, comdat, strlen (comdat) + 1);
2295 c = (unsigned char) kind;
2296 lto_output_data_stream (stream, &c, 1);
2297 c = (unsigned char) visibility;
2298 lto_output_data_stream (stream, &c, 1);
2299 lto_output_data_stream (stream, &size, 8);
2300 lto_output_data_stream (stream, &slot_num, 4);
2301 }
2302
2303 /* Return true if NODE should appear in the plugin symbol table. */
2304
2305 bool
2306 output_symbol_p (symtab_node *node)
2307 {
2308 struct cgraph_node *cnode;
2309 if (!symtab_real_symbol_p (node))
2310 return false;
2311 /* We keep external functions in symtab for sake of inlining
2312 and devirtualization. We do not want to see them in symbol table as
2313 references unless they are really used. */
2314 cnode = dyn_cast <cgraph_node> (node);
2315 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2316 && cnode->callers)
2317 return true;
2318
2319 /* Ignore all references from external vars initializers - they are not really
2320 part of the compilation unit until they are used by folding. Some symbols,
2321 like references to external construction vtables can not be referred to at all.
2322 We decide this at can_refer_decl_in_current_unit_p. */
2323 if (!node->definition || DECL_EXTERNAL (node->decl))
2324 {
2325 int i;
2326 struct ipa_ref *ref;
2327 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list,
2328 i, ref); i++)
2329 {
2330 if (ref->use == IPA_REF_ALIAS)
2331 continue;
2332 if (is_a <cgraph_node> (ref->referring))
2333 return true;
2334 if (!DECL_EXTERNAL (ref->referring->decl))
2335 return true;
2336 }
2337 return false;
2338 }
2339 return true;
2340 }
2341
2342
2343 /* Write an IL symbol table to OB.
2344 SET and VSET are cgraph/varpool node sets we are outputting. */
2345
2346 static void
2347 produce_symtab (struct output_block *ob)
2348 {
2349 struct streamer_tree_cache_d *cache = ob->writer_cache;
2350 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2351 struct pointer_set_t *seen;
2352 struct lto_output_stream stream;
2353 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2354 lto_symtab_encoder_iterator lsei;
2355
2356 lto_begin_section (section_name, false);
2357 free (section_name);
2358
2359 seen = pointer_set_create ();
2360 memset (&stream, 0, sizeof (stream));
2361
2362 /* Write the symbol table.
2363 First write everything defined and then all declarations.
2364 This is necessary to handle cases where we have duplicated symbols. */
2365 for (lsei = lsei_start (encoder);
2366 !lsei_end_p (lsei); lsei_next (&lsei))
2367 {
2368 symtab_node *node = lsei_node (lsei);
2369
2370 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2371 continue;
2372 write_symbol (cache, &stream, node->decl, seen, false);
2373 }
2374 for (lsei = lsei_start (encoder);
2375 !lsei_end_p (lsei); lsei_next (&lsei))
2376 {
2377 symtab_node *node = lsei_node (lsei);
2378
2379 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2380 continue;
2381 write_symbol (cache, &stream, node->decl, seen, false);
2382 }
2383
2384 lto_write_stream (&stream);
2385 pointer_set_destroy (seen);
2386
2387 lto_end_section ();
2388 }
2389
2390
2391 /* This pass is run after all of the functions are serialized and all
2392 of the IPA passes have written their serialized forms. This pass
2393 causes the vector of all of the global decls and types used from
2394 this file to be written in to a section that can then be read in to
2395 recover these on other side. */
2396
2397 static void
2398 produce_asm_for_decls (void)
2399 {
2400 struct lto_out_decl_state *out_state;
2401 struct lto_out_decl_state *fn_out_state;
2402 struct lto_decl_header header;
2403 char *section_name;
2404 struct output_block *ob;
2405 struct lto_output_stream *header_stream, *decl_state_stream;
2406 unsigned idx, num_fns;
2407 size_t decl_state_size;
2408 int32_t num_decl_states;
2409
2410 ob = create_output_block (LTO_section_decls);
2411 ob->global = true;
2412
2413 memset (&header, 0, sizeof (struct lto_decl_header));
2414
2415 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2416 lto_begin_section (section_name, !flag_wpa);
2417 free (section_name);
2418
2419 /* Make string 0 be a NULL string. */
2420 streamer_write_char_stream (ob->string_stream, 0);
2421
2422 gcc_assert (!alias_pairs);
2423
2424 /* Write the global symbols. */
2425 out_state = lto_get_out_decl_state ();
2426 num_fns = lto_function_decl_states.length ();
2427 lto_output_decl_state_streams (ob, out_state);
2428 for (idx = 0; idx < num_fns; idx++)
2429 {
2430 fn_out_state =
2431 lto_function_decl_states[idx];
2432 lto_output_decl_state_streams (ob, fn_out_state);
2433 }
2434
2435 header.lto_header.major_version = LTO_major_version;
2436 header.lto_header.minor_version = LTO_minor_version;
2437
2438 /* Currently not used. This field would allow us to preallocate
2439 the globals vector, so that it need not be resized as it is extended. */
2440 header.num_nodes = -1;
2441
2442 /* Compute the total size of all decl out states. */
2443 decl_state_size = sizeof (int32_t);
2444 decl_state_size += lto_out_decl_state_written_size (out_state);
2445 for (idx = 0; idx < num_fns; idx++)
2446 {
2447 fn_out_state =
2448 lto_function_decl_states[idx];
2449 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2450 }
2451 header.decl_state_size = decl_state_size;
2452
2453 header.main_size = ob->main_stream->total_size;
2454 header.string_size = ob->string_stream->total_size;
2455
2456 header_stream = XCNEW (struct lto_output_stream);
2457 lto_output_data_stream (header_stream, &header, sizeof header);
2458 lto_write_stream (header_stream);
2459 free (header_stream);
2460
2461 /* Write the main out-decl state, followed by out-decl states of
2462 functions. */
2463 decl_state_stream = XCNEW (struct lto_output_stream);
2464 num_decl_states = num_fns + 1;
2465 lto_output_data_stream (decl_state_stream, &num_decl_states,
2466 sizeof (num_decl_states));
2467 lto_output_decl_state_refs (ob, decl_state_stream, out_state);
2468 for (idx = 0; idx < num_fns; idx++)
2469 {
2470 fn_out_state =
2471 lto_function_decl_states[idx];
2472 lto_output_decl_state_refs (ob, decl_state_stream, fn_out_state);
2473 }
2474 lto_write_stream (decl_state_stream);
2475 free (decl_state_stream);
2476
2477 lto_write_stream (ob->main_stream);
2478 lto_write_stream (ob->string_stream);
2479
2480 lto_end_section ();
2481
2482 /* Write the symbol table. It is used by linker to determine dependencies
2483 and thus we can skip it for WPA. */
2484 if (!flag_wpa)
2485 produce_symtab (ob);
2486
2487 /* Write command line opts. */
2488 lto_write_options ();
2489
2490 /* Deallocate memory and clean up. */
2491 for (idx = 0; idx < num_fns; idx++)
2492 {
2493 fn_out_state =
2494 lto_function_decl_states[idx];
2495 lto_delete_out_decl_state (fn_out_state);
2496 }
2497 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2498 lto_function_decl_states.release ();
2499 destroy_output_block (ob);
2500 }
2501
2502
2503 namespace {
2504
2505 const pass_data pass_data_ipa_lto_finish_out =
2506 {
2507 IPA_PASS, /* type */
2508 "lto_decls_out", /* name */
2509 OPTGROUP_NONE, /* optinfo_flags */
2510 true, /* has_gate */
2511 false, /* has_execute */
2512 TV_IPA_LTO_DECL_OUT, /* tv_id */
2513 0, /* properties_required */
2514 0, /* properties_provided */
2515 0, /* properties_destroyed */
2516 0, /* todo_flags_start */
2517 0, /* todo_flags_finish */
2518 };
2519
2520 class pass_ipa_lto_finish_out : public ipa_opt_pass_d
2521 {
2522 public:
2523 pass_ipa_lto_finish_out (gcc::context *ctxt)
2524 : ipa_opt_pass_d (pass_data_ipa_lto_finish_out, ctxt,
2525 NULL, /* generate_summary */
2526 produce_asm_for_decls, /* write_summary */
2527 NULL, /* read_summary */
2528 produce_asm_for_decls, /* write_optimization_summary */
2529 NULL, /* read_optimization_summary */
2530 NULL, /* stmt_fixup */
2531 0, /* function_transform_todo_flags_start */
2532 NULL, /* function_transform */
2533 NULL) /* variable_transform */
2534 {}
2535
2536 /* opt_pass methods: */
2537 bool gate () { return gate_lto_out (); }
2538
2539 }; // class pass_ipa_lto_finish_out
2540
2541 } // anon namespace
2542
2543 ipa_opt_pass_d *
2544 make_pass_ipa_lto_finish_out (gcc::context *ctxt)
2545 {
2546 return new pass_ipa_lto_finish_out (ctxt);
2547 }