]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-streamer-out.c
Remove tree_to_hwi.
[thirdparty/gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2013 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "expr.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "hashtab.h"
33 #include "basic-block.h"
34 #include "gimple.h"
35 #include "gimple-iterator.h"
36 #include "gimple-ssa.h"
37 #include "tree-ssanames.h"
38 #include "tree-pass.h"
39 #include "function.h"
40 #include "ggc.h"
41 #include "diagnostic-core.h"
42 #include "except.h"
43 #include "vec.h"
44 #include "lto-symtab.h"
45 #include "lto-streamer.h"
46 #include "data-streamer.h"
47 #include "gimple-streamer.h"
48 #include "tree-streamer.h"
49 #include "streamer-hooks.h"
50 #include "cfgloop.h"
51
52
53 /* Clear the line info stored in DATA_IN. */
54
55 static void
56 clear_line_info (struct output_block *ob)
57 {
58 ob->current_file = NULL;
59 ob->current_line = 0;
60 ob->current_col = 0;
61 }
62
63
64 /* Create the output block and return it. SECTION_TYPE is
65 LTO_section_function_body or LTO_static_initializer. */
66
67 struct output_block *
68 create_output_block (enum lto_section_type section_type)
69 {
70 struct output_block *ob = XCNEW (struct output_block);
71
72 ob->section_type = section_type;
73 ob->decl_state = lto_get_out_decl_state ();
74 ob->main_stream = XCNEW (struct lto_output_stream);
75 ob->string_stream = XCNEW (struct lto_output_stream);
76 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true);
77
78 if (section_type == LTO_section_function_body)
79 ob->cfg_stream = XCNEW (struct lto_output_stream);
80
81 clear_line_info (ob);
82
83 ob->string_hash_table.create (37);
84 gcc_obstack_init (&ob->obstack);
85
86 return ob;
87 }
88
89
90 /* Destroy the output block OB. */
91
92 void
93 destroy_output_block (struct output_block *ob)
94 {
95 enum lto_section_type section_type = ob->section_type;
96
97 ob->string_hash_table.dispose ();
98
99 free (ob->main_stream);
100 free (ob->string_stream);
101 if (section_type == LTO_section_function_body)
102 free (ob->cfg_stream);
103
104 streamer_tree_cache_delete (ob->writer_cache);
105 obstack_free (&ob->obstack, NULL);
106
107 free (ob);
108 }
109
110
111 /* Look up NODE in the type table and write the index for it to OB. */
112
113 static void
114 output_type_ref (struct output_block *ob, tree node)
115 {
116 streamer_write_record_start (ob, LTO_type_ref);
117 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
118 }
119
120
121 /* Return true if tree node T is written to various tables. For these
122 nodes, we sometimes want to write their phyiscal representation
123 (via lto_output_tree), and sometimes we need to emit an index
124 reference into a table (via lto_output_tree_ref). */
125
126 static bool
127 tree_is_indexable (tree t)
128 {
129 /* Parameters and return values of functions of variably modified types
130 must go to global stream, because they may be used in the type
131 definition. */
132 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
133 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
134 else if (TREE_CODE (t) == VAR_DECL && decl_function_context (t)
135 && !TREE_STATIC (t))
136 return false;
137 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
138 return false;
139 /* Variably modified types need to be streamed alongside function
140 bodies because they can refer to local entities. Together with
141 them we have to localize their members as well.
142 ??? In theory that includes non-FIELD_DECLs as well. */
143 else if (TYPE_P (t)
144 && variably_modified_type_p (t, NULL_TREE))
145 return false;
146 else if (TREE_CODE (t) == FIELD_DECL
147 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
148 return false;
149 else
150 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
151 }
152
153
154 /* Output info about new location into bitpack BP.
155 After outputting bitpack, lto_output_location_data has
156 to be done to output actual data. */
157
158 void
159 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
160 location_t loc)
161 {
162 expanded_location xloc;
163
164 loc = LOCATION_LOCUS (loc);
165 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
166 if (loc == UNKNOWN_LOCATION)
167 return;
168
169 xloc = expand_location (loc);
170
171 bp_pack_value (bp, ob->current_file != xloc.file, 1);
172 bp_pack_value (bp, ob->current_line != xloc.line, 1);
173 bp_pack_value (bp, ob->current_col != xloc.column, 1);
174
175 if (ob->current_file != xloc.file)
176 bp_pack_var_len_unsigned (bp,
177 streamer_string_index (ob, xloc.file,
178 strlen (xloc.file) + 1,
179 true));
180 ob->current_file = xloc.file;
181
182 if (ob->current_line != xloc.line)
183 bp_pack_var_len_unsigned (bp, xloc.line);
184 ob->current_line = xloc.line;
185
186 if (ob->current_col != xloc.column)
187 bp_pack_var_len_unsigned (bp, xloc.column);
188 ob->current_col = xloc.column;
189 }
190
191
192 /* If EXPR is an indexable tree node, output a reference to it to
193 output block OB. Otherwise, output the physical representation of
194 EXPR to OB. */
195
196 static void
197 lto_output_tree_ref (struct output_block *ob, tree expr)
198 {
199 enum tree_code code;
200
201 if (TYPE_P (expr))
202 {
203 output_type_ref (ob, expr);
204 return;
205 }
206
207 code = TREE_CODE (expr);
208 switch (code)
209 {
210 case SSA_NAME:
211 streamer_write_record_start (ob, LTO_ssa_name_ref);
212 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
213 break;
214
215 case FIELD_DECL:
216 streamer_write_record_start (ob, LTO_field_decl_ref);
217 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
218 break;
219
220 case FUNCTION_DECL:
221 streamer_write_record_start (ob, LTO_function_decl_ref);
222 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
223 break;
224
225 case VAR_DECL:
226 case DEBUG_EXPR_DECL:
227 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
228 case PARM_DECL:
229 streamer_write_record_start (ob, LTO_global_decl_ref);
230 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
231 break;
232
233 case CONST_DECL:
234 streamer_write_record_start (ob, LTO_const_decl_ref);
235 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
236 break;
237
238 case IMPORTED_DECL:
239 gcc_assert (decl_function_context (expr) == NULL);
240 streamer_write_record_start (ob, LTO_imported_decl_ref);
241 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
242 break;
243
244 case TYPE_DECL:
245 streamer_write_record_start (ob, LTO_type_decl_ref);
246 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
247 break;
248
249 case NAMESPACE_DECL:
250 streamer_write_record_start (ob, LTO_namespace_decl_ref);
251 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
252 break;
253
254 case LABEL_DECL:
255 streamer_write_record_start (ob, LTO_label_decl_ref);
256 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
257 break;
258
259 case RESULT_DECL:
260 streamer_write_record_start (ob, LTO_result_decl_ref);
261 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
262 break;
263
264 case TRANSLATION_UNIT_DECL:
265 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
266 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
267 break;
268
269 default:
270 /* No other node is indexable, so it should have been handled by
271 lto_output_tree. */
272 gcc_unreachable ();
273 }
274 }
275
276
277 /* Return true if EXPR is a tree node that can be written to disk. */
278
279 static inline bool
280 lto_is_streamable (tree expr)
281 {
282 enum tree_code code = TREE_CODE (expr);
283
284 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
285 name version in lto_output_tree_ref (see output_ssa_names). */
286 return !is_lang_specific (expr)
287 && code != SSA_NAME
288 && code != CALL_EXPR
289 && code != LANG_TYPE
290 && code != MODIFY_EXPR
291 && code != INIT_EXPR
292 && code != TARGET_EXPR
293 && code != BIND_EXPR
294 && code != WITH_CLEANUP_EXPR
295 && code != STATEMENT_LIST
296 && code != OMP_CLAUSE
297 && (code == CASE_LABEL_EXPR
298 || code == DECL_EXPR
299 || TREE_CODE_CLASS (code) != tcc_statement);
300 }
301
302
303 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
304
305 static tree
306 get_symbol_initial_value (struct output_block *ob, tree expr)
307 {
308 gcc_checking_assert (DECL_P (expr)
309 && TREE_CODE (expr) != FUNCTION_DECL
310 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
311
312 /* Handle DECL_INITIAL for symbols. */
313 tree initial = DECL_INITIAL (expr);
314 if (TREE_CODE (expr) == VAR_DECL
315 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
316 && !DECL_IN_CONSTANT_POOL (expr)
317 && initial)
318 {
319 lto_symtab_encoder_t encoder;
320 struct varpool_node *vnode;
321
322 encoder = ob->decl_state->symtab_node_encoder;
323 vnode = varpool_get_node (expr);
324 if (!vnode
325 || !lto_symtab_encoder_encode_initializer_p (encoder,
326 vnode))
327 initial = error_mark_node;
328 }
329
330 return initial;
331 }
332
333
334 /* Write a physical representation of tree node EXPR to output block
335 OB. If REF_P is true, the leaves of EXPR are emitted as references
336 via lto_output_tree_ref. IX is the index into the streamer cache
337 where EXPR is stored. */
338
339 static void
340 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
341 {
342 /* Pack all the non-pointer fields in EXPR into a bitpack and write
343 the resulting bitpack. */
344 bitpack_d bp = bitpack_create (ob->main_stream);
345 streamer_pack_tree_bitfields (ob, &bp, expr);
346 streamer_write_bitpack (&bp);
347
348 /* Write all the pointer fields in EXPR. */
349 streamer_write_tree_body (ob, expr, ref_p);
350
351 /* Write any LTO-specific data to OB. */
352 if (DECL_P (expr)
353 && TREE_CODE (expr) != FUNCTION_DECL
354 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
355 {
356 /* Handle DECL_INITIAL for symbols. */
357 tree initial = get_symbol_initial_value (ob, expr);
358 stream_write_tree (ob, initial, ref_p);
359 }
360 }
361
362 /* Write a physical representation of tree node EXPR to output block
363 OB. If REF_P is true, the leaves of EXPR are emitted as references
364 via lto_output_tree_ref. IX is the index into the streamer cache
365 where EXPR is stored. */
366
367 static void
368 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
369 {
370 if (!lto_is_streamable (expr))
371 internal_error ("tree code %qs is not supported in LTO streams",
372 get_tree_code_name (TREE_CODE (expr)));
373
374 /* Write the header, containing everything needed to materialize
375 EXPR on the reading side. */
376 streamer_write_tree_header (ob, expr);
377
378 lto_write_tree_1 (ob, expr, ref_p);
379
380 /* Mark the end of EXPR. */
381 streamer_write_zero (ob);
382 }
383
384 /* Emit the physical representation of tree node EXPR to output block
385 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
386 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
387
388 static void
389 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
390 bool ref_p, bool this_ref_p)
391 {
392 unsigned ix;
393
394 gcc_checking_assert (expr != NULL_TREE
395 && !(this_ref_p && tree_is_indexable (expr)));
396
397 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
398 expr, hash, &ix);
399 gcc_assert (!exists_p);
400 if (streamer_handle_as_builtin_p (expr))
401 {
402 /* MD and NORMAL builtins do not need to be written out
403 completely as they are always instantiated by the
404 compiler on startup. The only builtins that need to
405 be written out are BUILT_IN_FRONTEND. For all other
406 builtins, we simply write the class and code. */
407 streamer_write_builtin (ob, expr);
408 }
409 else if (TREE_CODE (expr) == INTEGER_CST
410 && !TREE_OVERFLOW (expr))
411 {
412 /* Shared INTEGER_CST nodes are special because they need their
413 original type to be materialized by the reader (to implement
414 TYPE_CACHED_VALUES). */
415 streamer_write_integer_cst (ob, expr, ref_p);
416 }
417 else
418 {
419 /* This is the first time we see EXPR, write its fields
420 to OB. */
421 lto_write_tree (ob, expr, ref_p);
422 }
423 }
424
425 struct sccs
426 {
427 unsigned int dfsnum;
428 unsigned int low;
429 };
430
431 struct scc_entry
432 {
433 tree t;
434 hashval_t hash;
435 };
436
437 static unsigned int next_dfs_num;
438 static vec<scc_entry> sccstack;
439 static struct pointer_map_t *sccstate;
440 static struct obstack sccstate_obstack;
441
442 static void
443 DFS_write_tree (struct output_block *ob, sccs *from_state,
444 tree expr, bool ref_p, bool this_ref_p);
445
446 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
447 DFS recurse for all tree edges originating from it. */
448
449 static void
450 DFS_write_tree_body (struct output_block *ob,
451 tree expr, sccs *expr_state, bool ref_p)
452 {
453 #define DFS_follow_tree_edge(DEST) \
454 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
455
456 enum tree_code code;
457
458 code = TREE_CODE (expr);
459
460 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
461 {
462 if (TREE_CODE (expr) != IDENTIFIER_NODE)
463 DFS_follow_tree_edge (TREE_TYPE (expr));
464 }
465
466 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
467 {
468 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
469 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
470 }
471
472 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
473 {
474 DFS_follow_tree_edge (TREE_REALPART (expr));
475 DFS_follow_tree_edge (TREE_IMAGPART (expr));
476 }
477
478 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
479 {
480 /* Drop names that were created for anonymous entities. */
481 if (DECL_NAME (expr)
482 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
483 && ANON_AGGRNAME_P (DECL_NAME (expr)))
484 ;
485 else
486 DFS_follow_tree_edge (DECL_NAME (expr));
487 DFS_follow_tree_edge (DECL_CONTEXT (expr));
488 }
489
490 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
491 {
492 DFS_follow_tree_edge (DECL_SIZE (expr));
493 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
494
495 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
496 special handling in LTO, it must be handled by streamer hooks. */
497
498 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
499
500 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
501 for early inlining so drop it on the floor instead of ICEing in
502 dwarf2out.c. */
503
504 if ((TREE_CODE (expr) == VAR_DECL
505 || TREE_CODE (expr) == PARM_DECL)
506 && DECL_HAS_VALUE_EXPR_P (expr))
507 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
508 if (TREE_CODE (expr) == VAR_DECL)
509 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
510 }
511
512 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
513 {
514 if (TREE_CODE (expr) == TYPE_DECL)
515 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
516 DFS_follow_tree_edge (DECL_VINDEX (expr));
517 }
518
519 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
520 {
521 /* Make sure we don't inadvertently set the assembler name. */
522 if (DECL_ASSEMBLER_NAME_SET_P (expr))
523 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
524 DFS_follow_tree_edge (DECL_SECTION_NAME (expr));
525 DFS_follow_tree_edge (DECL_COMDAT_GROUP (expr));
526 }
527
528 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
529 {
530 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
531 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
532 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
533 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
534 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
535 }
536
537 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
538 {
539 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
540 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
541 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
542 }
543
544 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
545 {
546 DFS_follow_tree_edge (TYPE_SIZE (expr));
547 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
548 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
549 DFS_follow_tree_edge (TYPE_NAME (expr));
550 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
551 reconstructed during fixup. */
552 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
553 during fixup. */
554 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
555 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
556 /* TYPE_CANONICAL is re-computed during type merging, so no need
557 to follow it here. */
558 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
559 }
560
561 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
562 {
563 if (TREE_CODE (expr) == ENUMERAL_TYPE)
564 DFS_follow_tree_edge (TYPE_VALUES (expr));
565 else if (TREE_CODE (expr) == ARRAY_TYPE)
566 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
567 else if (RECORD_OR_UNION_TYPE_P (expr))
568 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
569 DFS_follow_tree_edge (t);
570 else if (TREE_CODE (expr) == FUNCTION_TYPE
571 || TREE_CODE (expr) == METHOD_TYPE)
572 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
573
574 if (!POINTER_TYPE_P (expr))
575 DFS_follow_tree_edge (TYPE_MINVAL (expr));
576 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
577 if (RECORD_OR_UNION_TYPE_P (expr))
578 DFS_follow_tree_edge (TYPE_BINFO (expr));
579 }
580
581 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
582 {
583 DFS_follow_tree_edge (TREE_PURPOSE (expr));
584 DFS_follow_tree_edge (TREE_VALUE (expr));
585 DFS_follow_tree_edge (TREE_CHAIN (expr));
586 }
587
588 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
589 {
590 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
591 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
592 }
593
594 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
595 {
596 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
597 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
598 DFS_follow_tree_edge (TREE_BLOCK (expr));
599 }
600
601 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
602 {
603 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
604 /* ??? FIXME. See also streamer_write_chain. */
605 if (!(VAR_OR_FUNCTION_DECL_P (t)
606 && DECL_EXTERNAL (t)))
607 DFS_follow_tree_edge (t);
608
609 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
610
611 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
612 handle - those that represent inlined function scopes.
613 For the drop rest them on the floor instead of ICEing
614 in dwarf2out.c. */
615 if (inlined_function_outer_scope_p (expr))
616 {
617 tree ultimate_origin = block_ultimate_origin (expr);
618 DFS_follow_tree_edge (ultimate_origin);
619 }
620 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
621 information for early inlined BLOCKs so drop it on the floor instead
622 of ICEing in dwarf2out.c. */
623
624 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
625 streaming time. */
626
627 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
628 list is re-constructed from BLOCK_SUPERCONTEXT. */
629 }
630
631 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
632 {
633 unsigned i;
634 tree t;
635
636 /* Note that the number of BINFO slots has already been emitted in
637 EXPR's header (see streamer_write_tree_header) because this length
638 is needed to build the empty BINFO node on the reader side. */
639 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
640 DFS_follow_tree_edge (t);
641 DFS_follow_tree_edge (BINFO_OFFSET (expr));
642 DFS_follow_tree_edge (BINFO_VTABLE (expr));
643 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
644
645 /* The number of BINFO_BASE_ACCESSES has already been emitted in
646 EXPR's bitfield section. */
647 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
648 DFS_follow_tree_edge (t);
649
650 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
651 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
652 }
653
654 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
655 {
656 unsigned i;
657 tree index, value;
658
659 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
660 {
661 DFS_follow_tree_edge (index);
662 DFS_follow_tree_edge (value);
663 }
664 }
665
666 #undef DFS_follow_tree_edge
667 }
668
669 /* Return a hash value for the tree T. */
670
671 static hashval_t
672 hash_tree (struct streamer_tree_cache_d *cache, tree t)
673 {
674 #define visit(SIBLING) \
675 do { \
676 unsigned ix; \
677 if (SIBLING && streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
678 v = iterative_hash_hashval_t (streamer_tree_cache_get_hash (cache, ix), v); \
679 } while (0)
680
681 /* Hash TS_BASE. */
682 enum tree_code code = TREE_CODE (t);
683 hashval_t v = iterative_hash_host_wide_int (code, 0);
684 if (!TYPE_P (t))
685 {
686 v = iterative_hash_host_wide_int (TREE_SIDE_EFFECTS (t)
687 | (TREE_CONSTANT (t) << 1)
688 | (TREE_READONLY (t) << 2)
689 | (TREE_PUBLIC (t) << 3), v);
690 }
691 v = iterative_hash_host_wide_int (TREE_ADDRESSABLE (t)
692 | (TREE_THIS_VOLATILE (t) << 1), v);
693 if (DECL_P (t))
694 v = iterative_hash_host_wide_int (DECL_UNSIGNED (t), v);
695 else if (TYPE_P (t))
696 v = iterative_hash_host_wide_int (TYPE_UNSIGNED (t), v);
697 if (TYPE_P (t))
698 v = iterative_hash_host_wide_int (TYPE_ARTIFICIAL (t), v);
699 else
700 v = iterative_hash_host_wide_int (TREE_NO_WARNING (t), v);
701 v = iterative_hash_host_wide_int (TREE_NOTHROW (t)
702 | (TREE_STATIC (t) << 1)
703 | (TREE_PROTECTED (t) << 2)
704 | (TREE_DEPRECATED (t) << 3), v);
705 if (code != TREE_BINFO)
706 v = iterative_hash_host_wide_int (TREE_PRIVATE (t), v);
707 if (TYPE_P (t))
708 v = iterative_hash_host_wide_int (TYPE_SATURATING (t)
709 | (TYPE_ADDR_SPACE (t) << 1), v);
710 else if (code == SSA_NAME)
711 v = iterative_hash_host_wide_int (SSA_NAME_IS_DEFAULT_DEF (t), v);
712
713 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
714 {
715 int i;
716 v = iterative_hash_host_wide_int (TREE_INT_CST_NUNITS (t), v);
717 v = iterative_hash_host_wide_int (TREE_INT_CST_EXT_NUNITS (t), v);
718 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
719 v = iterative_hash_host_wide_int (TREE_INT_CST_ELT (t, i), v);
720 }
721
722 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
723 {
724 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
725 v = iterative_hash_host_wide_int (r.cl, v);
726 v = iterative_hash_host_wide_int (r.decimal
727 | (r.sign << 1)
728 | (r.signalling << 2)
729 | (r.canonical << 3), v);
730 v = iterative_hash_host_wide_int (r.uexp, v);
731 for (unsigned i = 0; i < SIGSZ; ++i)
732 v = iterative_hash_host_wide_int (r.sig[i], v);
733 }
734
735 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
736 {
737 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
738 v = iterative_hash_host_wide_int (f.mode, v);
739 v = iterative_hash_host_wide_int (f.data.low, v);
740 v = iterative_hash_host_wide_int (f.data.high, v);
741 }
742
743 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
744 {
745 v = iterative_hash_host_wide_int (DECL_MODE (t), v);
746 v = iterative_hash_host_wide_int (DECL_NONLOCAL (t)
747 | (DECL_VIRTUAL_P (t) << 1)
748 | (DECL_IGNORED_P (t) << 2)
749 | (DECL_ABSTRACT (t) << 3)
750 | (DECL_ARTIFICIAL (t) << 4)
751 | (DECL_USER_ALIGN (t) << 5)
752 | (DECL_PRESERVE_P (t) << 6)
753 | (DECL_EXTERNAL (t) << 7)
754 | (DECL_GIMPLE_REG_P (t) << 8), v);
755 v = iterative_hash_host_wide_int (DECL_ALIGN (t), v);
756 if (code == LABEL_DECL)
757 {
758 v = iterative_hash_host_wide_int (EH_LANDING_PAD_NR (t), v);
759 v = iterative_hash_host_wide_int (LABEL_DECL_UID (t), v);
760 }
761 else if (code == FIELD_DECL)
762 {
763 v = iterative_hash_host_wide_int (DECL_PACKED (t)
764 | (DECL_NONADDRESSABLE_P (t) << 1),
765 v);
766 v = iterative_hash_host_wide_int (DECL_OFFSET_ALIGN (t), v);
767 }
768 else if (code == VAR_DECL)
769 {
770 v = iterative_hash_host_wide_int (DECL_HAS_DEBUG_EXPR_P (t)
771 | (DECL_NONLOCAL_FRAME (t) << 1),
772 v);
773 }
774 if (code == RESULT_DECL
775 || code == PARM_DECL
776 || code == VAR_DECL)
777 {
778 v = iterative_hash_host_wide_int (DECL_BY_REFERENCE (t), v);
779 if (code == VAR_DECL
780 || code == PARM_DECL)
781 v = iterative_hash_host_wide_int (DECL_HAS_VALUE_EXPR_P (t), v);
782 }
783 }
784
785 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
786 v = iterative_hash_host_wide_int (DECL_REGISTER (t), v);
787
788 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
789 {
790 v = iterative_hash_host_wide_int ((DECL_COMMON (t))
791 | (DECL_DLLIMPORT_P (t) << 1)
792 | (DECL_WEAK (t) << 2)
793 | (DECL_SEEN_IN_BIND_EXPR_P (t) << 3)
794 | (DECL_COMDAT (t) << 4)
795 | (DECL_VISIBILITY_SPECIFIED (t) << 6),
796 v);
797 v = iterative_hash_host_wide_int (DECL_VISIBILITY (t), v);
798 if (code == VAR_DECL)
799 {
800 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
801 v = iterative_hash_host_wide_int (DECL_HARD_REGISTER (t)
802 | (DECL_IN_CONSTANT_POOL (t) << 1),
803 v);
804 v = iterative_hash_host_wide_int (DECL_TLS_MODEL (t), v);
805 }
806 if (TREE_CODE (t) == FUNCTION_DECL)
807 v = iterative_hash_host_wide_int (DECL_FINAL_P (t)
808 | (DECL_CXX_CONSTRUCTOR_P (t) << 1)
809 | (DECL_CXX_DESTRUCTOR_P (t) << 2),
810 v);
811 if (VAR_OR_FUNCTION_DECL_P (t))
812 v = iterative_hash_host_wide_int (DECL_INIT_PRIORITY (t), v);
813 }
814
815 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
816 {
817 v = iterative_hash_host_wide_int (DECL_BUILT_IN_CLASS (t), v);
818 v = iterative_hash_host_wide_int (DECL_STATIC_CONSTRUCTOR (t)
819 | (DECL_STATIC_DESTRUCTOR (t) << 1)
820 | (DECL_UNINLINABLE (t) << 2)
821 | (DECL_POSSIBLY_INLINED (t) << 3)
822 | (DECL_IS_NOVOPS (t) << 4)
823 | (DECL_IS_RETURNS_TWICE (t) << 5)
824 | (DECL_IS_MALLOC (t) << 6)
825 | (DECL_IS_OPERATOR_NEW (t) << 7)
826 | (DECL_DECLARED_INLINE_P (t) << 8)
827 | (DECL_STATIC_CHAIN (t) << 9)
828 | (DECL_NO_INLINE_WARNING_P (t) << 10)
829 | (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t) << 11)
830 | (DECL_NO_LIMIT_STACK (t) << 12)
831 | (DECL_DISREGARD_INLINE_LIMITS (t) << 13)
832 | (DECL_PURE_P (t) << 14)
833 | (DECL_LOOPING_CONST_OR_PURE_P (t) << 15), v);
834 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
835 v = iterative_hash_host_wide_int (DECL_FUNCTION_CODE (t), v);
836 if (DECL_STATIC_DESTRUCTOR (t))
837 v = iterative_hash_host_wide_int (DECL_FINI_PRIORITY (t), v);
838 }
839
840 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
841 {
842 v = iterative_hash_host_wide_int (TYPE_MODE (t), v);
843 v = iterative_hash_host_wide_int (TYPE_STRING_FLAG (t)
844 | (TYPE_NO_FORCE_BLK (t) << 1)
845 | (TYPE_NEEDS_CONSTRUCTING (t) << 2)
846 | (TYPE_PACKED (t) << 3)
847 | (TYPE_RESTRICT (t) << 4)
848 | (TYPE_USER_ALIGN (t) << 5)
849 | (TYPE_READONLY (t) << 6), v);
850 if (RECORD_OR_UNION_TYPE_P (t))
851 {
852 v = iterative_hash_host_wide_int (TYPE_TRANSPARENT_AGGR (t)
853 | (TYPE_FINAL_P (t) << 1), v);
854 }
855 else if (code == ARRAY_TYPE)
856 v = iterative_hash_host_wide_int (TYPE_NONALIASED_COMPONENT (t), v);
857 v = iterative_hash_host_wide_int (TYPE_PRECISION (t), v);
858 v = iterative_hash_host_wide_int (TYPE_ALIGN (t), v);
859 v = iterative_hash_host_wide_int ((TYPE_ALIAS_SET (t) == 0
860 || (!in_lto_p
861 && get_alias_set (t) == 0))
862 ? 0 : -1, v);
863 }
864
865 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
866 v = iterative_hash (TRANSLATION_UNIT_LANGUAGE (t),
867 strlen (TRANSLATION_UNIT_LANGUAGE (t)), v);
868
869 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
870 v = iterative_hash (t, sizeof (struct cl_target_option), v);
871
872 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
873 v = iterative_hash (t, sizeof (struct cl_optimization), v);
874
875 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
876 v = iterative_hash_host_wide_int (IDENTIFIER_HASH_VALUE (t), v);
877
878 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
879 v = iterative_hash (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t), v);
880
881 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
882 {
883 if (POINTER_TYPE_P (t))
884 {
885 /* For pointers factor in the pointed-to type recursively as
886 we cannot recurse through only pointers.
887 ??? We can generalize this by keeping track of the
888 in-SCC edges for each tree (or arbitrarily the first
889 such edge) and hashing that in in a second stage
890 (instead of the quadratic mixing of the SCC we do now). */
891 hashval_t x;
892 unsigned ix;
893 if (streamer_tree_cache_lookup (cache, TREE_TYPE (t), &ix))
894 x = streamer_tree_cache_get_hash (cache, ix);
895 else
896 x = hash_tree (cache, TREE_TYPE (t));
897 v = iterative_hash_hashval_t (x, v);
898 }
899 else if (code != IDENTIFIER_NODE)
900 visit (TREE_TYPE (t));
901 }
902
903 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
904 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
905 visit (VECTOR_CST_ELT (t, i));
906
907 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
908 {
909 visit (TREE_REALPART (t));
910 visit (TREE_IMAGPART (t));
911 }
912
913 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
914 {
915 /* Drop names that were created for anonymous entities. */
916 if (DECL_NAME (t)
917 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
918 && ANON_AGGRNAME_P (DECL_NAME (t)))
919 ;
920 else
921 visit (DECL_NAME (t));
922 if (DECL_FILE_SCOPE_P (t))
923 ;
924 else
925 visit (DECL_CONTEXT (t));
926 }
927
928 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
929 {
930 visit (DECL_SIZE (t));
931 visit (DECL_SIZE_UNIT (t));
932 visit (DECL_ATTRIBUTES (t));
933 if ((code == VAR_DECL
934 || code == PARM_DECL)
935 && DECL_HAS_VALUE_EXPR_P (t))
936 visit (DECL_VALUE_EXPR (t));
937 if (code == VAR_DECL
938 && DECL_HAS_DEBUG_EXPR_P (t))
939 visit (DECL_DEBUG_EXPR (t));
940 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
941 be able to call get_symbol_initial_value. */
942 }
943
944 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
945 {
946 if (code == TYPE_DECL)
947 visit (DECL_ORIGINAL_TYPE (t));
948 visit (DECL_VINDEX (t));
949 }
950
951 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
952 {
953 if (DECL_ASSEMBLER_NAME_SET_P (t))
954 visit (DECL_ASSEMBLER_NAME (t));
955 visit (DECL_SECTION_NAME (t));
956 visit (DECL_COMDAT_GROUP (t));
957 }
958
959 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
960 {
961 visit (DECL_FIELD_OFFSET (t));
962 visit (DECL_BIT_FIELD_TYPE (t));
963 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
964 visit (DECL_FIELD_BIT_OFFSET (t));
965 visit (DECL_FCONTEXT (t));
966 }
967
968 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
969 {
970 visit (DECL_FUNCTION_PERSONALITY (t));
971 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
972 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
973 }
974
975 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
976 {
977 visit (TYPE_SIZE (t));
978 visit (TYPE_SIZE_UNIT (t));
979 visit (TYPE_ATTRIBUTES (t));
980 visit (TYPE_NAME (t));
981 visit (TYPE_MAIN_VARIANT (t));
982 if (TYPE_FILE_SCOPE_P (t))
983 ;
984 else
985 visit (TYPE_CONTEXT (t));
986 visit (TYPE_STUB_DECL (t));
987 }
988
989 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
990 {
991 if (code == ENUMERAL_TYPE)
992 visit (TYPE_VALUES (t));
993 else if (code == ARRAY_TYPE)
994 visit (TYPE_DOMAIN (t));
995 else if (RECORD_OR_UNION_TYPE_P (t))
996 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
997 visit (f);
998 else if (code == FUNCTION_TYPE
999 || code == METHOD_TYPE)
1000 visit (TYPE_ARG_TYPES (t));
1001 if (!POINTER_TYPE_P (t))
1002 visit (TYPE_MINVAL (t));
1003 visit (TYPE_MAXVAL (t));
1004 if (RECORD_OR_UNION_TYPE_P (t))
1005 visit (TYPE_BINFO (t));
1006 }
1007
1008 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1009 {
1010 visit (TREE_PURPOSE (t));
1011 visit (TREE_VALUE (t));
1012 visit (TREE_CHAIN (t));
1013 }
1014
1015 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1016 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1017 visit (TREE_VEC_ELT (t, i));
1018
1019 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1020 {
1021 v = iterative_hash_host_wide_int (TREE_OPERAND_LENGTH (t), v);
1022 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1023 visit (TREE_OPERAND (t, i));
1024 }
1025
1026 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1027 {
1028 unsigned i;
1029 tree b;
1030 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1031 visit (b);
1032 visit (BINFO_OFFSET (t));
1033 visit (BINFO_VTABLE (t));
1034 visit (BINFO_VPTR_FIELD (t));
1035 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1036 visit (b);
1037 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1038 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1039 }
1040
1041 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1042 {
1043 unsigned i;
1044 tree index, value;
1045 v = iterative_hash_host_wide_int (CONSTRUCTOR_NELTS (t), v);
1046 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1047 {
1048 visit (index);
1049 visit (value);
1050 }
1051 }
1052
1053 return v;
1054
1055 #undef visit
1056 }
1057
1058 /* Compare two SCC entries by their hash value for qsorting them. */
1059
1060 static int
1061 scc_entry_compare (const void *p1_, const void *p2_)
1062 {
1063 const scc_entry *p1 = (const scc_entry *) p1_;
1064 const scc_entry *p2 = (const scc_entry *) p2_;
1065 if (p1->hash < p2->hash)
1066 return -1;
1067 else if (p1->hash > p2->hash)
1068 return 1;
1069 return 0;
1070 }
1071
1072 /* Return a hash value for the SCC on the SCC stack from FIRST with
1073 size SIZE. */
1074
1075 static hashval_t
1076 hash_scc (struct streamer_tree_cache_d *cache, unsigned first, unsigned size)
1077 {
1078 /* Compute hash values for the SCC members. */
1079 for (unsigned i = 0; i < size; ++i)
1080 sccstack[first+i].hash = hash_tree (cache, sccstack[first+i].t);
1081
1082 if (size == 1)
1083 return sccstack[first].hash;
1084
1085 /* Sort the SCC of type, hash pairs so that when we mix in
1086 all members of the SCC the hash value becomes independent on
1087 the order we visited the SCC. Disregard hashes equal to
1088 the hash of the tree we mix into because we cannot guarantee
1089 a stable sort for those across different TUs. */
1090 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1091 hashval_t *tem = XALLOCAVEC (hashval_t, size);
1092 for (unsigned i = 0; i < size; ++i)
1093 {
1094 hashval_t hash = sccstack[first+i].hash;
1095 hashval_t orig_hash = hash;
1096 unsigned j;
1097 /* Skip same hashes. */
1098 for (j = i + 1;
1099 j < size && sccstack[first+j].hash == orig_hash; ++j)
1100 ;
1101 for (; j < size; ++j)
1102 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1103 for (j = 0; sccstack[first+j].hash != orig_hash; ++j)
1104 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1105 tem[i] = hash;
1106 }
1107 hashval_t scc_hash = 0;
1108 for (unsigned i = 0; i < size; ++i)
1109 {
1110 sccstack[first+i].hash = tem[i];
1111 scc_hash = iterative_hash_hashval_t (tem[i], scc_hash);
1112 }
1113 return scc_hash;
1114 }
1115
1116 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1117 already in the streamer cache. Main routine called for
1118 each visit of EXPR. */
1119
1120 static void
1121 DFS_write_tree (struct output_block *ob, sccs *from_state,
1122 tree expr, bool ref_p, bool this_ref_p)
1123 {
1124 unsigned ix;
1125 sccs **slot;
1126
1127 /* Handle special cases. */
1128 if (expr == NULL_TREE)
1129 return;
1130
1131 /* Do not DFS walk into indexable trees. */
1132 if (this_ref_p && tree_is_indexable (expr))
1133 return;
1134
1135 /* Check if we already streamed EXPR. */
1136 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1137 return;
1138
1139 slot = (sccs **)pointer_map_insert (sccstate, expr);
1140 sccs *cstate = *slot;
1141 if (!cstate)
1142 {
1143 scc_entry e = { expr, 0 };
1144 /* Not yet visited. DFS recurse and push it onto the stack. */
1145 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1146 sccstack.safe_push (e);
1147 cstate->dfsnum = next_dfs_num++;
1148 cstate->low = cstate->dfsnum;
1149
1150 if (streamer_handle_as_builtin_p (expr))
1151 ;
1152 else if (TREE_CODE (expr) == INTEGER_CST
1153 && !TREE_OVERFLOW (expr))
1154 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
1155 else
1156 {
1157 DFS_write_tree_body (ob, expr, cstate, ref_p);
1158
1159 /* Walk any LTO-specific edges. */
1160 if (DECL_P (expr)
1161 && TREE_CODE (expr) != FUNCTION_DECL
1162 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1163 {
1164 /* Handle DECL_INITIAL for symbols. */
1165 tree initial = get_symbol_initial_value (ob, expr);
1166 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
1167 }
1168 }
1169
1170 /* See if we found an SCC. */
1171 if (cstate->low == cstate->dfsnum)
1172 {
1173 unsigned first, size;
1174 tree x;
1175
1176 /* Pop the SCC and compute its size. */
1177 first = sccstack.length ();
1178 do
1179 {
1180 x = sccstack[--first].t;
1181 }
1182 while (x != expr);
1183 size = sccstack.length () - first;
1184
1185 /* No need to compute hashes for LTRANS units, we don't perform
1186 any merging there. */
1187 hashval_t scc_hash = 0;
1188 unsigned scc_entry_len = 0;
1189 if (!flag_wpa)
1190 {
1191 scc_hash = hash_scc (ob->writer_cache, first, size);
1192
1193 /* Put the entries with the least number of collisions first. */
1194 unsigned entry_start = 0;
1195 scc_entry_len = size + 1;
1196 for (unsigned i = 0; i < size;)
1197 {
1198 unsigned from = i;
1199 for (i = i + 1; i < size
1200 && (sccstack[first + i].hash
1201 == sccstack[first + from].hash); ++i)
1202 ;
1203 if (i - from < scc_entry_len)
1204 {
1205 scc_entry_len = i - from;
1206 entry_start = from;
1207 }
1208 }
1209 for (unsigned i = 0; i < scc_entry_len; ++i)
1210 {
1211 scc_entry tem = sccstack[first + i];
1212 sccstack[first + i] = sccstack[first + entry_start + i];
1213 sccstack[first + entry_start + i] = tem;
1214 }
1215 }
1216
1217 /* Write LTO_tree_scc. */
1218 streamer_write_record_start (ob, LTO_tree_scc);
1219 streamer_write_uhwi (ob, size);
1220 streamer_write_uhwi (ob, scc_hash);
1221
1222 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1223 All INTEGER_CSTs need to be handled this way as we need
1224 their type to materialize them. Also builtins are handled
1225 this way.
1226 ??? We still wrap these in LTO_tree_scc so at the
1227 input side we can properly identify the tree we want
1228 to ultimatively return. */
1229 size_t old_len = ob->writer_cache->nodes.length ();
1230 if (size == 1)
1231 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1232 else
1233 {
1234 /* Write the size of the SCC entry candidates. */
1235 streamer_write_uhwi (ob, scc_entry_len);
1236
1237 /* Write all headers and populate the streamer cache. */
1238 for (unsigned i = 0; i < size; ++i)
1239 {
1240 hashval_t hash = sccstack[first+i].hash;
1241 tree t = sccstack[first+i].t;
1242 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1243 t, hash, &ix);
1244 gcc_assert (!exists_p);
1245
1246 if (!lto_is_streamable (t))
1247 internal_error ("tree code %qs is not supported "
1248 "in LTO streams",
1249 get_tree_code_name (TREE_CODE (t)));
1250
1251 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1252
1253 /* Write the header, containing everything needed to
1254 materialize EXPR on the reading side. */
1255 streamer_write_tree_header (ob, t);
1256 }
1257
1258 /* Write the bitpacks and tree references. */
1259 for (unsigned i = 0; i < size; ++i)
1260 {
1261 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1262
1263 /* Mark the end of the tree. */
1264 streamer_write_zero (ob);
1265 }
1266 }
1267 gcc_assert (old_len + size == ob->writer_cache->nodes.length ());
1268
1269 /* Finally truncate the vector. */
1270 sccstack.truncate (first);
1271
1272 if (from_state)
1273 from_state->low = MIN (from_state->low, cstate->low);
1274 return;
1275 }
1276
1277 if (from_state)
1278 from_state->low = MIN (from_state->low, cstate->low);
1279 }
1280 gcc_checking_assert (from_state);
1281 if (cstate->dfsnum < from_state->dfsnum)
1282 from_state->low = MIN (cstate->dfsnum, from_state->low);
1283 }
1284
1285
1286 /* Emit the physical representation of tree node EXPR to output block
1287 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1288 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1289
1290 void
1291 lto_output_tree (struct output_block *ob, tree expr,
1292 bool ref_p, bool this_ref_p)
1293 {
1294 unsigned ix;
1295 bool existed_p;
1296
1297 if (expr == NULL_TREE)
1298 {
1299 streamer_write_record_start (ob, LTO_null);
1300 return;
1301 }
1302
1303 if (this_ref_p && tree_is_indexable (expr))
1304 {
1305 lto_output_tree_ref (ob, expr);
1306 return;
1307 }
1308
1309 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1310 if (existed_p)
1311 {
1312 /* If a node has already been streamed out, make sure that
1313 we don't write it more than once. Otherwise, the reader
1314 will instantiate two different nodes for the same object. */
1315 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1316 streamer_write_uhwi (ob, ix);
1317 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1318 lto_tree_code_to_tag (TREE_CODE (expr)));
1319 lto_stats.num_pickle_refs_output++;
1320 }
1321 else
1322 {
1323 /* This is the first time we see EXPR, write all reachable
1324 trees to OB. */
1325 static bool in_dfs_walk;
1326
1327 /* Protect against recursion which means disconnect between
1328 what tree edges we walk in the DFS walk and what edges
1329 we stream out. */
1330 gcc_assert (!in_dfs_walk);
1331
1332 /* Start the DFS walk. */
1333 /* Save ob state ... */
1334 /* let's see ... */
1335 in_dfs_walk = true;
1336 sccstate = pointer_map_create ();
1337 gcc_obstack_init (&sccstate_obstack);
1338 next_dfs_num = 1;
1339 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
1340 sccstack.release ();
1341 pointer_map_destroy (sccstate);
1342 obstack_free (&sccstate_obstack, NULL);
1343 in_dfs_walk = false;
1344
1345 /* Finally append a reference to the tree we were writing.
1346 ??? If expr ended up as a singleton we could have
1347 inlined it here and avoid outputting a reference. */
1348 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1349 gcc_assert (existed_p);
1350 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1351 streamer_write_uhwi (ob, ix);
1352 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1353 lto_tree_code_to_tag (TREE_CODE (expr)));
1354 lto_stats.num_pickle_refs_output++;
1355 }
1356 }
1357
1358
1359 /* Output to OB a list of try/catch handlers starting with FIRST. */
1360
1361 static void
1362 output_eh_try_list (struct output_block *ob, eh_catch first)
1363 {
1364 eh_catch n;
1365
1366 for (n = first; n; n = n->next_catch)
1367 {
1368 streamer_write_record_start (ob, LTO_eh_catch);
1369 stream_write_tree (ob, n->type_list, true);
1370 stream_write_tree (ob, n->filter_list, true);
1371 stream_write_tree (ob, n->label, true);
1372 }
1373
1374 streamer_write_record_start (ob, LTO_null);
1375 }
1376
1377
1378 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1379 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1380 detect EH region sharing. */
1381
1382 static void
1383 output_eh_region (struct output_block *ob, eh_region r)
1384 {
1385 enum LTO_tags tag;
1386
1387 if (r == NULL)
1388 {
1389 streamer_write_record_start (ob, LTO_null);
1390 return;
1391 }
1392
1393 if (r->type == ERT_CLEANUP)
1394 tag = LTO_ert_cleanup;
1395 else if (r->type == ERT_TRY)
1396 tag = LTO_ert_try;
1397 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1398 tag = LTO_ert_allowed_exceptions;
1399 else if (r->type == ERT_MUST_NOT_THROW)
1400 tag = LTO_ert_must_not_throw;
1401 else
1402 gcc_unreachable ();
1403
1404 streamer_write_record_start (ob, tag);
1405 streamer_write_hwi (ob, r->index);
1406
1407 if (r->outer)
1408 streamer_write_hwi (ob, r->outer->index);
1409 else
1410 streamer_write_zero (ob);
1411
1412 if (r->inner)
1413 streamer_write_hwi (ob, r->inner->index);
1414 else
1415 streamer_write_zero (ob);
1416
1417 if (r->next_peer)
1418 streamer_write_hwi (ob, r->next_peer->index);
1419 else
1420 streamer_write_zero (ob);
1421
1422 if (r->type == ERT_TRY)
1423 {
1424 output_eh_try_list (ob, r->u.eh_try.first_catch);
1425 }
1426 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1427 {
1428 stream_write_tree (ob, r->u.allowed.type_list, true);
1429 stream_write_tree (ob, r->u.allowed.label, true);
1430 streamer_write_uhwi (ob, r->u.allowed.filter);
1431 }
1432 else if (r->type == ERT_MUST_NOT_THROW)
1433 {
1434 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1435 bitpack_d bp = bitpack_create (ob->main_stream);
1436 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1437 streamer_write_bitpack (&bp);
1438 }
1439
1440 if (r->landing_pads)
1441 streamer_write_hwi (ob, r->landing_pads->index);
1442 else
1443 streamer_write_zero (ob);
1444 }
1445
1446
1447 /* Output landing pad LP to OB. */
1448
1449 static void
1450 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1451 {
1452 if (lp == NULL)
1453 {
1454 streamer_write_record_start (ob, LTO_null);
1455 return;
1456 }
1457
1458 streamer_write_record_start (ob, LTO_eh_landing_pad);
1459 streamer_write_hwi (ob, lp->index);
1460 if (lp->next_lp)
1461 streamer_write_hwi (ob, lp->next_lp->index);
1462 else
1463 streamer_write_zero (ob);
1464
1465 if (lp->region)
1466 streamer_write_hwi (ob, lp->region->index);
1467 else
1468 streamer_write_zero (ob);
1469
1470 stream_write_tree (ob, lp->post_landing_pad, true);
1471 }
1472
1473
1474 /* Output the existing eh_table to OB. */
1475
1476 static void
1477 output_eh_regions (struct output_block *ob, struct function *fn)
1478 {
1479 if (fn->eh && fn->eh->region_tree)
1480 {
1481 unsigned i;
1482 eh_region eh;
1483 eh_landing_pad lp;
1484 tree ttype;
1485
1486 streamer_write_record_start (ob, LTO_eh_table);
1487
1488 /* Emit the index of the root of the EH region tree. */
1489 streamer_write_hwi (ob, fn->eh->region_tree->index);
1490
1491 /* Emit all the EH regions in the region array. */
1492 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1493 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1494 output_eh_region (ob, eh);
1495
1496 /* Emit all landing pads. */
1497 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1498 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1499 output_eh_lp (ob, lp);
1500
1501 /* Emit all the runtime type data. */
1502 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1503 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1504 stream_write_tree (ob, ttype, true);
1505
1506 /* Emit the table of action chains. */
1507 if (targetm.arm_eabi_unwinder)
1508 {
1509 tree t;
1510 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1511 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1512 stream_write_tree (ob, t, true);
1513 }
1514 else
1515 {
1516 uchar c;
1517 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1518 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1519 streamer_write_char_stream (ob->main_stream, c);
1520 }
1521 }
1522
1523 /* The LTO_null either terminates the record or indicates that there
1524 are no eh_records at all. */
1525 streamer_write_record_start (ob, LTO_null);
1526 }
1527
1528
1529 /* Output all of the active ssa names to the ssa_names stream. */
1530
1531 static void
1532 output_ssa_names (struct output_block *ob, struct function *fn)
1533 {
1534 unsigned int i, len;
1535
1536 len = vec_safe_length (SSANAMES (fn));
1537 streamer_write_uhwi (ob, len);
1538
1539 for (i = 1; i < len; i++)
1540 {
1541 tree ptr = (*SSANAMES (fn))[i];
1542
1543 if (ptr == NULL_TREE
1544 || SSA_NAME_IN_FREE_LIST (ptr)
1545 || virtual_operand_p (ptr))
1546 continue;
1547
1548 streamer_write_uhwi (ob, i);
1549 streamer_write_char_stream (ob->main_stream,
1550 SSA_NAME_IS_DEFAULT_DEF (ptr));
1551 if (SSA_NAME_VAR (ptr))
1552 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1553 else
1554 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1555 stream_write_tree (ob, TREE_TYPE (ptr), true);
1556 }
1557
1558 streamer_write_zero (ob);
1559 }
1560
1561
1562 /* Output the cfg. */
1563
1564 static void
1565 output_cfg (struct output_block *ob, struct function *fn)
1566 {
1567 struct lto_output_stream *tmp_stream = ob->main_stream;
1568 basic_block bb;
1569
1570 ob->main_stream = ob->cfg_stream;
1571
1572 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1573 profile_status_for_function (fn));
1574
1575 /* Output the number of the highest basic block. */
1576 streamer_write_uhwi (ob, last_basic_block_for_function (fn));
1577
1578 FOR_ALL_BB_FN (bb, fn)
1579 {
1580 edge_iterator ei;
1581 edge e;
1582
1583 streamer_write_hwi (ob, bb->index);
1584
1585 /* Output the successors and the edge flags. */
1586 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1587 FOR_EACH_EDGE (e, ei, bb->succs)
1588 {
1589 streamer_write_uhwi (ob, e->dest->index);
1590 streamer_write_hwi (ob, e->probability);
1591 streamer_write_gcov_count (ob, e->count);
1592 streamer_write_uhwi (ob, e->flags);
1593 }
1594 }
1595
1596 streamer_write_hwi (ob, -1);
1597
1598 bb = ENTRY_BLOCK_PTR;
1599 while (bb->next_bb)
1600 {
1601 streamer_write_hwi (ob, bb->next_bb->index);
1602 bb = bb->next_bb;
1603 }
1604
1605 streamer_write_hwi (ob, -1);
1606
1607 /* ??? The cfgloop interface is tied to cfun. */
1608 gcc_assert (cfun == fn);
1609
1610 /* Output the number of loops. */
1611 streamer_write_uhwi (ob, number_of_loops (fn));
1612
1613 /* Output each loop, skipping the tree root which has number zero. */
1614 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1615 {
1616 struct loop *loop = get_loop (fn, i);
1617
1618 /* Write the index of the loop header. That's enough to rebuild
1619 the loop tree on the reader side. Stream -1 for an unused
1620 loop entry. */
1621 if (!loop)
1622 {
1623 streamer_write_hwi (ob, -1);
1624 continue;
1625 }
1626 else
1627 streamer_write_hwi (ob, loop->header->index);
1628
1629 /* Write everything copy_loop_info copies. */
1630 streamer_write_enum (ob->main_stream,
1631 loop_estimation, EST_LAST, loop->estimate_state);
1632 streamer_write_hwi (ob, loop->any_upper_bound);
1633 if (loop->any_upper_bound)
1634 {
1635 int len = loop->nb_iterations_upper_bound.get_len ();
1636 int i;
1637
1638 streamer_write_uhwi (ob, loop->nb_iterations_upper_bound.get_precision ());
1639 streamer_write_uhwi (ob, len);
1640 for (i = 0; i < len; i++)
1641 streamer_write_hwi (ob, loop->nb_iterations_upper_bound.elt (i));
1642 }
1643 streamer_write_hwi (ob, loop->any_estimate);
1644 if (loop->any_estimate)
1645 {
1646 int len = loop->nb_iterations_estimate.get_len ();
1647 int i;
1648
1649 streamer_write_uhwi (ob, loop->nb_iterations_estimate.get_precision ());
1650 streamer_write_uhwi (ob, len);
1651 for (i = 0; i < len; i++)
1652 streamer_write_hwi (ob, loop->nb_iterations_estimate.elt (i));
1653 }
1654 }
1655
1656 ob->main_stream = tmp_stream;
1657 }
1658
1659
1660 /* Create the header in the file using OB. If the section type is for
1661 a function, set FN to the decl for that function. */
1662
1663 void
1664 produce_asm (struct output_block *ob, tree fn)
1665 {
1666 enum lto_section_type section_type = ob->section_type;
1667 struct lto_function_header header;
1668 char *section_name;
1669 struct lto_output_stream *header_stream;
1670
1671 if (section_type == LTO_section_function_body)
1672 {
1673 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1674 section_name = lto_get_section_name (section_type, name, NULL);
1675 }
1676 else
1677 section_name = lto_get_section_name (section_type, NULL, NULL);
1678
1679 lto_begin_section (section_name, !flag_wpa);
1680 free (section_name);
1681
1682 /* The entire header is stream computed here. */
1683 memset (&header, 0, sizeof (struct lto_function_header));
1684
1685 /* Write the header. */
1686 header.lto_header.major_version = LTO_major_version;
1687 header.lto_header.minor_version = LTO_minor_version;
1688
1689 header.compressed_size = 0;
1690
1691 if (section_type == LTO_section_function_body)
1692 header.cfg_size = ob->cfg_stream->total_size;
1693 header.main_size = ob->main_stream->total_size;
1694 header.string_size = ob->string_stream->total_size;
1695
1696 header_stream = XCNEW (struct lto_output_stream);
1697 lto_output_data_stream (header_stream, &header, sizeof header);
1698 lto_write_stream (header_stream);
1699 free (header_stream);
1700
1701 /* Put all of the gimple and the string table out the asm file as a
1702 block of text. */
1703 if (section_type == LTO_section_function_body)
1704 lto_write_stream (ob->cfg_stream);
1705 lto_write_stream (ob->main_stream);
1706 lto_write_stream (ob->string_stream);
1707
1708 lto_end_section ();
1709 }
1710
1711
1712 /* Output the base body of struct function FN using output block OB. */
1713
1714 static void
1715 output_struct_function_base (struct output_block *ob, struct function *fn)
1716 {
1717 struct bitpack_d bp;
1718 unsigned i;
1719 tree t;
1720
1721 /* Output the static chain and non-local goto save area. */
1722 stream_write_tree (ob, fn->static_chain_decl, true);
1723 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1724
1725 /* Output all the local variables in the function. */
1726 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1727 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1728 stream_write_tree (ob, t, true);
1729
1730 /* Output current IL state of the function. */
1731 streamer_write_uhwi (ob, fn->curr_properties);
1732
1733 /* Write all the attributes for FN. */
1734 bp = bitpack_create (ob->main_stream);
1735 bp_pack_value (&bp, fn->is_thunk, 1);
1736 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1737 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1738 bp_pack_value (&bp, fn->returns_struct, 1);
1739 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1740 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1741 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1742 bp_pack_value (&bp, fn->after_inlining, 1);
1743 bp_pack_value (&bp, fn->stdarg, 1);
1744 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1745 bp_pack_value (&bp, fn->calls_alloca, 1);
1746 bp_pack_value (&bp, fn->calls_setjmp, 1);
1747 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1748 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1749
1750 /* Output the function start and end loci. */
1751 stream_output_location (ob, &bp, fn->function_start_locus);
1752 stream_output_location (ob, &bp, fn->function_end_locus);
1753
1754 streamer_write_bitpack (&bp);
1755 }
1756
1757
1758 /* Output the body of function NODE->DECL. */
1759
1760 static void
1761 output_function (struct cgraph_node *node)
1762 {
1763 tree function;
1764 struct function *fn;
1765 basic_block bb;
1766 struct output_block *ob;
1767
1768 function = node->decl;
1769 fn = DECL_STRUCT_FUNCTION (function);
1770 ob = create_output_block (LTO_section_function_body);
1771
1772 clear_line_info (ob);
1773 ob->cgraph_node = node;
1774
1775 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1776
1777 /* Set current_function_decl and cfun. */
1778 push_cfun (fn);
1779
1780 /* Make string 0 be a NULL string. */
1781 streamer_write_char_stream (ob->string_stream, 0);
1782
1783 streamer_write_record_start (ob, LTO_function);
1784
1785 /* Output decls for parameters and args. */
1786 stream_write_tree (ob, DECL_RESULT (function), true);
1787 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1788
1789 /* Output DECL_INITIAL for the function, which contains the tree of
1790 lexical scopes. */
1791 stream_write_tree (ob, DECL_INITIAL (function), true);
1792
1793 /* We also stream abstract functions where we stream only stuff needed for
1794 debug info. */
1795 if (gimple_has_body_p (function))
1796 {
1797 streamer_write_uhwi (ob, 1);
1798 output_struct_function_base (ob, fn);
1799
1800 /* Output all the SSA names used in the function. */
1801 output_ssa_names (ob, fn);
1802
1803 /* Output any exception handling regions. */
1804 output_eh_regions (ob, fn);
1805
1806
1807 /* We will renumber the statements. The code that does this uses
1808 the same ordering that we use for serializing them so we can use
1809 the same code on the other end and not have to write out the
1810 statement numbers. We do not assign UIDs to PHIs here because
1811 virtual PHIs get re-computed on-the-fly which would make numbers
1812 inconsistent. */
1813 set_gimple_stmt_max_uid (cfun, 0);
1814 FOR_ALL_BB (bb)
1815 {
1816 gimple_stmt_iterator gsi;
1817 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1818 {
1819 gimple stmt = gsi_stmt (gsi);
1820
1821 /* Virtual PHIs are not going to be streamed. */
1822 if (!virtual_operand_p (gimple_phi_result (stmt)))
1823 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1824 }
1825 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1826 {
1827 gimple stmt = gsi_stmt (gsi);
1828 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1829 }
1830 }
1831 /* To avoid keeping duplicate gimple IDs in the statements, renumber
1832 virtual phis now. */
1833 FOR_ALL_BB (bb)
1834 {
1835 gimple_stmt_iterator gsi;
1836 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1837 {
1838 gimple stmt = gsi_stmt (gsi);
1839 if (virtual_operand_p (gimple_phi_result (stmt)))
1840 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1841 }
1842 }
1843
1844 /* Output the code for the function. */
1845 FOR_ALL_BB_FN (bb, fn)
1846 output_bb (ob, bb, fn);
1847
1848 /* The terminator for this function. */
1849 streamer_write_record_start (ob, LTO_null);
1850
1851 output_cfg (ob, fn);
1852
1853 pop_cfun ();
1854 }
1855 else
1856 streamer_write_uhwi (ob, 0);
1857
1858 /* Create a section to hold the pickled output of this function. */
1859 produce_asm (ob, function);
1860
1861 destroy_output_block (ob);
1862 }
1863
1864
1865 /* Emit toplevel asms. */
1866
1867 void
1868 lto_output_toplevel_asms (void)
1869 {
1870 struct output_block *ob;
1871 struct asm_node *can;
1872 char *section_name;
1873 struct lto_output_stream *header_stream;
1874 struct lto_asm_header header;
1875
1876 if (! asm_nodes)
1877 return;
1878
1879 ob = create_output_block (LTO_section_asm);
1880
1881 /* Make string 0 be a NULL string. */
1882 streamer_write_char_stream (ob->string_stream, 0);
1883
1884 for (can = asm_nodes; can; can = can->next)
1885 {
1886 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
1887 streamer_write_hwi (ob, can->order);
1888 }
1889
1890 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
1891
1892 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
1893 lto_begin_section (section_name, !flag_wpa);
1894 free (section_name);
1895
1896 /* The entire header stream is computed here. */
1897 memset (&header, 0, sizeof (header));
1898
1899 /* Write the header. */
1900 header.lto_header.major_version = LTO_major_version;
1901 header.lto_header.minor_version = LTO_minor_version;
1902
1903 header.main_size = ob->main_stream->total_size;
1904 header.string_size = ob->string_stream->total_size;
1905
1906 header_stream = XCNEW (struct lto_output_stream);
1907 lto_output_data_stream (header_stream, &header, sizeof (header));
1908 lto_write_stream (header_stream);
1909 free (header_stream);
1910
1911 /* Put all of the gimple and the string table out the asm file as a
1912 block of text. */
1913 lto_write_stream (ob->main_stream);
1914 lto_write_stream (ob->string_stream);
1915
1916 lto_end_section ();
1917
1918 destroy_output_block (ob);
1919 }
1920
1921
1922 /* Copy the function body of NODE without deserializing. */
1923
1924 static void
1925 copy_function (struct cgraph_node *node)
1926 {
1927 tree function = node->decl;
1928 struct lto_file_decl_data *file_data = node->lto_file_data;
1929 struct lto_output_stream *output_stream = XCNEW (struct lto_output_stream);
1930 const char *data;
1931 size_t len;
1932 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
1933 char *section_name =
1934 lto_get_section_name (LTO_section_function_body, name, NULL);
1935 size_t i, j;
1936 struct lto_in_decl_state *in_state;
1937 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
1938
1939 lto_begin_section (section_name, !flag_wpa);
1940 free (section_name);
1941
1942 /* We may have renamed the declaration, e.g., a static function. */
1943 name = lto_get_decl_name_mapping (file_data, name);
1944
1945 data = lto_get_section_data (file_data, LTO_section_function_body,
1946 name, &len);
1947 gcc_assert (data);
1948
1949 /* Do a bit copy of the function body. */
1950 lto_output_data_stream (output_stream, data, len);
1951 lto_write_stream (output_stream);
1952
1953 /* Copy decls. */
1954 in_state =
1955 lto_get_function_in_decl_state (node->lto_file_data, function);
1956 gcc_assert (in_state);
1957
1958 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
1959 {
1960 size_t n = in_state->streams[i].size;
1961 tree *trees = in_state->streams[i].trees;
1962 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
1963
1964 /* The out state must have the same indices and the in state.
1965 So just copy the vector. All the encoders in the in state
1966 must be empty where we reach here. */
1967 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
1968 encoder->trees.reserve_exact (n);
1969 for (j = 0; j < n; j++)
1970 encoder->trees.safe_push (trees[j]);
1971 }
1972
1973 lto_free_section_data (file_data, LTO_section_function_body, name,
1974 data, len);
1975 free (output_stream);
1976 lto_end_section ();
1977 }
1978
1979
1980 /* Main entry point from the pass manager. */
1981
1982 static void
1983 lto_output (void)
1984 {
1985 struct lto_out_decl_state *decl_state;
1986 #ifdef ENABLE_CHECKING
1987 bitmap output = lto_bitmap_alloc ();
1988 #endif
1989 int i, n_nodes;
1990 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1991
1992 /* Initialize the streamer. */
1993 lto_streamer_init ();
1994
1995 n_nodes = lto_symtab_encoder_size (encoder);
1996 /* Process only the functions with bodies. */
1997 for (i = 0; i < n_nodes; i++)
1998 {
1999 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2000 cgraph_node *node = dyn_cast <cgraph_node> (snode);
2001 if (node
2002 && lto_symtab_encoder_encode_body_p (encoder, node)
2003 && !node->alias)
2004 {
2005 #ifdef ENABLE_CHECKING
2006 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2007 bitmap_set_bit (output, DECL_UID (node->decl));
2008 #endif
2009 decl_state = lto_new_out_decl_state ();
2010 lto_push_out_decl_state (decl_state);
2011 if (gimple_has_body_p (node->decl) || !flag_wpa)
2012 output_function (node);
2013 else
2014 copy_function (node);
2015 gcc_assert (lto_get_out_decl_state () == decl_state);
2016 lto_pop_out_decl_state ();
2017 lto_record_function_out_decl_state (node->decl, decl_state);
2018 }
2019 }
2020
2021 /* Emit the callgraph after emitting function bodies. This needs to
2022 be done now to make sure that all the statements in every function
2023 have been renumbered so that edges can be associated with call
2024 statements using the statement UIDs. */
2025 output_symtab ();
2026
2027 #ifdef ENABLE_CHECKING
2028 lto_bitmap_free (output);
2029 #endif
2030 }
2031
2032 namespace {
2033
2034 const pass_data pass_data_ipa_lto_gimple_out =
2035 {
2036 IPA_PASS, /* type */
2037 "lto_gimple_out", /* name */
2038 OPTGROUP_NONE, /* optinfo_flags */
2039 true, /* has_gate */
2040 false, /* has_execute */
2041 TV_IPA_LTO_GIMPLE_OUT, /* tv_id */
2042 0, /* properties_required */
2043 0, /* properties_provided */
2044 0, /* properties_destroyed */
2045 0, /* todo_flags_start */
2046 0, /* todo_flags_finish */
2047 };
2048
2049 class pass_ipa_lto_gimple_out : public ipa_opt_pass_d
2050 {
2051 public:
2052 pass_ipa_lto_gimple_out (gcc::context *ctxt)
2053 : ipa_opt_pass_d (pass_data_ipa_lto_gimple_out, ctxt,
2054 NULL, /* generate_summary */
2055 lto_output, /* write_summary */
2056 NULL, /* read_summary */
2057 lto_output, /* write_optimization_summary */
2058 NULL, /* read_optimization_summary */
2059 NULL, /* stmt_fixup */
2060 0, /* function_transform_todo_flags_start */
2061 NULL, /* function_transform */
2062 NULL) /* variable_transform */
2063 {}
2064
2065 /* opt_pass methods: */
2066 bool gate () { return gate_lto_out (); }
2067
2068 }; // class pass_ipa_lto_gimple_out
2069
2070 } // anon namespace
2071
2072 ipa_opt_pass_d *
2073 make_pass_ipa_lto_gimple_out (gcc::context *ctxt)
2074 {
2075 return new pass_ipa_lto_gimple_out (ctxt);
2076 }
2077
2078
2079 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2080 from it and required for correct representation of its semantics.
2081 Each node in ENCODER must be a global declaration or a type. A node
2082 is written only once, even if it appears multiple times in the
2083 vector. Certain transitively-reachable nodes, such as those
2084 representing expressions, may be duplicated, but such nodes
2085 must not appear in ENCODER itself. */
2086
2087 static void
2088 write_global_stream (struct output_block *ob,
2089 struct lto_tree_ref_encoder *encoder)
2090 {
2091 tree t;
2092 size_t index;
2093 const size_t size = lto_tree_ref_encoder_size (encoder);
2094
2095 for (index = 0; index < size; index++)
2096 {
2097 t = lto_tree_ref_encoder_get_tree (encoder, index);
2098 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2099 stream_write_tree (ob, t, false);
2100 }
2101 }
2102
2103
2104 /* Write a sequence of indices into the globals vector corresponding
2105 to the trees in ENCODER. These are used by the reader to map the
2106 indices used to refer to global entities within function bodies to
2107 their referents. */
2108
2109 static void
2110 write_global_references (struct output_block *ob,
2111 struct lto_output_stream *ref_stream,
2112 struct lto_tree_ref_encoder *encoder)
2113 {
2114 tree t;
2115 uint32_t index;
2116 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2117
2118 /* Write size as 32-bit unsigned. */
2119 lto_output_data_stream (ref_stream, &size, sizeof (int32_t));
2120
2121 for (index = 0; index < size; index++)
2122 {
2123 uint32_t slot_num;
2124
2125 t = lto_tree_ref_encoder_get_tree (encoder, index);
2126 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2127 gcc_assert (slot_num != (unsigned)-1);
2128 lto_output_data_stream (ref_stream, &slot_num, sizeof slot_num);
2129 }
2130 }
2131
2132
2133 /* Write all the streams in an lto_out_decl_state STATE using
2134 output block OB and output stream OUT_STREAM. */
2135
2136 void
2137 lto_output_decl_state_streams (struct output_block *ob,
2138 struct lto_out_decl_state *state)
2139 {
2140 int i;
2141
2142 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2143 write_global_stream (ob, &state->streams[i]);
2144 }
2145
2146
2147 /* Write all the references in an lto_out_decl_state STATE using
2148 output block OB and output stream OUT_STREAM. */
2149
2150 void
2151 lto_output_decl_state_refs (struct output_block *ob,
2152 struct lto_output_stream *out_stream,
2153 struct lto_out_decl_state *state)
2154 {
2155 unsigned i;
2156 uint32_t ref;
2157 tree decl;
2158
2159 /* Write reference to FUNCTION_DECL. If there is not function,
2160 write reference to void_type_node. */
2161 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2162 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2163 gcc_assert (ref != (unsigned)-1);
2164 lto_output_data_stream (out_stream, &ref, sizeof (uint32_t));
2165
2166 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2167 write_global_references (ob, out_stream, &state->streams[i]);
2168 }
2169
2170
2171 /* Return the written size of STATE. */
2172
2173 static size_t
2174 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2175 {
2176 int i;
2177 size_t size;
2178
2179 size = sizeof (int32_t); /* fn_ref. */
2180 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2181 {
2182 size += sizeof (int32_t); /* vector size. */
2183 size += (lto_tree_ref_encoder_size (&state->streams[i])
2184 * sizeof (int32_t));
2185 }
2186 return size;
2187 }
2188
2189
2190 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2191 so far. */
2192
2193 static void
2194 write_symbol (struct streamer_tree_cache_d *cache,
2195 struct lto_output_stream *stream,
2196 tree t, struct pointer_set_t *seen, bool alias)
2197 {
2198 const char *name;
2199 enum gcc_plugin_symbol_kind kind;
2200 enum gcc_plugin_symbol_visibility visibility;
2201 unsigned slot_num;
2202 unsigned HOST_WIDEST_INT size;
2203 const char *comdat;
2204 unsigned char c;
2205
2206 /* None of the following kinds of symbols are needed in the
2207 symbol table. */
2208 if (!TREE_PUBLIC (t)
2209 || is_builtin_fn (t)
2210 || DECL_ABSTRACT (t)
2211 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2212 return;
2213 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2214
2215 gcc_assert (TREE_CODE (t) == VAR_DECL
2216 || TREE_CODE (t) == FUNCTION_DECL);
2217
2218 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2219
2220 /* This behaves like assemble_name_raw in varasm.c, performing the
2221 same name manipulations that ASM_OUTPUT_LABELREF does. */
2222 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2223
2224 if (pointer_set_contains (seen, name))
2225 return;
2226 pointer_set_insert (seen, name);
2227
2228 streamer_tree_cache_lookup (cache, t, &slot_num);
2229 gcc_assert (slot_num != (unsigned)-1);
2230
2231 if (DECL_EXTERNAL (t))
2232 {
2233 if (DECL_WEAK (t))
2234 kind = GCCPK_WEAKUNDEF;
2235 else
2236 kind = GCCPK_UNDEF;
2237 }
2238 else
2239 {
2240 if (DECL_WEAK (t))
2241 kind = GCCPK_WEAKDEF;
2242 else if (DECL_COMMON (t))
2243 kind = GCCPK_COMMON;
2244 else
2245 kind = GCCPK_DEF;
2246
2247 /* When something is defined, it should have node attached. */
2248 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2249 || varpool_get_node (t)->definition);
2250 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2251 || (cgraph_get_node (t)
2252 && cgraph_get_node (t)->definition));
2253 }
2254
2255 /* Imitate what default_elf_asm_output_external do.
2256 When symbol is external, we need to output it with DEFAULT visibility
2257 when compiling with -fvisibility=default, while with HIDDEN visibility
2258 when symbol has attribute (visibility("hidden")) specified.
2259 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2260 right. */
2261
2262 if (DECL_EXTERNAL (t)
2263 && !targetm.binds_local_p (t))
2264 visibility = GCCPV_DEFAULT;
2265 else
2266 switch (DECL_VISIBILITY (t))
2267 {
2268 case VISIBILITY_DEFAULT:
2269 visibility = GCCPV_DEFAULT;
2270 break;
2271 case VISIBILITY_PROTECTED:
2272 visibility = GCCPV_PROTECTED;
2273 break;
2274 case VISIBILITY_HIDDEN:
2275 visibility = GCCPV_HIDDEN;
2276 break;
2277 case VISIBILITY_INTERNAL:
2278 visibility = GCCPV_INTERNAL;
2279 break;
2280 }
2281
2282 if (kind == GCCPK_COMMON
2283 && DECL_SIZE_UNIT (t)
2284 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2285 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2286 else
2287 size = 0;
2288
2289 if (DECL_ONE_ONLY (t))
2290 comdat = IDENTIFIER_POINTER (DECL_COMDAT_GROUP (t));
2291 else
2292 comdat = "";
2293
2294 lto_output_data_stream (stream, name, strlen (name) + 1);
2295 lto_output_data_stream (stream, comdat, strlen (comdat) + 1);
2296 c = (unsigned char) kind;
2297 lto_output_data_stream (stream, &c, 1);
2298 c = (unsigned char) visibility;
2299 lto_output_data_stream (stream, &c, 1);
2300 lto_output_data_stream (stream, &size, 8);
2301 lto_output_data_stream (stream, &slot_num, 4);
2302 }
2303
2304 /* Return true if NODE should appear in the plugin symbol table. */
2305
2306 bool
2307 output_symbol_p (symtab_node *node)
2308 {
2309 struct cgraph_node *cnode;
2310 if (!symtab_real_symbol_p (node))
2311 return false;
2312 /* We keep external functions in symtab for sake of inlining
2313 and devirtualization. We do not want to see them in symbol table as
2314 references unless they are really used. */
2315 cnode = dyn_cast <cgraph_node> (node);
2316 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2317 && cnode->callers)
2318 return true;
2319
2320 /* Ignore all references from external vars initializers - they are not really
2321 part of the compilation unit until they are used by folding. Some symbols,
2322 like references to external construction vtables can not be referred to at all.
2323 We decide this at can_refer_decl_in_current_unit_p. */
2324 if (!node->definition || DECL_EXTERNAL (node->decl))
2325 {
2326 int i;
2327 struct ipa_ref *ref;
2328 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list,
2329 i, ref); i++)
2330 {
2331 if (ref->use == IPA_REF_ALIAS)
2332 continue;
2333 if (is_a <cgraph_node> (ref->referring))
2334 return true;
2335 if (!DECL_EXTERNAL (ref->referring->decl))
2336 return true;
2337 }
2338 return false;
2339 }
2340 return true;
2341 }
2342
2343
2344 /* Write an IL symbol table to OB.
2345 SET and VSET are cgraph/varpool node sets we are outputting. */
2346
2347 static void
2348 produce_symtab (struct output_block *ob)
2349 {
2350 struct streamer_tree_cache_d *cache = ob->writer_cache;
2351 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2352 struct pointer_set_t *seen;
2353 struct lto_output_stream stream;
2354 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2355 lto_symtab_encoder_iterator lsei;
2356
2357 lto_begin_section (section_name, false);
2358 free (section_name);
2359
2360 seen = pointer_set_create ();
2361 memset (&stream, 0, sizeof (stream));
2362
2363 /* Write the symbol table.
2364 First write everything defined and then all declarations.
2365 This is necessary to handle cases where we have duplicated symbols. */
2366 for (lsei = lsei_start (encoder);
2367 !lsei_end_p (lsei); lsei_next (&lsei))
2368 {
2369 symtab_node *node = lsei_node (lsei);
2370
2371 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2372 continue;
2373 write_symbol (cache, &stream, node->decl, seen, false);
2374 }
2375 for (lsei = lsei_start (encoder);
2376 !lsei_end_p (lsei); lsei_next (&lsei))
2377 {
2378 symtab_node *node = lsei_node (lsei);
2379
2380 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2381 continue;
2382 write_symbol (cache, &stream, node->decl, seen, false);
2383 }
2384
2385 lto_write_stream (&stream);
2386 pointer_set_destroy (seen);
2387
2388 lto_end_section ();
2389 }
2390
2391
2392 /* This pass is run after all of the functions are serialized and all
2393 of the IPA passes have written their serialized forms. This pass
2394 causes the vector of all of the global decls and types used from
2395 this file to be written in to a section that can then be read in to
2396 recover these on other side. */
2397
2398 static void
2399 produce_asm_for_decls (void)
2400 {
2401 struct lto_out_decl_state *out_state;
2402 struct lto_out_decl_state *fn_out_state;
2403 struct lto_decl_header header;
2404 char *section_name;
2405 struct output_block *ob;
2406 struct lto_output_stream *header_stream, *decl_state_stream;
2407 unsigned idx, num_fns;
2408 size_t decl_state_size;
2409 int32_t num_decl_states;
2410
2411 ob = create_output_block (LTO_section_decls);
2412 ob->global = true;
2413
2414 memset (&header, 0, sizeof (struct lto_decl_header));
2415
2416 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2417 lto_begin_section (section_name, !flag_wpa);
2418 free (section_name);
2419
2420 /* Make string 0 be a NULL string. */
2421 streamer_write_char_stream (ob->string_stream, 0);
2422
2423 gcc_assert (!alias_pairs);
2424
2425 /* Write the global symbols. */
2426 out_state = lto_get_out_decl_state ();
2427 num_fns = lto_function_decl_states.length ();
2428 lto_output_decl_state_streams (ob, out_state);
2429 for (idx = 0; idx < num_fns; idx++)
2430 {
2431 fn_out_state =
2432 lto_function_decl_states[idx];
2433 lto_output_decl_state_streams (ob, fn_out_state);
2434 }
2435
2436 header.lto_header.major_version = LTO_major_version;
2437 header.lto_header.minor_version = LTO_minor_version;
2438
2439 /* Currently not used. This field would allow us to preallocate
2440 the globals vector, so that it need not be resized as it is extended. */
2441 header.num_nodes = -1;
2442
2443 /* Compute the total size of all decl out states. */
2444 decl_state_size = sizeof (int32_t);
2445 decl_state_size += lto_out_decl_state_written_size (out_state);
2446 for (idx = 0; idx < num_fns; idx++)
2447 {
2448 fn_out_state =
2449 lto_function_decl_states[idx];
2450 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2451 }
2452 header.decl_state_size = decl_state_size;
2453
2454 header.main_size = ob->main_stream->total_size;
2455 header.string_size = ob->string_stream->total_size;
2456
2457 header_stream = XCNEW (struct lto_output_stream);
2458 lto_output_data_stream (header_stream, &header, sizeof header);
2459 lto_write_stream (header_stream);
2460 free (header_stream);
2461
2462 /* Write the main out-decl state, followed by out-decl states of
2463 functions. */
2464 decl_state_stream = XCNEW (struct lto_output_stream);
2465 num_decl_states = num_fns + 1;
2466 lto_output_data_stream (decl_state_stream, &num_decl_states,
2467 sizeof (num_decl_states));
2468 lto_output_decl_state_refs (ob, decl_state_stream, out_state);
2469 for (idx = 0; idx < num_fns; idx++)
2470 {
2471 fn_out_state =
2472 lto_function_decl_states[idx];
2473 lto_output_decl_state_refs (ob, decl_state_stream, fn_out_state);
2474 }
2475 lto_write_stream (decl_state_stream);
2476 free (decl_state_stream);
2477
2478 lto_write_stream (ob->main_stream);
2479 lto_write_stream (ob->string_stream);
2480
2481 lto_end_section ();
2482
2483 /* Write the symbol table. It is used by linker to determine dependencies
2484 and thus we can skip it for WPA. */
2485 if (!flag_wpa)
2486 produce_symtab (ob);
2487
2488 /* Write command line opts. */
2489 lto_write_options ();
2490
2491 /* Deallocate memory and clean up. */
2492 for (idx = 0; idx < num_fns; idx++)
2493 {
2494 fn_out_state =
2495 lto_function_decl_states[idx];
2496 lto_delete_out_decl_state (fn_out_state);
2497 }
2498 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2499 lto_function_decl_states.release ();
2500 destroy_output_block (ob);
2501 }
2502
2503
2504 namespace {
2505
2506 const pass_data pass_data_ipa_lto_finish_out =
2507 {
2508 IPA_PASS, /* type */
2509 "lto_decls_out", /* name */
2510 OPTGROUP_NONE, /* optinfo_flags */
2511 true, /* has_gate */
2512 false, /* has_execute */
2513 TV_IPA_LTO_DECL_OUT, /* tv_id */
2514 0, /* properties_required */
2515 0, /* properties_provided */
2516 0, /* properties_destroyed */
2517 0, /* todo_flags_start */
2518 0, /* todo_flags_finish */
2519 };
2520
2521 class pass_ipa_lto_finish_out : public ipa_opt_pass_d
2522 {
2523 public:
2524 pass_ipa_lto_finish_out (gcc::context *ctxt)
2525 : ipa_opt_pass_d (pass_data_ipa_lto_finish_out, ctxt,
2526 NULL, /* generate_summary */
2527 produce_asm_for_decls, /* write_summary */
2528 NULL, /* read_summary */
2529 produce_asm_for_decls, /* write_optimization_summary */
2530 NULL, /* read_optimization_summary */
2531 NULL, /* stmt_fixup */
2532 0, /* function_transform_todo_flags_start */
2533 NULL, /* function_transform */
2534 NULL) /* variable_transform */
2535 {}
2536
2537 /* opt_pass methods: */
2538 bool gate () { return gate_lto_out (); }
2539
2540 }; // class pass_ipa_lto_finish_out
2541
2542 } // anon namespace
2543
2544 ipa_opt_pass_d *
2545 make_pass_ipa_lto_finish_out (gcc::context *ctxt)
2546 {
2547 return new pass_ipa_lto_finish_out (ctxt);
2548 }