]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-streamer-out.c
Make TREE_INT_CST_NUNITS have its original meaning and add
[thirdparty/gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2013 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "expr.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "hashtab.h"
33 #include "basic-block.h"
34 #include "tree-ssa.h"
35 #include "tree-pass.h"
36 #include "cgraph.h"
37 #include "function.h"
38 #include "ggc.h"
39 #include "diagnostic-core.h"
40 #include "except.h"
41 #include "vec.h"
42 #include "lto-symtab.h"
43 #include "lto-streamer.h"
44 #include "data-streamer.h"
45 #include "gimple-streamer.h"
46 #include "tree-streamer.h"
47 #include "streamer-hooks.h"
48 #include "cfgloop.h"
49
50
51 /* Clear the line info stored in DATA_IN. */
52
53 static void
54 clear_line_info (struct output_block *ob)
55 {
56 ob->current_file = NULL;
57 ob->current_line = 0;
58 ob->current_col = 0;
59 }
60
61
62 /* Create the output block and return it. SECTION_TYPE is
63 LTO_section_function_body or LTO_static_initializer. */
64
65 struct output_block *
66 create_output_block (enum lto_section_type section_type)
67 {
68 struct output_block *ob = XCNEW (struct output_block);
69
70 ob->section_type = section_type;
71 ob->decl_state = lto_get_out_decl_state ();
72 ob->main_stream = XCNEW (struct lto_output_stream);
73 ob->string_stream = XCNEW (struct lto_output_stream);
74 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true);
75
76 if (section_type == LTO_section_function_body)
77 ob->cfg_stream = XCNEW (struct lto_output_stream);
78
79 clear_line_info (ob);
80
81 ob->string_hash_table.create (37);
82 gcc_obstack_init (&ob->obstack);
83
84 return ob;
85 }
86
87
88 /* Destroy the output block OB. */
89
90 void
91 destroy_output_block (struct output_block *ob)
92 {
93 enum lto_section_type section_type = ob->section_type;
94
95 ob->string_hash_table.dispose ();
96
97 free (ob->main_stream);
98 free (ob->string_stream);
99 if (section_type == LTO_section_function_body)
100 free (ob->cfg_stream);
101
102 streamer_tree_cache_delete (ob->writer_cache);
103 obstack_free (&ob->obstack, NULL);
104
105 free (ob);
106 }
107
108
109 /* Look up NODE in the type table and write the index for it to OB. */
110
111 static void
112 output_type_ref (struct output_block *ob, tree node)
113 {
114 streamer_write_record_start (ob, LTO_type_ref);
115 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
116 }
117
118
119 /* Return true if tree node T is written to various tables. For these
120 nodes, we sometimes want to write their phyiscal representation
121 (via lto_output_tree), and sometimes we need to emit an index
122 reference into a table (via lto_output_tree_ref). */
123
124 static bool
125 tree_is_indexable (tree t)
126 {
127 /* Parameters and return values of functions of variably modified types
128 must go to global stream, because they may be used in the type
129 definition. */
130 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
131 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
132 else if (TREE_CODE (t) == VAR_DECL && decl_function_context (t)
133 && !TREE_STATIC (t))
134 return false;
135 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
136 return false;
137 /* Variably modified types need to be streamed alongside function
138 bodies because they can refer to local entities. Together with
139 them we have to localize their members as well.
140 ??? In theory that includes non-FIELD_DECLs as well. */
141 else if (TYPE_P (t)
142 && variably_modified_type_p (t, NULL_TREE))
143 return false;
144 else if (TREE_CODE (t) == FIELD_DECL
145 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
146 return false;
147 else
148 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
149 }
150
151
152 /* Output info about new location into bitpack BP.
153 After outputting bitpack, lto_output_location_data has
154 to be done to output actual data. */
155
156 void
157 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
158 location_t loc)
159 {
160 expanded_location xloc;
161
162 loc = LOCATION_LOCUS (loc);
163 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
164 if (loc == UNKNOWN_LOCATION)
165 return;
166
167 xloc = expand_location (loc);
168
169 bp_pack_value (bp, ob->current_file != xloc.file, 1);
170 bp_pack_value (bp, ob->current_line != xloc.line, 1);
171 bp_pack_value (bp, ob->current_col != xloc.column, 1);
172
173 if (ob->current_file != xloc.file)
174 bp_pack_var_len_unsigned (bp,
175 streamer_string_index (ob, xloc.file,
176 strlen (xloc.file) + 1,
177 true));
178 ob->current_file = xloc.file;
179
180 if (ob->current_line != xloc.line)
181 bp_pack_var_len_unsigned (bp, xloc.line);
182 ob->current_line = xloc.line;
183
184 if (ob->current_col != xloc.column)
185 bp_pack_var_len_unsigned (bp, xloc.column);
186 ob->current_col = xloc.column;
187 }
188
189
190 /* If EXPR is an indexable tree node, output a reference to it to
191 output block OB. Otherwise, output the physical representation of
192 EXPR to OB. */
193
194 static void
195 lto_output_tree_ref (struct output_block *ob, tree expr)
196 {
197 enum tree_code code;
198
199 if (TYPE_P (expr))
200 {
201 output_type_ref (ob, expr);
202 return;
203 }
204
205 code = TREE_CODE (expr);
206 switch (code)
207 {
208 case SSA_NAME:
209 streamer_write_record_start (ob, LTO_ssa_name_ref);
210 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
211 break;
212
213 case FIELD_DECL:
214 streamer_write_record_start (ob, LTO_field_decl_ref);
215 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
216 break;
217
218 case FUNCTION_DECL:
219 streamer_write_record_start (ob, LTO_function_decl_ref);
220 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
221 break;
222
223 case VAR_DECL:
224 case DEBUG_EXPR_DECL:
225 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
226 case PARM_DECL:
227 streamer_write_record_start (ob, LTO_global_decl_ref);
228 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
229 break;
230
231 case CONST_DECL:
232 streamer_write_record_start (ob, LTO_const_decl_ref);
233 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
234 break;
235
236 case IMPORTED_DECL:
237 gcc_assert (decl_function_context (expr) == NULL);
238 streamer_write_record_start (ob, LTO_imported_decl_ref);
239 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
240 break;
241
242 case TYPE_DECL:
243 streamer_write_record_start (ob, LTO_type_decl_ref);
244 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
245 break;
246
247 case NAMESPACE_DECL:
248 streamer_write_record_start (ob, LTO_namespace_decl_ref);
249 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
250 break;
251
252 case LABEL_DECL:
253 streamer_write_record_start (ob, LTO_label_decl_ref);
254 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
255 break;
256
257 case RESULT_DECL:
258 streamer_write_record_start (ob, LTO_result_decl_ref);
259 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
260 break;
261
262 case TRANSLATION_UNIT_DECL:
263 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
264 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
265 break;
266
267 default:
268 /* No other node is indexable, so it should have been handled by
269 lto_output_tree. */
270 gcc_unreachable ();
271 }
272 }
273
274
275 /* Return true if EXPR is a tree node that can be written to disk. */
276
277 static inline bool
278 lto_is_streamable (tree expr)
279 {
280 enum tree_code code = TREE_CODE (expr);
281
282 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
283 name version in lto_output_tree_ref (see output_ssa_names). */
284 return !is_lang_specific (expr)
285 && code != SSA_NAME
286 && code != CALL_EXPR
287 && code != LANG_TYPE
288 && code != MODIFY_EXPR
289 && code != INIT_EXPR
290 && code != TARGET_EXPR
291 && code != BIND_EXPR
292 && code != WITH_CLEANUP_EXPR
293 && code != STATEMENT_LIST
294 && code != OMP_CLAUSE
295 && (code == CASE_LABEL_EXPR
296 || code == DECL_EXPR
297 || TREE_CODE_CLASS (code) != tcc_statement);
298 }
299
300
301 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
302
303 static tree
304 get_symbol_initial_value (struct output_block *ob, tree expr)
305 {
306 gcc_checking_assert (DECL_P (expr)
307 && TREE_CODE (expr) != FUNCTION_DECL
308 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
309
310 /* Handle DECL_INITIAL for symbols. */
311 tree initial = DECL_INITIAL (expr);
312 if (TREE_CODE (expr) == VAR_DECL
313 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
314 && !DECL_IN_CONSTANT_POOL (expr)
315 && initial)
316 {
317 lto_symtab_encoder_t encoder;
318 struct varpool_node *vnode;
319
320 encoder = ob->decl_state->symtab_node_encoder;
321 vnode = varpool_get_node (expr);
322 if (!vnode
323 || !lto_symtab_encoder_encode_initializer_p (encoder,
324 vnode))
325 initial = error_mark_node;
326 }
327
328 return initial;
329 }
330
331
332 /* Write a physical representation of tree node EXPR to output block
333 OB. If REF_P is true, the leaves of EXPR are emitted as references
334 via lto_output_tree_ref. IX is the index into the streamer cache
335 where EXPR is stored. */
336
337 static void
338 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
339 {
340 /* Pack all the non-pointer fields in EXPR into a bitpack and write
341 the resulting bitpack. */
342 bitpack_d bp = bitpack_create (ob->main_stream);
343 streamer_pack_tree_bitfields (ob, &bp, expr);
344 streamer_write_bitpack (&bp);
345
346 /* Write all the pointer fields in EXPR. */
347 streamer_write_tree_body (ob, expr, ref_p);
348
349 /* Write any LTO-specific data to OB. */
350 if (DECL_P (expr)
351 && TREE_CODE (expr) != FUNCTION_DECL
352 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
353 {
354 /* Handle DECL_INITIAL for symbols. */
355 tree initial = get_symbol_initial_value (ob, expr);
356 stream_write_tree (ob, initial, ref_p);
357 }
358 }
359
360 /* Write a physical representation of tree node EXPR to output block
361 OB. If REF_P is true, the leaves of EXPR are emitted as references
362 via lto_output_tree_ref. IX is the index into the streamer cache
363 where EXPR is stored. */
364
365 static void
366 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
367 {
368 if (!lto_is_streamable (expr))
369 internal_error ("tree code %qs is not supported in LTO streams",
370 get_tree_code_name (TREE_CODE (expr)));
371
372 /* Write the header, containing everything needed to materialize
373 EXPR on the reading side. */
374 streamer_write_tree_header (ob, expr);
375
376 lto_write_tree_1 (ob, expr, ref_p);
377
378 /* Mark the end of EXPR. */
379 streamer_write_zero (ob);
380 }
381
382 /* Emit the physical representation of tree node EXPR to output block
383 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
384 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
385
386 static void
387 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
388 bool ref_p, bool this_ref_p)
389 {
390 unsigned ix;
391
392 gcc_checking_assert (expr != NULL_TREE
393 && !(this_ref_p && tree_is_indexable (expr)));
394
395 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
396 expr, hash, &ix);
397 gcc_assert (!exists_p);
398 if (streamer_handle_as_builtin_p (expr))
399 {
400 /* MD and NORMAL builtins do not need to be written out
401 completely as they are always instantiated by the
402 compiler on startup. The only builtins that need to
403 be written out are BUILT_IN_FRONTEND. For all other
404 builtins, we simply write the class and code. */
405 streamer_write_builtin (ob, expr);
406 }
407 else if (TREE_CODE (expr) == INTEGER_CST
408 && !TREE_OVERFLOW (expr))
409 {
410 /* Shared INTEGER_CST nodes are special because they need their
411 original type to be materialized by the reader (to implement
412 TYPE_CACHED_VALUES). */
413 streamer_write_integer_cst (ob, expr, ref_p);
414 }
415 else
416 {
417 /* This is the first time we see EXPR, write its fields
418 to OB. */
419 lto_write_tree (ob, expr, ref_p);
420 }
421 }
422
423 struct sccs
424 {
425 unsigned int dfsnum;
426 unsigned int low;
427 };
428
429 struct scc_entry
430 {
431 tree t;
432 hashval_t hash;
433 };
434
435 static unsigned int next_dfs_num;
436 static vec<scc_entry> sccstack;
437 static struct pointer_map_t *sccstate;
438 static struct obstack sccstate_obstack;
439
440 static void
441 DFS_write_tree (struct output_block *ob, sccs *from_state,
442 tree expr, bool ref_p, bool this_ref_p);
443
444 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
445 DFS recurse for all tree edges originating from it. */
446
447 static void
448 DFS_write_tree_body (struct output_block *ob,
449 tree expr, sccs *expr_state, bool ref_p)
450 {
451 #define DFS_follow_tree_edge(DEST) \
452 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
453
454 enum tree_code code;
455
456 code = TREE_CODE (expr);
457
458 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
459 {
460 if (TREE_CODE (expr) != IDENTIFIER_NODE)
461 DFS_follow_tree_edge (TREE_TYPE (expr));
462 }
463
464 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
465 {
466 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
467 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
468 }
469
470 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
471 {
472 DFS_follow_tree_edge (TREE_REALPART (expr));
473 DFS_follow_tree_edge (TREE_IMAGPART (expr));
474 }
475
476 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
477 {
478 /* Drop names that were created for anonymous entities. */
479 if (DECL_NAME (expr)
480 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
481 && ANON_AGGRNAME_P (DECL_NAME (expr)))
482 ;
483 else
484 DFS_follow_tree_edge (DECL_NAME (expr));
485 DFS_follow_tree_edge (DECL_CONTEXT (expr));
486 }
487
488 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
489 {
490 DFS_follow_tree_edge (DECL_SIZE (expr));
491 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
492
493 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
494 special handling in LTO, it must be handled by streamer hooks. */
495
496 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
497
498 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
499 for early inlining so drop it on the floor instead of ICEing in
500 dwarf2out.c. */
501
502 if ((TREE_CODE (expr) == VAR_DECL
503 || TREE_CODE (expr) == PARM_DECL)
504 && DECL_HAS_VALUE_EXPR_P (expr))
505 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
506 if (TREE_CODE (expr) == VAR_DECL)
507 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
508 }
509
510 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
511 {
512 if (TREE_CODE (expr) == TYPE_DECL)
513 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
514 DFS_follow_tree_edge (DECL_VINDEX (expr));
515 }
516
517 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
518 {
519 /* Make sure we don't inadvertently set the assembler name. */
520 if (DECL_ASSEMBLER_NAME_SET_P (expr))
521 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
522 DFS_follow_tree_edge (DECL_SECTION_NAME (expr));
523 DFS_follow_tree_edge (DECL_COMDAT_GROUP (expr));
524 }
525
526 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
527 {
528 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
529 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
530 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
531 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
532 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
533 }
534
535 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
536 {
537 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
538 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
539 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
540 }
541
542 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
543 {
544 DFS_follow_tree_edge (TYPE_SIZE (expr));
545 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
546 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
547 DFS_follow_tree_edge (TYPE_NAME (expr));
548 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
549 reconstructed during fixup. */
550 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
551 during fixup. */
552 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
553 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
554 /* TYPE_CANONICAL is re-computed during type merging, so no need
555 to follow it here. */
556 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
557 }
558
559 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
560 {
561 if (TREE_CODE (expr) == ENUMERAL_TYPE)
562 DFS_follow_tree_edge (TYPE_VALUES (expr));
563 else if (TREE_CODE (expr) == ARRAY_TYPE)
564 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
565 else if (RECORD_OR_UNION_TYPE_P (expr))
566 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
567 DFS_follow_tree_edge (t);
568 else if (TREE_CODE (expr) == FUNCTION_TYPE
569 || TREE_CODE (expr) == METHOD_TYPE)
570 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
571
572 if (!POINTER_TYPE_P (expr))
573 DFS_follow_tree_edge (TYPE_MINVAL (expr));
574 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
575 if (RECORD_OR_UNION_TYPE_P (expr))
576 DFS_follow_tree_edge (TYPE_BINFO (expr));
577 }
578
579 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
580 {
581 DFS_follow_tree_edge (TREE_PURPOSE (expr));
582 DFS_follow_tree_edge (TREE_VALUE (expr));
583 DFS_follow_tree_edge (TREE_CHAIN (expr));
584 }
585
586 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
587 {
588 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
589 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
590 }
591
592 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
593 {
594 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
595 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
596 DFS_follow_tree_edge (TREE_BLOCK (expr));
597 }
598
599 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
600 {
601 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
602 /* ??? FIXME. See also streamer_write_chain. */
603 if (!(VAR_OR_FUNCTION_DECL_P (t)
604 && DECL_EXTERNAL (t)))
605 DFS_follow_tree_edge (t);
606
607 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
608
609 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
610 handle - those that represent inlined function scopes.
611 For the drop rest them on the floor instead of ICEing
612 in dwarf2out.c. */
613 if (inlined_function_outer_scope_p (expr))
614 {
615 tree ultimate_origin = block_ultimate_origin (expr);
616 DFS_follow_tree_edge (ultimate_origin);
617 }
618 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
619 information for early inlined BLOCKs so drop it on the floor instead
620 of ICEing in dwarf2out.c. */
621
622 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
623 streaming time. */
624
625 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
626 list is re-constructed from BLOCK_SUPERCONTEXT. */
627 }
628
629 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
630 {
631 unsigned i;
632 tree t;
633
634 /* Note that the number of BINFO slots has already been emitted in
635 EXPR's header (see streamer_write_tree_header) because this length
636 is needed to build the empty BINFO node on the reader side. */
637 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
638 DFS_follow_tree_edge (t);
639 DFS_follow_tree_edge (BINFO_OFFSET (expr));
640 DFS_follow_tree_edge (BINFO_VTABLE (expr));
641 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
642
643 /* The number of BINFO_BASE_ACCESSES has already been emitted in
644 EXPR's bitfield section. */
645 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
646 DFS_follow_tree_edge (t);
647
648 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
649 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
650 }
651
652 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
653 {
654 unsigned i;
655 tree index, value;
656
657 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
658 {
659 DFS_follow_tree_edge (index);
660 DFS_follow_tree_edge (value);
661 }
662 }
663
664 #undef DFS_follow_tree_edge
665 }
666
667 /* Return a hash value for the tree T. */
668
669 static hashval_t
670 hash_tree (struct streamer_tree_cache_d *cache, tree t)
671 {
672 #define visit(SIBLING) \
673 do { \
674 unsigned ix; \
675 if (SIBLING && streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
676 v = iterative_hash_hashval_t (streamer_tree_cache_get_hash (cache, ix), v); \
677 } while (0)
678
679 /* Hash TS_BASE. */
680 enum tree_code code = TREE_CODE (t);
681 hashval_t v = iterative_hash_host_wide_int (code, 0);
682 if (!TYPE_P (t))
683 {
684 v = iterative_hash_host_wide_int (TREE_SIDE_EFFECTS (t)
685 | (TREE_CONSTANT (t) << 1)
686 | (TREE_READONLY (t) << 2)
687 | (TREE_PUBLIC (t) << 3), v);
688 }
689 v = iterative_hash_host_wide_int (TREE_ADDRESSABLE (t)
690 | (TREE_THIS_VOLATILE (t) << 1), v);
691 if (DECL_P (t))
692 v = iterative_hash_host_wide_int (DECL_UNSIGNED (t), v);
693 else if (TYPE_P (t))
694 v = iterative_hash_host_wide_int (TYPE_UNSIGNED (t), v);
695 if (TYPE_P (t))
696 v = iterative_hash_host_wide_int (TYPE_ARTIFICIAL (t), v);
697 else
698 v = iterative_hash_host_wide_int (TREE_NO_WARNING (t), v);
699 v = iterative_hash_host_wide_int (TREE_NOTHROW (t)
700 | (TREE_STATIC (t) << 1)
701 | (TREE_PROTECTED (t) << 2)
702 | (TREE_DEPRECATED (t) << 3), v);
703 if (code != TREE_BINFO)
704 v = iterative_hash_host_wide_int (TREE_PRIVATE (t), v);
705 if (TYPE_P (t))
706 v = iterative_hash_host_wide_int (TYPE_SATURATING (t)
707 | (TYPE_ADDR_SPACE (t) << 1), v);
708 else if (code == SSA_NAME)
709 v = iterative_hash_host_wide_int (SSA_NAME_IS_DEFAULT_DEF (t), v);
710
711 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
712 {
713 int i;
714 v = iterative_hash_host_wide_int (TREE_INT_CST_NUNITS (t), v);
715 v = iterative_hash_host_wide_int (TREE_INT_CST_EXT_NUNITS (t), v);
716 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
717 v = iterative_hash_host_wide_int (TREE_INT_CST_ELT (t, i), v);
718 }
719
720 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
721 {
722 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
723 v = iterative_hash_host_wide_int (r.cl, v);
724 v = iterative_hash_host_wide_int (r.decimal
725 | (r.sign << 1)
726 | (r.signalling << 2)
727 | (r.canonical << 3), v);
728 v = iterative_hash_host_wide_int (r.uexp, v);
729 for (unsigned i = 0; i < SIGSZ; ++i)
730 v = iterative_hash_host_wide_int (r.sig[i], v);
731 }
732
733 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
734 {
735 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
736 v = iterative_hash_host_wide_int (f.mode, v);
737 v = iterative_hash_host_wide_int (f.data.low, v);
738 v = iterative_hash_host_wide_int (f.data.high, v);
739 }
740
741 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
742 {
743 v = iterative_hash_host_wide_int (DECL_MODE (t), v);
744 v = iterative_hash_host_wide_int (DECL_NONLOCAL (t)
745 | (DECL_VIRTUAL_P (t) << 1)
746 | (DECL_IGNORED_P (t) << 2)
747 | (DECL_ABSTRACT (t) << 3)
748 | (DECL_ARTIFICIAL (t) << 4)
749 | (DECL_USER_ALIGN (t) << 5)
750 | (DECL_PRESERVE_P (t) << 6)
751 | (DECL_EXTERNAL (t) << 7)
752 | (DECL_GIMPLE_REG_P (t) << 8), v);
753 v = iterative_hash_host_wide_int (DECL_ALIGN (t), v);
754 if (code == LABEL_DECL)
755 {
756 v = iterative_hash_host_wide_int (EH_LANDING_PAD_NR (t), v);
757 v = iterative_hash_host_wide_int (LABEL_DECL_UID (t), v);
758 }
759 else if (code == FIELD_DECL)
760 {
761 v = iterative_hash_host_wide_int (DECL_PACKED (t)
762 | (DECL_NONADDRESSABLE_P (t) << 1),
763 v);
764 v = iterative_hash_host_wide_int (DECL_OFFSET_ALIGN (t), v);
765 }
766 else if (code == VAR_DECL)
767 {
768 v = iterative_hash_host_wide_int (DECL_HAS_DEBUG_EXPR_P (t)
769 | (DECL_NONLOCAL_FRAME (t) << 1),
770 v);
771 }
772 if (code == RESULT_DECL
773 || code == PARM_DECL
774 || code == VAR_DECL)
775 {
776 v = iterative_hash_host_wide_int (DECL_BY_REFERENCE (t), v);
777 if (code == VAR_DECL
778 || code == PARM_DECL)
779 v = iterative_hash_host_wide_int (DECL_HAS_VALUE_EXPR_P (t), v);
780 }
781 }
782
783 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
784 v = iterative_hash_host_wide_int (DECL_REGISTER (t), v);
785
786 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
787 {
788 v = iterative_hash_host_wide_int ((DECL_COMMON (t))
789 | (DECL_DLLIMPORT_P (t) << 1)
790 | (DECL_WEAK (t) << 2)
791 | (DECL_SEEN_IN_BIND_EXPR_P (t) << 3)
792 | (DECL_COMDAT (t) << 4)
793 | (DECL_VISIBILITY_SPECIFIED (t) << 6),
794 v);
795 v = iterative_hash_host_wide_int (DECL_VISIBILITY (t), v);
796 if (code == VAR_DECL)
797 {
798 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
799 v = iterative_hash_host_wide_int (DECL_HARD_REGISTER (t)
800 | (DECL_IN_CONSTANT_POOL (t) << 1),
801 v);
802 v = iterative_hash_host_wide_int (DECL_TLS_MODEL (t), v);
803 }
804 if (TREE_CODE (t) == FUNCTION_DECL)
805 v = iterative_hash_host_wide_int (DECL_FINAL_P (t)
806 | (DECL_CXX_CONSTRUCTOR_P (t) << 1)
807 | (DECL_CXX_DESTRUCTOR_P (t) << 2),
808 v);
809 if (VAR_OR_FUNCTION_DECL_P (t))
810 v = iterative_hash_host_wide_int (DECL_INIT_PRIORITY (t), v);
811 }
812
813 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
814 {
815 v = iterative_hash_host_wide_int (DECL_BUILT_IN_CLASS (t), v);
816 v = iterative_hash_host_wide_int (DECL_STATIC_CONSTRUCTOR (t)
817 | (DECL_STATIC_DESTRUCTOR (t) << 1)
818 | (DECL_UNINLINABLE (t) << 2)
819 | (DECL_POSSIBLY_INLINED (t) << 3)
820 | (DECL_IS_NOVOPS (t) << 4)
821 | (DECL_IS_RETURNS_TWICE (t) << 5)
822 | (DECL_IS_MALLOC (t) << 6)
823 | (DECL_IS_OPERATOR_NEW (t) << 7)
824 | (DECL_DECLARED_INLINE_P (t) << 8)
825 | (DECL_STATIC_CHAIN (t) << 9)
826 | (DECL_NO_INLINE_WARNING_P (t) << 10)
827 | (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t) << 11)
828 | (DECL_NO_LIMIT_STACK (t) << 12)
829 | (DECL_DISREGARD_INLINE_LIMITS (t) << 13)
830 | (DECL_PURE_P (t) << 14)
831 | (DECL_LOOPING_CONST_OR_PURE_P (t) << 15), v);
832 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
833 v = iterative_hash_host_wide_int (DECL_FUNCTION_CODE (t), v);
834 if (DECL_STATIC_DESTRUCTOR (t))
835 v = iterative_hash_host_wide_int (DECL_FINI_PRIORITY (t), v);
836 }
837
838 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
839 {
840 v = iterative_hash_host_wide_int (TYPE_MODE (t), v);
841 v = iterative_hash_host_wide_int (TYPE_STRING_FLAG (t)
842 | (TYPE_NO_FORCE_BLK (t) << 1)
843 | (TYPE_NEEDS_CONSTRUCTING (t) << 2)
844 | (TYPE_PACKED (t) << 3)
845 | (TYPE_RESTRICT (t) << 4)
846 | (TYPE_USER_ALIGN (t) << 5)
847 | (TYPE_READONLY (t) << 6), v);
848 if (RECORD_OR_UNION_TYPE_P (t))
849 {
850 v = iterative_hash_host_wide_int (TYPE_TRANSPARENT_AGGR (t)
851 | (TYPE_FINAL_P (t) << 1), v);
852 }
853 else if (code == ARRAY_TYPE)
854 v = iterative_hash_host_wide_int (TYPE_NONALIASED_COMPONENT (t), v);
855 v = iterative_hash_host_wide_int (TYPE_PRECISION (t), v);
856 v = iterative_hash_host_wide_int (TYPE_ALIGN (t), v);
857 v = iterative_hash_host_wide_int ((TYPE_ALIAS_SET (t) == 0
858 || (!in_lto_p
859 && get_alias_set (t) == 0))
860 ? 0 : -1, v);
861 }
862
863 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
864 v = iterative_hash (TRANSLATION_UNIT_LANGUAGE (t),
865 strlen (TRANSLATION_UNIT_LANGUAGE (t)), v);
866
867 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
868 v = iterative_hash (t, sizeof (struct cl_target_option), v);
869
870 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
871 v = iterative_hash (t, sizeof (struct cl_optimization), v);
872
873 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
874 v = iterative_hash_host_wide_int (IDENTIFIER_HASH_VALUE (t), v);
875
876 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
877 v = iterative_hash (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t), v);
878
879 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
880 {
881 if (POINTER_TYPE_P (t))
882 {
883 /* For pointers factor in the pointed-to type recursively as
884 we cannot recurse through only pointers.
885 ??? We can generalize this by keeping track of the
886 in-SCC edges for each tree (or arbitrarily the first
887 such edge) and hashing that in in a second stage
888 (instead of the quadratic mixing of the SCC we do now). */
889 hashval_t x;
890 unsigned ix;
891 if (streamer_tree_cache_lookup (cache, TREE_TYPE (t), &ix))
892 x = streamer_tree_cache_get_hash (cache, ix);
893 else
894 x = hash_tree (cache, TREE_TYPE (t));
895 v = iterative_hash_hashval_t (x, v);
896 }
897 else if (code != IDENTIFIER_NODE)
898 visit (TREE_TYPE (t));
899 }
900
901 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
902 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
903 visit (VECTOR_CST_ELT (t, i));
904
905 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
906 {
907 visit (TREE_REALPART (t));
908 visit (TREE_IMAGPART (t));
909 }
910
911 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
912 {
913 /* Drop names that were created for anonymous entities. */
914 if (DECL_NAME (t)
915 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
916 && ANON_AGGRNAME_P (DECL_NAME (t)))
917 ;
918 else
919 visit (DECL_NAME (t));
920 if (DECL_FILE_SCOPE_P (t))
921 ;
922 else
923 visit (DECL_CONTEXT (t));
924 }
925
926 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
927 {
928 visit (DECL_SIZE (t));
929 visit (DECL_SIZE_UNIT (t));
930 visit (DECL_ATTRIBUTES (t));
931 if ((code == VAR_DECL
932 || code == PARM_DECL)
933 && DECL_HAS_VALUE_EXPR_P (t))
934 visit (DECL_VALUE_EXPR (t));
935 if (code == VAR_DECL
936 && DECL_HAS_DEBUG_EXPR_P (t))
937 visit (DECL_DEBUG_EXPR (t));
938 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
939 be able to call get_symbol_initial_value. */
940 }
941
942 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
943 {
944 if (code == TYPE_DECL)
945 visit (DECL_ORIGINAL_TYPE (t));
946 visit (DECL_VINDEX (t));
947 }
948
949 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
950 {
951 if (DECL_ASSEMBLER_NAME_SET_P (t))
952 visit (DECL_ASSEMBLER_NAME (t));
953 visit (DECL_SECTION_NAME (t));
954 visit (DECL_COMDAT_GROUP (t));
955 }
956
957 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
958 {
959 visit (DECL_FIELD_OFFSET (t));
960 visit (DECL_BIT_FIELD_TYPE (t));
961 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
962 visit (DECL_FIELD_BIT_OFFSET (t));
963 visit (DECL_FCONTEXT (t));
964 }
965
966 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
967 {
968 visit (DECL_FUNCTION_PERSONALITY (t));
969 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
970 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
971 }
972
973 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
974 {
975 visit (TYPE_SIZE (t));
976 visit (TYPE_SIZE_UNIT (t));
977 visit (TYPE_ATTRIBUTES (t));
978 visit (TYPE_NAME (t));
979 visit (TYPE_MAIN_VARIANT (t));
980 if (TYPE_FILE_SCOPE_P (t))
981 ;
982 else
983 visit (TYPE_CONTEXT (t));
984 visit (TYPE_STUB_DECL (t));
985 }
986
987 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
988 {
989 if (code == ENUMERAL_TYPE)
990 visit (TYPE_VALUES (t));
991 else if (code == ARRAY_TYPE)
992 visit (TYPE_DOMAIN (t));
993 else if (RECORD_OR_UNION_TYPE_P (t))
994 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
995 visit (f);
996 else if (code == FUNCTION_TYPE
997 || code == METHOD_TYPE)
998 visit (TYPE_ARG_TYPES (t));
999 if (!POINTER_TYPE_P (t))
1000 visit (TYPE_MINVAL (t));
1001 visit (TYPE_MAXVAL (t));
1002 if (RECORD_OR_UNION_TYPE_P (t))
1003 visit (TYPE_BINFO (t));
1004 }
1005
1006 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1007 {
1008 visit (TREE_PURPOSE (t));
1009 visit (TREE_VALUE (t));
1010 visit (TREE_CHAIN (t));
1011 }
1012
1013 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1014 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1015 visit (TREE_VEC_ELT (t, i));
1016
1017 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1018 {
1019 v = iterative_hash_host_wide_int (TREE_OPERAND_LENGTH (t), v);
1020 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1021 visit (TREE_OPERAND (t, i));
1022 }
1023
1024 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1025 {
1026 unsigned i;
1027 tree b;
1028 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1029 visit (b);
1030 visit (BINFO_OFFSET (t));
1031 visit (BINFO_VTABLE (t));
1032 visit (BINFO_VPTR_FIELD (t));
1033 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1034 visit (b);
1035 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1036 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1037 }
1038
1039 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1040 {
1041 unsigned i;
1042 tree index, value;
1043 v = iterative_hash_host_wide_int (CONSTRUCTOR_NELTS (t), v);
1044 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1045 {
1046 visit (index);
1047 visit (value);
1048 }
1049 }
1050
1051 return v;
1052
1053 #undef visit
1054 }
1055
1056 /* Compare two SCC entries by their hash value for qsorting them. */
1057
1058 static int
1059 scc_entry_compare (const void *p1_, const void *p2_)
1060 {
1061 const scc_entry *p1 = (const scc_entry *) p1_;
1062 const scc_entry *p2 = (const scc_entry *) p2_;
1063 if (p1->hash < p2->hash)
1064 return -1;
1065 else if (p1->hash > p2->hash)
1066 return 1;
1067 return 0;
1068 }
1069
1070 /* Return a hash value for the SCC on the SCC stack from FIRST with
1071 size SIZE. */
1072
1073 static hashval_t
1074 hash_scc (struct streamer_tree_cache_d *cache, unsigned first, unsigned size)
1075 {
1076 /* Compute hash values for the SCC members. */
1077 for (unsigned i = 0; i < size; ++i)
1078 sccstack[first+i].hash = hash_tree (cache, sccstack[first+i].t);
1079
1080 if (size == 1)
1081 return sccstack[first].hash;
1082
1083 /* Sort the SCC of type, hash pairs so that when we mix in
1084 all members of the SCC the hash value becomes independent on
1085 the order we visited the SCC. Disregard hashes equal to
1086 the hash of the tree we mix into because we cannot guarantee
1087 a stable sort for those across different TUs. */
1088 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1089 hashval_t *tem = XALLOCAVEC (hashval_t, size);
1090 for (unsigned i = 0; i < size; ++i)
1091 {
1092 hashval_t hash = sccstack[first+i].hash;
1093 hashval_t orig_hash = hash;
1094 unsigned j;
1095 /* Skip same hashes. */
1096 for (j = i + 1;
1097 j < size && sccstack[first+j].hash == orig_hash; ++j)
1098 ;
1099 for (; j < size; ++j)
1100 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1101 for (j = 0; sccstack[first+j].hash != orig_hash; ++j)
1102 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1103 tem[i] = hash;
1104 }
1105 hashval_t scc_hash = 0;
1106 for (unsigned i = 0; i < size; ++i)
1107 {
1108 sccstack[first+i].hash = tem[i];
1109 scc_hash = iterative_hash_hashval_t (tem[i], scc_hash);
1110 }
1111 return scc_hash;
1112 }
1113
1114 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1115 already in the streamer cache. Main routine called for
1116 each visit of EXPR. */
1117
1118 static void
1119 DFS_write_tree (struct output_block *ob, sccs *from_state,
1120 tree expr, bool ref_p, bool this_ref_p)
1121 {
1122 unsigned ix;
1123 sccs **slot;
1124
1125 /* Handle special cases. */
1126 if (expr == NULL_TREE)
1127 return;
1128
1129 /* Do not DFS walk into indexable trees. */
1130 if (this_ref_p && tree_is_indexable (expr))
1131 return;
1132
1133 /* Check if we already streamed EXPR. */
1134 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1135 return;
1136
1137 slot = (sccs **)pointer_map_insert (sccstate, expr);
1138 sccs *cstate = *slot;
1139 if (!cstate)
1140 {
1141 scc_entry e = { expr, 0 };
1142 /* Not yet visited. DFS recurse and push it onto the stack. */
1143 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1144 sccstack.safe_push (e);
1145 cstate->dfsnum = next_dfs_num++;
1146 cstate->low = cstate->dfsnum;
1147
1148 if (streamer_handle_as_builtin_p (expr))
1149 ;
1150 else if (TREE_CODE (expr) == INTEGER_CST
1151 && !TREE_OVERFLOW (expr))
1152 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
1153 else
1154 {
1155 DFS_write_tree_body (ob, expr, cstate, ref_p);
1156
1157 /* Walk any LTO-specific edges. */
1158 if (DECL_P (expr)
1159 && TREE_CODE (expr) != FUNCTION_DECL
1160 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1161 {
1162 /* Handle DECL_INITIAL for symbols. */
1163 tree initial = get_symbol_initial_value (ob, expr);
1164 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
1165 }
1166 }
1167
1168 /* See if we found an SCC. */
1169 if (cstate->low == cstate->dfsnum)
1170 {
1171 unsigned first, size;
1172 tree x;
1173
1174 /* Pop the SCC and compute its size. */
1175 first = sccstack.length ();
1176 do
1177 {
1178 x = sccstack[--first].t;
1179 }
1180 while (x != expr);
1181 size = sccstack.length () - first;
1182
1183 /* No need to compute hashes for LTRANS units, we don't perform
1184 any merging there. */
1185 hashval_t scc_hash = 0;
1186 unsigned scc_entry_len = 0;
1187 if (!flag_wpa)
1188 {
1189 scc_hash = hash_scc (ob->writer_cache, first, size);
1190
1191 /* Put the entries with the least number of collisions first. */
1192 unsigned entry_start = 0;
1193 scc_entry_len = size + 1;
1194 for (unsigned i = 0; i < size;)
1195 {
1196 unsigned from = i;
1197 for (i = i + 1; i < size
1198 && (sccstack[first + i].hash
1199 == sccstack[first + from].hash); ++i)
1200 ;
1201 if (i - from < scc_entry_len)
1202 {
1203 scc_entry_len = i - from;
1204 entry_start = from;
1205 }
1206 }
1207 for (unsigned i = 0; i < scc_entry_len; ++i)
1208 {
1209 scc_entry tem = sccstack[first + i];
1210 sccstack[first + i] = sccstack[first + entry_start + i];
1211 sccstack[first + entry_start + i] = tem;
1212 }
1213 }
1214
1215 /* Write LTO_tree_scc. */
1216 streamer_write_record_start (ob, LTO_tree_scc);
1217 streamer_write_uhwi (ob, size);
1218 streamer_write_uhwi (ob, scc_hash);
1219
1220 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1221 All INTEGER_CSTs need to be handled this way as we need
1222 their type to materialize them. Also builtins are handled
1223 this way.
1224 ??? We still wrap these in LTO_tree_scc so at the
1225 input side we can properly identify the tree we want
1226 to ultimatively return. */
1227 size_t old_len = ob->writer_cache->nodes.length ();
1228 if (size == 1)
1229 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1230 else
1231 {
1232 /* Write the size of the SCC entry candidates. */
1233 streamer_write_uhwi (ob, scc_entry_len);
1234
1235 /* Write all headers and populate the streamer cache. */
1236 for (unsigned i = 0; i < size; ++i)
1237 {
1238 hashval_t hash = sccstack[first+i].hash;
1239 tree t = sccstack[first+i].t;
1240 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1241 t, hash, &ix);
1242 gcc_assert (!exists_p);
1243
1244 if (!lto_is_streamable (t))
1245 internal_error ("tree code %qs is not supported "
1246 "in LTO streams",
1247 get_tree_code_name (TREE_CODE (t)));
1248
1249 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1250
1251 /* Write the header, containing everything needed to
1252 materialize EXPR on the reading side. */
1253 streamer_write_tree_header (ob, t);
1254 }
1255
1256 /* Write the bitpacks and tree references. */
1257 for (unsigned i = 0; i < size; ++i)
1258 {
1259 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1260
1261 /* Mark the end of the tree. */
1262 streamer_write_zero (ob);
1263 }
1264 }
1265 gcc_assert (old_len + size == ob->writer_cache->nodes.length ());
1266
1267 /* Finally truncate the vector. */
1268 sccstack.truncate (first);
1269
1270 if (from_state)
1271 from_state->low = MIN (from_state->low, cstate->low);
1272 return;
1273 }
1274
1275 if (from_state)
1276 from_state->low = MIN (from_state->low, cstate->low);
1277 }
1278 gcc_checking_assert (from_state);
1279 if (cstate->dfsnum < from_state->dfsnum)
1280 from_state->low = MIN (cstate->dfsnum, from_state->low);
1281 }
1282
1283
1284 /* Emit the physical representation of tree node EXPR to output block
1285 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1286 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1287
1288 void
1289 lto_output_tree (struct output_block *ob, tree expr,
1290 bool ref_p, bool this_ref_p)
1291 {
1292 unsigned ix;
1293 bool existed_p;
1294
1295 if (expr == NULL_TREE)
1296 {
1297 streamer_write_record_start (ob, LTO_null);
1298 return;
1299 }
1300
1301 if (this_ref_p && tree_is_indexable (expr))
1302 {
1303 lto_output_tree_ref (ob, expr);
1304 return;
1305 }
1306
1307 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1308 if (existed_p)
1309 {
1310 /* If a node has already been streamed out, make sure that
1311 we don't write it more than once. Otherwise, the reader
1312 will instantiate two different nodes for the same object. */
1313 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1314 streamer_write_uhwi (ob, ix);
1315 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1316 lto_tree_code_to_tag (TREE_CODE (expr)));
1317 lto_stats.num_pickle_refs_output++;
1318 }
1319 else
1320 {
1321 /* This is the first time we see EXPR, write all reachable
1322 trees to OB. */
1323 static bool in_dfs_walk;
1324
1325 /* Protect against recursion which means disconnect between
1326 what tree edges we walk in the DFS walk and what edges
1327 we stream out. */
1328 gcc_assert (!in_dfs_walk);
1329
1330 /* Start the DFS walk. */
1331 /* Save ob state ... */
1332 /* let's see ... */
1333 in_dfs_walk = true;
1334 sccstate = pointer_map_create ();
1335 gcc_obstack_init (&sccstate_obstack);
1336 next_dfs_num = 1;
1337 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
1338 sccstack.release ();
1339 pointer_map_destroy (sccstate);
1340 obstack_free (&sccstate_obstack, NULL);
1341 in_dfs_walk = false;
1342
1343 /* Finally append a reference to the tree we were writing.
1344 ??? If expr ended up as a singleton we could have
1345 inlined it here and avoid outputting a reference. */
1346 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1347 gcc_assert (existed_p);
1348 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1349 streamer_write_uhwi (ob, ix);
1350 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1351 lto_tree_code_to_tag (TREE_CODE (expr)));
1352 lto_stats.num_pickle_refs_output++;
1353 }
1354 }
1355
1356
1357 /* Output to OB a list of try/catch handlers starting with FIRST. */
1358
1359 static void
1360 output_eh_try_list (struct output_block *ob, eh_catch first)
1361 {
1362 eh_catch n;
1363
1364 for (n = first; n; n = n->next_catch)
1365 {
1366 streamer_write_record_start (ob, LTO_eh_catch);
1367 stream_write_tree (ob, n->type_list, true);
1368 stream_write_tree (ob, n->filter_list, true);
1369 stream_write_tree (ob, n->label, true);
1370 }
1371
1372 streamer_write_record_start (ob, LTO_null);
1373 }
1374
1375
1376 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1377 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1378 detect EH region sharing. */
1379
1380 static void
1381 output_eh_region (struct output_block *ob, eh_region r)
1382 {
1383 enum LTO_tags tag;
1384
1385 if (r == NULL)
1386 {
1387 streamer_write_record_start (ob, LTO_null);
1388 return;
1389 }
1390
1391 if (r->type == ERT_CLEANUP)
1392 tag = LTO_ert_cleanup;
1393 else if (r->type == ERT_TRY)
1394 tag = LTO_ert_try;
1395 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1396 tag = LTO_ert_allowed_exceptions;
1397 else if (r->type == ERT_MUST_NOT_THROW)
1398 tag = LTO_ert_must_not_throw;
1399 else
1400 gcc_unreachable ();
1401
1402 streamer_write_record_start (ob, tag);
1403 streamer_write_hwi (ob, r->index);
1404
1405 if (r->outer)
1406 streamer_write_hwi (ob, r->outer->index);
1407 else
1408 streamer_write_zero (ob);
1409
1410 if (r->inner)
1411 streamer_write_hwi (ob, r->inner->index);
1412 else
1413 streamer_write_zero (ob);
1414
1415 if (r->next_peer)
1416 streamer_write_hwi (ob, r->next_peer->index);
1417 else
1418 streamer_write_zero (ob);
1419
1420 if (r->type == ERT_TRY)
1421 {
1422 output_eh_try_list (ob, r->u.eh_try.first_catch);
1423 }
1424 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1425 {
1426 stream_write_tree (ob, r->u.allowed.type_list, true);
1427 stream_write_tree (ob, r->u.allowed.label, true);
1428 streamer_write_uhwi (ob, r->u.allowed.filter);
1429 }
1430 else if (r->type == ERT_MUST_NOT_THROW)
1431 {
1432 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1433 bitpack_d bp = bitpack_create (ob->main_stream);
1434 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1435 streamer_write_bitpack (&bp);
1436 }
1437
1438 if (r->landing_pads)
1439 streamer_write_hwi (ob, r->landing_pads->index);
1440 else
1441 streamer_write_zero (ob);
1442 }
1443
1444
1445 /* Output landing pad LP to OB. */
1446
1447 static void
1448 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1449 {
1450 if (lp == NULL)
1451 {
1452 streamer_write_record_start (ob, LTO_null);
1453 return;
1454 }
1455
1456 streamer_write_record_start (ob, LTO_eh_landing_pad);
1457 streamer_write_hwi (ob, lp->index);
1458 if (lp->next_lp)
1459 streamer_write_hwi (ob, lp->next_lp->index);
1460 else
1461 streamer_write_zero (ob);
1462
1463 if (lp->region)
1464 streamer_write_hwi (ob, lp->region->index);
1465 else
1466 streamer_write_zero (ob);
1467
1468 stream_write_tree (ob, lp->post_landing_pad, true);
1469 }
1470
1471
1472 /* Output the existing eh_table to OB. */
1473
1474 static void
1475 output_eh_regions (struct output_block *ob, struct function *fn)
1476 {
1477 if (fn->eh && fn->eh->region_tree)
1478 {
1479 unsigned i;
1480 eh_region eh;
1481 eh_landing_pad lp;
1482 tree ttype;
1483
1484 streamer_write_record_start (ob, LTO_eh_table);
1485
1486 /* Emit the index of the root of the EH region tree. */
1487 streamer_write_hwi (ob, fn->eh->region_tree->index);
1488
1489 /* Emit all the EH regions in the region array. */
1490 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1491 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1492 output_eh_region (ob, eh);
1493
1494 /* Emit all landing pads. */
1495 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1496 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1497 output_eh_lp (ob, lp);
1498
1499 /* Emit all the runtime type data. */
1500 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1501 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1502 stream_write_tree (ob, ttype, true);
1503
1504 /* Emit the table of action chains. */
1505 if (targetm.arm_eabi_unwinder)
1506 {
1507 tree t;
1508 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1509 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1510 stream_write_tree (ob, t, true);
1511 }
1512 else
1513 {
1514 uchar c;
1515 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1516 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1517 streamer_write_char_stream (ob->main_stream, c);
1518 }
1519 }
1520
1521 /* The LTO_null either terminates the record or indicates that there
1522 are no eh_records at all. */
1523 streamer_write_record_start (ob, LTO_null);
1524 }
1525
1526
1527 /* Output all of the active ssa names to the ssa_names stream. */
1528
1529 static void
1530 output_ssa_names (struct output_block *ob, struct function *fn)
1531 {
1532 unsigned int i, len;
1533
1534 len = vec_safe_length (SSANAMES (fn));
1535 streamer_write_uhwi (ob, len);
1536
1537 for (i = 1; i < len; i++)
1538 {
1539 tree ptr = (*SSANAMES (fn))[i];
1540
1541 if (ptr == NULL_TREE
1542 || SSA_NAME_IN_FREE_LIST (ptr)
1543 || virtual_operand_p (ptr))
1544 continue;
1545
1546 streamer_write_uhwi (ob, i);
1547 streamer_write_char_stream (ob->main_stream,
1548 SSA_NAME_IS_DEFAULT_DEF (ptr));
1549 if (SSA_NAME_VAR (ptr))
1550 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1551 else
1552 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1553 stream_write_tree (ob, TREE_TYPE (ptr), true);
1554 }
1555
1556 streamer_write_zero (ob);
1557 }
1558
1559
1560 /* Output the cfg. */
1561
1562 static void
1563 output_cfg (struct output_block *ob, struct function *fn)
1564 {
1565 struct lto_output_stream *tmp_stream = ob->main_stream;
1566 basic_block bb;
1567
1568 ob->main_stream = ob->cfg_stream;
1569
1570 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1571 profile_status_for_function (fn));
1572
1573 /* Output the number of the highest basic block. */
1574 streamer_write_uhwi (ob, last_basic_block_for_function (fn));
1575
1576 FOR_ALL_BB_FN (bb, fn)
1577 {
1578 edge_iterator ei;
1579 edge e;
1580
1581 streamer_write_hwi (ob, bb->index);
1582
1583 /* Output the successors and the edge flags. */
1584 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1585 FOR_EACH_EDGE (e, ei, bb->succs)
1586 {
1587 streamer_write_uhwi (ob, e->dest->index);
1588 streamer_write_hwi (ob, e->probability);
1589 streamer_write_gcov_count (ob, e->count);
1590 streamer_write_uhwi (ob, e->flags);
1591 }
1592 }
1593
1594 streamer_write_hwi (ob, -1);
1595
1596 bb = ENTRY_BLOCK_PTR;
1597 while (bb->next_bb)
1598 {
1599 streamer_write_hwi (ob, bb->next_bb->index);
1600 bb = bb->next_bb;
1601 }
1602
1603 streamer_write_hwi (ob, -1);
1604
1605 /* ??? The cfgloop interface is tied to cfun. */
1606 gcc_assert (cfun == fn);
1607
1608 /* Output the number of loops. */
1609 streamer_write_uhwi (ob, number_of_loops (fn));
1610
1611 /* Output each loop, skipping the tree root which has number zero. */
1612 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1613 {
1614 struct loop *loop = get_loop (fn, i);
1615
1616 /* Write the index of the loop header. That's enough to rebuild
1617 the loop tree on the reader side. Stream -1 for an unused
1618 loop entry. */
1619 if (!loop)
1620 {
1621 streamer_write_hwi (ob, -1);
1622 continue;
1623 }
1624 else
1625 streamer_write_hwi (ob, loop->header->index);
1626
1627 /* Write everything copy_loop_info copies. */
1628 streamer_write_enum (ob->main_stream,
1629 loop_estimation, EST_LAST, loop->estimate_state);
1630 streamer_write_hwi (ob, loop->any_upper_bound);
1631 if (loop->any_upper_bound)
1632 {
1633 int len = loop->nb_iterations_upper_bound.get_len ();
1634 int i;
1635
1636 streamer_write_uhwi (ob, loop->nb_iterations_upper_bound.get_precision ());
1637 streamer_write_uhwi (ob, len);
1638 for (i = 0; i < len; i++)
1639 streamer_write_hwi (ob, loop->nb_iterations_upper_bound.elt (i));
1640 }
1641 streamer_write_hwi (ob, loop->any_estimate);
1642 if (loop->any_estimate)
1643 {
1644 int len = loop->nb_iterations_estimate.get_len ();
1645 int i;
1646
1647 streamer_write_uhwi (ob, loop->nb_iterations_estimate.get_precision ());
1648 streamer_write_uhwi (ob, len);
1649 for (i = 0; i < len; i++)
1650 streamer_write_hwi (ob, loop->nb_iterations_estimate.elt (i));
1651 }
1652 }
1653
1654 ob->main_stream = tmp_stream;
1655 }
1656
1657
1658 /* Create the header in the file using OB. If the section type is for
1659 a function, set FN to the decl for that function. */
1660
1661 void
1662 produce_asm (struct output_block *ob, tree fn)
1663 {
1664 enum lto_section_type section_type = ob->section_type;
1665 struct lto_function_header header;
1666 char *section_name;
1667 struct lto_output_stream *header_stream;
1668
1669 if (section_type == LTO_section_function_body)
1670 {
1671 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1672 section_name = lto_get_section_name (section_type, name, NULL);
1673 }
1674 else
1675 section_name = lto_get_section_name (section_type, NULL, NULL);
1676
1677 lto_begin_section (section_name, !flag_wpa);
1678 free (section_name);
1679
1680 /* The entire header is stream computed here. */
1681 memset (&header, 0, sizeof (struct lto_function_header));
1682
1683 /* Write the header. */
1684 header.lto_header.major_version = LTO_major_version;
1685 header.lto_header.minor_version = LTO_minor_version;
1686
1687 header.compressed_size = 0;
1688
1689 if (section_type == LTO_section_function_body)
1690 header.cfg_size = ob->cfg_stream->total_size;
1691 header.main_size = ob->main_stream->total_size;
1692 header.string_size = ob->string_stream->total_size;
1693
1694 header_stream = XCNEW (struct lto_output_stream);
1695 lto_output_data_stream (header_stream, &header, sizeof header);
1696 lto_write_stream (header_stream);
1697 free (header_stream);
1698
1699 /* Put all of the gimple and the string table out the asm file as a
1700 block of text. */
1701 if (section_type == LTO_section_function_body)
1702 lto_write_stream (ob->cfg_stream);
1703 lto_write_stream (ob->main_stream);
1704 lto_write_stream (ob->string_stream);
1705
1706 lto_end_section ();
1707 }
1708
1709
1710 /* Output the base body of struct function FN using output block OB. */
1711
1712 static void
1713 output_struct_function_base (struct output_block *ob, struct function *fn)
1714 {
1715 struct bitpack_d bp;
1716 unsigned i;
1717 tree t;
1718
1719 /* Output the static chain and non-local goto save area. */
1720 stream_write_tree (ob, fn->static_chain_decl, true);
1721 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1722
1723 /* Output all the local variables in the function. */
1724 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1725 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1726 stream_write_tree (ob, t, true);
1727
1728 /* Output current IL state of the function. */
1729 streamer_write_uhwi (ob, fn->curr_properties);
1730
1731 /* Write all the attributes for FN. */
1732 bp = bitpack_create (ob->main_stream);
1733 bp_pack_value (&bp, fn->is_thunk, 1);
1734 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1735 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1736 bp_pack_value (&bp, fn->returns_struct, 1);
1737 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1738 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1739 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1740 bp_pack_value (&bp, fn->after_inlining, 1);
1741 bp_pack_value (&bp, fn->stdarg, 1);
1742 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1743 bp_pack_value (&bp, fn->calls_alloca, 1);
1744 bp_pack_value (&bp, fn->calls_setjmp, 1);
1745 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1746 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1747
1748 /* Output the function start and end loci. */
1749 stream_output_location (ob, &bp, fn->function_start_locus);
1750 stream_output_location (ob, &bp, fn->function_end_locus);
1751
1752 streamer_write_bitpack (&bp);
1753 }
1754
1755
1756 /* Output the body of function NODE->DECL. */
1757
1758 static void
1759 output_function (struct cgraph_node *node)
1760 {
1761 tree function;
1762 struct function *fn;
1763 basic_block bb;
1764 struct output_block *ob;
1765
1766 function = node->symbol.decl;
1767 fn = DECL_STRUCT_FUNCTION (function);
1768 ob = create_output_block (LTO_section_function_body);
1769
1770 clear_line_info (ob);
1771 ob->cgraph_node = node;
1772
1773 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1774
1775 /* Set current_function_decl and cfun. */
1776 push_cfun (fn);
1777
1778 /* Make string 0 be a NULL string. */
1779 streamer_write_char_stream (ob->string_stream, 0);
1780
1781 streamer_write_record_start (ob, LTO_function);
1782
1783 /* Output decls for parameters and args. */
1784 stream_write_tree (ob, DECL_RESULT (function), true);
1785 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1786
1787 /* Output DECL_INITIAL for the function, which contains the tree of
1788 lexical scopes. */
1789 stream_write_tree (ob, DECL_INITIAL (function), true);
1790
1791 /* We also stream abstract functions where we stream only stuff needed for
1792 debug info. */
1793 if (gimple_has_body_p (function))
1794 {
1795 streamer_write_uhwi (ob, 1);
1796 output_struct_function_base (ob, fn);
1797
1798 /* Output all the SSA names used in the function. */
1799 output_ssa_names (ob, fn);
1800
1801 /* Output any exception handling regions. */
1802 output_eh_regions (ob, fn);
1803
1804
1805 /* We will renumber the statements. The code that does this uses
1806 the same ordering that we use for serializing them so we can use
1807 the same code on the other end and not have to write out the
1808 statement numbers. We do not assign UIDs to PHIs here because
1809 virtual PHIs get re-computed on-the-fly which would make numbers
1810 inconsistent. */
1811 set_gimple_stmt_max_uid (cfun, 0);
1812 FOR_ALL_BB (bb)
1813 {
1814 gimple_stmt_iterator gsi;
1815 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1816 {
1817 gimple stmt = gsi_stmt (gsi);
1818
1819 /* Virtual PHIs are not going to be streamed. */
1820 if (!virtual_operand_p (gimple_phi_result (stmt)))
1821 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1822 }
1823 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1824 {
1825 gimple stmt = gsi_stmt (gsi);
1826 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1827 }
1828 }
1829 /* To avoid keeping duplicate gimple IDs in the statements, renumber
1830 virtual phis now. */
1831 FOR_ALL_BB (bb)
1832 {
1833 gimple_stmt_iterator gsi;
1834 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1835 {
1836 gimple stmt = gsi_stmt (gsi);
1837 if (virtual_operand_p (gimple_phi_result (stmt)))
1838 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1839 }
1840 }
1841
1842 /* Output the code for the function. */
1843 FOR_ALL_BB_FN (bb, fn)
1844 output_bb (ob, bb, fn);
1845
1846 /* The terminator for this function. */
1847 streamer_write_record_start (ob, LTO_null);
1848
1849 output_cfg (ob, fn);
1850
1851 pop_cfun ();
1852 }
1853 else
1854 streamer_write_uhwi (ob, 0);
1855
1856 /* Create a section to hold the pickled output of this function. */
1857 produce_asm (ob, function);
1858
1859 destroy_output_block (ob);
1860 }
1861
1862
1863 /* Emit toplevel asms. */
1864
1865 void
1866 lto_output_toplevel_asms (void)
1867 {
1868 struct output_block *ob;
1869 struct asm_node *can;
1870 char *section_name;
1871 struct lto_output_stream *header_stream;
1872 struct lto_asm_header header;
1873
1874 if (! asm_nodes)
1875 return;
1876
1877 ob = create_output_block (LTO_section_asm);
1878
1879 /* Make string 0 be a NULL string. */
1880 streamer_write_char_stream (ob->string_stream, 0);
1881
1882 for (can = asm_nodes; can; can = can->next)
1883 {
1884 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
1885 streamer_write_hwi (ob, can->order);
1886 }
1887
1888 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
1889
1890 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
1891 lto_begin_section (section_name, !flag_wpa);
1892 free (section_name);
1893
1894 /* The entire header stream is computed here. */
1895 memset (&header, 0, sizeof (header));
1896
1897 /* Write the header. */
1898 header.lto_header.major_version = LTO_major_version;
1899 header.lto_header.minor_version = LTO_minor_version;
1900
1901 header.main_size = ob->main_stream->total_size;
1902 header.string_size = ob->string_stream->total_size;
1903
1904 header_stream = XCNEW (struct lto_output_stream);
1905 lto_output_data_stream (header_stream, &header, sizeof (header));
1906 lto_write_stream (header_stream);
1907 free (header_stream);
1908
1909 /* Put all of the gimple and the string table out the asm file as a
1910 block of text. */
1911 lto_write_stream (ob->main_stream);
1912 lto_write_stream (ob->string_stream);
1913
1914 lto_end_section ();
1915
1916 destroy_output_block (ob);
1917 }
1918
1919
1920 /* Copy the function body of NODE without deserializing. */
1921
1922 static void
1923 copy_function (struct cgraph_node *node)
1924 {
1925 tree function = node->symbol.decl;
1926 struct lto_file_decl_data *file_data = node->symbol.lto_file_data;
1927 struct lto_output_stream *output_stream = XCNEW (struct lto_output_stream);
1928 const char *data;
1929 size_t len;
1930 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
1931 char *section_name =
1932 lto_get_section_name (LTO_section_function_body, name, NULL);
1933 size_t i, j;
1934 struct lto_in_decl_state *in_state;
1935 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
1936
1937 lto_begin_section (section_name, !flag_wpa);
1938 free (section_name);
1939
1940 /* We may have renamed the declaration, e.g., a static function. */
1941 name = lto_get_decl_name_mapping (file_data, name);
1942
1943 data = lto_get_section_data (file_data, LTO_section_function_body,
1944 name, &len);
1945 gcc_assert (data);
1946
1947 /* Do a bit copy of the function body. */
1948 lto_output_data_stream (output_stream, data, len);
1949 lto_write_stream (output_stream);
1950
1951 /* Copy decls. */
1952 in_state =
1953 lto_get_function_in_decl_state (node->symbol.lto_file_data, function);
1954 gcc_assert (in_state);
1955
1956 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
1957 {
1958 size_t n = in_state->streams[i].size;
1959 tree *trees = in_state->streams[i].trees;
1960 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
1961
1962 /* The out state must have the same indices and the in state.
1963 So just copy the vector. All the encoders in the in state
1964 must be empty where we reach here. */
1965 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
1966 encoder->trees.reserve_exact (n);
1967 for (j = 0; j < n; j++)
1968 encoder->trees.safe_push (trees[j]);
1969 }
1970
1971 lto_free_section_data (file_data, LTO_section_function_body, name,
1972 data, len);
1973 free (output_stream);
1974 lto_end_section ();
1975 }
1976
1977
1978 /* Main entry point from the pass manager. */
1979
1980 static void
1981 lto_output (void)
1982 {
1983 struct lto_out_decl_state *decl_state;
1984 #ifdef ENABLE_CHECKING
1985 bitmap output = lto_bitmap_alloc ();
1986 #endif
1987 int i, n_nodes;
1988 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1989
1990 /* Initialize the streamer. */
1991 lto_streamer_init ();
1992
1993 n_nodes = lto_symtab_encoder_size (encoder);
1994 /* Process only the functions with bodies. */
1995 for (i = 0; i < n_nodes; i++)
1996 {
1997 symtab_node snode = lto_symtab_encoder_deref (encoder, i);
1998 cgraph_node *node = dyn_cast <cgraph_node> (snode);
1999 if (node
2000 && lto_symtab_encoder_encode_body_p (encoder, node)
2001 && !node->symbol.alias)
2002 {
2003 #ifdef ENABLE_CHECKING
2004 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->symbol.decl)));
2005 bitmap_set_bit (output, DECL_UID (node->symbol.decl));
2006 #endif
2007 decl_state = lto_new_out_decl_state ();
2008 lto_push_out_decl_state (decl_state);
2009 if (gimple_has_body_p (node->symbol.decl) || !flag_wpa)
2010 output_function (node);
2011 else
2012 copy_function (node);
2013 gcc_assert (lto_get_out_decl_state () == decl_state);
2014 lto_pop_out_decl_state ();
2015 lto_record_function_out_decl_state (node->symbol.decl, decl_state);
2016 }
2017 }
2018
2019 /* Emit the callgraph after emitting function bodies. This needs to
2020 be done now to make sure that all the statements in every function
2021 have been renumbered so that edges can be associated with call
2022 statements using the statement UIDs. */
2023 output_symtab ();
2024
2025 #ifdef ENABLE_CHECKING
2026 lto_bitmap_free (output);
2027 #endif
2028 }
2029
2030 namespace {
2031
2032 const pass_data pass_data_ipa_lto_gimple_out =
2033 {
2034 IPA_PASS, /* type */
2035 "lto_gimple_out", /* name */
2036 OPTGROUP_NONE, /* optinfo_flags */
2037 true, /* has_gate */
2038 false, /* has_execute */
2039 TV_IPA_LTO_GIMPLE_OUT, /* tv_id */
2040 0, /* properties_required */
2041 0, /* properties_provided */
2042 0, /* properties_destroyed */
2043 0, /* todo_flags_start */
2044 0, /* todo_flags_finish */
2045 };
2046
2047 class pass_ipa_lto_gimple_out : public ipa_opt_pass_d
2048 {
2049 public:
2050 pass_ipa_lto_gimple_out (gcc::context *ctxt)
2051 : ipa_opt_pass_d (pass_data_ipa_lto_gimple_out, ctxt,
2052 NULL, /* generate_summary */
2053 lto_output, /* write_summary */
2054 NULL, /* read_summary */
2055 lto_output, /* write_optimization_summary */
2056 NULL, /* read_optimization_summary */
2057 NULL, /* stmt_fixup */
2058 0, /* function_transform_todo_flags_start */
2059 NULL, /* function_transform */
2060 NULL) /* variable_transform */
2061 {}
2062
2063 /* opt_pass methods: */
2064 bool gate () { return gate_lto_out (); }
2065
2066 }; // class pass_ipa_lto_gimple_out
2067
2068 } // anon namespace
2069
2070 ipa_opt_pass_d *
2071 make_pass_ipa_lto_gimple_out (gcc::context *ctxt)
2072 {
2073 return new pass_ipa_lto_gimple_out (ctxt);
2074 }
2075
2076
2077 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2078 from it and required for correct representation of its semantics.
2079 Each node in ENCODER must be a global declaration or a type. A node
2080 is written only once, even if it appears multiple times in the
2081 vector. Certain transitively-reachable nodes, such as those
2082 representing expressions, may be duplicated, but such nodes
2083 must not appear in ENCODER itself. */
2084
2085 static void
2086 write_global_stream (struct output_block *ob,
2087 struct lto_tree_ref_encoder *encoder)
2088 {
2089 tree t;
2090 size_t index;
2091 const size_t size = lto_tree_ref_encoder_size (encoder);
2092
2093 for (index = 0; index < size; index++)
2094 {
2095 t = lto_tree_ref_encoder_get_tree (encoder, index);
2096 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2097 stream_write_tree (ob, t, false);
2098 }
2099 }
2100
2101
2102 /* Write a sequence of indices into the globals vector corresponding
2103 to the trees in ENCODER. These are used by the reader to map the
2104 indices used to refer to global entities within function bodies to
2105 their referents. */
2106
2107 static void
2108 write_global_references (struct output_block *ob,
2109 struct lto_output_stream *ref_stream,
2110 struct lto_tree_ref_encoder *encoder)
2111 {
2112 tree t;
2113 uint32_t index;
2114 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2115
2116 /* Write size as 32-bit unsigned. */
2117 lto_output_data_stream (ref_stream, &size, sizeof (int32_t));
2118
2119 for (index = 0; index < size; index++)
2120 {
2121 uint32_t slot_num;
2122
2123 t = lto_tree_ref_encoder_get_tree (encoder, index);
2124 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2125 gcc_assert (slot_num != (unsigned)-1);
2126 lto_output_data_stream (ref_stream, &slot_num, sizeof slot_num);
2127 }
2128 }
2129
2130
2131 /* Write all the streams in an lto_out_decl_state STATE using
2132 output block OB and output stream OUT_STREAM. */
2133
2134 void
2135 lto_output_decl_state_streams (struct output_block *ob,
2136 struct lto_out_decl_state *state)
2137 {
2138 int i;
2139
2140 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2141 write_global_stream (ob, &state->streams[i]);
2142 }
2143
2144
2145 /* Write all the references in an lto_out_decl_state STATE using
2146 output block OB and output stream OUT_STREAM. */
2147
2148 void
2149 lto_output_decl_state_refs (struct output_block *ob,
2150 struct lto_output_stream *out_stream,
2151 struct lto_out_decl_state *state)
2152 {
2153 unsigned i;
2154 uint32_t ref;
2155 tree decl;
2156
2157 /* Write reference to FUNCTION_DECL. If there is not function,
2158 write reference to void_type_node. */
2159 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2160 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2161 gcc_assert (ref != (unsigned)-1);
2162 lto_output_data_stream (out_stream, &ref, sizeof (uint32_t));
2163
2164 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2165 write_global_references (ob, out_stream, &state->streams[i]);
2166 }
2167
2168
2169 /* Return the written size of STATE. */
2170
2171 static size_t
2172 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2173 {
2174 int i;
2175 size_t size;
2176
2177 size = sizeof (int32_t); /* fn_ref. */
2178 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2179 {
2180 size += sizeof (int32_t); /* vector size. */
2181 size += (lto_tree_ref_encoder_size (&state->streams[i])
2182 * sizeof (int32_t));
2183 }
2184 return size;
2185 }
2186
2187
2188 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2189 so far. */
2190
2191 static void
2192 write_symbol (struct streamer_tree_cache_d *cache,
2193 struct lto_output_stream *stream,
2194 tree t, struct pointer_set_t *seen, bool alias)
2195 {
2196 const char *name;
2197 enum gcc_plugin_symbol_kind kind;
2198 enum gcc_plugin_symbol_visibility visibility;
2199 unsigned slot_num;
2200 unsigned HOST_WIDEST_INT size;
2201 const char *comdat;
2202 unsigned char c;
2203
2204 /* None of the following kinds of symbols are needed in the
2205 symbol table. */
2206 if (!TREE_PUBLIC (t)
2207 || is_builtin_fn (t)
2208 || DECL_ABSTRACT (t)
2209 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2210 return;
2211 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2212
2213 gcc_assert (TREE_CODE (t) == VAR_DECL
2214 || TREE_CODE (t) == FUNCTION_DECL);
2215
2216 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2217
2218 /* This behaves like assemble_name_raw in varasm.c, performing the
2219 same name manipulations that ASM_OUTPUT_LABELREF does. */
2220 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2221
2222 if (pointer_set_contains (seen, name))
2223 return;
2224 pointer_set_insert (seen, name);
2225
2226 streamer_tree_cache_lookup (cache, t, &slot_num);
2227 gcc_assert (slot_num != (unsigned)-1);
2228
2229 if (DECL_EXTERNAL (t))
2230 {
2231 if (DECL_WEAK (t))
2232 kind = GCCPK_WEAKUNDEF;
2233 else
2234 kind = GCCPK_UNDEF;
2235 }
2236 else
2237 {
2238 if (DECL_WEAK (t))
2239 kind = GCCPK_WEAKDEF;
2240 else if (DECL_COMMON (t))
2241 kind = GCCPK_COMMON;
2242 else
2243 kind = GCCPK_DEF;
2244
2245 /* When something is defined, it should have node attached. */
2246 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2247 || varpool_get_node (t)->symbol.definition);
2248 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2249 || (cgraph_get_node (t)
2250 && cgraph_get_node (t)->symbol.definition));
2251 }
2252
2253 /* Imitate what default_elf_asm_output_external do.
2254 When symbol is external, we need to output it with DEFAULT visibility
2255 when compiling with -fvisibility=default, while with HIDDEN visibility
2256 when symbol has attribute (visibility("hidden")) specified.
2257 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2258 right. */
2259
2260 if (DECL_EXTERNAL (t)
2261 && !targetm.binds_local_p (t))
2262 visibility = GCCPV_DEFAULT;
2263 else
2264 switch (DECL_VISIBILITY (t))
2265 {
2266 case VISIBILITY_DEFAULT:
2267 visibility = GCCPV_DEFAULT;
2268 break;
2269 case VISIBILITY_PROTECTED:
2270 visibility = GCCPV_PROTECTED;
2271 break;
2272 case VISIBILITY_HIDDEN:
2273 visibility = GCCPV_HIDDEN;
2274 break;
2275 case VISIBILITY_INTERNAL:
2276 visibility = GCCPV_INTERNAL;
2277 break;
2278 }
2279
2280 if (kind == GCCPK_COMMON
2281 && DECL_SIZE_UNIT (t)
2282 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2283 size = tree_to_hwi (DECL_SIZE_UNIT (t));
2284 else
2285 size = 0;
2286
2287 if (DECL_ONE_ONLY (t))
2288 comdat = IDENTIFIER_POINTER (DECL_COMDAT_GROUP (t));
2289 else
2290 comdat = "";
2291
2292 lto_output_data_stream (stream, name, strlen (name) + 1);
2293 lto_output_data_stream (stream, comdat, strlen (comdat) + 1);
2294 c = (unsigned char) kind;
2295 lto_output_data_stream (stream, &c, 1);
2296 c = (unsigned char) visibility;
2297 lto_output_data_stream (stream, &c, 1);
2298 lto_output_data_stream (stream, &size, 8);
2299 lto_output_data_stream (stream, &slot_num, 4);
2300 }
2301
2302 /* Return true if NODE should appear in the plugin symbol table. */
2303
2304 bool
2305 output_symbol_p (symtab_node node)
2306 {
2307 struct cgraph_node *cnode;
2308 if (!symtab_real_symbol_p (node))
2309 return false;
2310 /* We keep external functions in symtab for sake of inlining
2311 and devirtualization. We do not want to see them in symbol table as
2312 references unless they are really used. */
2313 cnode = dyn_cast <cgraph_node> (node);
2314 if (cnode && (!node->symbol.definition || DECL_EXTERNAL (cnode->symbol.decl))
2315 && cnode->callers)
2316 return true;
2317
2318 /* Ignore all references from external vars initializers - they are not really
2319 part of the compilation unit until they are used by folding. Some symbols,
2320 like references to external construction vtables can not be referred to at all.
2321 We decide this at can_refer_decl_in_current_unit_p. */
2322 if (!node->symbol.definition || DECL_EXTERNAL (node->symbol.decl))
2323 {
2324 int i;
2325 struct ipa_ref *ref;
2326 for (i = 0; ipa_ref_list_referring_iterate (&node->symbol.ref_list,
2327 i, ref); i++)
2328 {
2329 if (ref->use == IPA_REF_ALIAS)
2330 continue;
2331 if (is_a <cgraph_node> (ref->referring))
2332 return true;
2333 if (!DECL_EXTERNAL (ref->referring->symbol.decl))
2334 return true;
2335 }
2336 return false;
2337 }
2338 return true;
2339 }
2340
2341
2342 /* Write an IL symbol table to OB.
2343 SET and VSET are cgraph/varpool node sets we are outputting. */
2344
2345 static void
2346 produce_symtab (struct output_block *ob)
2347 {
2348 struct streamer_tree_cache_d *cache = ob->writer_cache;
2349 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2350 struct pointer_set_t *seen;
2351 struct lto_output_stream stream;
2352 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2353 lto_symtab_encoder_iterator lsei;
2354
2355 lto_begin_section (section_name, false);
2356 free (section_name);
2357
2358 seen = pointer_set_create ();
2359 memset (&stream, 0, sizeof (stream));
2360
2361 /* Write the symbol table.
2362 First write everything defined and then all declarations.
2363 This is necessary to handle cases where we have duplicated symbols. */
2364 for (lsei = lsei_start (encoder);
2365 !lsei_end_p (lsei); lsei_next (&lsei))
2366 {
2367 symtab_node node = lsei_node (lsei);
2368
2369 if (!output_symbol_p (node) || DECL_EXTERNAL (node->symbol.decl))
2370 continue;
2371 write_symbol (cache, &stream, node->symbol.decl, seen, false);
2372 }
2373 for (lsei = lsei_start (encoder);
2374 !lsei_end_p (lsei); lsei_next (&lsei))
2375 {
2376 symtab_node node = lsei_node (lsei);
2377
2378 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->symbol.decl))
2379 continue;
2380 write_symbol (cache, &stream, node->symbol.decl, seen, false);
2381 }
2382
2383 lto_write_stream (&stream);
2384 pointer_set_destroy (seen);
2385
2386 lto_end_section ();
2387 }
2388
2389
2390 /* This pass is run after all of the functions are serialized and all
2391 of the IPA passes have written their serialized forms. This pass
2392 causes the vector of all of the global decls and types used from
2393 this file to be written in to a section that can then be read in to
2394 recover these on other side. */
2395
2396 static void
2397 produce_asm_for_decls (void)
2398 {
2399 struct lto_out_decl_state *out_state;
2400 struct lto_out_decl_state *fn_out_state;
2401 struct lto_decl_header header;
2402 char *section_name;
2403 struct output_block *ob;
2404 struct lto_output_stream *header_stream, *decl_state_stream;
2405 unsigned idx, num_fns;
2406 size_t decl_state_size;
2407 int32_t num_decl_states;
2408
2409 ob = create_output_block (LTO_section_decls);
2410 ob->global = true;
2411
2412 memset (&header, 0, sizeof (struct lto_decl_header));
2413
2414 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2415 lto_begin_section (section_name, !flag_wpa);
2416 free (section_name);
2417
2418 /* Make string 0 be a NULL string. */
2419 streamer_write_char_stream (ob->string_stream, 0);
2420
2421 gcc_assert (!alias_pairs);
2422
2423 /* Write the global symbols. */
2424 out_state = lto_get_out_decl_state ();
2425 num_fns = lto_function_decl_states.length ();
2426 lto_output_decl_state_streams (ob, out_state);
2427 for (idx = 0; idx < num_fns; idx++)
2428 {
2429 fn_out_state =
2430 lto_function_decl_states[idx];
2431 lto_output_decl_state_streams (ob, fn_out_state);
2432 }
2433
2434 header.lto_header.major_version = LTO_major_version;
2435 header.lto_header.minor_version = LTO_minor_version;
2436
2437 /* Currently not used. This field would allow us to preallocate
2438 the globals vector, so that it need not be resized as it is extended. */
2439 header.num_nodes = -1;
2440
2441 /* Compute the total size of all decl out states. */
2442 decl_state_size = sizeof (int32_t);
2443 decl_state_size += lto_out_decl_state_written_size (out_state);
2444 for (idx = 0; idx < num_fns; idx++)
2445 {
2446 fn_out_state =
2447 lto_function_decl_states[idx];
2448 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2449 }
2450 header.decl_state_size = decl_state_size;
2451
2452 header.main_size = ob->main_stream->total_size;
2453 header.string_size = ob->string_stream->total_size;
2454
2455 header_stream = XCNEW (struct lto_output_stream);
2456 lto_output_data_stream (header_stream, &header, sizeof header);
2457 lto_write_stream (header_stream);
2458 free (header_stream);
2459
2460 /* Write the main out-decl state, followed by out-decl states of
2461 functions. */
2462 decl_state_stream = XCNEW (struct lto_output_stream);
2463 num_decl_states = num_fns + 1;
2464 lto_output_data_stream (decl_state_stream, &num_decl_states,
2465 sizeof (num_decl_states));
2466 lto_output_decl_state_refs (ob, decl_state_stream, out_state);
2467 for (idx = 0; idx < num_fns; idx++)
2468 {
2469 fn_out_state =
2470 lto_function_decl_states[idx];
2471 lto_output_decl_state_refs (ob, decl_state_stream, fn_out_state);
2472 }
2473 lto_write_stream (decl_state_stream);
2474 free (decl_state_stream);
2475
2476 lto_write_stream (ob->main_stream);
2477 lto_write_stream (ob->string_stream);
2478
2479 lto_end_section ();
2480
2481 /* Write the symbol table. It is used by linker to determine dependencies
2482 and thus we can skip it for WPA. */
2483 if (!flag_wpa)
2484 produce_symtab (ob);
2485
2486 /* Write command line opts. */
2487 lto_write_options ();
2488
2489 /* Deallocate memory and clean up. */
2490 for (idx = 0; idx < num_fns; idx++)
2491 {
2492 fn_out_state =
2493 lto_function_decl_states[idx];
2494 lto_delete_out_decl_state (fn_out_state);
2495 }
2496 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2497 lto_function_decl_states.release ();
2498 destroy_output_block (ob);
2499 }
2500
2501
2502 namespace {
2503
2504 const pass_data pass_data_ipa_lto_finish_out =
2505 {
2506 IPA_PASS, /* type */
2507 "lto_decls_out", /* name */
2508 OPTGROUP_NONE, /* optinfo_flags */
2509 true, /* has_gate */
2510 false, /* has_execute */
2511 TV_IPA_LTO_DECL_OUT, /* tv_id */
2512 0, /* properties_required */
2513 0, /* properties_provided */
2514 0, /* properties_destroyed */
2515 0, /* todo_flags_start */
2516 0, /* todo_flags_finish */
2517 };
2518
2519 class pass_ipa_lto_finish_out : public ipa_opt_pass_d
2520 {
2521 public:
2522 pass_ipa_lto_finish_out (gcc::context *ctxt)
2523 : ipa_opt_pass_d (pass_data_ipa_lto_finish_out, ctxt,
2524 NULL, /* generate_summary */
2525 produce_asm_for_decls, /* write_summary */
2526 NULL, /* read_summary */
2527 produce_asm_for_decls, /* write_optimization_summary */
2528 NULL, /* read_optimization_summary */
2529 NULL, /* stmt_fixup */
2530 0, /* function_transform_todo_flags_start */
2531 NULL, /* function_transform */
2532 NULL) /* variable_transform */
2533 {}
2534
2535 /* opt_pass methods: */
2536 bool gate () { return gate_lto_out (); }
2537
2538 }; // class pass_ipa_lto_finish_out
2539
2540 } // anon namespace
2541
2542 ipa_opt_pass_d *
2543 make_pass_ipa_lto_finish_out (gcc::context *ctxt)
2544 {
2545 return new pass_ipa_lto_finish_out (ctxt);
2546 }