]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-streamer-out.c
* lto-cgraph.c (compute_ltrans_boundary): Add abstract origins into
[thirdparty/gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2013 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "expr.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "hashtab.h"
33 #include "basic-block.h"
34 #include "tree-flow.h"
35 #include "tree-pass.h"
36 #include "cgraph.h"
37 #include "function.h"
38 #include "ggc.h"
39 #include "diagnostic-core.h"
40 #include "except.h"
41 #include "vec.h"
42 #include "lto-symtab.h"
43 #include "lto-streamer.h"
44 #include "data-streamer.h"
45 #include "gimple-streamer.h"
46 #include "tree-streamer.h"
47 #include "streamer-hooks.h"
48 #include "cfgloop.h"
49
50
51 /* Clear the line info stored in DATA_IN. */
52
53 static void
54 clear_line_info (struct output_block *ob)
55 {
56 ob->current_file = NULL;
57 ob->current_line = 0;
58 ob->current_col = 0;
59 }
60
61
62 /* Create the output block and return it. SECTION_TYPE is
63 LTO_section_function_body or LTO_static_initializer. */
64
65 struct output_block *
66 create_output_block (enum lto_section_type section_type)
67 {
68 struct output_block *ob = XCNEW (struct output_block);
69
70 ob->section_type = section_type;
71 ob->decl_state = lto_get_out_decl_state ();
72 ob->main_stream = XCNEW (struct lto_output_stream);
73 ob->string_stream = XCNEW (struct lto_output_stream);
74 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true);
75
76 if (section_type == LTO_section_function_body)
77 ob->cfg_stream = XCNEW (struct lto_output_stream);
78
79 clear_line_info (ob);
80
81 ob->string_hash_table.create (37);
82 gcc_obstack_init (&ob->obstack);
83
84 return ob;
85 }
86
87
88 /* Destroy the output block OB. */
89
90 void
91 destroy_output_block (struct output_block *ob)
92 {
93 enum lto_section_type section_type = ob->section_type;
94
95 ob->string_hash_table.dispose ();
96
97 free (ob->main_stream);
98 free (ob->string_stream);
99 if (section_type == LTO_section_function_body)
100 free (ob->cfg_stream);
101
102 streamer_tree_cache_delete (ob->writer_cache);
103 obstack_free (&ob->obstack, NULL);
104
105 free (ob);
106 }
107
108
109 /* Look up NODE in the type table and write the index for it to OB. */
110
111 static void
112 output_type_ref (struct output_block *ob, tree node)
113 {
114 streamer_write_record_start (ob, LTO_type_ref);
115 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
116 }
117
118
119 /* Return true if tree node T is written to various tables. For these
120 nodes, we sometimes want to write their phyiscal representation
121 (via lto_output_tree), and sometimes we need to emit an index
122 reference into a table (via lto_output_tree_ref). */
123
124 static bool
125 tree_is_indexable (tree t)
126 {
127 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
128 return false;
129 else if (TREE_CODE (t) == VAR_DECL && decl_function_context (t)
130 && !TREE_STATIC (t))
131 return false;
132 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
133 return false;
134 /* Variably modified types need to be streamed alongside function
135 bodies because they can refer to local entities. Together with
136 them we have to localize their members as well.
137 ??? In theory that includes non-FIELD_DECLs as well. */
138 else if (TYPE_P (t)
139 && variably_modified_type_p (t, NULL_TREE))
140 return false;
141 else if (TREE_CODE (t) == FIELD_DECL
142 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
143 return false;
144 else
145 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
146 }
147
148
149 /* Output info about new location into bitpack BP.
150 After outputting bitpack, lto_output_location_data has
151 to be done to output actual data. */
152
153 void
154 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
155 location_t loc)
156 {
157 expanded_location xloc;
158
159 loc = LOCATION_LOCUS (loc);
160 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
161 if (loc == UNKNOWN_LOCATION)
162 return;
163
164 xloc = expand_location (loc);
165
166 bp_pack_value (bp, ob->current_file != xloc.file, 1);
167 bp_pack_value (bp, ob->current_line != xloc.line, 1);
168 bp_pack_value (bp, ob->current_col != xloc.column, 1);
169
170 if (ob->current_file != xloc.file)
171 bp_pack_var_len_unsigned (bp,
172 streamer_string_index (ob, xloc.file,
173 strlen (xloc.file) + 1,
174 true));
175 ob->current_file = xloc.file;
176
177 if (ob->current_line != xloc.line)
178 bp_pack_var_len_unsigned (bp, xloc.line);
179 ob->current_line = xloc.line;
180
181 if (ob->current_col != xloc.column)
182 bp_pack_var_len_unsigned (bp, xloc.column);
183 ob->current_col = xloc.column;
184 }
185
186
187 /* If EXPR is an indexable tree node, output a reference to it to
188 output block OB. Otherwise, output the physical representation of
189 EXPR to OB. */
190
191 static void
192 lto_output_tree_ref (struct output_block *ob, tree expr)
193 {
194 enum tree_code code;
195
196 if (TYPE_P (expr))
197 {
198 output_type_ref (ob, expr);
199 return;
200 }
201
202 code = TREE_CODE (expr);
203 switch (code)
204 {
205 case SSA_NAME:
206 streamer_write_record_start (ob, LTO_ssa_name_ref);
207 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
208 break;
209
210 case FIELD_DECL:
211 streamer_write_record_start (ob, LTO_field_decl_ref);
212 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
213 break;
214
215 case FUNCTION_DECL:
216 streamer_write_record_start (ob, LTO_function_decl_ref);
217 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
218 break;
219
220 case VAR_DECL:
221 case DEBUG_EXPR_DECL:
222 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
223 case PARM_DECL:
224 streamer_write_record_start (ob, LTO_global_decl_ref);
225 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
226 break;
227
228 case CONST_DECL:
229 streamer_write_record_start (ob, LTO_const_decl_ref);
230 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
231 break;
232
233 case IMPORTED_DECL:
234 gcc_assert (decl_function_context (expr) == NULL);
235 streamer_write_record_start (ob, LTO_imported_decl_ref);
236 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
237 break;
238
239 case TYPE_DECL:
240 streamer_write_record_start (ob, LTO_type_decl_ref);
241 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
242 break;
243
244 case NAMESPACE_DECL:
245 streamer_write_record_start (ob, LTO_namespace_decl_ref);
246 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
247 break;
248
249 case LABEL_DECL:
250 streamer_write_record_start (ob, LTO_label_decl_ref);
251 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
252 break;
253
254 case RESULT_DECL:
255 streamer_write_record_start (ob, LTO_result_decl_ref);
256 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
257 break;
258
259 case TRANSLATION_UNIT_DECL:
260 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
261 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
262 break;
263
264 default:
265 /* No other node is indexable, so it should have been handled by
266 lto_output_tree. */
267 gcc_unreachable ();
268 }
269 }
270
271
272 /* Return true if EXPR is a tree node that can be written to disk. */
273
274 static inline bool
275 lto_is_streamable (tree expr)
276 {
277 enum tree_code code = TREE_CODE (expr);
278
279 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
280 name version in lto_output_tree_ref (see output_ssa_names). */
281 return !is_lang_specific (expr)
282 && code != SSA_NAME
283 && code != CALL_EXPR
284 && code != LANG_TYPE
285 && code != MODIFY_EXPR
286 && code != INIT_EXPR
287 && code != TARGET_EXPR
288 && code != BIND_EXPR
289 && code != WITH_CLEANUP_EXPR
290 && code != STATEMENT_LIST
291 && code != OMP_CLAUSE
292 && (code == CASE_LABEL_EXPR
293 || code == DECL_EXPR
294 || TREE_CODE_CLASS (code) != tcc_statement);
295 }
296
297
298 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
299
300 static tree
301 get_symbol_initial_value (struct output_block *ob, tree expr)
302 {
303 gcc_checking_assert (DECL_P (expr)
304 && TREE_CODE (expr) != FUNCTION_DECL
305 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
306
307 /* Handle DECL_INITIAL for symbols. */
308 tree initial = DECL_INITIAL (expr);
309 if (TREE_CODE (expr) == VAR_DECL
310 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
311 && !DECL_IN_CONSTANT_POOL (expr)
312 && initial)
313 {
314 lto_symtab_encoder_t encoder;
315 struct varpool_node *vnode;
316
317 encoder = ob->decl_state->symtab_node_encoder;
318 vnode = varpool_get_node (expr);
319 if (!vnode
320 || !lto_symtab_encoder_encode_initializer_p (encoder,
321 vnode))
322 initial = error_mark_node;
323 }
324
325 return initial;
326 }
327
328
329 /* Write a physical representation of tree node EXPR to output block
330 OB. If REF_P is true, the leaves of EXPR are emitted as references
331 via lto_output_tree_ref. IX is the index into the streamer cache
332 where EXPR is stored. */
333
334 static void
335 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
336 {
337 /* Pack all the non-pointer fields in EXPR into a bitpack and write
338 the resulting bitpack. */
339 bitpack_d bp = bitpack_create (ob->main_stream);
340 streamer_pack_tree_bitfields (ob, &bp, expr);
341 streamer_write_bitpack (&bp);
342
343 /* Write all the pointer fields in EXPR. */
344 streamer_write_tree_body (ob, expr, ref_p);
345
346 /* Write any LTO-specific data to OB. */
347 if (DECL_P (expr)
348 && TREE_CODE (expr) != FUNCTION_DECL
349 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
350 {
351 /* Handle DECL_INITIAL for symbols. */
352 tree initial = get_symbol_initial_value (ob, expr);
353 stream_write_tree (ob, initial, ref_p);
354 }
355 }
356
357 /* Write a physical representation of tree node EXPR to output block
358 OB. If REF_P is true, the leaves of EXPR are emitted as references
359 via lto_output_tree_ref. IX is the index into the streamer cache
360 where EXPR is stored. */
361
362 static void
363 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
364 {
365 if (!lto_is_streamable (expr))
366 internal_error ("tree code %qs is not supported in LTO streams",
367 tree_code_name[TREE_CODE (expr)]);
368
369 /* Write the header, containing everything needed to materialize
370 EXPR on the reading side. */
371 streamer_write_tree_header (ob, expr);
372
373 lto_write_tree_1 (ob, expr, ref_p);
374
375 /* Mark the end of EXPR. */
376 streamer_write_zero (ob);
377 }
378
379 /* Emit the physical representation of tree node EXPR to output block
380 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
381 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
382
383 static void
384 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
385 bool ref_p, bool this_ref_p)
386 {
387 unsigned ix;
388
389 gcc_checking_assert (expr != NULL_TREE
390 && !(this_ref_p && tree_is_indexable (expr)));
391
392 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
393 expr, hash, &ix);
394 gcc_assert (!exists_p);
395 if (streamer_handle_as_builtin_p (expr))
396 {
397 /* MD and NORMAL builtins do not need to be written out
398 completely as they are always instantiated by the
399 compiler on startup. The only builtins that need to
400 be written out are BUILT_IN_FRONTEND. For all other
401 builtins, we simply write the class and code. */
402 streamer_write_builtin (ob, expr);
403 }
404 else if (TREE_CODE (expr) == INTEGER_CST
405 && !TREE_OVERFLOW (expr))
406 {
407 /* Shared INTEGER_CST nodes are special because they need their
408 original type to be materialized by the reader (to implement
409 TYPE_CACHED_VALUES). */
410 streamer_write_integer_cst (ob, expr, ref_p);
411 }
412 else
413 {
414 /* This is the first time we see EXPR, write its fields
415 to OB. */
416 lto_write_tree (ob, expr, ref_p);
417 }
418 }
419
420 struct sccs
421 {
422 unsigned int dfsnum;
423 unsigned int low;
424 };
425
426 struct scc_entry
427 {
428 tree t;
429 hashval_t hash;
430 };
431
432 static unsigned int next_dfs_num;
433 static vec<scc_entry> sccstack;
434 static struct pointer_map_t *sccstate;
435 static struct obstack sccstate_obstack;
436
437 static void
438 DFS_write_tree (struct output_block *ob, sccs *from_state,
439 tree expr, bool ref_p, bool this_ref_p);
440
441 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
442 DFS recurse for all tree edges originating from it. */
443
444 static void
445 DFS_write_tree_body (struct output_block *ob,
446 tree expr, sccs *expr_state, bool ref_p)
447 {
448 #define DFS_follow_tree_edge(DEST) \
449 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
450
451 enum tree_code code;
452
453 code = TREE_CODE (expr);
454
455 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
456 {
457 if (TREE_CODE (expr) != IDENTIFIER_NODE)
458 DFS_follow_tree_edge (TREE_TYPE (expr));
459 }
460
461 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
462 {
463 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
464 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
465 }
466
467 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
468 {
469 DFS_follow_tree_edge (TREE_REALPART (expr));
470 DFS_follow_tree_edge (TREE_IMAGPART (expr));
471 }
472
473 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
474 {
475 /* Drop names that were created for anonymous entities. */
476 if (DECL_NAME (expr)
477 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
478 && ANON_AGGRNAME_P (DECL_NAME (expr)))
479 ;
480 else
481 DFS_follow_tree_edge (DECL_NAME (expr));
482 DFS_follow_tree_edge (DECL_CONTEXT (expr));
483 }
484
485 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
486 {
487 DFS_follow_tree_edge (DECL_SIZE (expr));
488 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
489
490 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
491 special handling in LTO, it must be handled by streamer hooks. */
492
493 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
494
495 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
496 for early inlining so drop it on the floor instead of ICEing in
497 dwarf2out.c. */
498
499 if ((TREE_CODE (expr) == VAR_DECL
500 || TREE_CODE (expr) == PARM_DECL)
501 && DECL_HAS_VALUE_EXPR_P (expr))
502 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
503 if (TREE_CODE (expr) == VAR_DECL)
504 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
505 }
506
507 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
508 {
509 if (TREE_CODE (expr) == TYPE_DECL)
510 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
511 DFS_follow_tree_edge (DECL_VINDEX (expr));
512 }
513
514 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
515 {
516 /* Make sure we don't inadvertently set the assembler name. */
517 if (DECL_ASSEMBLER_NAME_SET_P (expr))
518 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
519 DFS_follow_tree_edge (DECL_SECTION_NAME (expr));
520 DFS_follow_tree_edge (DECL_COMDAT_GROUP (expr));
521 }
522
523 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
524 {
525 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
526 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
527 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
528 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
529 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
530 }
531
532 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
533 {
534 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
535 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
536 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
537 }
538
539 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
540 {
541 DFS_follow_tree_edge (TYPE_SIZE (expr));
542 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
543 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
544 DFS_follow_tree_edge (TYPE_NAME (expr));
545 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
546 reconstructed during fixup. */
547 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
548 during fixup. */
549 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
550 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
551 /* TYPE_CANONICAL is re-computed during type merging, so no need
552 to follow it here. */
553 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
554 }
555
556 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
557 {
558 if (TREE_CODE (expr) == ENUMERAL_TYPE)
559 DFS_follow_tree_edge (TYPE_VALUES (expr));
560 else if (TREE_CODE (expr) == ARRAY_TYPE)
561 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
562 else if (RECORD_OR_UNION_TYPE_P (expr))
563 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
564 DFS_follow_tree_edge (t);
565 else if (TREE_CODE (expr) == FUNCTION_TYPE
566 || TREE_CODE (expr) == METHOD_TYPE)
567 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
568
569 if (!POINTER_TYPE_P (expr))
570 DFS_follow_tree_edge (TYPE_MINVAL (expr));
571 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
572 if (RECORD_OR_UNION_TYPE_P (expr))
573 DFS_follow_tree_edge (TYPE_BINFO (expr));
574 }
575
576 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
577 {
578 DFS_follow_tree_edge (TREE_PURPOSE (expr));
579 DFS_follow_tree_edge (TREE_VALUE (expr));
580 DFS_follow_tree_edge (TREE_CHAIN (expr));
581 }
582
583 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
584 {
585 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
586 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
587 }
588
589 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
590 {
591 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
592 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
593 DFS_follow_tree_edge (TREE_BLOCK (expr));
594 }
595
596 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
597 {
598 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
599 /* ??? FIXME. See also streamer_write_chain. */
600 if (!(VAR_OR_FUNCTION_DECL_P (t)
601 && DECL_EXTERNAL (t)))
602 DFS_follow_tree_edge (t);
603
604 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
605
606 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
607 handle - those that represent inlined function scopes.
608 For the drop rest them on the floor instead of ICEing
609 in dwarf2out.c. */
610 if (inlined_function_outer_scope_p (expr))
611 {
612 tree ultimate_origin = block_ultimate_origin (expr);
613 DFS_follow_tree_edge (ultimate_origin);
614 }
615 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
616 information for early inlined BLOCKs so drop it on the floor instead
617 of ICEing in dwarf2out.c. */
618
619 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
620 streaming time. */
621
622 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
623 list is re-constructed from BLOCK_SUPERCONTEXT. */
624 }
625
626 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
627 {
628 unsigned i;
629 tree t;
630
631 /* Note that the number of BINFO slots has already been emitted in
632 EXPR's header (see streamer_write_tree_header) because this length
633 is needed to build the empty BINFO node on the reader side. */
634 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
635 DFS_follow_tree_edge (t);
636 DFS_follow_tree_edge (BINFO_OFFSET (expr));
637 DFS_follow_tree_edge (BINFO_VTABLE (expr));
638 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
639
640 /* The number of BINFO_BASE_ACCESSES has already been emitted in
641 EXPR's bitfield section. */
642 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
643 DFS_follow_tree_edge (t);
644
645 DFS_follow_tree_edge (BINFO_INHERITANCE_CHAIN (expr));
646 DFS_follow_tree_edge (BINFO_SUBVTT_INDEX (expr));
647 DFS_follow_tree_edge (BINFO_VPTR_INDEX (expr));
648 }
649
650 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
651 {
652 unsigned i;
653 tree index, value;
654
655 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
656 {
657 DFS_follow_tree_edge (index);
658 DFS_follow_tree_edge (value);
659 }
660 }
661
662 #undef DFS_follow_tree_edge
663 }
664
665 /* Return a hash value for the tree T. */
666
667 static hashval_t
668 hash_tree (struct streamer_tree_cache_d *cache, tree t)
669 {
670 #define visit(SIBLING) \
671 do { \
672 unsigned ix; \
673 if (SIBLING && streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
674 v = iterative_hash_hashval_t (streamer_tree_cache_get_hash (cache, ix), v); \
675 } while (0)
676
677 /* Hash TS_BASE. */
678 enum tree_code code = TREE_CODE (t);
679 hashval_t v = iterative_hash_host_wide_int (code, 0);
680 if (!TYPE_P (t))
681 {
682 v = iterative_hash_host_wide_int (TREE_SIDE_EFFECTS (t)
683 | (TREE_CONSTANT (t) << 1)
684 | (TREE_READONLY (t) << 2)
685 | (TREE_PUBLIC (t) << 3), v);
686 }
687 v = iterative_hash_host_wide_int (TREE_ADDRESSABLE (t)
688 | (TREE_THIS_VOLATILE (t) << 1), v);
689 if (DECL_P (t))
690 v = iterative_hash_host_wide_int (DECL_UNSIGNED (t), v);
691 else if (TYPE_P (t))
692 v = iterative_hash_host_wide_int (TYPE_UNSIGNED (t), v);
693 if (TYPE_P (t))
694 v = iterative_hash_host_wide_int (TYPE_ARTIFICIAL (t), v);
695 else
696 v = iterative_hash_host_wide_int (TREE_NO_WARNING (t), v);
697 v = iterative_hash_host_wide_int (TREE_NOTHROW (t)
698 | (TREE_STATIC (t) << 1)
699 | (TREE_PROTECTED (t) << 2)
700 | (TREE_DEPRECATED (t) << 3), v);
701 if (code != TREE_BINFO)
702 v = iterative_hash_host_wide_int (TREE_PRIVATE (t), v);
703 if (TYPE_P (t))
704 v = iterative_hash_host_wide_int (TYPE_SATURATING (t)
705 | (TYPE_ADDR_SPACE (t) << 1), v);
706 else if (code == SSA_NAME)
707 v = iterative_hash_host_wide_int (SSA_NAME_IS_DEFAULT_DEF (t), v);
708
709 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
710 {
711 v = iterative_hash_host_wide_int (TREE_INT_CST_LOW (t), v);
712 v = iterative_hash_host_wide_int (TREE_INT_CST_HIGH (t), v);
713 }
714
715 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
716 {
717 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
718 v = iterative_hash_host_wide_int (r.cl, v);
719 v = iterative_hash_host_wide_int (r.decimal
720 | (r.sign << 1)
721 | (r.signalling << 2)
722 | (r.canonical << 3), v);
723 v = iterative_hash_host_wide_int (r.uexp, v);
724 for (unsigned i = 0; i < SIGSZ; ++i)
725 v = iterative_hash_host_wide_int (r.sig[i], v);
726 }
727
728 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
729 {
730 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
731 v = iterative_hash_host_wide_int (f.mode, v);
732 v = iterative_hash_host_wide_int (f.data.low, v);
733 v = iterative_hash_host_wide_int (f.data.high, v);
734 }
735
736 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
737 {
738 v = iterative_hash_host_wide_int (DECL_MODE (t), v);
739 v = iterative_hash_host_wide_int (DECL_NONLOCAL (t)
740 | (DECL_VIRTUAL_P (t) << 1)
741 | (DECL_IGNORED_P (t) << 2)
742 | (DECL_ABSTRACT (t) << 3)
743 | (DECL_ARTIFICIAL (t) << 4)
744 | (DECL_USER_ALIGN (t) << 5)
745 | (DECL_PRESERVE_P (t) << 6)
746 | (DECL_EXTERNAL (t) << 7)
747 | (DECL_GIMPLE_REG_P (t) << 8), v);
748 v = iterative_hash_host_wide_int (DECL_ALIGN (t), v);
749 if (code == LABEL_DECL)
750 {
751 v = iterative_hash_host_wide_int (DECL_ERROR_ISSUED (t), v);
752 v = iterative_hash_host_wide_int (EH_LANDING_PAD_NR (t), v);
753 v = iterative_hash_host_wide_int (LABEL_DECL_UID (t), v);
754 }
755 else if (code == FIELD_DECL)
756 {
757 v = iterative_hash_host_wide_int (DECL_PACKED (t)
758 | (DECL_NONADDRESSABLE_P (t) << 1),
759 v);
760 v = iterative_hash_host_wide_int (DECL_OFFSET_ALIGN (t), v);
761 }
762 else if (code == VAR_DECL)
763 {
764 v = iterative_hash_host_wide_int (DECL_HAS_DEBUG_EXPR_P (t)
765 | (DECL_NONLOCAL_FRAME (t) << 1),
766 v);
767 }
768 if (code == RESULT_DECL
769 || code == PARM_DECL
770 || code == VAR_DECL)
771 {
772 v = iterative_hash_host_wide_int (DECL_BY_REFERENCE (t), v);
773 if (code == VAR_DECL
774 || code == PARM_DECL)
775 v = iterative_hash_host_wide_int (DECL_HAS_VALUE_EXPR_P (t), v);
776 }
777 }
778
779 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
780 v = iterative_hash_host_wide_int (DECL_REGISTER (t), v);
781
782 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
783 {
784 v = iterative_hash_host_wide_int (DECL_DEFER_OUTPUT (t)
785 | (DECL_COMMON (t) << 1)
786 | (DECL_DLLIMPORT_P (t) << 2)
787 | (DECL_WEAK (t) << 3)
788 | (DECL_SEEN_IN_BIND_EXPR_P (t) << 4)
789 | (DECL_COMDAT (t) << 5)
790 | (DECL_VISIBILITY_SPECIFIED (t) << 6),
791 v);
792 v = iterative_hash_host_wide_int (DECL_VISIBILITY (t), v);
793 if (code == VAR_DECL)
794 {
795 v = iterative_hash_host_wide_int (DECL_HARD_REGISTER (t)
796 | (DECL_IN_TEXT_SECTION (t) << 1)
797 | (DECL_IN_CONSTANT_POOL (t) << 2),
798 v);
799 v = iterative_hash_host_wide_int (DECL_TLS_MODEL (t), v);
800 }
801 if (VAR_OR_FUNCTION_DECL_P (t))
802 v = iterative_hash_host_wide_int (DECL_INIT_PRIORITY (t), v);
803 }
804
805 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
806 {
807 v = iterative_hash_host_wide_int (DECL_BUILT_IN_CLASS (t), v);
808 v = iterative_hash_host_wide_int (DECL_STATIC_CONSTRUCTOR (t)
809 | (DECL_STATIC_DESTRUCTOR (t) << 1)
810 | (DECL_UNINLINABLE (t) << 2)
811 | (DECL_POSSIBLY_INLINED (t) << 3)
812 | (DECL_IS_NOVOPS (t) << 4)
813 | (DECL_IS_RETURNS_TWICE (t) << 5)
814 | (DECL_IS_MALLOC (t) << 6)
815 | (DECL_IS_OPERATOR_NEW (t) << 7)
816 | (DECL_DECLARED_INLINE_P (t) << 8)
817 | (DECL_STATIC_CHAIN (t) << 9)
818 | (DECL_NO_INLINE_WARNING_P (t) << 10)
819 | (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t) << 11)
820 | (DECL_NO_LIMIT_STACK (t) << 12)
821 | (DECL_DISREGARD_INLINE_LIMITS (t) << 13)
822 | (DECL_PURE_P (t) << 14)
823 | (DECL_LOOPING_CONST_OR_PURE_P (t) << 15), v);
824 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
825 v = iterative_hash_host_wide_int (DECL_FUNCTION_CODE (t), v);
826 if (DECL_STATIC_DESTRUCTOR (t))
827 v = iterative_hash_host_wide_int (DECL_FINI_PRIORITY (t), v);
828 }
829
830 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
831 {
832 v = iterative_hash_host_wide_int (TYPE_MODE (t), v);
833 v = iterative_hash_host_wide_int (TYPE_STRING_FLAG (t)
834 | (TYPE_NO_FORCE_BLK (t) << 1)
835 | (TYPE_NEEDS_CONSTRUCTING (t) << 2)
836 | (TYPE_PACKED (t) << 3)
837 | (TYPE_RESTRICT (t) << 4)
838 | (TYPE_USER_ALIGN (t) << 5)
839 | (TYPE_READONLY (t) << 6), v);
840 if (RECORD_OR_UNION_TYPE_P (t))
841 v = iterative_hash_host_wide_int (TYPE_TRANSPARENT_AGGR (t), v);
842 else if (code == ARRAY_TYPE)
843 v = iterative_hash_host_wide_int (TYPE_NONALIASED_COMPONENT (t), v);
844 v = iterative_hash_host_wide_int (TYPE_PRECISION (t), v);
845 v = iterative_hash_host_wide_int (TYPE_ALIGN (t), v);
846 v = iterative_hash_host_wide_int ((TYPE_ALIAS_SET (t) == 0
847 || (!in_lto_p
848 && get_alias_set (t) == 0))
849 ? 0 : -1, v);
850 }
851
852 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
853 v = iterative_hash (TRANSLATION_UNIT_LANGUAGE (t),
854 strlen (TRANSLATION_UNIT_LANGUAGE (t)), v);
855
856 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
857 v = iterative_hash (t, sizeof (struct cl_target_option), v);
858
859 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
860 v = iterative_hash (t, sizeof (struct cl_optimization), v);
861
862 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
863 v = iterative_hash_host_wide_int (IDENTIFIER_HASH_VALUE (t), v);
864
865 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
866 v = iterative_hash (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t), v);
867
868 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
869 {
870 if (POINTER_TYPE_P (t))
871 {
872 /* For pointers factor in the pointed-to type recursively as
873 we cannot recurse through only pointers.
874 ??? We can generalize this by keeping track of the
875 in-SCC edges for each tree (or arbitrarily the first
876 such edge) and hashing that in in a second stage
877 (instead of the quadratic mixing of the SCC we do now). */
878 hashval_t x;
879 unsigned ix;
880 if (streamer_tree_cache_lookup (cache, TREE_TYPE (t), &ix))
881 x = streamer_tree_cache_get_hash (cache, ix);
882 else
883 x = hash_tree (cache, TREE_TYPE (t));
884 v = iterative_hash_hashval_t (x, v);
885 }
886 else if (code != IDENTIFIER_NODE)
887 visit (TREE_TYPE (t));
888 }
889
890 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
891 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
892 visit (VECTOR_CST_ELT (t, i));
893
894 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
895 {
896 visit (TREE_REALPART (t));
897 visit (TREE_IMAGPART (t));
898 }
899
900 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
901 {
902 /* Drop names that were created for anonymous entities. */
903 if (DECL_NAME (t)
904 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
905 && ANON_AGGRNAME_P (DECL_NAME (t)))
906 ;
907 else
908 visit (DECL_NAME (t));
909 if (DECL_FILE_SCOPE_P (t))
910 ;
911 else
912 visit (DECL_CONTEXT (t));
913 }
914
915 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
916 {
917 visit (DECL_SIZE (t));
918 visit (DECL_SIZE_UNIT (t));
919 visit (DECL_ATTRIBUTES (t));
920 if ((code == VAR_DECL
921 || code == PARM_DECL)
922 && DECL_HAS_VALUE_EXPR_P (t))
923 visit (DECL_VALUE_EXPR (t));
924 if (code == VAR_DECL
925 && DECL_HAS_DEBUG_EXPR_P (t))
926 visit (DECL_DEBUG_EXPR (t));
927 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
928 be able to call get_symbol_initial_value. */
929 }
930
931 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
932 {
933 if (code == TYPE_DECL)
934 visit (DECL_ORIGINAL_TYPE (t));
935 visit (DECL_VINDEX (t));
936 }
937
938 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
939 {
940 if (DECL_ASSEMBLER_NAME_SET_P (t))
941 visit (DECL_ASSEMBLER_NAME (t));
942 visit (DECL_SECTION_NAME (t));
943 visit (DECL_COMDAT_GROUP (t));
944 }
945
946 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
947 {
948 visit (DECL_FIELD_OFFSET (t));
949 visit (DECL_BIT_FIELD_TYPE (t));
950 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
951 visit (DECL_FIELD_BIT_OFFSET (t));
952 visit (DECL_FCONTEXT (t));
953 }
954
955 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
956 {
957 visit (DECL_FUNCTION_PERSONALITY (t));
958 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
959 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
960 }
961
962 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
963 {
964 visit (TYPE_SIZE (t));
965 visit (TYPE_SIZE_UNIT (t));
966 visit (TYPE_ATTRIBUTES (t));
967 visit (TYPE_NAME (t));
968 visit (TYPE_MAIN_VARIANT (t));
969 if (TYPE_FILE_SCOPE_P (t))
970 ;
971 else
972 visit (TYPE_CONTEXT (t));
973 visit (TYPE_STUB_DECL (t));
974 }
975
976 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
977 {
978 if (code == ENUMERAL_TYPE)
979 visit (TYPE_VALUES (t));
980 else if (code == ARRAY_TYPE)
981 visit (TYPE_DOMAIN (t));
982 else if (RECORD_OR_UNION_TYPE_P (t))
983 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
984 visit (f);
985 else if (code == FUNCTION_TYPE
986 || code == METHOD_TYPE)
987 visit (TYPE_ARG_TYPES (t));
988 if (!POINTER_TYPE_P (t))
989 visit (TYPE_MINVAL (t));
990 visit (TYPE_MAXVAL (t));
991 if (RECORD_OR_UNION_TYPE_P (t))
992 visit (TYPE_BINFO (t));
993 }
994
995 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
996 {
997 visit (TREE_PURPOSE (t));
998 visit (TREE_VALUE (t));
999 visit (TREE_CHAIN (t));
1000 }
1001
1002 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1003 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1004 visit (TREE_VEC_ELT (t, i));
1005
1006 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1007 {
1008 v = iterative_hash_host_wide_int (TREE_OPERAND_LENGTH (t), v);
1009 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1010 visit (TREE_OPERAND (t, i));
1011 }
1012
1013 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1014 {
1015 unsigned i;
1016 tree b;
1017 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1018 visit (b);
1019 visit (BINFO_OFFSET (t));
1020 visit (BINFO_VTABLE (t));
1021 visit (BINFO_VPTR_FIELD (t));
1022 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1023 visit (b);
1024 visit (BINFO_INHERITANCE_CHAIN (t));
1025 visit (BINFO_SUBVTT_INDEX (t));
1026 visit (BINFO_VPTR_INDEX (t));
1027 }
1028
1029 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1030 {
1031 unsigned i;
1032 tree index, value;
1033 v = iterative_hash_host_wide_int (CONSTRUCTOR_NELTS (t), v);
1034 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1035 {
1036 visit (index);
1037 visit (value);
1038 }
1039 }
1040
1041 return v;
1042
1043 #undef visit
1044 }
1045
1046 /* Compare two SCC entries by their hash value for qsorting them. */
1047
1048 static int
1049 scc_entry_compare (const void *p1_, const void *p2_)
1050 {
1051 const scc_entry *p1 = (const scc_entry *) p1_;
1052 const scc_entry *p2 = (const scc_entry *) p2_;
1053 if (p1->hash < p2->hash)
1054 return -1;
1055 else if (p1->hash > p2->hash)
1056 return 1;
1057 return 0;
1058 }
1059
1060 /* Return a hash value for the SCC on the SCC stack from FIRST with
1061 size SIZE. */
1062
1063 static hashval_t
1064 hash_scc (struct streamer_tree_cache_d *cache, unsigned first, unsigned size)
1065 {
1066 /* Compute hash values for the SCC members. */
1067 for (unsigned i = 0; i < size; ++i)
1068 sccstack[first+i].hash = hash_tree (cache, sccstack[first+i].t);
1069
1070 if (size == 1)
1071 return sccstack[first].hash;
1072
1073 /* Sort the SCC of type, hash pairs so that when we mix in
1074 all members of the SCC the hash value becomes independent on
1075 the order we visited the SCC. Disregard hashes equal to
1076 the hash of the tree we mix into because we cannot guarantee
1077 a stable sort for those across different TUs. */
1078 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1079 hashval_t *tem = XALLOCAVEC (hashval_t, size);
1080 for (unsigned i = 0; i < size; ++i)
1081 {
1082 hashval_t hash = sccstack[first+i].hash;
1083 hashval_t orig_hash = hash;
1084 unsigned j;
1085 /* Skip same hashes. */
1086 for (j = i + 1;
1087 j < size && sccstack[first+j].hash == orig_hash; ++j)
1088 ;
1089 for (; j < size; ++j)
1090 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1091 for (j = 0; sccstack[first+j].hash != orig_hash; ++j)
1092 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1093 tem[i] = hash;
1094 }
1095 hashval_t scc_hash = 0;
1096 for (unsigned i = 0; i < size; ++i)
1097 {
1098 sccstack[first+i].hash = tem[i];
1099 scc_hash = iterative_hash_hashval_t (tem[i], scc_hash);
1100 }
1101 return scc_hash;
1102 }
1103
1104 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1105 already in the streamer cache. Main routine called for
1106 each visit of EXPR. */
1107
1108 static void
1109 DFS_write_tree (struct output_block *ob, sccs *from_state,
1110 tree expr, bool ref_p, bool this_ref_p)
1111 {
1112 unsigned ix;
1113 sccs **slot;
1114
1115 /* Handle special cases. */
1116 if (expr == NULL_TREE)
1117 return;
1118
1119 /* Do not DFS walk into indexable trees. */
1120 if (this_ref_p && tree_is_indexable (expr))
1121 return;
1122
1123 /* Check if we already streamed EXPR. */
1124 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1125 return;
1126
1127 slot = (sccs **)pointer_map_insert (sccstate, expr);
1128 sccs *cstate = *slot;
1129 if (!cstate)
1130 {
1131 scc_entry e = { expr, 0 };
1132 /* Not yet visited. DFS recurse and push it onto the stack. */
1133 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1134 sccstack.safe_push (e);
1135 cstate->dfsnum = next_dfs_num++;
1136 cstate->low = cstate->dfsnum;
1137
1138 if (streamer_handle_as_builtin_p (expr))
1139 ;
1140 else if (TREE_CODE (expr) == INTEGER_CST
1141 && !TREE_OVERFLOW (expr))
1142 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
1143 else
1144 {
1145 DFS_write_tree_body (ob, expr, cstate, ref_p);
1146
1147 /* Walk any LTO-specific edges. */
1148 if (DECL_P (expr)
1149 && TREE_CODE (expr) != FUNCTION_DECL
1150 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1151 {
1152 /* Handle DECL_INITIAL for symbols. */
1153 tree initial = get_symbol_initial_value (ob, expr);
1154 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
1155 }
1156 }
1157
1158 /* See if we found an SCC. */
1159 if (cstate->low == cstate->dfsnum)
1160 {
1161 unsigned first, size;
1162 tree x;
1163
1164 /* Pop the SCC and compute its size. */
1165 first = sccstack.length ();
1166 do
1167 {
1168 x = sccstack[--first].t;
1169 }
1170 while (x != expr);
1171 size = sccstack.length () - first;
1172
1173 /* No need to compute hashes for LTRANS units, we don't perform
1174 any merging there. */
1175 hashval_t scc_hash = 0;
1176 unsigned scc_entry_len = 0;
1177 if (!flag_wpa)
1178 {
1179 scc_hash = hash_scc (ob->writer_cache, first, size);
1180
1181 /* Put the entries with the least number of collisions first. */
1182 unsigned entry_start = 0;
1183 scc_entry_len = size + 1;
1184 for (unsigned i = 0; i < size;)
1185 {
1186 unsigned from = i;
1187 for (i = i + 1; i < size
1188 && (sccstack[first + i].hash
1189 == sccstack[first + from].hash); ++i)
1190 ;
1191 if (i - from < scc_entry_len)
1192 {
1193 scc_entry_len = i - from;
1194 entry_start = from;
1195 }
1196 }
1197 for (unsigned i = 0; i < scc_entry_len; ++i)
1198 {
1199 scc_entry tem = sccstack[first + i];
1200 sccstack[first + i] = sccstack[first + entry_start + i];
1201 sccstack[first + entry_start + i] = tem;
1202 }
1203 }
1204
1205 /* Write LTO_tree_scc. */
1206 streamer_write_record_start (ob, LTO_tree_scc);
1207 streamer_write_uhwi (ob, size);
1208 streamer_write_uhwi (ob, scc_hash);
1209
1210 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1211 All INTEGER_CSTs need to be handled this way as we need
1212 their type to materialize them. Also builtins are handled
1213 this way.
1214 ??? We still wrap these in LTO_tree_scc so at the
1215 input side we can properly identify the tree we want
1216 to ultimatively return. */
1217 size_t old_len = ob->writer_cache->nodes.length ();
1218 if (size == 1)
1219 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1220 else
1221 {
1222 /* Write the size of the SCC entry candidates. */
1223 streamer_write_uhwi (ob, scc_entry_len);
1224
1225 /* Write all headers and populate the streamer cache. */
1226 for (unsigned i = 0; i < size; ++i)
1227 {
1228 hashval_t hash = sccstack[first+i].hash;
1229 tree t = sccstack[first+i].t;
1230 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1231 t, hash, &ix);
1232 gcc_assert (!exists_p);
1233
1234 if (!lto_is_streamable (t))
1235 internal_error ("tree code %qs is not supported "
1236 "in LTO streams",
1237 tree_code_name[TREE_CODE (t)]);
1238
1239 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1240
1241 /* Write the header, containing everything needed to
1242 materialize EXPR on the reading side. */
1243 streamer_write_tree_header (ob, t);
1244 }
1245
1246 /* Write the bitpacks and tree references. */
1247 for (unsigned i = 0; i < size; ++i)
1248 {
1249 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1250
1251 /* Mark the end of the tree. */
1252 streamer_write_zero (ob);
1253 }
1254 }
1255 gcc_assert (old_len + size == ob->writer_cache->nodes.length ());
1256
1257 /* Finally truncate the vector. */
1258 sccstack.truncate (first);
1259
1260 if (from_state)
1261 from_state->low = MIN (from_state->low, cstate->low);
1262 return;
1263 }
1264
1265 if (from_state)
1266 from_state->low = MIN (from_state->low, cstate->low);
1267 }
1268 gcc_checking_assert (from_state);
1269 if (cstate->dfsnum < from_state->dfsnum)
1270 from_state->low = MIN (cstate->dfsnum, from_state->low);
1271 }
1272
1273
1274 /* Emit the physical representation of tree node EXPR to output block
1275 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1276 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1277
1278 void
1279 lto_output_tree (struct output_block *ob, tree expr,
1280 bool ref_p, bool this_ref_p)
1281 {
1282 unsigned ix;
1283 bool existed_p;
1284
1285 if (expr == NULL_TREE)
1286 {
1287 streamer_write_record_start (ob, LTO_null);
1288 return;
1289 }
1290
1291 if (this_ref_p && tree_is_indexable (expr))
1292 {
1293 lto_output_tree_ref (ob, expr);
1294 return;
1295 }
1296
1297 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1298 if (existed_p)
1299 {
1300 /* If a node has already been streamed out, make sure that
1301 we don't write it more than once. Otherwise, the reader
1302 will instantiate two different nodes for the same object. */
1303 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1304 streamer_write_uhwi (ob, ix);
1305 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1306 lto_tree_code_to_tag (TREE_CODE (expr)));
1307 lto_stats.num_pickle_refs_output++;
1308 }
1309 else
1310 {
1311 /* This is the first time we see EXPR, write all reachable
1312 trees to OB. */
1313 static bool in_dfs_walk;
1314
1315 /* Protect against recursion which means disconnect between
1316 what tree edges we walk in the DFS walk and what edges
1317 we stream out. */
1318 gcc_assert (!in_dfs_walk);
1319
1320 /* Start the DFS walk. */
1321 /* Save ob state ... */
1322 /* let's see ... */
1323 in_dfs_walk = true;
1324 sccstate = pointer_map_create ();
1325 gcc_obstack_init (&sccstate_obstack);
1326 next_dfs_num = 1;
1327 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
1328 sccstack.release ();
1329 pointer_map_destroy (sccstate);
1330 obstack_free (&sccstate_obstack, NULL);
1331 in_dfs_walk = false;
1332
1333 /* Finally append a reference to the tree we were writing.
1334 ??? If expr ended up as a singleton we could have
1335 inlined it here and avoid outputting a reference. */
1336 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1337 gcc_assert (existed_p);
1338 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1339 streamer_write_uhwi (ob, ix);
1340 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1341 lto_tree_code_to_tag (TREE_CODE (expr)));
1342 lto_stats.num_pickle_refs_output++;
1343 }
1344 }
1345
1346
1347 /* Output to OB a list of try/catch handlers starting with FIRST. */
1348
1349 static void
1350 output_eh_try_list (struct output_block *ob, eh_catch first)
1351 {
1352 eh_catch n;
1353
1354 for (n = first; n; n = n->next_catch)
1355 {
1356 streamer_write_record_start (ob, LTO_eh_catch);
1357 stream_write_tree (ob, n->type_list, true);
1358 stream_write_tree (ob, n->filter_list, true);
1359 stream_write_tree (ob, n->label, true);
1360 }
1361
1362 streamer_write_record_start (ob, LTO_null);
1363 }
1364
1365
1366 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1367 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1368 detect EH region sharing. */
1369
1370 static void
1371 output_eh_region (struct output_block *ob, eh_region r)
1372 {
1373 enum LTO_tags tag;
1374
1375 if (r == NULL)
1376 {
1377 streamer_write_record_start (ob, LTO_null);
1378 return;
1379 }
1380
1381 if (r->type == ERT_CLEANUP)
1382 tag = LTO_ert_cleanup;
1383 else if (r->type == ERT_TRY)
1384 tag = LTO_ert_try;
1385 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1386 tag = LTO_ert_allowed_exceptions;
1387 else if (r->type == ERT_MUST_NOT_THROW)
1388 tag = LTO_ert_must_not_throw;
1389 else
1390 gcc_unreachable ();
1391
1392 streamer_write_record_start (ob, tag);
1393 streamer_write_hwi (ob, r->index);
1394
1395 if (r->outer)
1396 streamer_write_hwi (ob, r->outer->index);
1397 else
1398 streamer_write_zero (ob);
1399
1400 if (r->inner)
1401 streamer_write_hwi (ob, r->inner->index);
1402 else
1403 streamer_write_zero (ob);
1404
1405 if (r->next_peer)
1406 streamer_write_hwi (ob, r->next_peer->index);
1407 else
1408 streamer_write_zero (ob);
1409
1410 if (r->type == ERT_TRY)
1411 {
1412 output_eh_try_list (ob, r->u.eh_try.first_catch);
1413 }
1414 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1415 {
1416 stream_write_tree (ob, r->u.allowed.type_list, true);
1417 stream_write_tree (ob, r->u.allowed.label, true);
1418 streamer_write_uhwi (ob, r->u.allowed.filter);
1419 }
1420 else if (r->type == ERT_MUST_NOT_THROW)
1421 {
1422 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1423 bitpack_d bp = bitpack_create (ob->main_stream);
1424 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1425 streamer_write_bitpack (&bp);
1426 }
1427
1428 if (r->landing_pads)
1429 streamer_write_hwi (ob, r->landing_pads->index);
1430 else
1431 streamer_write_zero (ob);
1432 }
1433
1434
1435 /* Output landing pad LP to OB. */
1436
1437 static void
1438 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1439 {
1440 if (lp == NULL)
1441 {
1442 streamer_write_record_start (ob, LTO_null);
1443 return;
1444 }
1445
1446 streamer_write_record_start (ob, LTO_eh_landing_pad);
1447 streamer_write_hwi (ob, lp->index);
1448 if (lp->next_lp)
1449 streamer_write_hwi (ob, lp->next_lp->index);
1450 else
1451 streamer_write_zero (ob);
1452
1453 if (lp->region)
1454 streamer_write_hwi (ob, lp->region->index);
1455 else
1456 streamer_write_zero (ob);
1457
1458 stream_write_tree (ob, lp->post_landing_pad, true);
1459 }
1460
1461
1462 /* Output the existing eh_table to OB. */
1463
1464 static void
1465 output_eh_regions (struct output_block *ob, struct function *fn)
1466 {
1467 if (fn->eh && fn->eh->region_tree)
1468 {
1469 unsigned i;
1470 eh_region eh;
1471 eh_landing_pad lp;
1472 tree ttype;
1473
1474 streamer_write_record_start (ob, LTO_eh_table);
1475
1476 /* Emit the index of the root of the EH region tree. */
1477 streamer_write_hwi (ob, fn->eh->region_tree->index);
1478
1479 /* Emit all the EH regions in the region array. */
1480 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1481 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1482 output_eh_region (ob, eh);
1483
1484 /* Emit all landing pads. */
1485 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1486 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1487 output_eh_lp (ob, lp);
1488
1489 /* Emit all the runtime type data. */
1490 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1491 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1492 stream_write_tree (ob, ttype, true);
1493
1494 /* Emit the table of action chains. */
1495 if (targetm.arm_eabi_unwinder)
1496 {
1497 tree t;
1498 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1499 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1500 stream_write_tree (ob, t, true);
1501 }
1502 else
1503 {
1504 uchar c;
1505 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1506 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1507 streamer_write_char_stream (ob->main_stream, c);
1508 }
1509 }
1510
1511 /* The LTO_null either terminates the record or indicates that there
1512 are no eh_records at all. */
1513 streamer_write_record_start (ob, LTO_null);
1514 }
1515
1516
1517 /* Output all of the active ssa names to the ssa_names stream. */
1518
1519 static void
1520 output_ssa_names (struct output_block *ob, struct function *fn)
1521 {
1522 unsigned int i, len;
1523
1524 len = vec_safe_length (SSANAMES (fn));
1525 streamer_write_uhwi (ob, len);
1526
1527 for (i = 1; i < len; i++)
1528 {
1529 tree ptr = (*SSANAMES (fn))[i];
1530
1531 if (ptr == NULL_TREE
1532 || SSA_NAME_IN_FREE_LIST (ptr)
1533 || virtual_operand_p (ptr))
1534 continue;
1535
1536 streamer_write_uhwi (ob, i);
1537 streamer_write_char_stream (ob->main_stream,
1538 SSA_NAME_IS_DEFAULT_DEF (ptr));
1539 if (SSA_NAME_VAR (ptr))
1540 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1541 else
1542 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1543 stream_write_tree (ob, TREE_TYPE (ptr), true);
1544 }
1545
1546 streamer_write_zero (ob);
1547 }
1548
1549
1550 /* Output the cfg. */
1551
1552 static void
1553 output_cfg (struct output_block *ob, struct function *fn)
1554 {
1555 struct lto_output_stream *tmp_stream = ob->main_stream;
1556 basic_block bb;
1557
1558 ob->main_stream = ob->cfg_stream;
1559
1560 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1561 profile_status_for_function (fn));
1562
1563 /* Output the number of the highest basic block. */
1564 streamer_write_uhwi (ob, last_basic_block_for_function (fn));
1565
1566 FOR_ALL_BB_FN (bb, fn)
1567 {
1568 edge_iterator ei;
1569 edge e;
1570
1571 streamer_write_hwi (ob, bb->index);
1572
1573 /* Output the successors and the edge flags. */
1574 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1575 FOR_EACH_EDGE (e, ei, bb->succs)
1576 {
1577 streamer_write_uhwi (ob, e->dest->index);
1578 streamer_write_hwi (ob, e->probability);
1579 streamer_write_gcov_count (ob, e->count);
1580 streamer_write_uhwi (ob, e->flags);
1581 }
1582 }
1583
1584 streamer_write_hwi (ob, -1);
1585
1586 bb = ENTRY_BLOCK_PTR;
1587 while (bb->next_bb)
1588 {
1589 streamer_write_hwi (ob, bb->next_bb->index);
1590 bb = bb->next_bb;
1591 }
1592
1593 streamer_write_hwi (ob, -1);
1594
1595 /* ??? The cfgloop interface is tied to cfun. */
1596 gcc_assert (cfun == fn);
1597
1598 /* Output the number of loops. */
1599 streamer_write_uhwi (ob, number_of_loops (fn));
1600
1601 /* Output each loop, skipping the tree root which has number zero. */
1602 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1603 {
1604 struct loop *loop = get_loop (fn, i);
1605
1606 /* Write the index of the loop header. That's enough to rebuild
1607 the loop tree on the reader side. Stream -1 for an unused
1608 loop entry. */
1609 if (!loop)
1610 {
1611 streamer_write_hwi (ob, -1);
1612 continue;
1613 }
1614 else
1615 streamer_write_hwi (ob, loop->header->index);
1616
1617 /* Write everything copy_loop_info copies. */
1618 streamer_write_enum (ob->main_stream,
1619 loop_estimation, EST_LAST, loop->estimate_state);
1620 streamer_write_hwi (ob, loop->any_upper_bound);
1621 if (loop->any_upper_bound)
1622 {
1623 streamer_write_uhwi (ob, loop->nb_iterations_upper_bound.low);
1624 streamer_write_hwi (ob, loop->nb_iterations_upper_bound.high);
1625 }
1626 streamer_write_hwi (ob, loop->any_estimate);
1627 if (loop->any_estimate)
1628 {
1629 streamer_write_uhwi (ob, loop->nb_iterations_estimate.low);
1630 streamer_write_hwi (ob, loop->nb_iterations_estimate.high);
1631 }
1632 }
1633
1634 ob->main_stream = tmp_stream;
1635 }
1636
1637
1638 /* Create the header in the file using OB. If the section type is for
1639 a function, set FN to the decl for that function. */
1640
1641 void
1642 produce_asm (struct output_block *ob, tree fn)
1643 {
1644 enum lto_section_type section_type = ob->section_type;
1645 struct lto_function_header header;
1646 char *section_name;
1647 struct lto_output_stream *header_stream;
1648
1649 if (section_type == LTO_section_function_body)
1650 {
1651 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1652 section_name = lto_get_section_name (section_type, name, NULL);
1653 }
1654 else
1655 section_name = lto_get_section_name (section_type, NULL, NULL);
1656
1657 lto_begin_section (section_name, !flag_wpa);
1658 free (section_name);
1659
1660 /* The entire header is stream computed here. */
1661 memset (&header, 0, sizeof (struct lto_function_header));
1662
1663 /* Write the header. */
1664 header.lto_header.major_version = LTO_major_version;
1665 header.lto_header.minor_version = LTO_minor_version;
1666
1667 header.compressed_size = 0;
1668
1669 if (section_type == LTO_section_function_body)
1670 header.cfg_size = ob->cfg_stream->total_size;
1671 header.main_size = ob->main_stream->total_size;
1672 header.string_size = ob->string_stream->total_size;
1673
1674 header_stream = XCNEW (struct lto_output_stream);
1675 lto_output_data_stream (header_stream, &header, sizeof header);
1676 lto_write_stream (header_stream);
1677 free (header_stream);
1678
1679 /* Put all of the gimple and the string table out the asm file as a
1680 block of text. */
1681 if (section_type == LTO_section_function_body)
1682 lto_write_stream (ob->cfg_stream);
1683 lto_write_stream (ob->main_stream);
1684 lto_write_stream (ob->string_stream);
1685
1686 lto_end_section ();
1687 }
1688
1689
1690 /* Output the base body of struct function FN using output block OB. */
1691
1692 static void
1693 output_struct_function_base (struct output_block *ob, struct function *fn)
1694 {
1695 struct bitpack_d bp;
1696 unsigned i;
1697 tree t;
1698
1699 /* Output the static chain and non-local goto save area. */
1700 stream_write_tree (ob, fn->static_chain_decl, true);
1701 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1702
1703 /* Output all the local variables in the function. */
1704 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1705 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1706 stream_write_tree (ob, t, true);
1707
1708 /* Output current IL state of the function. */
1709 streamer_write_uhwi (ob, fn->curr_properties);
1710
1711 /* Write all the attributes for FN. */
1712 bp = bitpack_create (ob->main_stream);
1713 bp_pack_value (&bp, fn->is_thunk, 1);
1714 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1715 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1716 bp_pack_value (&bp, fn->returns_struct, 1);
1717 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1718 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1719 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1720 bp_pack_value (&bp, fn->after_inlining, 1);
1721 bp_pack_value (&bp, fn->stdarg, 1);
1722 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1723 bp_pack_value (&bp, fn->calls_alloca, 1);
1724 bp_pack_value (&bp, fn->calls_setjmp, 1);
1725 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1726 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1727
1728 /* Output the function start and end loci. */
1729 stream_output_location (ob, &bp, fn->function_start_locus);
1730 stream_output_location (ob, &bp, fn->function_end_locus);
1731
1732 streamer_write_bitpack (&bp);
1733 }
1734
1735
1736 /* Output the body of function NODE->DECL. */
1737
1738 static void
1739 output_function (struct cgraph_node *node)
1740 {
1741 tree function;
1742 struct function *fn;
1743 basic_block bb;
1744 struct output_block *ob;
1745
1746 function = node->symbol.decl;
1747 fn = DECL_STRUCT_FUNCTION (function);
1748 ob = create_output_block (LTO_section_function_body);
1749
1750 clear_line_info (ob);
1751 ob->cgraph_node = node;
1752
1753 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1754
1755 /* Set current_function_decl and cfun. */
1756 push_cfun (fn);
1757
1758 /* Make string 0 be a NULL string. */
1759 streamer_write_char_stream (ob->string_stream, 0);
1760
1761 streamer_write_record_start (ob, LTO_function);
1762
1763 /* Output decls for parameters and args. */
1764 stream_write_tree (ob, DECL_RESULT (function), true);
1765 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1766
1767 /* Output DECL_INITIAL for the function, which contains the tree of
1768 lexical scopes. */
1769 stream_write_tree (ob, DECL_INITIAL (function), true);
1770
1771 /* We also stream abstract functions where we stream only stuff needed for
1772 debug info. */
1773 if (gimple_has_body_p (function))
1774 {
1775 streamer_write_uhwi (ob, 1);
1776 output_struct_function_base (ob, fn);
1777
1778 /* Output all the SSA names used in the function. */
1779 output_ssa_names (ob, fn);
1780
1781 /* Output any exception handling regions. */
1782 output_eh_regions (ob, fn);
1783
1784
1785 /* We will renumber the statements. The code that does this uses
1786 the same ordering that we use for serializing them so we can use
1787 the same code on the other end and not have to write out the
1788 statement numbers. We do not assign UIDs to PHIs here because
1789 virtual PHIs get re-computed on-the-fly which would make numbers
1790 inconsistent. */
1791 set_gimple_stmt_max_uid (cfun, 0);
1792 FOR_ALL_BB (bb)
1793 {
1794 gimple_stmt_iterator gsi;
1795 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1796 {
1797 gimple stmt = gsi_stmt (gsi);
1798 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1799 }
1800 }
1801
1802 /* Output the code for the function. */
1803 FOR_ALL_BB_FN (bb, fn)
1804 output_bb (ob, bb, fn);
1805
1806 /* The terminator for this function. */
1807 streamer_write_record_start (ob, LTO_null);
1808
1809 output_cfg (ob, fn);
1810
1811 pop_cfun ();
1812 }
1813 else
1814 streamer_write_uhwi (ob, 0);
1815
1816 /* Create a section to hold the pickled output of this function. */
1817 produce_asm (ob, function);
1818
1819 destroy_output_block (ob);
1820 }
1821
1822
1823 /* Emit toplevel asms. */
1824
1825 void
1826 lto_output_toplevel_asms (void)
1827 {
1828 struct output_block *ob;
1829 struct asm_node *can;
1830 char *section_name;
1831 struct lto_output_stream *header_stream;
1832 struct lto_asm_header header;
1833
1834 if (! asm_nodes)
1835 return;
1836
1837 ob = create_output_block (LTO_section_asm);
1838
1839 /* Make string 0 be a NULL string. */
1840 streamer_write_char_stream (ob->string_stream, 0);
1841
1842 for (can = asm_nodes; can; can = can->next)
1843 {
1844 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
1845 streamer_write_hwi (ob, can->order);
1846 }
1847
1848 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
1849
1850 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
1851 lto_begin_section (section_name, !flag_wpa);
1852 free (section_name);
1853
1854 /* The entire header stream is computed here. */
1855 memset (&header, 0, sizeof (header));
1856
1857 /* Write the header. */
1858 header.lto_header.major_version = LTO_major_version;
1859 header.lto_header.minor_version = LTO_minor_version;
1860
1861 header.main_size = ob->main_stream->total_size;
1862 header.string_size = ob->string_stream->total_size;
1863
1864 header_stream = XCNEW (struct lto_output_stream);
1865 lto_output_data_stream (header_stream, &header, sizeof (header));
1866 lto_write_stream (header_stream);
1867 free (header_stream);
1868
1869 /* Put all of the gimple and the string table out the asm file as a
1870 block of text. */
1871 lto_write_stream (ob->main_stream);
1872 lto_write_stream (ob->string_stream);
1873
1874 lto_end_section ();
1875
1876 destroy_output_block (ob);
1877 }
1878
1879
1880 /* Copy the function body of NODE without deserializing. */
1881
1882 static void
1883 copy_function (struct cgraph_node *node)
1884 {
1885 tree function = node->symbol.decl;
1886 struct lto_file_decl_data *file_data = node->symbol.lto_file_data;
1887 struct lto_output_stream *output_stream = XCNEW (struct lto_output_stream);
1888 const char *data;
1889 size_t len;
1890 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
1891 char *section_name =
1892 lto_get_section_name (LTO_section_function_body, name, NULL);
1893 size_t i, j;
1894 struct lto_in_decl_state *in_state;
1895 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
1896
1897 lto_begin_section (section_name, !flag_wpa);
1898 free (section_name);
1899
1900 /* We may have renamed the declaration, e.g., a static function. */
1901 name = lto_get_decl_name_mapping (file_data, name);
1902
1903 data = lto_get_section_data (file_data, LTO_section_function_body,
1904 name, &len);
1905 gcc_assert (data);
1906
1907 /* Do a bit copy of the function body. */
1908 lto_output_data_stream (output_stream, data, len);
1909 lto_write_stream (output_stream);
1910
1911 /* Copy decls. */
1912 in_state =
1913 lto_get_function_in_decl_state (node->symbol.lto_file_data, function);
1914 gcc_assert (in_state);
1915
1916 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
1917 {
1918 size_t n = in_state->streams[i].size;
1919 tree *trees = in_state->streams[i].trees;
1920 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
1921
1922 /* The out state must have the same indices and the in state.
1923 So just copy the vector. All the encoders in the in state
1924 must be empty where we reach here. */
1925 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
1926 encoder->trees.reserve_exact (n);
1927 for (j = 0; j < n; j++)
1928 encoder->trees.safe_push (trees[j]);
1929 }
1930
1931 lto_free_section_data (file_data, LTO_section_function_body, name,
1932 data, len);
1933 free (output_stream);
1934 lto_end_section ();
1935 }
1936
1937
1938 /* Main entry point from the pass manager. */
1939
1940 static void
1941 lto_output (void)
1942 {
1943 struct lto_out_decl_state *decl_state;
1944 #ifdef ENABLE_CHECKING
1945 bitmap output = lto_bitmap_alloc ();
1946 #endif
1947 int i, n_nodes;
1948 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1949
1950 /* Initialize the streamer. */
1951 lto_streamer_init ();
1952
1953 n_nodes = lto_symtab_encoder_size (encoder);
1954 /* Process only the functions with bodies. */
1955 for (i = 0; i < n_nodes; i++)
1956 {
1957 symtab_node snode = lto_symtab_encoder_deref (encoder, i);
1958 cgraph_node *node = dyn_cast <cgraph_node> (snode);
1959 if (node
1960 && lto_symtab_encoder_encode_body_p (encoder, node)
1961 && !node->symbol.alias
1962 && !node->thunk.thunk_p)
1963 {
1964 #ifdef ENABLE_CHECKING
1965 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->symbol.decl)));
1966 bitmap_set_bit (output, DECL_UID (node->symbol.decl));
1967 #endif
1968 decl_state = lto_new_out_decl_state ();
1969 lto_push_out_decl_state (decl_state);
1970 if (gimple_has_body_p (node->symbol.decl) || !flag_wpa)
1971 output_function (node);
1972 else
1973 copy_function (node);
1974 gcc_assert (lto_get_out_decl_state () == decl_state);
1975 lto_pop_out_decl_state ();
1976 lto_record_function_out_decl_state (node->symbol.decl, decl_state);
1977 }
1978 }
1979
1980 /* Emit the callgraph after emitting function bodies. This needs to
1981 be done now to make sure that all the statements in every function
1982 have been renumbered so that edges can be associated with call
1983 statements using the statement UIDs. */
1984 output_symtab ();
1985
1986 #ifdef ENABLE_CHECKING
1987 lto_bitmap_free (output);
1988 #endif
1989 }
1990
1991 struct ipa_opt_pass_d pass_ipa_lto_gimple_out =
1992 {
1993 {
1994 IPA_PASS,
1995 "lto_gimple_out", /* name */
1996 OPTGROUP_NONE, /* optinfo_flags */
1997 gate_lto_out, /* gate */
1998 NULL, /* execute */
1999 NULL, /* sub */
2000 NULL, /* next */
2001 0, /* static_pass_number */
2002 TV_IPA_LTO_GIMPLE_OUT, /* tv_id */
2003 0, /* properties_required */
2004 0, /* properties_provided */
2005 0, /* properties_destroyed */
2006 0, /* todo_flags_start */
2007 0 /* todo_flags_finish */
2008 },
2009 NULL, /* generate_summary */
2010 lto_output, /* write_summary */
2011 NULL, /* read_summary */
2012 lto_output, /* write_optimization_summary */
2013 NULL, /* read_optimization_summary */
2014 NULL, /* stmt_fixup */
2015 0, /* TODOs */
2016 NULL, /* function_transform */
2017 NULL /* variable_transform */
2018 };
2019
2020
2021 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2022 from it and required for correct representation of its semantics.
2023 Each node in ENCODER must be a global declaration or a type. A node
2024 is written only once, even if it appears multiple times in the
2025 vector. Certain transitively-reachable nodes, such as those
2026 representing expressions, may be duplicated, but such nodes
2027 must not appear in ENCODER itself. */
2028
2029 static void
2030 write_global_stream (struct output_block *ob,
2031 struct lto_tree_ref_encoder *encoder)
2032 {
2033 tree t;
2034 size_t index;
2035 const size_t size = lto_tree_ref_encoder_size (encoder);
2036
2037 for (index = 0; index < size; index++)
2038 {
2039 t = lto_tree_ref_encoder_get_tree (encoder, index);
2040 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2041 stream_write_tree (ob, t, false);
2042 }
2043 }
2044
2045
2046 /* Write a sequence of indices into the globals vector corresponding
2047 to the trees in ENCODER. These are used by the reader to map the
2048 indices used to refer to global entities within function bodies to
2049 their referents. */
2050
2051 static void
2052 write_global_references (struct output_block *ob,
2053 struct lto_output_stream *ref_stream,
2054 struct lto_tree_ref_encoder *encoder)
2055 {
2056 tree t;
2057 uint32_t index;
2058 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2059
2060 /* Write size as 32-bit unsigned. */
2061 lto_output_data_stream (ref_stream, &size, sizeof (int32_t));
2062
2063 for (index = 0; index < size; index++)
2064 {
2065 uint32_t slot_num;
2066
2067 t = lto_tree_ref_encoder_get_tree (encoder, index);
2068 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2069 gcc_assert (slot_num != (unsigned)-1);
2070 lto_output_data_stream (ref_stream, &slot_num, sizeof slot_num);
2071 }
2072 }
2073
2074
2075 /* Write all the streams in an lto_out_decl_state STATE using
2076 output block OB and output stream OUT_STREAM. */
2077
2078 void
2079 lto_output_decl_state_streams (struct output_block *ob,
2080 struct lto_out_decl_state *state)
2081 {
2082 int i;
2083
2084 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2085 write_global_stream (ob, &state->streams[i]);
2086 }
2087
2088
2089 /* Write all the references in an lto_out_decl_state STATE using
2090 output block OB and output stream OUT_STREAM. */
2091
2092 void
2093 lto_output_decl_state_refs (struct output_block *ob,
2094 struct lto_output_stream *out_stream,
2095 struct lto_out_decl_state *state)
2096 {
2097 unsigned i;
2098 uint32_t ref;
2099 tree decl;
2100
2101 /* Write reference to FUNCTION_DECL. If there is not function,
2102 write reference to void_type_node. */
2103 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2104 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2105 gcc_assert (ref != (unsigned)-1);
2106 lto_output_data_stream (out_stream, &ref, sizeof (uint32_t));
2107
2108 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2109 write_global_references (ob, out_stream, &state->streams[i]);
2110 }
2111
2112
2113 /* Return the written size of STATE. */
2114
2115 static size_t
2116 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2117 {
2118 int i;
2119 size_t size;
2120
2121 size = sizeof (int32_t); /* fn_ref. */
2122 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2123 {
2124 size += sizeof (int32_t); /* vector size. */
2125 size += (lto_tree_ref_encoder_size (&state->streams[i])
2126 * sizeof (int32_t));
2127 }
2128 return size;
2129 }
2130
2131
2132 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2133 so far. */
2134
2135 static void
2136 write_symbol (struct streamer_tree_cache_d *cache,
2137 struct lto_output_stream *stream,
2138 tree t, struct pointer_set_t *seen, bool alias)
2139 {
2140 const char *name;
2141 enum gcc_plugin_symbol_kind kind;
2142 enum gcc_plugin_symbol_visibility visibility;
2143 unsigned slot_num;
2144 unsigned HOST_WIDEST_INT size;
2145 const char *comdat;
2146 unsigned char c;
2147
2148 /* None of the following kinds of symbols are needed in the
2149 symbol table. */
2150 if (!TREE_PUBLIC (t)
2151 || is_builtin_fn (t)
2152 || DECL_ABSTRACT (t)
2153 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2154 return;
2155 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2156
2157 gcc_assert (TREE_CODE (t) == VAR_DECL
2158 || TREE_CODE (t) == FUNCTION_DECL);
2159
2160 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2161
2162 /* This behaves like assemble_name_raw in varasm.c, performing the
2163 same name manipulations that ASM_OUTPUT_LABELREF does. */
2164 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2165
2166 if (pointer_set_contains (seen, name))
2167 return;
2168 pointer_set_insert (seen, name);
2169
2170 streamer_tree_cache_lookup (cache, t, &slot_num);
2171 gcc_assert (slot_num != (unsigned)-1);
2172
2173 if (DECL_EXTERNAL (t))
2174 {
2175 if (DECL_WEAK (t))
2176 kind = GCCPK_WEAKUNDEF;
2177 else
2178 kind = GCCPK_UNDEF;
2179 }
2180 else
2181 {
2182 if (DECL_WEAK (t))
2183 kind = GCCPK_WEAKDEF;
2184 else if (DECL_COMMON (t))
2185 kind = GCCPK_COMMON;
2186 else
2187 kind = GCCPK_DEF;
2188
2189 /* When something is defined, it should have node attached. */
2190 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2191 || varpool_get_node (t)->symbol.definition);
2192 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2193 || (cgraph_get_node (t)
2194 && cgraph_get_node (t)->symbol.definition));
2195 }
2196
2197 /* Imitate what default_elf_asm_output_external do.
2198 When symbol is external, we need to output it with DEFAULT visibility
2199 when compiling with -fvisibility=default, while with HIDDEN visibility
2200 when symbol has attribute (visibility("hidden")) specified.
2201 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2202 right. */
2203
2204 if (DECL_EXTERNAL (t)
2205 && !targetm.binds_local_p (t))
2206 visibility = GCCPV_DEFAULT;
2207 else
2208 switch (DECL_VISIBILITY(t))
2209 {
2210 case VISIBILITY_DEFAULT:
2211 visibility = GCCPV_DEFAULT;
2212 break;
2213 case VISIBILITY_PROTECTED:
2214 visibility = GCCPV_PROTECTED;
2215 break;
2216 case VISIBILITY_HIDDEN:
2217 visibility = GCCPV_HIDDEN;
2218 break;
2219 case VISIBILITY_INTERNAL:
2220 visibility = GCCPV_INTERNAL;
2221 break;
2222 }
2223
2224 if (kind == GCCPK_COMMON
2225 && DECL_SIZE_UNIT (t)
2226 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2227 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2228 else
2229 size = 0;
2230
2231 if (DECL_ONE_ONLY (t))
2232 comdat = IDENTIFIER_POINTER (DECL_COMDAT_GROUP (t));
2233 else
2234 comdat = "";
2235
2236 lto_output_data_stream (stream, name, strlen (name) + 1);
2237 lto_output_data_stream (stream, comdat, strlen (comdat) + 1);
2238 c = (unsigned char) kind;
2239 lto_output_data_stream (stream, &c, 1);
2240 c = (unsigned char) visibility;
2241 lto_output_data_stream (stream, &c, 1);
2242 lto_output_data_stream (stream, &size, 8);
2243 lto_output_data_stream (stream, &slot_num, 4);
2244 }
2245
2246 /* Return true if NODE should appear in the plugin symbol table. */
2247
2248 bool
2249 output_symbol_p (symtab_node node)
2250 {
2251 struct cgraph_node *cnode;
2252 if (!symtab_real_symbol_p (node))
2253 return false;
2254 /* We keep external functions in symtab for sake of inlining
2255 and devirtualization. We do not want to see them in symbol table as
2256 references unless they are really used. */
2257 cnode = dyn_cast <cgraph_node> (node);
2258 if (cnode && (!node->symbol.definition || DECL_EXTERNAL (cnode->symbol.decl))
2259 && cnode->callers)
2260 return true;
2261
2262 /* Ignore all references from external vars initializers - they are not really
2263 part of the compilation unit until they are used by folding. Some symbols,
2264 like references to external construction vtables can not be referred to at all.
2265 We decide this at can_refer_decl_in_current_unit_p. */
2266 if (!node->symbol.definition || DECL_EXTERNAL (node->symbol.decl))
2267 {
2268 int i;
2269 struct ipa_ref *ref;
2270 for (i = 0; ipa_ref_list_referring_iterate (&node->symbol.ref_list,
2271 i, ref); i++)
2272 {
2273 if (ref->use == IPA_REF_ALIAS)
2274 continue;
2275 if (is_a <cgraph_node> (ref->referring))
2276 return true;
2277 if (!DECL_EXTERNAL (ref->referring->symbol.decl))
2278 return true;
2279 }
2280 return false;
2281 }
2282 return true;
2283 }
2284
2285
2286 /* Write an IL symbol table to OB.
2287 SET and VSET are cgraph/varpool node sets we are outputting. */
2288
2289 static void
2290 produce_symtab (struct output_block *ob)
2291 {
2292 struct streamer_tree_cache_d *cache = ob->writer_cache;
2293 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2294 struct pointer_set_t *seen;
2295 struct lto_output_stream stream;
2296 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2297 lto_symtab_encoder_iterator lsei;
2298
2299 lto_begin_section (section_name, false);
2300 free (section_name);
2301
2302 seen = pointer_set_create ();
2303 memset (&stream, 0, sizeof (stream));
2304
2305 /* Write the symbol table.
2306 First write everything defined and then all declarations.
2307 This is necessary to handle cases where we have duplicated symbols. */
2308 for (lsei = lsei_start (encoder);
2309 !lsei_end_p (lsei); lsei_next (&lsei))
2310 {
2311 symtab_node node = lsei_node (lsei);
2312
2313 if (!output_symbol_p (node) || DECL_EXTERNAL (node->symbol.decl))
2314 continue;
2315 write_symbol (cache, &stream, node->symbol.decl, seen, false);
2316 }
2317 for (lsei = lsei_start (encoder);
2318 !lsei_end_p (lsei); lsei_next (&lsei))
2319 {
2320 symtab_node node = lsei_node (lsei);
2321
2322 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->symbol.decl))
2323 continue;
2324 write_symbol (cache, &stream, node->symbol.decl, seen, false);
2325 }
2326
2327 lto_write_stream (&stream);
2328 pointer_set_destroy (seen);
2329
2330 lto_end_section ();
2331 }
2332
2333
2334 /* This pass is run after all of the functions are serialized and all
2335 of the IPA passes have written their serialized forms. This pass
2336 causes the vector of all of the global decls and types used from
2337 this file to be written in to a section that can then be read in to
2338 recover these on other side. */
2339
2340 static void
2341 produce_asm_for_decls (void)
2342 {
2343 struct lto_out_decl_state *out_state;
2344 struct lto_out_decl_state *fn_out_state;
2345 struct lto_decl_header header;
2346 char *section_name;
2347 struct output_block *ob;
2348 struct lto_output_stream *header_stream, *decl_state_stream;
2349 unsigned idx, num_fns;
2350 size_t decl_state_size;
2351 int32_t num_decl_states;
2352
2353 ob = create_output_block (LTO_section_decls);
2354 ob->global = true;
2355
2356 memset (&header, 0, sizeof (struct lto_decl_header));
2357
2358 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2359 lto_begin_section (section_name, !flag_wpa);
2360 free (section_name);
2361
2362 /* Make string 0 be a NULL string. */
2363 streamer_write_char_stream (ob->string_stream, 0);
2364
2365 gcc_assert (!alias_pairs);
2366
2367 /* Write the global symbols. */
2368 out_state = lto_get_out_decl_state ();
2369 num_fns = lto_function_decl_states.length ();
2370 lto_output_decl_state_streams (ob, out_state);
2371 for (idx = 0; idx < num_fns; idx++)
2372 {
2373 fn_out_state =
2374 lto_function_decl_states[idx];
2375 lto_output_decl_state_streams (ob, fn_out_state);
2376 }
2377
2378 header.lto_header.major_version = LTO_major_version;
2379 header.lto_header.minor_version = LTO_minor_version;
2380
2381 /* Currently not used. This field would allow us to preallocate
2382 the globals vector, so that it need not be resized as it is extended. */
2383 header.num_nodes = -1;
2384
2385 /* Compute the total size of all decl out states. */
2386 decl_state_size = sizeof (int32_t);
2387 decl_state_size += lto_out_decl_state_written_size (out_state);
2388 for (idx = 0; idx < num_fns; idx++)
2389 {
2390 fn_out_state =
2391 lto_function_decl_states[idx];
2392 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2393 }
2394 header.decl_state_size = decl_state_size;
2395
2396 header.main_size = ob->main_stream->total_size;
2397 header.string_size = ob->string_stream->total_size;
2398
2399 header_stream = XCNEW (struct lto_output_stream);
2400 lto_output_data_stream (header_stream, &header, sizeof header);
2401 lto_write_stream (header_stream);
2402 free (header_stream);
2403
2404 /* Write the main out-decl state, followed by out-decl states of
2405 functions. */
2406 decl_state_stream = XCNEW (struct lto_output_stream);
2407 num_decl_states = num_fns + 1;
2408 lto_output_data_stream (decl_state_stream, &num_decl_states,
2409 sizeof (num_decl_states));
2410 lto_output_decl_state_refs (ob, decl_state_stream, out_state);
2411 for (idx = 0; idx < num_fns; idx++)
2412 {
2413 fn_out_state =
2414 lto_function_decl_states[idx];
2415 lto_output_decl_state_refs (ob, decl_state_stream, fn_out_state);
2416 }
2417 lto_write_stream (decl_state_stream);
2418 free(decl_state_stream);
2419
2420 lto_write_stream (ob->main_stream);
2421 lto_write_stream (ob->string_stream);
2422
2423 lto_end_section ();
2424
2425 /* Write the symbol table. It is used by linker to determine dependencies
2426 and thus we can skip it for WPA. */
2427 if (!flag_wpa)
2428 produce_symtab (ob);
2429
2430 /* Write command line opts. */
2431 lto_write_options ();
2432
2433 /* Deallocate memory and clean up. */
2434 for (idx = 0; idx < num_fns; idx++)
2435 {
2436 fn_out_state =
2437 lto_function_decl_states[idx];
2438 lto_delete_out_decl_state (fn_out_state);
2439 }
2440 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2441 lto_function_decl_states.release ();
2442 destroy_output_block (ob);
2443 }
2444
2445
2446 struct ipa_opt_pass_d pass_ipa_lto_finish_out =
2447 {
2448 {
2449 IPA_PASS,
2450 "lto_decls_out", /* name */
2451 OPTGROUP_NONE, /* optinfo_flags */
2452 gate_lto_out, /* gate */
2453 NULL, /* execute */
2454 NULL, /* sub */
2455 NULL, /* next */
2456 0, /* static_pass_number */
2457 TV_IPA_LTO_DECL_OUT, /* tv_id */
2458 0, /* properties_required */
2459 0, /* properties_provided */
2460 0, /* properties_destroyed */
2461 0, /* todo_flags_start */
2462 0 /* todo_flags_finish */
2463 },
2464 NULL, /* generate_summary */
2465 produce_asm_for_decls, /* write_summary */
2466 NULL, /* read_summary */
2467 produce_asm_for_decls, /* write_optimization_summary */
2468 NULL, /* read_optimization_summary */
2469 NULL, /* stmt_fixup */
2470 0, /* TODOs */
2471 NULL, /* function_transform */
2472 NULL /* variable_transform */
2473 };