]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-streamer-out.c
6f1585a2bf967f9d286781cb42a11d8b3ad775bd
[thirdparty/gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2013 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "stor-layout.h"
29 #include "stringpool.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "hashtab.h"
35 #include "basic-block.h"
36 #include "gimple.h"
37 #include "gimple-iterator.h"
38 #include "gimple-ssa.h"
39 #include "tree-ssanames.h"
40 #include "tree-pass.h"
41 #include "function.h"
42 #include "ggc.h"
43 #include "diagnostic-core.h"
44 #include "except.h"
45 #include "vec.h"
46 #include "lto-symtab.h"
47 #include "lto-streamer.h"
48 #include "data-streamer.h"
49 #include "gimple-streamer.h"
50 #include "tree-streamer.h"
51 #include "streamer-hooks.h"
52 #include "cfgloop.h"
53
54
55 /* Clear the line info stored in DATA_IN. */
56
57 static void
58 clear_line_info (struct output_block *ob)
59 {
60 ob->current_file = NULL;
61 ob->current_line = 0;
62 ob->current_col = 0;
63 }
64
65
66 /* Create the output block and return it. SECTION_TYPE is
67 LTO_section_function_body or LTO_static_initializer. */
68
69 struct output_block *
70 create_output_block (enum lto_section_type section_type)
71 {
72 struct output_block *ob = XCNEW (struct output_block);
73
74 ob->section_type = section_type;
75 ob->decl_state = lto_get_out_decl_state ();
76 ob->main_stream = XCNEW (struct lto_output_stream);
77 ob->string_stream = XCNEW (struct lto_output_stream);
78 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true);
79
80 if (section_type == LTO_section_function_body)
81 ob->cfg_stream = XCNEW (struct lto_output_stream);
82
83 clear_line_info (ob);
84
85 ob->string_hash_table.create (37);
86 gcc_obstack_init (&ob->obstack);
87
88 return ob;
89 }
90
91
92 /* Destroy the output block OB. */
93
94 void
95 destroy_output_block (struct output_block *ob)
96 {
97 enum lto_section_type section_type = ob->section_type;
98
99 ob->string_hash_table.dispose ();
100
101 free (ob->main_stream);
102 free (ob->string_stream);
103 if (section_type == LTO_section_function_body)
104 free (ob->cfg_stream);
105
106 streamer_tree_cache_delete (ob->writer_cache);
107 obstack_free (&ob->obstack, NULL);
108
109 free (ob);
110 }
111
112
113 /* Look up NODE in the type table and write the index for it to OB. */
114
115 static void
116 output_type_ref (struct output_block *ob, tree node)
117 {
118 streamer_write_record_start (ob, LTO_type_ref);
119 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
120 }
121
122
123 /* Return true if tree node T is written to various tables. For these
124 nodes, we sometimes want to write their phyiscal representation
125 (via lto_output_tree), and sometimes we need to emit an index
126 reference into a table (via lto_output_tree_ref). */
127
128 static bool
129 tree_is_indexable (tree t)
130 {
131 /* Parameters and return values of functions of variably modified types
132 must go to global stream, because they may be used in the type
133 definition. */
134 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
135 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
136 else if (TREE_CODE (t) == VAR_DECL && decl_function_context (t)
137 && !TREE_STATIC (t))
138 return false;
139 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
140 return false;
141 /* Variably modified types need to be streamed alongside function
142 bodies because they can refer to local entities. Together with
143 them we have to localize their members as well.
144 ??? In theory that includes non-FIELD_DECLs as well. */
145 else if (TYPE_P (t)
146 && variably_modified_type_p (t, NULL_TREE))
147 return false;
148 else if (TREE_CODE (t) == FIELD_DECL
149 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
150 return false;
151 else
152 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
153 }
154
155
156 /* Output info about new location into bitpack BP.
157 After outputting bitpack, lto_output_location_data has
158 to be done to output actual data. */
159
160 void
161 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
162 location_t loc)
163 {
164 expanded_location xloc;
165
166 loc = LOCATION_LOCUS (loc);
167 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
168 if (loc == UNKNOWN_LOCATION)
169 return;
170
171 xloc = expand_location (loc);
172
173 bp_pack_value (bp, ob->current_file != xloc.file, 1);
174 bp_pack_value (bp, ob->current_line != xloc.line, 1);
175 bp_pack_value (bp, ob->current_col != xloc.column, 1);
176
177 if (ob->current_file != xloc.file)
178 bp_pack_var_len_unsigned (bp,
179 streamer_string_index (ob, xloc.file,
180 strlen (xloc.file) + 1,
181 true));
182 ob->current_file = xloc.file;
183
184 if (ob->current_line != xloc.line)
185 bp_pack_var_len_unsigned (bp, xloc.line);
186 ob->current_line = xloc.line;
187
188 if (ob->current_col != xloc.column)
189 bp_pack_var_len_unsigned (bp, xloc.column);
190 ob->current_col = xloc.column;
191 }
192
193
194 /* If EXPR is an indexable tree node, output a reference to it to
195 output block OB. Otherwise, output the physical representation of
196 EXPR to OB. */
197
198 static void
199 lto_output_tree_ref (struct output_block *ob, tree expr)
200 {
201 enum tree_code code;
202
203 if (TYPE_P (expr))
204 {
205 output_type_ref (ob, expr);
206 return;
207 }
208
209 code = TREE_CODE (expr);
210 switch (code)
211 {
212 case SSA_NAME:
213 streamer_write_record_start (ob, LTO_ssa_name_ref);
214 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
215 break;
216
217 case FIELD_DECL:
218 streamer_write_record_start (ob, LTO_field_decl_ref);
219 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
220 break;
221
222 case FUNCTION_DECL:
223 streamer_write_record_start (ob, LTO_function_decl_ref);
224 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
225 break;
226
227 case VAR_DECL:
228 case DEBUG_EXPR_DECL:
229 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
230 case PARM_DECL:
231 streamer_write_record_start (ob, LTO_global_decl_ref);
232 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
233 break;
234
235 case CONST_DECL:
236 streamer_write_record_start (ob, LTO_const_decl_ref);
237 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
238 break;
239
240 case IMPORTED_DECL:
241 gcc_assert (decl_function_context (expr) == NULL);
242 streamer_write_record_start (ob, LTO_imported_decl_ref);
243 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
244 break;
245
246 case TYPE_DECL:
247 streamer_write_record_start (ob, LTO_type_decl_ref);
248 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
249 break;
250
251 case NAMESPACE_DECL:
252 streamer_write_record_start (ob, LTO_namespace_decl_ref);
253 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
254 break;
255
256 case LABEL_DECL:
257 streamer_write_record_start (ob, LTO_label_decl_ref);
258 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
259 break;
260
261 case RESULT_DECL:
262 streamer_write_record_start (ob, LTO_result_decl_ref);
263 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
264 break;
265
266 case TRANSLATION_UNIT_DECL:
267 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
268 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
269 break;
270
271 default:
272 /* No other node is indexable, so it should have been handled by
273 lto_output_tree. */
274 gcc_unreachable ();
275 }
276 }
277
278
279 /* Return true if EXPR is a tree node that can be written to disk. */
280
281 static inline bool
282 lto_is_streamable (tree expr)
283 {
284 enum tree_code code = TREE_CODE (expr);
285
286 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
287 name version in lto_output_tree_ref (see output_ssa_names). */
288 return !is_lang_specific (expr)
289 && code != SSA_NAME
290 && code != CALL_EXPR
291 && code != LANG_TYPE
292 && code != MODIFY_EXPR
293 && code != INIT_EXPR
294 && code != TARGET_EXPR
295 && code != BIND_EXPR
296 && code != WITH_CLEANUP_EXPR
297 && code != STATEMENT_LIST
298 && code != OMP_CLAUSE
299 && (code == CASE_LABEL_EXPR
300 || code == DECL_EXPR
301 || TREE_CODE_CLASS (code) != tcc_statement);
302 }
303
304
305 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
306
307 static tree
308 get_symbol_initial_value (struct output_block *ob, tree expr)
309 {
310 gcc_checking_assert (DECL_P (expr)
311 && TREE_CODE (expr) != FUNCTION_DECL
312 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
313
314 /* Handle DECL_INITIAL for symbols. */
315 tree initial = DECL_INITIAL (expr);
316 if (TREE_CODE (expr) == VAR_DECL
317 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
318 && !DECL_IN_CONSTANT_POOL (expr)
319 && initial)
320 {
321 lto_symtab_encoder_t encoder;
322 struct varpool_node *vnode;
323
324 encoder = ob->decl_state->symtab_node_encoder;
325 vnode = varpool_get_node (expr);
326 if (!vnode
327 || !lto_symtab_encoder_encode_initializer_p (encoder,
328 vnode))
329 initial = error_mark_node;
330 }
331
332 return initial;
333 }
334
335
336 /* Write a physical representation of tree node EXPR to output block
337 OB. If REF_P is true, the leaves of EXPR are emitted as references
338 via lto_output_tree_ref. IX is the index into the streamer cache
339 where EXPR is stored. */
340
341 static void
342 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
343 {
344 /* Pack all the non-pointer fields in EXPR into a bitpack and write
345 the resulting bitpack. */
346 bitpack_d bp = bitpack_create (ob->main_stream);
347 streamer_pack_tree_bitfields (ob, &bp, expr);
348 streamer_write_bitpack (&bp);
349
350 /* Write all the pointer fields in EXPR. */
351 streamer_write_tree_body (ob, expr, ref_p);
352
353 /* Write any LTO-specific data to OB. */
354 if (DECL_P (expr)
355 && TREE_CODE (expr) != FUNCTION_DECL
356 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
357 {
358 /* Handle DECL_INITIAL for symbols. */
359 tree initial = get_symbol_initial_value (ob, expr);
360 stream_write_tree (ob, initial, ref_p);
361 }
362 }
363
364 /* Write a physical representation of tree node EXPR to output block
365 OB. If REF_P is true, the leaves of EXPR are emitted as references
366 via lto_output_tree_ref. IX is the index into the streamer cache
367 where EXPR is stored. */
368
369 static void
370 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
371 {
372 if (!lto_is_streamable (expr))
373 internal_error ("tree code %qs is not supported in LTO streams",
374 get_tree_code_name (TREE_CODE (expr)));
375
376 /* Write the header, containing everything needed to materialize
377 EXPR on the reading side. */
378 streamer_write_tree_header (ob, expr);
379
380 lto_write_tree_1 (ob, expr, ref_p);
381
382 /* Mark the end of EXPR. */
383 streamer_write_zero (ob);
384 }
385
386 /* Emit the physical representation of tree node EXPR to output block
387 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
388 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
389
390 static void
391 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
392 bool ref_p, bool this_ref_p)
393 {
394 unsigned ix;
395
396 gcc_checking_assert (expr != NULL_TREE
397 && !(this_ref_p && tree_is_indexable (expr)));
398
399 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
400 expr, hash, &ix);
401 gcc_assert (!exists_p);
402 if (streamer_handle_as_builtin_p (expr))
403 {
404 /* MD and NORMAL builtins do not need to be written out
405 completely as they are always instantiated by the
406 compiler on startup. The only builtins that need to
407 be written out are BUILT_IN_FRONTEND. For all other
408 builtins, we simply write the class and code. */
409 streamer_write_builtin (ob, expr);
410 }
411 else if (TREE_CODE (expr) == INTEGER_CST
412 && !TREE_OVERFLOW (expr))
413 {
414 /* Shared INTEGER_CST nodes are special because they need their
415 original type to be materialized by the reader (to implement
416 TYPE_CACHED_VALUES). */
417 streamer_write_integer_cst (ob, expr, ref_p);
418 }
419 else
420 {
421 /* This is the first time we see EXPR, write its fields
422 to OB. */
423 lto_write_tree (ob, expr, ref_p);
424 }
425 }
426
427 struct sccs
428 {
429 unsigned int dfsnum;
430 unsigned int low;
431 };
432
433 struct scc_entry
434 {
435 tree t;
436 hashval_t hash;
437 };
438
439 static unsigned int next_dfs_num;
440 static vec<scc_entry> sccstack;
441 static struct pointer_map_t *sccstate;
442 static struct obstack sccstate_obstack;
443
444 static void
445 DFS_write_tree (struct output_block *ob, sccs *from_state,
446 tree expr, bool ref_p, bool this_ref_p);
447
448 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
449 DFS recurse for all tree edges originating from it. */
450
451 static void
452 DFS_write_tree_body (struct output_block *ob,
453 tree expr, sccs *expr_state, bool ref_p)
454 {
455 #define DFS_follow_tree_edge(DEST) \
456 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
457
458 enum tree_code code;
459
460 code = TREE_CODE (expr);
461
462 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
463 {
464 if (TREE_CODE (expr) != IDENTIFIER_NODE)
465 DFS_follow_tree_edge (TREE_TYPE (expr));
466 }
467
468 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
469 {
470 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
471 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
472 }
473
474 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
475 {
476 DFS_follow_tree_edge (TREE_REALPART (expr));
477 DFS_follow_tree_edge (TREE_IMAGPART (expr));
478 }
479
480 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
481 {
482 /* Drop names that were created for anonymous entities. */
483 if (DECL_NAME (expr)
484 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
485 && ANON_AGGRNAME_P (DECL_NAME (expr)))
486 ;
487 else
488 DFS_follow_tree_edge (DECL_NAME (expr));
489 DFS_follow_tree_edge (DECL_CONTEXT (expr));
490 }
491
492 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
493 {
494 DFS_follow_tree_edge (DECL_SIZE (expr));
495 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
496
497 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
498 special handling in LTO, it must be handled by streamer hooks. */
499
500 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
501
502 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
503 for early inlining so drop it on the floor instead of ICEing in
504 dwarf2out.c. */
505
506 if ((TREE_CODE (expr) == VAR_DECL
507 || TREE_CODE (expr) == PARM_DECL)
508 && DECL_HAS_VALUE_EXPR_P (expr))
509 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
510 if (TREE_CODE (expr) == VAR_DECL)
511 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
512 }
513
514 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
515 {
516 if (TREE_CODE (expr) == TYPE_DECL)
517 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
518 DFS_follow_tree_edge (DECL_VINDEX (expr));
519 }
520
521 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
522 {
523 /* Make sure we don't inadvertently set the assembler name. */
524 if (DECL_ASSEMBLER_NAME_SET_P (expr))
525 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
526 DFS_follow_tree_edge (DECL_SECTION_NAME (expr));
527 DFS_follow_tree_edge (DECL_COMDAT_GROUP (expr));
528 }
529
530 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
531 {
532 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
533 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
534 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
535 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
536 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
537 }
538
539 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
540 {
541 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
542 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
543 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
544 }
545
546 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
547 {
548 DFS_follow_tree_edge (TYPE_SIZE (expr));
549 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
550 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
551 DFS_follow_tree_edge (TYPE_NAME (expr));
552 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
553 reconstructed during fixup. */
554 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
555 during fixup. */
556 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
557 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
558 /* TYPE_CANONICAL is re-computed during type merging, so no need
559 to follow it here. */
560 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
561 }
562
563 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
564 {
565 if (TREE_CODE (expr) == ENUMERAL_TYPE)
566 DFS_follow_tree_edge (TYPE_VALUES (expr));
567 else if (TREE_CODE (expr) == ARRAY_TYPE)
568 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
569 else if (RECORD_OR_UNION_TYPE_P (expr))
570 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
571 DFS_follow_tree_edge (t);
572 else if (TREE_CODE (expr) == FUNCTION_TYPE
573 || TREE_CODE (expr) == METHOD_TYPE)
574 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
575
576 if (!POINTER_TYPE_P (expr))
577 DFS_follow_tree_edge (TYPE_MINVAL (expr));
578 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
579 if (RECORD_OR_UNION_TYPE_P (expr))
580 DFS_follow_tree_edge (TYPE_BINFO (expr));
581 }
582
583 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
584 {
585 DFS_follow_tree_edge (TREE_PURPOSE (expr));
586 DFS_follow_tree_edge (TREE_VALUE (expr));
587 DFS_follow_tree_edge (TREE_CHAIN (expr));
588 }
589
590 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
591 {
592 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
593 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
594 }
595
596 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
597 {
598 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
599 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
600 DFS_follow_tree_edge (TREE_BLOCK (expr));
601 }
602
603 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
604 {
605 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
606 /* ??? FIXME. See also streamer_write_chain. */
607 if (!(VAR_OR_FUNCTION_DECL_P (t)
608 && DECL_EXTERNAL (t)))
609 DFS_follow_tree_edge (t);
610
611 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
612
613 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
614 handle - those that represent inlined function scopes.
615 For the drop rest them on the floor instead of ICEing
616 in dwarf2out.c. */
617 if (inlined_function_outer_scope_p (expr))
618 {
619 tree ultimate_origin = block_ultimate_origin (expr);
620 DFS_follow_tree_edge (ultimate_origin);
621 }
622 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
623 information for early inlined BLOCKs so drop it on the floor instead
624 of ICEing in dwarf2out.c. */
625
626 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
627 streaming time. */
628
629 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
630 list is re-constructed from BLOCK_SUPERCONTEXT. */
631 }
632
633 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
634 {
635 unsigned i;
636 tree t;
637
638 /* Note that the number of BINFO slots has already been emitted in
639 EXPR's header (see streamer_write_tree_header) because this length
640 is needed to build the empty BINFO node on the reader side. */
641 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
642 DFS_follow_tree_edge (t);
643 DFS_follow_tree_edge (BINFO_OFFSET (expr));
644 DFS_follow_tree_edge (BINFO_VTABLE (expr));
645 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
646
647 /* The number of BINFO_BASE_ACCESSES has already been emitted in
648 EXPR's bitfield section. */
649 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
650 DFS_follow_tree_edge (t);
651
652 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
653 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
654 }
655
656 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
657 {
658 unsigned i;
659 tree index, value;
660
661 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
662 {
663 DFS_follow_tree_edge (index);
664 DFS_follow_tree_edge (value);
665 }
666 }
667
668 #undef DFS_follow_tree_edge
669 }
670
671 /* Return a hash value for the tree T. */
672
673 static hashval_t
674 hash_tree (struct streamer_tree_cache_d *cache, tree t)
675 {
676 #define visit(SIBLING) \
677 do { \
678 unsigned ix; \
679 if (SIBLING && streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
680 v = iterative_hash_hashval_t (streamer_tree_cache_get_hash (cache, ix), v); \
681 } while (0)
682
683 /* Hash TS_BASE. */
684 enum tree_code code = TREE_CODE (t);
685 hashval_t v = iterative_hash_host_wide_int (code, 0);
686 if (!TYPE_P (t))
687 {
688 v = iterative_hash_host_wide_int (TREE_SIDE_EFFECTS (t)
689 | (TREE_CONSTANT (t) << 1)
690 | (TREE_READONLY (t) << 2)
691 | (TREE_PUBLIC (t) << 3), v);
692 }
693 v = iterative_hash_host_wide_int (TREE_ADDRESSABLE (t)
694 | (TREE_THIS_VOLATILE (t) << 1), v);
695 if (DECL_P (t))
696 v = iterative_hash_host_wide_int (DECL_UNSIGNED (t), v);
697 else if (TYPE_P (t))
698 v = iterative_hash_host_wide_int (TYPE_UNSIGNED (t), v);
699 if (TYPE_P (t))
700 v = iterative_hash_host_wide_int (TYPE_ARTIFICIAL (t), v);
701 else
702 v = iterative_hash_host_wide_int (TREE_NO_WARNING (t), v);
703 v = iterative_hash_host_wide_int (TREE_NOTHROW (t)
704 | (TREE_STATIC (t) << 1)
705 | (TREE_PROTECTED (t) << 2)
706 | (TREE_DEPRECATED (t) << 3), v);
707 if (code != TREE_BINFO)
708 v = iterative_hash_host_wide_int (TREE_PRIVATE (t), v);
709 if (TYPE_P (t))
710 v = iterative_hash_host_wide_int (TYPE_SATURATING (t)
711 | (TYPE_ADDR_SPACE (t) << 1), v);
712 else if (code == SSA_NAME)
713 v = iterative_hash_host_wide_int (SSA_NAME_IS_DEFAULT_DEF (t), v);
714
715 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
716 {
717 v = iterative_hash_host_wide_int (TREE_INT_CST_LOW (t), v);
718 v = iterative_hash_host_wide_int (TREE_INT_CST_HIGH (t), v);
719 }
720
721 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
722 {
723 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
724 v = iterative_hash_host_wide_int (r.cl, v);
725 v = iterative_hash_host_wide_int (r.decimal
726 | (r.sign << 1)
727 | (r.signalling << 2)
728 | (r.canonical << 3), v);
729 v = iterative_hash_host_wide_int (r.uexp, v);
730 for (unsigned i = 0; i < SIGSZ; ++i)
731 v = iterative_hash_host_wide_int (r.sig[i], v);
732 }
733
734 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
735 {
736 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
737 v = iterative_hash_host_wide_int (f.mode, v);
738 v = iterative_hash_host_wide_int (f.data.low, v);
739 v = iterative_hash_host_wide_int (f.data.high, v);
740 }
741
742 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
743 {
744 v = iterative_hash_host_wide_int (DECL_MODE (t), v);
745 v = iterative_hash_host_wide_int (DECL_NONLOCAL (t)
746 | (DECL_VIRTUAL_P (t) << 1)
747 | (DECL_IGNORED_P (t) << 2)
748 | (DECL_ABSTRACT (t) << 3)
749 | (DECL_ARTIFICIAL (t) << 4)
750 | (DECL_USER_ALIGN (t) << 5)
751 | (DECL_PRESERVE_P (t) << 6)
752 | (DECL_EXTERNAL (t) << 7)
753 | (DECL_GIMPLE_REG_P (t) << 8), v);
754 v = iterative_hash_host_wide_int (DECL_ALIGN (t), v);
755 if (code == LABEL_DECL)
756 {
757 v = iterative_hash_host_wide_int (EH_LANDING_PAD_NR (t), v);
758 v = iterative_hash_host_wide_int (LABEL_DECL_UID (t), v);
759 }
760 else if (code == FIELD_DECL)
761 {
762 v = iterative_hash_host_wide_int (DECL_PACKED (t)
763 | (DECL_NONADDRESSABLE_P (t) << 1),
764 v);
765 v = iterative_hash_host_wide_int (DECL_OFFSET_ALIGN (t), v);
766 }
767 else if (code == VAR_DECL)
768 {
769 v = iterative_hash_host_wide_int (DECL_HAS_DEBUG_EXPR_P (t)
770 | (DECL_NONLOCAL_FRAME (t) << 1),
771 v);
772 }
773 if (code == RESULT_DECL
774 || code == PARM_DECL
775 || code == VAR_DECL)
776 {
777 v = iterative_hash_host_wide_int (DECL_BY_REFERENCE (t), v);
778 if (code == VAR_DECL
779 || code == PARM_DECL)
780 v = iterative_hash_host_wide_int (DECL_HAS_VALUE_EXPR_P (t), v);
781 }
782 }
783
784 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
785 v = iterative_hash_host_wide_int (DECL_REGISTER (t), v);
786
787 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
788 {
789 v = iterative_hash_host_wide_int ((DECL_COMMON (t))
790 | (DECL_DLLIMPORT_P (t) << 1)
791 | (DECL_WEAK (t) << 2)
792 | (DECL_SEEN_IN_BIND_EXPR_P (t) << 3)
793 | (DECL_COMDAT (t) << 4)
794 | (DECL_VISIBILITY_SPECIFIED (t) << 6),
795 v);
796 v = iterative_hash_host_wide_int (DECL_VISIBILITY (t), v);
797 if (code == VAR_DECL)
798 {
799 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
800 v = iterative_hash_host_wide_int (DECL_HARD_REGISTER (t)
801 | (DECL_IN_CONSTANT_POOL (t) << 1),
802 v);
803 v = iterative_hash_host_wide_int (DECL_TLS_MODEL (t), v);
804 }
805 if (TREE_CODE (t) == FUNCTION_DECL)
806 v = iterative_hash_host_wide_int (DECL_FINAL_P (t)
807 | (DECL_CXX_CONSTRUCTOR_P (t) << 1)
808 | (DECL_CXX_DESTRUCTOR_P (t) << 2),
809 v);
810 if (VAR_OR_FUNCTION_DECL_P (t))
811 v = iterative_hash_host_wide_int (DECL_INIT_PRIORITY (t), v);
812 }
813
814 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
815 {
816 v = iterative_hash_host_wide_int (DECL_BUILT_IN_CLASS (t), v);
817 v = iterative_hash_host_wide_int (DECL_STATIC_CONSTRUCTOR (t)
818 | (DECL_STATIC_DESTRUCTOR (t) << 1)
819 | (DECL_UNINLINABLE (t) << 2)
820 | (DECL_POSSIBLY_INLINED (t) << 3)
821 | (DECL_IS_NOVOPS (t) << 4)
822 | (DECL_IS_RETURNS_TWICE (t) << 5)
823 | (DECL_IS_MALLOC (t) << 6)
824 | (DECL_IS_OPERATOR_NEW (t) << 7)
825 | (DECL_DECLARED_INLINE_P (t) << 8)
826 | (DECL_STATIC_CHAIN (t) << 9)
827 | (DECL_NO_INLINE_WARNING_P (t) << 10)
828 | (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t) << 11)
829 | (DECL_NO_LIMIT_STACK (t) << 12)
830 | (DECL_DISREGARD_INLINE_LIMITS (t) << 13)
831 | (DECL_PURE_P (t) << 14)
832 | (DECL_LOOPING_CONST_OR_PURE_P (t) << 15), v);
833 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
834 v = iterative_hash_host_wide_int (DECL_FUNCTION_CODE (t), v);
835 if (DECL_STATIC_DESTRUCTOR (t))
836 v = iterative_hash_host_wide_int (DECL_FINI_PRIORITY (t), v);
837 }
838
839 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
840 {
841 v = iterative_hash_host_wide_int (TYPE_MODE (t), v);
842 v = iterative_hash_host_wide_int (TYPE_STRING_FLAG (t)
843 | (TYPE_NO_FORCE_BLK (t) << 1)
844 | (TYPE_NEEDS_CONSTRUCTING (t) << 2)
845 | (TYPE_PACKED (t) << 3)
846 | (TYPE_RESTRICT (t) << 4)
847 | (TYPE_USER_ALIGN (t) << 5)
848 | (TYPE_READONLY (t) << 6), v);
849 if (RECORD_OR_UNION_TYPE_P (t))
850 {
851 v = iterative_hash_host_wide_int (TYPE_TRANSPARENT_AGGR (t)
852 | (TYPE_FINAL_P (t) << 1), v);
853 }
854 else if (code == ARRAY_TYPE)
855 v = iterative_hash_host_wide_int (TYPE_NONALIASED_COMPONENT (t), v);
856 v = iterative_hash_host_wide_int (TYPE_PRECISION (t), v);
857 v = iterative_hash_host_wide_int (TYPE_ALIGN (t), v);
858 v = iterative_hash_host_wide_int ((TYPE_ALIAS_SET (t) == 0
859 || (!in_lto_p
860 && get_alias_set (t) == 0))
861 ? 0 : -1, v);
862 }
863
864 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
865 v = iterative_hash (TRANSLATION_UNIT_LANGUAGE (t),
866 strlen (TRANSLATION_UNIT_LANGUAGE (t)), v);
867
868 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
869 v = iterative_hash (t, sizeof (struct cl_target_option), v);
870
871 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
872 v = iterative_hash (t, sizeof (struct cl_optimization), v);
873
874 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
875 v = iterative_hash_host_wide_int (IDENTIFIER_HASH_VALUE (t), v);
876
877 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
878 v = iterative_hash (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t), v);
879
880 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
881 {
882 if (POINTER_TYPE_P (t))
883 {
884 /* For pointers factor in the pointed-to type recursively as
885 we cannot recurse through only pointers.
886 ??? We can generalize this by keeping track of the
887 in-SCC edges for each tree (or arbitrarily the first
888 such edge) and hashing that in in a second stage
889 (instead of the quadratic mixing of the SCC we do now). */
890 hashval_t x;
891 unsigned ix;
892 if (streamer_tree_cache_lookup (cache, TREE_TYPE (t), &ix))
893 x = streamer_tree_cache_get_hash (cache, ix);
894 else
895 x = hash_tree (cache, TREE_TYPE (t));
896 v = iterative_hash_hashval_t (x, v);
897 }
898 else if (code != IDENTIFIER_NODE)
899 visit (TREE_TYPE (t));
900 }
901
902 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
903 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
904 visit (VECTOR_CST_ELT (t, i));
905
906 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
907 {
908 visit (TREE_REALPART (t));
909 visit (TREE_IMAGPART (t));
910 }
911
912 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
913 {
914 /* Drop names that were created for anonymous entities. */
915 if (DECL_NAME (t)
916 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
917 && ANON_AGGRNAME_P (DECL_NAME (t)))
918 ;
919 else
920 visit (DECL_NAME (t));
921 if (DECL_FILE_SCOPE_P (t))
922 ;
923 else
924 visit (DECL_CONTEXT (t));
925 }
926
927 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
928 {
929 visit (DECL_SIZE (t));
930 visit (DECL_SIZE_UNIT (t));
931 visit (DECL_ATTRIBUTES (t));
932 if ((code == VAR_DECL
933 || code == PARM_DECL)
934 && DECL_HAS_VALUE_EXPR_P (t))
935 visit (DECL_VALUE_EXPR (t));
936 if (code == VAR_DECL
937 && DECL_HAS_DEBUG_EXPR_P (t))
938 visit (DECL_DEBUG_EXPR (t));
939 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
940 be able to call get_symbol_initial_value. */
941 }
942
943 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
944 {
945 if (code == TYPE_DECL)
946 visit (DECL_ORIGINAL_TYPE (t));
947 visit (DECL_VINDEX (t));
948 }
949
950 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
951 {
952 if (DECL_ASSEMBLER_NAME_SET_P (t))
953 visit (DECL_ASSEMBLER_NAME (t));
954 visit (DECL_SECTION_NAME (t));
955 visit (DECL_COMDAT_GROUP (t));
956 }
957
958 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
959 {
960 visit (DECL_FIELD_OFFSET (t));
961 visit (DECL_BIT_FIELD_TYPE (t));
962 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
963 visit (DECL_FIELD_BIT_OFFSET (t));
964 visit (DECL_FCONTEXT (t));
965 }
966
967 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
968 {
969 visit (DECL_FUNCTION_PERSONALITY (t));
970 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
971 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
972 }
973
974 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
975 {
976 visit (TYPE_SIZE (t));
977 visit (TYPE_SIZE_UNIT (t));
978 visit (TYPE_ATTRIBUTES (t));
979 visit (TYPE_NAME (t));
980 visit (TYPE_MAIN_VARIANT (t));
981 if (TYPE_FILE_SCOPE_P (t))
982 ;
983 else
984 visit (TYPE_CONTEXT (t));
985 visit (TYPE_STUB_DECL (t));
986 }
987
988 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
989 {
990 if (code == ENUMERAL_TYPE)
991 visit (TYPE_VALUES (t));
992 else if (code == ARRAY_TYPE)
993 visit (TYPE_DOMAIN (t));
994 else if (RECORD_OR_UNION_TYPE_P (t))
995 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
996 visit (f);
997 else if (code == FUNCTION_TYPE
998 || code == METHOD_TYPE)
999 visit (TYPE_ARG_TYPES (t));
1000 if (!POINTER_TYPE_P (t))
1001 visit (TYPE_MINVAL (t));
1002 visit (TYPE_MAXVAL (t));
1003 if (RECORD_OR_UNION_TYPE_P (t))
1004 visit (TYPE_BINFO (t));
1005 }
1006
1007 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1008 {
1009 visit (TREE_PURPOSE (t));
1010 visit (TREE_VALUE (t));
1011 visit (TREE_CHAIN (t));
1012 }
1013
1014 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1015 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1016 visit (TREE_VEC_ELT (t, i));
1017
1018 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1019 {
1020 v = iterative_hash_host_wide_int (TREE_OPERAND_LENGTH (t), v);
1021 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1022 visit (TREE_OPERAND (t, i));
1023 }
1024
1025 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1026 {
1027 unsigned i;
1028 tree b;
1029 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1030 visit (b);
1031 visit (BINFO_OFFSET (t));
1032 visit (BINFO_VTABLE (t));
1033 visit (BINFO_VPTR_FIELD (t));
1034 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1035 visit (b);
1036 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1037 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1038 }
1039
1040 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1041 {
1042 unsigned i;
1043 tree index, value;
1044 v = iterative_hash_host_wide_int (CONSTRUCTOR_NELTS (t), v);
1045 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1046 {
1047 visit (index);
1048 visit (value);
1049 }
1050 }
1051
1052 return v;
1053
1054 #undef visit
1055 }
1056
1057 /* Compare two SCC entries by their hash value for qsorting them. */
1058
1059 static int
1060 scc_entry_compare (const void *p1_, const void *p2_)
1061 {
1062 const scc_entry *p1 = (const scc_entry *) p1_;
1063 const scc_entry *p2 = (const scc_entry *) p2_;
1064 if (p1->hash < p2->hash)
1065 return -1;
1066 else if (p1->hash > p2->hash)
1067 return 1;
1068 return 0;
1069 }
1070
1071 /* Return a hash value for the SCC on the SCC stack from FIRST with
1072 size SIZE. */
1073
1074 static hashval_t
1075 hash_scc (struct streamer_tree_cache_d *cache, unsigned first, unsigned size)
1076 {
1077 /* Compute hash values for the SCC members. */
1078 for (unsigned i = 0; i < size; ++i)
1079 sccstack[first+i].hash = hash_tree (cache, sccstack[first+i].t);
1080
1081 if (size == 1)
1082 return sccstack[first].hash;
1083
1084 /* Sort the SCC of type, hash pairs so that when we mix in
1085 all members of the SCC the hash value becomes independent on
1086 the order we visited the SCC. Disregard hashes equal to
1087 the hash of the tree we mix into because we cannot guarantee
1088 a stable sort for those across different TUs. */
1089 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1090 hashval_t *tem = XALLOCAVEC (hashval_t, size);
1091 for (unsigned i = 0; i < size; ++i)
1092 {
1093 hashval_t hash = sccstack[first+i].hash;
1094 hashval_t orig_hash = hash;
1095 unsigned j;
1096 /* Skip same hashes. */
1097 for (j = i + 1;
1098 j < size && sccstack[first+j].hash == orig_hash; ++j)
1099 ;
1100 for (; j < size; ++j)
1101 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1102 for (j = 0; sccstack[first+j].hash != orig_hash; ++j)
1103 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1104 tem[i] = hash;
1105 }
1106 hashval_t scc_hash = 0;
1107 for (unsigned i = 0; i < size; ++i)
1108 {
1109 sccstack[first+i].hash = tem[i];
1110 scc_hash = iterative_hash_hashval_t (tem[i], scc_hash);
1111 }
1112 return scc_hash;
1113 }
1114
1115 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1116 already in the streamer cache. Main routine called for
1117 each visit of EXPR. */
1118
1119 static void
1120 DFS_write_tree (struct output_block *ob, sccs *from_state,
1121 tree expr, bool ref_p, bool this_ref_p)
1122 {
1123 unsigned ix;
1124 sccs **slot;
1125
1126 /* Handle special cases. */
1127 if (expr == NULL_TREE)
1128 return;
1129
1130 /* Do not DFS walk into indexable trees. */
1131 if (this_ref_p && tree_is_indexable (expr))
1132 return;
1133
1134 /* Check if we already streamed EXPR. */
1135 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1136 return;
1137
1138 slot = (sccs **)pointer_map_insert (sccstate, expr);
1139 sccs *cstate = *slot;
1140 if (!cstate)
1141 {
1142 scc_entry e = { expr, 0 };
1143 /* Not yet visited. DFS recurse and push it onto the stack. */
1144 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1145 sccstack.safe_push (e);
1146 cstate->dfsnum = next_dfs_num++;
1147 cstate->low = cstate->dfsnum;
1148
1149 if (streamer_handle_as_builtin_p (expr))
1150 ;
1151 else if (TREE_CODE (expr) == INTEGER_CST
1152 && !TREE_OVERFLOW (expr))
1153 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
1154 else
1155 {
1156 DFS_write_tree_body (ob, expr, cstate, ref_p);
1157
1158 /* Walk any LTO-specific edges. */
1159 if (DECL_P (expr)
1160 && TREE_CODE (expr) != FUNCTION_DECL
1161 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1162 {
1163 /* Handle DECL_INITIAL for symbols. */
1164 tree initial = get_symbol_initial_value (ob, expr);
1165 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
1166 }
1167 }
1168
1169 /* See if we found an SCC. */
1170 if (cstate->low == cstate->dfsnum)
1171 {
1172 unsigned first, size;
1173 tree x;
1174
1175 /* Pop the SCC and compute its size. */
1176 first = sccstack.length ();
1177 do
1178 {
1179 x = sccstack[--first].t;
1180 }
1181 while (x != expr);
1182 size = sccstack.length () - first;
1183
1184 /* No need to compute hashes for LTRANS units, we don't perform
1185 any merging there. */
1186 hashval_t scc_hash = 0;
1187 unsigned scc_entry_len = 0;
1188 if (!flag_wpa)
1189 {
1190 scc_hash = hash_scc (ob->writer_cache, first, size);
1191
1192 /* Put the entries with the least number of collisions first. */
1193 unsigned entry_start = 0;
1194 scc_entry_len = size + 1;
1195 for (unsigned i = 0; i < size;)
1196 {
1197 unsigned from = i;
1198 for (i = i + 1; i < size
1199 && (sccstack[first + i].hash
1200 == sccstack[first + from].hash); ++i)
1201 ;
1202 if (i - from < scc_entry_len)
1203 {
1204 scc_entry_len = i - from;
1205 entry_start = from;
1206 }
1207 }
1208 for (unsigned i = 0; i < scc_entry_len; ++i)
1209 {
1210 scc_entry tem = sccstack[first + i];
1211 sccstack[first + i] = sccstack[first + entry_start + i];
1212 sccstack[first + entry_start + i] = tem;
1213 }
1214 }
1215
1216 /* Write LTO_tree_scc. */
1217 streamer_write_record_start (ob, LTO_tree_scc);
1218 streamer_write_uhwi (ob, size);
1219 streamer_write_uhwi (ob, scc_hash);
1220
1221 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1222 All INTEGER_CSTs need to be handled this way as we need
1223 their type to materialize them. Also builtins are handled
1224 this way.
1225 ??? We still wrap these in LTO_tree_scc so at the
1226 input side we can properly identify the tree we want
1227 to ultimatively return. */
1228 size_t old_len = ob->writer_cache->nodes.length ();
1229 if (size == 1)
1230 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1231 else
1232 {
1233 /* Write the size of the SCC entry candidates. */
1234 streamer_write_uhwi (ob, scc_entry_len);
1235
1236 /* Write all headers and populate the streamer cache. */
1237 for (unsigned i = 0; i < size; ++i)
1238 {
1239 hashval_t hash = sccstack[first+i].hash;
1240 tree t = sccstack[first+i].t;
1241 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1242 t, hash, &ix);
1243 gcc_assert (!exists_p);
1244
1245 if (!lto_is_streamable (t))
1246 internal_error ("tree code %qs is not supported "
1247 "in LTO streams",
1248 get_tree_code_name (TREE_CODE (t)));
1249
1250 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1251
1252 /* Write the header, containing everything needed to
1253 materialize EXPR on the reading side. */
1254 streamer_write_tree_header (ob, t);
1255 }
1256
1257 /* Write the bitpacks and tree references. */
1258 for (unsigned i = 0; i < size; ++i)
1259 {
1260 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1261
1262 /* Mark the end of the tree. */
1263 streamer_write_zero (ob);
1264 }
1265 }
1266 gcc_assert (old_len + size == ob->writer_cache->nodes.length ());
1267
1268 /* Finally truncate the vector. */
1269 sccstack.truncate (first);
1270
1271 if (from_state)
1272 from_state->low = MIN (from_state->low, cstate->low);
1273 return;
1274 }
1275
1276 if (from_state)
1277 from_state->low = MIN (from_state->low, cstate->low);
1278 }
1279 gcc_checking_assert (from_state);
1280 if (cstate->dfsnum < from_state->dfsnum)
1281 from_state->low = MIN (cstate->dfsnum, from_state->low);
1282 }
1283
1284
1285 /* Emit the physical representation of tree node EXPR to output block
1286 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1287 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1288
1289 void
1290 lto_output_tree (struct output_block *ob, tree expr,
1291 bool ref_p, bool this_ref_p)
1292 {
1293 unsigned ix;
1294 bool existed_p;
1295
1296 if (expr == NULL_TREE)
1297 {
1298 streamer_write_record_start (ob, LTO_null);
1299 return;
1300 }
1301
1302 if (this_ref_p && tree_is_indexable (expr))
1303 {
1304 lto_output_tree_ref (ob, expr);
1305 return;
1306 }
1307
1308 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1309 if (existed_p)
1310 {
1311 /* If a node has already been streamed out, make sure that
1312 we don't write it more than once. Otherwise, the reader
1313 will instantiate two different nodes for the same object. */
1314 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1315 streamer_write_uhwi (ob, ix);
1316 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1317 lto_tree_code_to_tag (TREE_CODE (expr)));
1318 lto_stats.num_pickle_refs_output++;
1319 }
1320 else
1321 {
1322 /* This is the first time we see EXPR, write all reachable
1323 trees to OB. */
1324 static bool in_dfs_walk;
1325
1326 /* Protect against recursion which means disconnect between
1327 what tree edges we walk in the DFS walk and what edges
1328 we stream out. */
1329 gcc_assert (!in_dfs_walk);
1330
1331 /* Start the DFS walk. */
1332 /* Save ob state ... */
1333 /* let's see ... */
1334 in_dfs_walk = true;
1335 sccstate = pointer_map_create ();
1336 gcc_obstack_init (&sccstate_obstack);
1337 next_dfs_num = 1;
1338 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
1339 sccstack.release ();
1340 pointer_map_destroy (sccstate);
1341 obstack_free (&sccstate_obstack, NULL);
1342 in_dfs_walk = false;
1343
1344 /* Finally append a reference to the tree we were writing.
1345 ??? If expr ended up as a singleton we could have
1346 inlined it here and avoid outputting a reference. */
1347 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1348 gcc_assert (existed_p);
1349 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1350 streamer_write_uhwi (ob, ix);
1351 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1352 lto_tree_code_to_tag (TREE_CODE (expr)));
1353 lto_stats.num_pickle_refs_output++;
1354 }
1355 }
1356
1357
1358 /* Output to OB a list of try/catch handlers starting with FIRST. */
1359
1360 static void
1361 output_eh_try_list (struct output_block *ob, eh_catch first)
1362 {
1363 eh_catch n;
1364
1365 for (n = first; n; n = n->next_catch)
1366 {
1367 streamer_write_record_start (ob, LTO_eh_catch);
1368 stream_write_tree (ob, n->type_list, true);
1369 stream_write_tree (ob, n->filter_list, true);
1370 stream_write_tree (ob, n->label, true);
1371 }
1372
1373 streamer_write_record_start (ob, LTO_null);
1374 }
1375
1376
1377 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1378 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1379 detect EH region sharing. */
1380
1381 static void
1382 output_eh_region (struct output_block *ob, eh_region r)
1383 {
1384 enum LTO_tags tag;
1385
1386 if (r == NULL)
1387 {
1388 streamer_write_record_start (ob, LTO_null);
1389 return;
1390 }
1391
1392 if (r->type == ERT_CLEANUP)
1393 tag = LTO_ert_cleanup;
1394 else if (r->type == ERT_TRY)
1395 tag = LTO_ert_try;
1396 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1397 tag = LTO_ert_allowed_exceptions;
1398 else if (r->type == ERT_MUST_NOT_THROW)
1399 tag = LTO_ert_must_not_throw;
1400 else
1401 gcc_unreachable ();
1402
1403 streamer_write_record_start (ob, tag);
1404 streamer_write_hwi (ob, r->index);
1405
1406 if (r->outer)
1407 streamer_write_hwi (ob, r->outer->index);
1408 else
1409 streamer_write_zero (ob);
1410
1411 if (r->inner)
1412 streamer_write_hwi (ob, r->inner->index);
1413 else
1414 streamer_write_zero (ob);
1415
1416 if (r->next_peer)
1417 streamer_write_hwi (ob, r->next_peer->index);
1418 else
1419 streamer_write_zero (ob);
1420
1421 if (r->type == ERT_TRY)
1422 {
1423 output_eh_try_list (ob, r->u.eh_try.first_catch);
1424 }
1425 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1426 {
1427 stream_write_tree (ob, r->u.allowed.type_list, true);
1428 stream_write_tree (ob, r->u.allowed.label, true);
1429 streamer_write_uhwi (ob, r->u.allowed.filter);
1430 }
1431 else if (r->type == ERT_MUST_NOT_THROW)
1432 {
1433 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1434 bitpack_d bp = bitpack_create (ob->main_stream);
1435 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1436 streamer_write_bitpack (&bp);
1437 }
1438
1439 if (r->landing_pads)
1440 streamer_write_hwi (ob, r->landing_pads->index);
1441 else
1442 streamer_write_zero (ob);
1443 }
1444
1445
1446 /* Output landing pad LP to OB. */
1447
1448 static void
1449 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1450 {
1451 if (lp == NULL)
1452 {
1453 streamer_write_record_start (ob, LTO_null);
1454 return;
1455 }
1456
1457 streamer_write_record_start (ob, LTO_eh_landing_pad);
1458 streamer_write_hwi (ob, lp->index);
1459 if (lp->next_lp)
1460 streamer_write_hwi (ob, lp->next_lp->index);
1461 else
1462 streamer_write_zero (ob);
1463
1464 if (lp->region)
1465 streamer_write_hwi (ob, lp->region->index);
1466 else
1467 streamer_write_zero (ob);
1468
1469 stream_write_tree (ob, lp->post_landing_pad, true);
1470 }
1471
1472
1473 /* Output the existing eh_table to OB. */
1474
1475 static void
1476 output_eh_regions (struct output_block *ob, struct function *fn)
1477 {
1478 if (fn->eh && fn->eh->region_tree)
1479 {
1480 unsigned i;
1481 eh_region eh;
1482 eh_landing_pad lp;
1483 tree ttype;
1484
1485 streamer_write_record_start (ob, LTO_eh_table);
1486
1487 /* Emit the index of the root of the EH region tree. */
1488 streamer_write_hwi (ob, fn->eh->region_tree->index);
1489
1490 /* Emit all the EH regions in the region array. */
1491 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1492 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1493 output_eh_region (ob, eh);
1494
1495 /* Emit all landing pads. */
1496 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1497 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1498 output_eh_lp (ob, lp);
1499
1500 /* Emit all the runtime type data. */
1501 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1502 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1503 stream_write_tree (ob, ttype, true);
1504
1505 /* Emit the table of action chains. */
1506 if (targetm.arm_eabi_unwinder)
1507 {
1508 tree t;
1509 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1510 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1511 stream_write_tree (ob, t, true);
1512 }
1513 else
1514 {
1515 uchar c;
1516 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1517 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1518 streamer_write_char_stream (ob->main_stream, c);
1519 }
1520 }
1521
1522 /* The LTO_null either terminates the record or indicates that there
1523 are no eh_records at all. */
1524 streamer_write_record_start (ob, LTO_null);
1525 }
1526
1527
1528 /* Output all of the active ssa names to the ssa_names stream. */
1529
1530 static void
1531 output_ssa_names (struct output_block *ob, struct function *fn)
1532 {
1533 unsigned int i, len;
1534
1535 len = vec_safe_length (SSANAMES (fn));
1536 streamer_write_uhwi (ob, len);
1537
1538 for (i = 1; i < len; i++)
1539 {
1540 tree ptr = (*SSANAMES (fn))[i];
1541
1542 if (ptr == NULL_TREE
1543 || SSA_NAME_IN_FREE_LIST (ptr)
1544 || virtual_operand_p (ptr))
1545 continue;
1546
1547 streamer_write_uhwi (ob, i);
1548 streamer_write_char_stream (ob->main_stream,
1549 SSA_NAME_IS_DEFAULT_DEF (ptr));
1550 if (SSA_NAME_VAR (ptr))
1551 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1552 else
1553 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1554 stream_write_tree (ob, TREE_TYPE (ptr), true);
1555 }
1556
1557 streamer_write_zero (ob);
1558 }
1559
1560
1561 /* Output the cfg. */
1562
1563 static void
1564 output_cfg (struct output_block *ob, struct function *fn)
1565 {
1566 struct lto_output_stream *tmp_stream = ob->main_stream;
1567 basic_block bb;
1568
1569 ob->main_stream = ob->cfg_stream;
1570
1571 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1572 profile_status_for_function (fn));
1573
1574 /* Output the number of the highest basic block. */
1575 streamer_write_uhwi (ob, last_basic_block_for_function (fn));
1576
1577 FOR_ALL_BB_FN (bb, fn)
1578 {
1579 edge_iterator ei;
1580 edge e;
1581
1582 streamer_write_hwi (ob, bb->index);
1583
1584 /* Output the successors and the edge flags. */
1585 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1586 FOR_EACH_EDGE (e, ei, bb->succs)
1587 {
1588 streamer_write_uhwi (ob, e->dest->index);
1589 streamer_write_hwi (ob, e->probability);
1590 streamer_write_gcov_count (ob, e->count);
1591 streamer_write_uhwi (ob, e->flags);
1592 }
1593 }
1594
1595 streamer_write_hwi (ob, -1);
1596
1597 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1598 while (bb->next_bb)
1599 {
1600 streamer_write_hwi (ob, bb->next_bb->index);
1601 bb = bb->next_bb;
1602 }
1603
1604 streamer_write_hwi (ob, -1);
1605
1606 /* ??? The cfgloop interface is tied to cfun. */
1607 gcc_assert (cfun == fn);
1608
1609 /* Output the number of loops. */
1610 streamer_write_uhwi (ob, number_of_loops (fn));
1611
1612 /* Output each loop, skipping the tree root which has number zero. */
1613 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1614 {
1615 struct loop *loop = get_loop (fn, i);
1616
1617 /* Write the index of the loop header. That's enough to rebuild
1618 the loop tree on the reader side. Stream -1 for an unused
1619 loop entry. */
1620 if (!loop)
1621 {
1622 streamer_write_hwi (ob, -1);
1623 continue;
1624 }
1625 else
1626 streamer_write_hwi (ob, loop->header->index);
1627
1628 /* Write everything copy_loop_info copies. */
1629 streamer_write_enum (ob->main_stream,
1630 loop_estimation, EST_LAST, loop->estimate_state);
1631 streamer_write_hwi (ob, loop->any_upper_bound);
1632 if (loop->any_upper_bound)
1633 {
1634 streamer_write_uhwi (ob, loop->nb_iterations_upper_bound.low);
1635 streamer_write_hwi (ob, loop->nb_iterations_upper_bound.high);
1636 }
1637 streamer_write_hwi (ob, loop->any_estimate);
1638 if (loop->any_estimate)
1639 {
1640 streamer_write_uhwi (ob, loop->nb_iterations_estimate.low);
1641 streamer_write_hwi (ob, loop->nb_iterations_estimate.high);
1642 }
1643 }
1644
1645 ob->main_stream = tmp_stream;
1646 }
1647
1648
1649 /* Create the header in the file using OB. If the section type is for
1650 a function, set FN to the decl for that function. */
1651
1652 void
1653 produce_asm (struct output_block *ob, tree fn)
1654 {
1655 enum lto_section_type section_type = ob->section_type;
1656 struct lto_function_header header;
1657 char *section_name;
1658 struct lto_output_stream *header_stream;
1659
1660 if (section_type == LTO_section_function_body)
1661 {
1662 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1663 section_name = lto_get_section_name (section_type, name, NULL);
1664 }
1665 else
1666 section_name = lto_get_section_name (section_type, NULL, NULL);
1667
1668 lto_begin_section (section_name, !flag_wpa);
1669 free (section_name);
1670
1671 /* The entire header is stream computed here. */
1672 memset (&header, 0, sizeof (struct lto_function_header));
1673
1674 /* Write the header. */
1675 header.lto_header.major_version = LTO_major_version;
1676 header.lto_header.minor_version = LTO_minor_version;
1677
1678 header.compressed_size = 0;
1679
1680 if (section_type == LTO_section_function_body)
1681 header.cfg_size = ob->cfg_stream->total_size;
1682 header.main_size = ob->main_stream->total_size;
1683 header.string_size = ob->string_stream->total_size;
1684
1685 header_stream = XCNEW (struct lto_output_stream);
1686 lto_output_data_stream (header_stream, &header, sizeof header);
1687 lto_write_stream (header_stream);
1688 free (header_stream);
1689
1690 /* Put all of the gimple and the string table out the asm file as a
1691 block of text. */
1692 if (section_type == LTO_section_function_body)
1693 lto_write_stream (ob->cfg_stream);
1694 lto_write_stream (ob->main_stream);
1695 lto_write_stream (ob->string_stream);
1696
1697 lto_end_section ();
1698 }
1699
1700
1701 /* Output the base body of struct function FN using output block OB. */
1702
1703 static void
1704 output_struct_function_base (struct output_block *ob, struct function *fn)
1705 {
1706 struct bitpack_d bp;
1707 unsigned i;
1708 tree t;
1709
1710 /* Output the static chain and non-local goto save area. */
1711 stream_write_tree (ob, fn->static_chain_decl, true);
1712 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1713
1714 /* Output all the local variables in the function. */
1715 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1716 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1717 stream_write_tree (ob, t, true);
1718
1719 /* Output current IL state of the function. */
1720 streamer_write_uhwi (ob, fn->curr_properties);
1721
1722 /* Write all the attributes for FN. */
1723 bp = bitpack_create (ob->main_stream);
1724 bp_pack_value (&bp, fn->is_thunk, 1);
1725 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1726 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1727 bp_pack_value (&bp, fn->returns_struct, 1);
1728 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1729 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1730 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1731 bp_pack_value (&bp, fn->after_inlining, 1);
1732 bp_pack_value (&bp, fn->stdarg, 1);
1733 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1734 bp_pack_value (&bp, fn->calls_alloca, 1);
1735 bp_pack_value (&bp, fn->calls_setjmp, 1);
1736 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1737 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1738
1739 /* Output the function start and end loci. */
1740 stream_output_location (ob, &bp, fn->function_start_locus);
1741 stream_output_location (ob, &bp, fn->function_end_locus);
1742
1743 streamer_write_bitpack (&bp);
1744 }
1745
1746
1747 /* Output the body of function NODE->DECL. */
1748
1749 static void
1750 output_function (struct cgraph_node *node)
1751 {
1752 tree function;
1753 struct function *fn;
1754 basic_block bb;
1755 struct output_block *ob;
1756
1757 function = node->decl;
1758 fn = DECL_STRUCT_FUNCTION (function);
1759 ob = create_output_block (LTO_section_function_body);
1760
1761 clear_line_info (ob);
1762 ob->cgraph_node = node;
1763
1764 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1765
1766 /* Set current_function_decl and cfun. */
1767 push_cfun (fn);
1768
1769 /* Make string 0 be a NULL string. */
1770 streamer_write_char_stream (ob->string_stream, 0);
1771
1772 streamer_write_record_start (ob, LTO_function);
1773
1774 /* Output decls for parameters and args. */
1775 stream_write_tree (ob, DECL_RESULT (function), true);
1776 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1777
1778 /* Output DECL_INITIAL for the function, which contains the tree of
1779 lexical scopes. */
1780 stream_write_tree (ob, DECL_INITIAL (function), true);
1781
1782 /* We also stream abstract functions where we stream only stuff needed for
1783 debug info. */
1784 if (gimple_has_body_p (function))
1785 {
1786 streamer_write_uhwi (ob, 1);
1787 output_struct_function_base (ob, fn);
1788
1789 /* Output all the SSA names used in the function. */
1790 output_ssa_names (ob, fn);
1791
1792 /* Output any exception handling regions. */
1793 output_eh_regions (ob, fn);
1794
1795
1796 /* We will renumber the statements. The code that does this uses
1797 the same ordering that we use for serializing them so we can use
1798 the same code on the other end and not have to write out the
1799 statement numbers. We do not assign UIDs to PHIs here because
1800 virtual PHIs get re-computed on-the-fly which would make numbers
1801 inconsistent. */
1802 set_gimple_stmt_max_uid (cfun, 0);
1803 FOR_ALL_BB (bb)
1804 {
1805 gimple_stmt_iterator gsi;
1806 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1807 {
1808 gimple stmt = gsi_stmt (gsi);
1809
1810 /* Virtual PHIs are not going to be streamed. */
1811 if (!virtual_operand_p (gimple_phi_result (stmt)))
1812 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1813 }
1814 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1815 {
1816 gimple stmt = gsi_stmt (gsi);
1817 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1818 }
1819 }
1820 /* To avoid keeping duplicate gimple IDs in the statements, renumber
1821 virtual phis now. */
1822 FOR_ALL_BB (bb)
1823 {
1824 gimple_stmt_iterator gsi;
1825 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1826 {
1827 gimple stmt = gsi_stmt (gsi);
1828 if (virtual_operand_p (gimple_phi_result (stmt)))
1829 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1830 }
1831 }
1832
1833 /* Output the code for the function. */
1834 FOR_ALL_BB_FN (bb, fn)
1835 output_bb (ob, bb, fn);
1836
1837 /* The terminator for this function. */
1838 streamer_write_record_start (ob, LTO_null);
1839
1840 output_cfg (ob, fn);
1841
1842 pop_cfun ();
1843 }
1844 else
1845 streamer_write_uhwi (ob, 0);
1846
1847 /* Create a section to hold the pickled output of this function. */
1848 produce_asm (ob, function);
1849
1850 destroy_output_block (ob);
1851 }
1852
1853
1854 /* Emit toplevel asms. */
1855
1856 void
1857 lto_output_toplevel_asms (void)
1858 {
1859 struct output_block *ob;
1860 struct asm_node *can;
1861 char *section_name;
1862 struct lto_output_stream *header_stream;
1863 struct lto_asm_header header;
1864
1865 if (! asm_nodes)
1866 return;
1867
1868 ob = create_output_block (LTO_section_asm);
1869
1870 /* Make string 0 be a NULL string. */
1871 streamer_write_char_stream (ob->string_stream, 0);
1872
1873 for (can = asm_nodes; can; can = can->next)
1874 {
1875 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
1876 streamer_write_hwi (ob, can->order);
1877 }
1878
1879 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
1880
1881 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
1882 lto_begin_section (section_name, !flag_wpa);
1883 free (section_name);
1884
1885 /* The entire header stream is computed here. */
1886 memset (&header, 0, sizeof (header));
1887
1888 /* Write the header. */
1889 header.lto_header.major_version = LTO_major_version;
1890 header.lto_header.minor_version = LTO_minor_version;
1891
1892 header.main_size = ob->main_stream->total_size;
1893 header.string_size = ob->string_stream->total_size;
1894
1895 header_stream = XCNEW (struct lto_output_stream);
1896 lto_output_data_stream (header_stream, &header, sizeof (header));
1897 lto_write_stream (header_stream);
1898 free (header_stream);
1899
1900 /* Put all of the gimple and the string table out the asm file as a
1901 block of text. */
1902 lto_write_stream (ob->main_stream);
1903 lto_write_stream (ob->string_stream);
1904
1905 lto_end_section ();
1906
1907 destroy_output_block (ob);
1908 }
1909
1910
1911 /* Copy the function body of NODE without deserializing. */
1912
1913 static void
1914 copy_function (struct cgraph_node *node)
1915 {
1916 tree function = node->decl;
1917 struct lto_file_decl_data *file_data = node->lto_file_data;
1918 struct lto_output_stream *output_stream = XCNEW (struct lto_output_stream);
1919 const char *data;
1920 size_t len;
1921 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
1922 char *section_name =
1923 lto_get_section_name (LTO_section_function_body, name, NULL);
1924 size_t i, j;
1925 struct lto_in_decl_state *in_state;
1926 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
1927
1928 lto_begin_section (section_name, !flag_wpa);
1929 free (section_name);
1930
1931 /* We may have renamed the declaration, e.g., a static function. */
1932 name = lto_get_decl_name_mapping (file_data, name);
1933
1934 data = lto_get_section_data (file_data, LTO_section_function_body,
1935 name, &len);
1936 gcc_assert (data);
1937
1938 /* Do a bit copy of the function body. */
1939 lto_output_data_stream (output_stream, data, len);
1940 lto_write_stream (output_stream);
1941
1942 /* Copy decls. */
1943 in_state =
1944 lto_get_function_in_decl_state (node->lto_file_data, function);
1945 gcc_assert (in_state);
1946
1947 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
1948 {
1949 size_t n = in_state->streams[i].size;
1950 tree *trees = in_state->streams[i].trees;
1951 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
1952
1953 /* The out state must have the same indices and the in state.
1954 So just copy the vector. All the encoders in the in state
1955 must be empty where we reach here. */
1956 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
1957 encoder->trees.reserve_exact (n);
1958 for (j = 0; j < n; j++)
1959 encoder->trees.safe_push (trees[j]);
1960 }
1961
1962 lto_free_section_data (file_data, LTO_section_function_body, name,
1963 data, len);
1964 free (output_stream);
1965 lto_end_section ();
1966 }
1967
1968
1969 /* Main entry point from the pass manager. */
1970
1971 void
1972 lto_output (void)
1973 {
1974 struct lto_out_decl_state *decl_state;
1975 #ifdef ENABLE_CHECKING
1976 bitmap output = lto_bitmap_alloc ();
1977 #endif
1978 int i, n_nodes;
1979 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1980
1981 /* Initialize the streamer. */
1982 lto_streamer_init ();
1983
1984 n_nodes = lto_symtab_encoder_size (encoder);
1985 /* Process only the functions with bodies. */
1986 for (i = 0; i < n_nodes; i++)
1987 {
1988 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
1989 cgraph_node *node = dyn_cast <cgraph_node> (snode);
1990 if (node
1991 && lto_symtab_encoder_encode_body_p (encoder, node)
1992 && !node->alias)
1993 {
1994 #ifdef ENABLE_CHECKING
1995 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
1996 bitmap_set_bit (output, DECL_UID (node->decl));
1997 #endif
1998 decl_state = lto_new_out_decl_state ();
1999 lto_push_out_decl_state (decl_state);
2000 if (gimple_has_body_p (node->decl) || !flag_wpa)
2001 output_function (node);
2002 else
2003 copy_function (node);
2004 gcc_assert (lto_get_out_decl_state () == decl_state);
2005 lto_pop_out_decl_state ();
2006 lto_record_function_out_decl_state (node->decl, decl_state);
2007 }
2008 }
2009
2010 /* Emit the callgraph after emitting function bodies. This needs to
2011 be done now to make sure that all the statements in every function
2012 have been renumbered so that edges can be associated with call
2013 statements using the statement UIDs. */
2014 output_symtab ();
2015
2016 #ifdef ENABLE_CHECKING
2017 lto_bitmap_free (output);
2018 #endif
2019 }
2020
2021 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2022 from it and required for correct representation of its semantics.
2023 Each node in ENCODER must be a global declaration or a type. A node
2024 is written only once, even if it appears multiple times in the
2025 vector. Certain transitively-reachable nodes, such as those
2026 representing expressions, may be duplicated, but such nodes
2027 must not appear in ENCODER itself. */
2028
2029 static void
2030 write_global_stream (struct output_block *ob,
2031 struct lto_tree_ref_encoder *encoder)
2032 {
2033 tree t;
2034 size_t index;
2035 const size_t size = lto_tree_ref_encoder_size (encoder);
2036
2037 for (index = 0; index < size; index++)
2038 {
2039 t = lto_tree_ref_encoder_get_tree (encoder, index);
2040 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2041 stream_write_tree (ob, t, false);
2042 }
2043 }
2044
2045
2046 /* Write a sequence of indices into the globals vector corresponding
2047 to the trees in ENCODER. These are used by the reader to map the
2048 indices used to refer to global entities within function bodies to
2049 their referents. */
2050
2051 static void
2052 write_global_references (struct output_block *ob,
2053 struct lto_output_stream *ref_stream,
2054 struct lto_tree_ref_encoder *encoder)
2055 {
2056 tree t;
2057 uint32_t index;
2058 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2059
2060 /* Write size as 32-bit unsigned. */
2061 lto_output_data_stream (ref_stream, &size, sizeof (int32_t));
2062
2063 for (index = 0; index < size; index++)
2064 {
2065 uint32_t slot_num;
2066
2067 t = lto_tree_ref_encoder_get_tree (encoder, index);
2068 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2069 gcc_assert (slot_num != (unsigned)-1);
2070 lto_output_data_stream (ref_stream, &slot_num, sizeof slot_num);
2071 }
2072 }
2073
2074
2075 /* Write all the streams in an lto_out_decl_state STATE using
2076 output block OB and output stream OUT_STREAM. */
2077
2078 void
2079 lto_output_decl_state_streams (struct output_block *ob,
2080 struct lto_out_decl_state *state)
2081 {
2082 int i;
2083
2084 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2085 write_global_stream (ob, &state->streams[i]);
2086 }
2087
2088
2089 /* Write all the references in an lto_out_decl_state STATE using
2090 output block OB and output stream OUT_STREAM. */
2091
2092 void
2093 lto_output_decl_state_refs (struct output_block *ob,
2094 struct lto_output_stream *out_stream,
2095 struct lto_out_decl_state *state)
2096 {
2097 unsigned i;
2098 uint32_t ref;
2099 tree decl;
2100
2101 /* Write reference to FUNCTION_DECL. If there is not function,
2102 write reference to void_type_node. */
2103 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2104 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2105 gcc_assert (ref != (unsigned)-1);
2106 lto_output_data_stream (out_stream, &ref, sizeof (uint32_t));
2107
2108 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2109 write_global_references (ob, out_stream, &state->streams[i]);
2110 }
2111
2112
2113 /* Return the written size of STATE. */
2114
2115 static size_t
2116 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2117 {
2118 int i;
2119 size_t size;
2120
2121 size = sizeof (int32_t); /* fn_ref. */
2122 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2123 {
2124 size += sizeof (int32_t); /* vector size. */
2125 size += (lto_tree_ref_encoder_size (&state->streams[i])
2126 * sizeof (int32_t));
2127 }
2128 return size;
2129 }
2130
2131
2132 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2133 so far. */
2134
2135 static void
2136 write_symbol (struct streamer_tree_cache_d *cache,
2137 struct lto_output_stream *stream,
2138 tree t, struct pointer_set_t *seen, bool alias)
2139 {
2140 const char *name;
2141 enum gcc_plugin_symbol_kind kind;
2142 enum gcc_plugin_symbol_visibility visibility;
2143 unsigned slot_num;
2144 unsigned HOST_WIDEST_INT size;
2145 const char *comdat;
2146 unsigned char c;
2147
2148 /* None of the following kinds of symbols are needed in the
2149 symbol table. */
2150 if (!TREE_PUBLIC (t)
2151 || is_builtin_fn (t)
2152 || DECL_ABSTRACT (t)
2153 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2154 return;
2155 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2156
2157 gcc_assert (TREE_CODE (t) == VAR_DECL
2158 || TREE_CODE (t) == FUNCTION_DECL);
2159
2160 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2161
2162 /* This behaves like assemble_name_raw in varasm.c, performing the
2163 same name manipulations that ASM_OUTPUT_LABELREF does. */
2164 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2165
2166 if (pointer_set_contains (seen, name))
2167 return;
2168 pointer_set_insert (seen, name);
2169
2170 streamer_tree_cache_lookup (cache, t, &slot_num);
2171 gcc_assert (slot_num != (unsigned)-1);
2172
2173 if (DECL_EXTERNAL (t))
2174 {
2175 if (DECL_WEAK (t))
2176 kind = GCCPK_WEAKUNDEF;
2177 else
2178 kind = GCCPK_UNDEF;
2179 }
2180 else
2181 {
2182 if (DECL_WEAK (t))
2183 kind = GCCPK_WEAKDEF;
2184 else if (DECL_COMMON (t))
2185 kind = GCCPK_COMMON;
2186 else
2187 kind = GCCPK_DEF;
2188
2189 /* When something is defined, it should have node attached. */
2190 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2191 || varpool_get_node (t)->definition);
2192 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2193 || (cgraph_get_node (t)
2194 && cgraph_get_node (t)->definition));
2195 }
2196
2197 /* Imitate what default_elf_asm_output_external do.
2198 When symbol is external, we need to output it with DEFAULT visibility
2199 when compiling with -fvisibility=default, while with HIDDEN visibility
2200 when symbol has attribute (visibility("hidden")) specified.
2201 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2202 right. */
2203
2204 if (DECL_EXTERNAL (t)
2205 && !targetm.binds_local_p (t))
2206 visibility = GCCPV_DEFAULT;
2207 else
2208 switch (DECL_VISIBILITY (t))
2209 {
2210 case VISIBILITY_DEFAULT:
2211 visibility = GCCPV_DEFAULT;
2212 break;
2213 case VISIBILITY_PROTECTED:
2214 visibility = GCCPV_PROTECTED;
2215 break;
2216 case VISIBILITY_HIDDEN:
2217 visibility = GCCPV_HIDDEN;
2218 break;
2219 case VISIBILITY_INTERNAL:
2220 visibility = GCCPV_INTERNAL;
2221 break;
2222 }
2223
2224 if (kind == GCCPK_COMMON
2225 && DECL_SIZE_UNIT (t)
2226 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2227 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2228 else
2229 size = 0;
2230
2231 if (DECL_ONE_ONLY (t))
2232 comdat = IDENTIFIER_POINTER (DECL_COMDAT_GROUP (t));
2233 else
2234 comdat = "";
2235
2236 lto_output_data_stream (stream, name, strlen (name) + 1);
2237 lto_output_data_stream (stream, comdat, strlen (comdat) + 1);
2238 c = (unsigned char) kind;
2239 lto_output_data_stream (stream, &c, 1);
2240 c = (unsigned char) visibility;
2241 lto_output_data_stream (stream, &c, 1);
2242 lto_output_data_stream (stream, &size, 8);
2243 lto_output_data_stream (stream, &slot_num, 4);
2244 }
2245
2246 /* Return true if NODE should appear in the plugin symbol table. */
2247
2248 bool
2249 output_symbol_p (symtab_node *node)
2250 {
2251 struct cgraph_node *cnode;
2252 if (!symtab_real_symbol_p (node))
2253 return false;
2254 /* We keep external functions in symtab for sake of inlining
2255 and devirtualization. We do not want to see them in symbol table as
2256 references unless they are really used. */
2257 cnode = dyn_cast <cgraph_node> (node);
2258 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2259 && cnode->callers)
2260 return true;
2261
2262 /* Ignore all references from external vars initializers - they are not really
2263 part of the compilation unit until they are used by folding. Some symbols,
2264 like references to external construction vtables can not be referred to at all.
2265 We decide this at can_refer_decl_in_current_unit_p. */
2266 if (!node->definition || DECL_EXTERNAL (node->decl))
2267 {
2268 int i;
2269 struct ipa_ref *ref;
2270 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list,
2271 i, ref); i++)
2272 {
2273 if (ref->use == IPA_REF_ALIAS)
2274 continue;
2275 if (is_a <cgraph_node> (ref->referring))
2276 return true;
2277 if (!DECL_EXTERNAL (ref->referring->decl))
2278 return true;
2279 }
2280 return false;
2281 }
2282 return true;
2283 }
2284
2285
2286 /* Write an IL symbol table to OB.
2287 SET and VSET are cgraph/varpool node sets we are outputting. */
2288
2289 static void
2290 produce_symtab (struct output_block *ob)
2291 {
2292 struct streamer_tree_cache_d *cache = ob->writer_cache;
2293 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2294 struct pointer_set_t *seen;
2295 struct lto_output_stream stream;
2296 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2297 lto_symtab_encoder_iterator lsei;
2298
2299 lto_begin_section (section_name, false);
2300 free (section_name);
2301
2302 seen = pointer_set_create ();
2303 memset (&stream, 0, sizeof (stream));
2304
2305 /* Write the symbol table.
2306 First write everything defined and then all declarations.
2307 This is necessary to handle cases where we have duplicated symbols. */
2308 for (lsei = lsei_start (encoder);
2309 !lsei_end_p (lsei); lsei_next (&lsei))
2310 {
2311 symtab_node *node = lsei_node (lsei);
2312
2313 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2314 continue;
2315 write_symbol (cache, &stream, node->decl, seen, false);
2316 }
2317 for (lsei = lsei_start (encoder);
2318 !lsei_end_p (lsei); lsei_next (&lsei))
2319 {
2320 symtab_node *node = lsei_node (lsei);
2321
2322 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2323 continue;
2324 write_symbol (cache, &stream, node->decl, seen, false);
2325 }
2326
2327 lto_write_stream (&stream);
2328 pointer_set_destroy (seen);
2329
2330 lto_end_section ();
2331 }
2332
2333
2334 /* This pass is run after all of the functions are serialized and all
2335 of the IPA passes have written their serialized forms. This pass
2336 causes the vector of all of the global decls and types used from
2337 this file to be written in to a section that can then be read in to
2338 recover these on other side. */
2339
2340 void
2341 produce_asm_for_decls (void)
2342 {
2343 struct lto_out_decl_state *out_state;
2344 struct lto_out_decl_state *fn_out_state;
2345 struct lto_decl_header header;
2346 char *section_name;
2347 struct output_block *ob;
2348 struct lto_output_stream *header_stream, *decl_state_stream;
2349 unsigned idx, num_fns;
2350 size_t decl_state_size;
2351 int32_t num_decl_states;
2352
2353 ob = create_output_block (LTO_section_decls);
2354 ob->global = true;
2355
2356 memset (&header, 0, sizeof (struct lto_decl_header));
2357
2358 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2359 lto_begin_section (section_name, !flag_wpa);
2360 free (section_name);
2361
2362 /* Make string 0 be a NULL string. */
2363 streamer_write_char_stream (ob->string_stream, 0);
2364
2365 gcc_assert (!alias_pairs);
2366
2367 /* Write the global symbols. */
2368 out_state = lto_get_out_decl_state ();
2369 num_fns = lto_function_decl_states.length ();
2370 lto_output_decl_state_streams (ob, out_state);
2371 for (idx = 0; idx < num_fns; idx++)
2372 {
2373 fn_out_state =
2374 lto_function_decl_states[idx];
2375 lto_output_decl_state_streams (ob, fn_out_state);
2376 }
2377
2378 header.lto_header.major_version = LTO_major_version;
2379 header.lto_header.minor_version = LTO_minor_version;
2380
2381 /* Currently not used. This field would allow us to preallocate
2382 the globals vector, so that it need not be resized as it is extended. */
2383 header.num_nodes = -1;
2384
2385 /* Compute the total size of all decl out states. */
2386 decl_state_size = sizeof (int32_t);
2387 decl_state_size += lto_out_decl_state_written_size (out_state);
2388 for (idx = 0; idx < num_fns; idx++)
2389 {
2390 fn_out_state =
2391 lto_function_decl_states[idx];
2392 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2393 }
2394 header.decl_state_size = decl_state_size;
2395
2396 header.main_size = ob->main_stream->total_size;
2397 header.string_size = ob->string_stream->total_size;
2398
2399 header_stream = XCNEW (struct lto_output_stream);
2400 lto_output_data_stream (header_stream, &header, sizeof header);
2401 lto_write_stream (header_stream);
2402 free (header_stream);
2403
2404 /* Write the main out-decl state, followed by out-decl states of
2405 functions. */
2406 decl_state_stream = XCNEW (struct lto_output_stream);
2407 num_decl_states = num_fns + 1;
2408 lto_output_data_stream (decl_state_stream, &num_decl_states,
2409 sizeof (num_decl_states));
2410 lto_output_decl_state_refs (ob, decl_state_stream, out_state);
2411 for (idx = 0; idx < num_fns; idx++)
2412 {
2413 fn_out_state =
2414 lto_function_decl_states[idx];
2415 lto_output_decl_state_refs (ob, decl_state_stream, fn_out_state);
2416 }
2417 lto_write_stream (decl_state_stream);
2418 free (decl_state_stream);
2419
2420 lto_write_stream (ob->main_stream);
2421 lto_write_stream (ob->string_stream);
2422
2423 lto_end_section ();
2424
2425 /* Write the symbol table. It is used by linker to determine dependencies
2426 and thus we can skip it for WPA. */
2427 if (!flag_wpa)
2428 produce_symtab (ob);
2429
2430 /* Write command line opts. */
2431 lto_write_options ();
2432
2433 /* Deallocate memory and clean up. */
2434 for (idx = 0; idx < num_fns; idx++)
2435 {
2436 fn_out_state =
2437 lto_function_decl_states[idx];
2438 lto_delete_out_decl_state (fn_out_state);
2439 }
2440 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2441 lto_function_decl_states.release ();
2442 destroy_output_block (ob);
2443 }