]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-streamer-out.c
Remove all_lto_gen_passes, replace with plain function calls
[thirdparty/gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2013 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "expr.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "hashtab.h"
33 #include "basic-block.h"
34 #include "gimple.h"
35 #include "gimple-iterator.h"
36 #include "gimple-ssa.h"
37 #include "tree-ssanames.h"
38 #include "tree-pass.h"
39 #include "function.h"
40 #include "ggc.h"
41 #include "diagnostic-core.h"
42 #include "except.h"
43 #include "vec.h"
44 #include "lto-symtab.h"
45 #include "lto-streamer.h"
46 #include "data-streamer.h"
47 #include "gimple-streamer.h"
48 #include "tree-streamer.h"
49 #include "streamer-hooks.h"
50 #include "cfgloop.h"
51
52
53 /* Clear the line info stored in DATA_IN. */
54
55 static void
56 clear_line_info (struct output_block *ob)
57 {
58 ob->current_file = NULL;
59 ob->current_line = 0;
60 ob->current_col = 0;
61 }
62
63
64 /* Create the output block and return it. SECTION_TYPE is
65 LTO_section_function_body or LTO_static_initializer. */
66
67 struct output_block *
68 create_output_block (enum lto_section_type section_type)
69 {
70 struct output_block *ob = XCNEW (struct output_block);
71
72 ob->section_type = section_type;
73 ob->decl_state = lto_get_out_decl_state ();
74 ob->main_stream = XCNEW (struct lto_output_stream);
75 ob->string_stream = XCNEW (struct lto_output_stream);
76 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true);
77
78 if (section_type == LTO_section_function_body)
79 ob->cfg_stream = XCNEW (struct lto_output_stream);
80
81 clear_line_info (ob);
82
83 ob->string_hash_table.create (37);
84 gcc_obstack_init (&ob->obstack);
85
86 return ob;
87 }
88
89
90 /* Destroy the output block OB. */
91
92 void
93 destroy_output_block (struct output_block *ob)
94 {
95 enum lto_section_type section_type = ob->section_type;
96
97 ob->string_hash_table.dispose ();
98
99 free (ob->main_stream);
100 free (ob->string_stream);
101 if (section_type == LTO_section_function_body)
102 free (ob->cfg_stream);
103
104 streamer_tree_cache_delete (ob->writer_cache);
105 obstack_free (&ob->obstack, NULL);
106
107 free (ob);
108 }
109
110
111 /* Look up NODE in the type table and write the index for it to OB. */
112
113 static void
114 output_type_ref (struct output_block *ob, tree node)
115 {
116 streamer_write_record_start (ob, LTO_type_ref);
117 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
118 }
119
120
121 /* Return true if tree node T is written to various tables. For these
122 nodes, we sometimes want to write their phyiscal representation
123 (via lto_output_tree), and sometimes we need to emit an index
124 reference into a table (via lto_output_tree_ref). */
125
126 static bool
127 tree_is_indexable (tree t)
128 {
129 /* Parameters and return values of functions of variably modified types
130 must go to global stream, because they may be used in the type
131 definition. */
132 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
133 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
134 else if (TREE_CODE (t) == VAR_DECL && decl_function_context (t)
135 && !TREE_STATIC (t))
136 return false;
137 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
138 return false;
139 /* Variably modified types need to be streamed alongside function
140 bodies because they can refer to local entities. Together with
141 them we have to localize their members as well.
142 ??? In theory that includes non-FIELD_DECLs as well. */
143 else if (TYPE_P (t)
144 && variably_modified_type_p (t, NULL_TREE))
145 return false;
146 else if (TREE_CODE (t) == FIELD_DECL
147 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
148 return false;
149 else
150 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
151 }
152
153
154 /* Output info about new location into bitpack BP.
155 After outputting bitpack, lto_output_location_data has
156 to be done to output actual data. */
157
158 void
159 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
160 location_t loc)
161 {
162 expanded_location xloc;
163
164 loc = LOCATION_LOCUS (loc);
165 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
166 if (loc == UNKNOWN_LOCATION)
167 return;
168
169 xloc = expand_location (loc);
170
171 bp_pack_value (bp, ob->current_file != xloc.file, 1);
172 bp_pack_value (bp, ob->current_line != xloc.line, 1);
173 bp_pack_value (bp, ob->current_col != xloc.column, 1);
174
175 if (ob->current_file != xloc.file)
176 bp_pack_var_len_unsigned (bp,
177 streamer_string_index (ob, xloc.file,
178 strlen (xloc.file) + 1,
179 true));
180 ob->current_file = xloc.file;
181
182 if (ob->current_line != xloc.line)
183 bp_pack_var_len_unsigned (bp, xloc.line);
184 ob->current_line = xloc.line;
185
186 if (ob->current_col != xloc.column)
187 bp_pack_var_len_unsigned (bp, xloc.column);
188 ob->current_col = xloc.column;
189 }
190
191
192 /* If EXPR is an indexable tree node, output a reference to it to
193 output block OB. Otherwise, output the physical representation of
194 EXPR to OB. */
195
196 static void
197 lto_output_tree_ref (struct output_block *ob, tree expr)
198 {
199 enum tree_code code;
200
201 if (TYPE_P (expr))
202 {
203 output_type_ref (ob, expr);
204 return;
205 }
206
207 code = TREE_CODE (expr);
208 switch (code)
209 {
210 case SSA_NAME:
211 streamer_write_record_start (ob, LTO_ssa_name_ref);
212 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
213 break;
214
215 case FIELD_DECL:
216 streamer_write_record_start (ob, LTO_field_decl_ref);
217 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
218 break;
219
220 case FUNCTION_DECL:
221 streamer_write_record_start (ob, LTO_function_decl_ref);
222 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
223 break;
224
225 case VAR_DECL:
226 case DEBUG_EXPR_DECL:
227 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
228 case PARM_DECL:
229 streamer_write_record_start (ob, LTO_global_decl_ref);
230 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
231 break;
232
233 case CONST_DECL:
234 streamer_write_record_start (ob, LTO_const_decl_ref);
235 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
236 break;
237
238 case IMPORTED_DECL:
239 gcc_assert (decl_function_context (expr) == NULL);
240 streamer_write_record_start (ob, LTO_imported_decl_ref);
241 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
242 break;
243
244 case TYPE_DECL:
245 streamer_write_record_start (ob, LTO_type_decl_ref);
246 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
247 break;
248
249 case NAMESPACE_DECL:
250 streamer_write_record_start (ob, LTO_namespace_decl_ref);
251 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
252 break;
253
254 case LABEL_DECL:
255 streamer_write_record_start (ob, LTO_label_decl_ref);
256 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
257 break;
258
259 case RESULT_DECL:
260 streamer_write_record_start (ob, LTO_result_decl_ref);
261 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
262 break;
263
264 case TRANSLATION_UNIT_DECL:
265 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
266 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
267 break;
268
269 default:
270 /* No other node is indexable, so it should have been handled by
271 lto_output_tree. */
272 gcc_unreachable ();
273 }
274 }
275
276
277 /* Return true if EXPR is a tree node that can be written to disk. */
278
279 static inline bool
280 lto_is_streamable (tree expr)
281 {
282 enum tree_code code = TREE_CODE (expr);
283
284 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
285 name version in lto_output_tree_ref (see output_ssa_names). */
286 return !is_lang_specific (expr)
287 && code != SSA_NAME
288 && code != CALL_EXPR
289 && code != LANG_TYPE
290 && code != MODIFY_EXPR
291 && code != INIT_EXPR
292 && code != TARGET_EXPR
293 && code != BIND_EXPR
294 && code != WITH_CLEANUP_EXPR
295 && code != STATEMENT_LIST
296 && code != OMP_CLAUSE
297 && (code == CASE_LABEL_EXPR
298 || code == DECL_EXPR
299 || TREE_CODE_CLASS (code) != tcc_statement);
300 }
301
302
303 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
304
305 static tree
306 get_symbol_initial_value (struct output_block *ob, tree expr)
307 {
308 gcc_checking_assert (DECL_P (expr)
309 && TREE_CODE (expr) != FUNCTION_DECL
310 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
311
312 /* Handle DECL_INITIAL for symbols. */
313 tree initial = DECL_INITIAL (expr);
314 if (TREE_CODE (expr) == VAR_DECL
315 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
316 && !DECL_IN_CONSTANT_POOL (expr)
317 && initial)
318 {
319 lto_symtab_encoder_t encoder;
320 struct varpool_node *vnode;
321
322 encoder = ob->decl_state->symtab_node_encoder;
323 vnode = varpool_get_node (expr);
324 if (!vnode
325 || !lto_symtab_encoder_encode_initializer_p (encoder,
326 vnode))
327 initial = error_mark_node;
328 }
329
330 return initial;
331 }
332
333
334 /* Write a physical representation of tree node EXPR to output block
335 OB. If REF_P is true, the leaves of EXPR are emitted as references
336 via lto_output_tree_ref. IX is the index into the streamer cache
337 where EXPR is stored. */
338
339 static void
340 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
341 {
342 /* Pack all the non-pointer fields in EXPR into a bitpack and write
343 the resulting bitpack. */
344 bitpack_d bp = bitpack_create (ob->main_stream);
345 streamer_pack_tree_bitfields (ob, &bp, expr);
346 streamer_write_bitpack (&bp);
347
348 /* Write all the pointer fields in EXPR. */
349 streamer_write_tree_body (ob, expr, ref_p);
350
351 /* Write any LTO-specific data to OB. */
352 if (DECL_P (expr)
353 && TREE_CODE (expr) != FUNCTION_DECL
354 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
355 {
356 /* Handle DECL_INITIAL for symbols. */
357 tree initial = get_symbol_initial_value (ob, expr);
358 stream_write_tree (ob, initial, ref_p);
359 }
360 }
361
362 /* Write a physical representation of tree node EXPR to output block
363 OB. If REF_P is true, the leaves of EXPR are emitted as references
364 via lto_output_tree_ref. IX is the index into the streamer cache
365 where EXPR is stored. */
366
367 static void
368 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
369 {
370 if (!lto_is_streamable (expr))
371 internal_error ("tree code %qs is not supported in LTO streams",
372 get_tree_code_name (TREE_CODE (expr)));
373
374 /* Write the header, containing everything needed to materialize
375 EXPR on the reading side. */
376 streamer_write_tree_header (ob, expr);
377
378 lto_write_tree_1 (ob, expr, ref_p);
379
380 /* Mark the end of EXPR. */
381 streamer_write_zero (ob);
382 }
383
384 /* Emit the physical representation of tree node EXPR to output block
385 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
386 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
387
388 static void
389 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
390 bool ref_p, bool this_ref_p)
391 {
392 unsigned ix;
393
394 gcc_checking_assert (expr != NULL_TREE
395 && !(this_ref_p && tree_is_indexable (expr)));
396
397 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
398 expr, hash, &ix);
399 gcc_assert (!exists_p);
400 if (streamer_handle_as_builtin_p (expr))
401 {
402 /* MD and NORMAL builtins do not need to be written out
403 completely as they are always instantiated by the
404 compiler on startup. The only builtins that need to
405 be written out are BUILT_IN_FRONTEND. For all other
406 builtins, we simply write the class and code. */
407 streamer_write_builtin (ob, expr);
408 }
409 else if (TREE_CODE (expr) == INTEGER_CST
410 && !TREE_OVERFLOW (expr))
411 {
412 /* Shared INTEGER_CST nodes are special because they need their
413 original type to be materialized by the reader (to implement
414 TYPE_CACHED_VALUES). */
415 streamer_write_integer_cst (ob, expr, ref_p);
416 }
417 else
418 {
419 /* This is the first time we see EXPR, write its fields
420 to OB. */
421 lto_write_tree (ob, expr, ref_p);
422 }
423 }
424
425 struct sccs
426 {
427 unsigned int dfsnum;
428 unsigned int low;
429 };
430
431 struct scc_entry
432 {
433 tree t;
434 hashval_t hash;
435 };
436
437 static unsigned int next_dfs_num;
438 static vec<scc_entry> sccstack;
439 static struct pointer_map_t *sccstate;
440 static struct obstack sccstate_obstack;
441
442 static void
443 DFS_write_tree (struct output_block *ob, sccs *from_state,
444 tree expr, bool ref_p, bool this_ref_p);
445
446 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
447 DFS recurse for all tree edges originating from it. */
448
449 static void
450 DFS_write_tree_body (struct output_block *ob,
451 tree expr, sccs *expr_state, bool ref_p)
452 {
453 #define DFS_follow_tree_edge(DEST) \
454 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
455
456 enum tree_code code;
457
458 code = TREE_CODE (expr);
459
460 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
461 {
462 if (TREE_CODE (expr) != IDENTIFIER_NODE)
463 DFS_follow_tree_edge (TREE_TYPE (expr));
464 }
465
466 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
467 {
468 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
469 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
470 }
471
472 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
473 {
474 DFS_follow_tree_edge (TREE_REALPART (expr));
475 DFS_follow_tree_edge (TREE_IMAGPART (expr));
476 }
477
478 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
479 {
480 /* Drop names that were created for anonymous entities. */
481 if (DECL_NAME (expr)
482 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
483 && ANON_AGGRNAME_P (DECL_NAME (expr)))
484 ;
485 else
486 DFS_follow_tree_edge (DECL_NAME (expr));
487 DFS_follow_tree_edge (DECL_CONTEXT (expr));
488 }
489
490 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
491 {
492 DFS_follow_tree_edge (DECL_SIZE (expr));
493 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
494
495 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
496 special handling in LTO, it must be handled by streamer hooks. */
497
498 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
499
500 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
501 for early inlining so drop it on the floor instead of ICEing in
502 dwarf2out.c. */
503
504 if ((TREE_CODE (expr) == VAR_DECL
505 || TREE_CODE (expr) == PARM_DECL)
506 && DECL_HAS_VALUE_EXPR_P (expr))
507 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
508 if (TREE_CODE (expr) == VAR_DECL)
509 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
510 }
511
512 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
513 {
514 if (TREE_CODE (expr) == TYPE_DECL)
515 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
516 DFS_follow_tree_edge (DECL_VINDEX (expr));
517 }
518
519 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
520 {
521 /* Make sure we don't inadvertently set the assembler name. */
522 if (DECL_ASSEMBLER_NAME_SET_P (expr))
523 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
524 DFS_follow_tree_edge (DECL_SECTION_NAME (expr));
525 DFS_follow_tree_edge (DECL_COMDAT_GROUP (expr));
526 }
527
528 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
529 {
530 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
531 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
532 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
533 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
534 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
535 }
536
537 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
538 {
539 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
540 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
541 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
542 }
543
544 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
545 {
546 DFS_follow_tree_edge (TYPE_SIZE (expr));
547 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
548 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
549 DFS_follow_tree_edge (TYPE_NAME (expr));
550 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
551 reconstructed during fixup. */
552 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
553 during fixup. */
554 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
555 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
556 /* TYPE_CANONICAL is re-computed during type merging, so no need
557 to follow it here. */
558 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
559 }
560
561 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
562 {
563 if (TREE_CODE (expr) == ENUMERAL_TYPE)
564 DFS_follow_tree_edge (TYPE_VALUES (expr));
565 else if (TREE_CODE (expr) == ARRAY_TYPE)
566 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
567 else if (RECORD_OR_UNION_TYPE_P (expr))
568 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
569 DFS_follow_tree_edge (t);
570 else if (TREE_CODE (expr) == FUNCTION_TYPE
571 || TREE_CODE (expr) == METHOD_TYPE)
572 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
573
574 if (!POINTER_TYPE_P (expr))
575 DFS_follow_tree_edge (TYPE_MINVAL (expr));
576 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
577 if (RECORD_OR_UNION_TYPE_P (expr))
578 DFS_follow_tree_edge (TYPE_BINFO (expr));
579 }
580
581 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
582 {
583 DFS_follow_tree_edge (TREE_PURPOSE (expr));
584 DFS_follow_tree_edge (TREE_VALUE (expr));
585 DFS_follow_tree_edge (TREE_CHAIN (expr));
586 }
587
588 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
589 {
590 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
591 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
592 }
593
594 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
595 {
596 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
597 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
598 DFS_follow_tree_edge (TREE_BLOCK (expr));
599 }
600
601 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
602 {
603 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
604 /* ??? FIXME. See also streamer_write_chain. */
605 if (!(VAR_OR_FUNCTION_DECL_P (t)
606 && DECL_EXTERNAL (t)))
607 DFS_follow_tree_edge (t);
608
609 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
610
611 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
612 handle - those that represent inlined function scopes.
613 For the drop rest them on the floor instead of ICEing
614 in dwarf2out.c. */
615 if (inlined_function_outer_scope_p (expr))
616 {
617 tree ultimate_origin = block_ultimate_origin (expr);
618 DFS_follow_tree_edge (ultimate_origin);
619 }
620 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
621 information for early inlined BLOCKs so drop it on the floor instead
622 of ICEing in dwarf2out.c. */
623
624 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
625 streaming time. */
626
627 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
628 list is re-constructed from BLOCK_SUPERCONTEXT. */
629 }
630
631 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
632 {
633 unsigned i;
634 tree t;
635
636 /* Note that the number of BINFO slots has already been emitted in
637 EXPR's header (see streamer_write_tree_header) because this length
638 is needed to build the empty BINFO node on the reader side. */
639 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
640 DFS_follow_tree_edge (t);
641 DFS_follow_tree_edge (BINFO_OFFSET (expr));
642 DFS_follow_tree_edge (BINFO_VTABLE (expr));
643 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
644
645 /* The number of BINFO_BASE_ACCESSES has already been emitted in
646 EXPR's bitfield section. */
647 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
648 DFS_follow_tree_edge (t);
649
650 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
651 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
652 }
653
654 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
655 {
656 unsigned i;
657 tree index, value;
658
659 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
660 {
661 DFS_follow_tree_edge (index);
662 DFS_follow_tree_edge (value);
663 }
664 }
665
666 #undef DFS_follow_tree_edge
667 }
668
669 /* Return a hash value for the tree T. */
670
671 static hashval_t
672 hash_tree (struct streamer_tree_cache_d *cache, tree t)
673 {
674 #define visit(SIBLING) \
675 do { \
676 unsigned ix; \
677 if (SIBLING && streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
678 v = iterative_hash_hashval_t (streamer_tree_cache_get_hash (cache, ix), v); \
679 } while (0)
680
681 /* Hash TS_BASE. */
682 enum tree_code code = TREE_CODE (t);
683 hashval_t v = iterative_hash_host_wide_int (code, 0);
684 if (!TYPE_P (t))
685 {
686 v = iterative_hash_host_wide_int (TREE_SIDE_EFFECTS (t)
687 | (TREE_CONSTANT (t) << 1)
688 | (TREE_READONLY (t) << 2)
689 | (TREE_PUBLIC (t) << 3), v);
690 }
691 v = iterative_hash_host_wide_int (TREE_ADDRESSABLE (t)
692 | (TREE_THIS_VOLATILE (t) << 1), v);
693 if (DECL_P (t))
694 v = iterative_hash_host_wide_int (DECL_UNSIGNED (t), v);
695 else if (TYPE_P (t))
696 v = iterative_hash_host_wide_int (TYPE_UNSIGNED (t), v);
697 if (TYPE_P (t))
698 v = iterative_hash_host_wide_int (TYPE_ARTIFICIAL (t), v);
699 else
700 v = iterative_hash_host_wide_int (TREE_NO_WARNING (t), v);
701 v = iterative_hash_host_wide_int (TREE_NOTHROW (t)
702 | (TREE_STATIC (t) << 1)
703 | (TREE_PROTECTED (t) << 2)
704 | (TREE_DEPRECATED (t) << 3), v);
705 if (code != TREE_BINFO)
706 v = iterative_hash_host_wide_int (TREE_PRIVATE (t), v);
707 if (TYPE_P (t))
708 v = iterative_hash_host_wide_int (TYPE_SATURATING (t)
709 | (TYPE_ADDR_SPACE (t) << 1), v);
710 else if (code == SSA_NAME)
711 v = iterative_hash_host_wide_int (SSA_NAME_IS_DEFAULT_DEF (t), v);
712
713 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
714 {
715 v = iterative_hash_host_wide_int (TREE_INT_CST_LOW (t), v);
716 v = iterative_hash_host_wide_int (TREE_INT_CST_HIGH (t), v);
717 }
718
719 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
720 {
721 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
722 v = iterative_hash_host_wide_int (r.cl, v);
723 v = iterative_hash_host_wide_int (r.decimal
724 | (r.sign << 1)
725 | (r.signalling << 2)
726 | (r.canonical << 3), v);
727 v = iterative_hash_host_wide_int (r.uexp, v);
728 for (unsigned i = 0; i < SIGSZ; ++i)
729 v = iterative_hash_host_wide_int (r.sig[i], v);
730 }
731
732 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
733 {
734 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
735 v = iterative_hash_host_wide_int (f.mode, v);
736 v = iterative_hash_host_wide_int (f.data.low, v);
737 v = iterative_hash_host_wide_int (f.data.high, v);
738 }
739
740 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
741 {
742 v = iterative_hash_host_wide_int (DECL_MODE (t), v);
743 v = iterative_hash_host_wide_int (DECL_NONLOCAL (t)
744 | (DECL_VIRTUAL_P (t) << 1)
745 | (DECL_IGNORED_P (t) << 2)
746 | (DECL_ABSTRACT (t) << 3)
747 | (DECL_ARTIFICIAL (t) << 4)
748 | (DECL_USER_ALIGN (t) << 5)
749 | (DECL_PRESERVE_P (t) << 6)
750 | (DECL_EXTERNAL (t) << 7)
751 | (DECL_GIMPLE_REG_P (t) << 8), v);
752 v = iterative_hash_host_wide_int (DECL_ALIGN (t), v);
753 if (code == LABEL_DECL)
754 {
755 v = iterative_hash_host_wide_int (EH_LANDING_PAD_NR (t), v);
756 v = iterative_hash_host_wide_int (LABEL_DECL_UID (t), v);
757 }
758 else if (code == FIELD_DECL)
759 {
760 v = iterative_hash_host_wide_int (DECL_PACKED (t)
761 | (DECL_NONADDRESSABLE_P (t) << 1),
762 v);
763 v = iterative_hash_host_wide_int (DECL_OFFSET_ALIGN (t), v);
764 }
765 else if (code == VAR_DECL)
766 {
767 v = iterative_hash_host_wide_int (DECL_HAS_DEBUG_EXPR_P (t)
768 | (DECL_NONLOCAL_FRAME (t) << 1),
769 v);
770 }
771 if (code == RESULT_DECL
772 || code == PARM_DECL
773 || code == VAR_DECL)
774 {
775 v = iterative_hash_host_wide_int (DECL_BY_REFERENCE (t), v);
776 if (code == VAR_DECL
777 || code == PARM_DECL)
778 v = iterative_hash_host_wide_int (DECL_HAS_VALUE_EXPR_P (t), v);
779 }
780 }
781
782 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
783 v = iterative_hash_host_wide_int (DECL_REGISTER (t), v);
784
785 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
786 {
787 v = iterative_hash_host_wide_int ((DECL_COMMON (t))
788 | (DECL_DLLIMPORT_P (t) << 1)
789 | (DECL_WEAK (t) << 2)
790 | (DECL_SEEN_IN_BIND_EXPR_P (t) << 3)
791 | (DECL_COMDAT (t) << 4)
792 | (DECL_VISIBILITY_SPECIFIED (t) << 6),
793 v);
794 v = iterative_hash_host_wide_int (DECL_VISIBILITY (t), v);
795 if (code == VAR_DECL)
796 {
797 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
798 v = iterative_hash_host_wide_int (DECL_HARD_REGISTER (t)
799 | (DECL_IN_CONSTANT_POOL (t) << 1),
800 v);
801 v = iterative_hash_host_wide_int (DECL_TLS_MODEL (t), v);
802 }
803 if (TREE_CODE (t) == FUNCTION_DECL)
804 v = iterative_hash_host_wide_int (DECL_FINAL_P (t)
805 | (DECL_CXX_CONSTRUCTOR_P (t) << 1)
806 | (DECL_CXX_DESTRUCTOR_P (t) << 2),
807 v);
808 if (VAR_OR_FUNCTION_DECL_P (t))
809 v = iterative_hash_host_wide_int (DECL_INIT_PRIORITY (t), v);
810 }
811
812 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
813 {
814 v = iterative_hash_host_wide_int (DECL_BUILT_IN_CLASS (t), v);
815 v = iterative_hash_host_wide_int (DECL_STATIC_CONSTRUCTOR (t)
816 | (DECL_STATIC_DESTRUCTOR (t) << 1)
817 | (DECL_UNINLINABLE (t) << 2)
818 | (DECL_POSSIBLY_INLINED (t) << 3)
819 | (DECL_IS_NOVOPS (t) << 4)
820 | (DECL_IS_RETURNS_TWICE (t) << 5)
821 | (DECL_IS_MALLOC (t) << 6)
822 | (DECL_IS_OPERATOR_NEW (t) << 7)
823 | (DECL_DECLARED_INLINE_P (t) << 8)
824 | (DECL_STATIC_CHAIN (t) << 9)
825 | (DECL_NO_INLINE_WARNING_P (t) << 10)
826 | (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t) << 11)
827 | (DECL_NO_LIMIT_STACK (t) << 12)
828 | (DECL_DISREGARD_INLINE_LIMITS (t) << 13)
829 | (DECL_PURE_P (t) << 14)
830 | (DECL_LOOPING_CONST_OR_PURE_P (t) << 15), v);
831 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
832 v = iterative_hash_host_wide_int (DECL_FUNCTION_CODE (t), v);
833 if (DECL_STATIC_DESTRUCTOR (t))
834 v = iterative_hash_host_wide_int (DECL_FINI_PRIORITY (t), v);
835 }
836
837 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
838 {
839 v = iterative_hash_host_wide_int (TYPE_MODE (t), v);
840 v = iterative_hash_host_wide_int (TYPE_STRING_FLAG (t)
841 | (TYPE_NO_FORCE_BLK (t) << 1)
842 | (TYPE_NEEDS_CONSTRUCTING (t) << 2)
843 | (TYPE_PACKED (t) << 3)
844 | (TYPE_RESTRICT (t) << 4)
845 | (TYPE_USER_ALIGN (t) << 5)
846 | (TYPE_READONLY (t) << 6), v);
847 if (RECORD_OR_UNION_TYPE_P (t))
848 {
849 v = iterative_hash_host_wide_int (TYPE_TRANSPARENT_AGGR (t)
850 | (TYPE_FINAL_P (t) << 1), v);
851 }
852 else if (code == ARRAY_TYPE)
853 v = iterative_hash_host_wide_int (TYPE_NONALIASED_COMPONENT (t), v);
854 v = iterative_hash_host_wide_int (TYPE_PRECISION (t), v);
855 v = iterative_hash_host_wide_int (TYPE_ALIGN (t), v);
856 v = iterative_hash_host_wide_int ((TYPE_ALIAS_SET (t) == 0
857 || (!in_lto_p
858 && get_alias_set (t) == 0))
859 ? 0 : -1, v);
860 }
861
862 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
863 v = iterative_hash (TRANSLATION_UNIT_LANGUAGE (t),
864 strlen (TRANSLATION_UNIT_LANGUAGE (t)), v);
865
866 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
867 v = iterative_hash (t, sizeof (struct cl_target_option), v);
868
869 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
870 v = iterative_hash (t, sizeof (struct cl_optimization), v);
871
872 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
873 v = iterative_hash_host_wide_int (IDENTIFIER_HASH_VALUE (t), v);
874
875 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
876 v = iterative_hash (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t), v);
877
878 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
879 {
880 if (POINTER_TYPE_P (t))
881 {
882 /* For pointers factor in the pointed-to type recursively as
883 we cannot recurse through only pointers.
884 ??? We can generalize this by keeping track of the
885 in-SCC edges for each tree (or arbitrarily the first
886 such edge) and hashing that in in a second stage
887 (instead of the quadratic mixing of the SCC we do now). */
888 hashval_t x;
889 unsigned ix;
890 if (streamer_tree_cache_lookup (cache, TREE_TYPE (t), &ix))
891 x = streamer_tree_cache_get_hash (cache, ix);
892 else
893 x = hash_tree (cache, TREE_TYPE (t));
894 v = iterative_hash_hashval_t (x, v);
895 }
896 else if (code != IDENTIFIER_NODE)
897 visit (TREE_TYPE (t));
898 }
899
900 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
901 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
902 visit (VECTOR_CST_ELT (t, i));
903
904 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
905 {
906 visit (TREE_REALPART (t));
907 visit (TREE_IMAGPART (t));
908 }
909
910 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
911 {
912 /* Drop names that were created for anonymous entities. */
913 if (DECL_NAME (t)
914 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
915 && ANON_AGGRNAME_P (DECL_NAME (t)))
916 ;
917 else
918 visit (DECL_NAME (t));
919 if (DECL_FILE_SCOPE_P (t))
920 ;
921 else
922 visit (DECL_CONTEXT (t));
923 }
924
925 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
926 {
927 visit (DECL_SIZE (t));
928 visit (DECL_SIZE_UNIT (t));
929 visit (DECL_ATTRIBUTES (t));
930 if ((code == VAR_DECL
931 || code == PARM_DECL)
932 && DECL_HAS_VALUE_EXPR_P (t))
933 visit (DECL_VALUE_EXPR (t));
934 if (code == VAR_DECL
935 && DECL_HAS_DEBUG_EXPR_P (t))
936 visit (DECL_DEBUG_EXPR (t));
937 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
938 be able to call get_symbol_initial_value. */
939 }
940
941 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
942 {
943 if (code == TYPE_DECL)
944 visit (DECL_ORIGINAL_TYPE (t));
945 visit (DECL_VINDEX (t));
946 }
947
948 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
949 {
950 if (DECL_ASSEMBLER_NAME_SET_P (t))
951 visit (DECL_ASSEMBLER_NAME (t));
952 visit (DECL_SECTION_NAME (t));
953 visit (DECL_COMDAT_GROUP (t));
954 }
955
956 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
957 {
958 visit (DECL_FIELD_OFFSET (t));
959 visit (DECL_BIT_FIELD_TYPE (t));
960 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
961 visit (DECL_FIELD_BIT_OFFSET (t));
962 visit (DECL_FCONTEXT (t));
963 }
964
965 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
966 {
967 visit (DECL_FUNCTION_PERSONALITY (t));
968 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
969 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
970 }
971
972 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
973 {
974 visit (TYPE_SIZE (t));
975 visit (TYPE_SIZE_UNIT (t));
976 visit (TYPE_ATTRIBUTES (t));
977 visit (TYPE_NAME (t));
978 visit (TYPE_MAIN_VARIANT (t));
979 if (TYPE_FILE_SCOPE_P (t))
980 ;
981 else
982 visit (TYPE_CONTEXT (t));
983 visit (TYPE_STUB_DECL (t));
984 }
985
986 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
987 {
988 if (code == ENUMERAL_TYPE)
989 visit (TYPE_VALUES (t));
990 else if (code == ARRAY_TYPE)
991 visit (TYPE_DOMAIN (t));
992 else if (RECORD_OR_UNION_TYPE_P (t))
993 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
994 visit (f);
995 else if (code == FUNCTION_TYPE
996 || code == METHOD_TYPE)
997 visit (TYPE_ARG_TYPES (t));
998 if (!POINTER_TYPE_P (t))
999 visit (TYPE_MINVAL (t));
1000 visit (TYPE_MAXVAL (t));
1001 if (RECORD_OR_UNION_TYPE_P (t))
1002 visit (TYPE_BINFO (t));
1003 }
1004
1005 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1006 {
1007 visit (TREE_PURPOSE (t));
1008 visit (TREE_VALUE (t));
1009 visit (TREE_CHAIN (t));
1010 }
1011
1012 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1013 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1014 visit (TREE_VEC_ELT (t, i));
1015
1016 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1017 {
1018 v = iterative_hash_host_wide_int (TREE_OPERAND_LENGTH (t), v);
1019 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1020 visit (TREE_OPERAND (t, i));
1021 }
1022
1023 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1024 {
1025 unsigned i;
1026 tree b;
1027 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1028 visit (b);
1029 visit (BINFO_OFFSET (t));
1030 visit (BINFO_VTABLE (t));
1031 visit (BINFO_VPTR_FIELD (t));
1032 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1033 visit (b);
1034 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1035 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1036 }
1037
1038 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1039 {
1040 unsigned i;
1041 tree index, value;
1042 v = iterative_hash_host_wide_int (CONSTRUCTOR_NELTS (t), v);
1043 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1044 {
1045 visit (index);
1046 visit (value);
1047 }
1048 }
1049
1050 return v;
1051
1052 #undef visit
1053 }
1054
1055 /* Compare two SCC entries by their hash value for qsorting them. */
1056
1057 static int
1058 scc_entry_compare (const void *p1_, const void *p2_)
1059 {
1060 const scc_entry *p1 = (const scc_entry *) p1_;
1061 const scc_entry *p2 = (const scc_entry *) p2_;
1062 if (p1->hash < p2->hash)
1063 return -1;
1064 else if (p1->hash > p2->hash)
1065 return 1;
1066 return 0;
1067 }
1068
1069 /* Return a hash value for the SCC on the SCC stack from FIRST with
1070 size SIZE. */
1071
1072 static hashval_t
1073 hash_scc (struct streamer_tree_cache_d *cache, unsigned first, unsigned size)
1074 {
1075 /* Compute hash values for the SCC members. */
1076 for (unsigned i = 0; i < size; ++i)
1077 sccstack[first+i].hash = hash_tree (cache, sccstack[first+i].t);
1078
1079 if (size == 1)
1080 return sccstack[first].hash;
1081
1082 /* Sort the SCC of type, hash pairs so that when we mix in
1083 all members of the SCC the hash value becomes independent on
1084 the order we visited the SCC. Disregard hashes equal to
1085 the hash of the tree we mix into because we cannot guarantee
1086 a stable sort for those across different TUs. */
1087 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1088 hashval_t *tem = XALLOCAVEC (hashval_t, size);
1089 for (unsigned i = 0; i < size; ++i)
1090 {
1091 hashval_t hash = sccstack[first+i].hash;
1092 hashval_t orig_hash = hash;
1093 unsigned j;
1094 /* Skip same hashes. */
1095 for (j = i + 1;
1096 j < size && sccstack[first+j].hash == orig_hash; ++j)
1097 ;
1098 for (; j < size; ++j)
1099 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1100 for (j = 0; sccstack[first+j].hash != orig_hash; ++j)
1101 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1102 tem[i] = hash;
1103 }
1104 hashval_t scc_hash = 0;
1105 for (unsigned i = 0; i < size; ++i)
1106 {
1107 sccstack[first+i].hash = tem[i];
1108 scc_hash = iterative_hash_hashval_t (tem[i], scc_hash);
1109 }
1110 return scc_hash;
1111 }
1112
1113 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1114 already in the streamer cache. Main routine called for
1115 each visit of EXPR. */
1116
1117 static void
1118 DFS_write_tree (struct output_block *ob, sccs *from_state,
1119 tree expr, bool ref_p, bool this_ref_p)
1120 {
1121 unsigned ix;
1122 sccs **slot;
1123
1124 /* Handle special cases. */
1125 if (expr == NULL_TREE)
1126 return;
1127
1128 /* Do not DFS walk into indexable trees. */
1129 if (this_ref_p && tree_is_indexable (expr))
1130 return;
1131
1132 /* Check if we already streamed EXPR. */
1133 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1134 return;
1135
1136 slot = (sccs **)pointer_map_insert (sccstate, expr);
1137 sccs *cstate = *slot;
1138 if (!cstate)
1139 {
1140 scc_entry e = { expr, 0 };
1141 /* Not yet visited. DFS recurse and push it onto the stack. */
1142 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1143 sccstack.safe_push (e);
1144 cstate->dfsnum = next_dfs_num++;
1145 cstate->low = cstate->dfsnum;
1146
1147 if (streamer_handle_as_builtin_p (expr))
1148 ;
1149 else if (TREE_CODE (expr) == INTEGER_CST
1150 && !TREE_OVERFLOW (expr))
1151 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
1152 else
1153 {
1154 DFS_write_tree_body (ob, expr, cstate, ref_p);
1155
1156 /* Walk any LTO-specific edges. */
1157 if (DECL_P (expr)
1158 && TREE_CODE (expr) != FUNCTION_DECL
1159 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1160 {
1161 /* Handle DECL_INITIAL for symbols. */
1162 tree initial = get_symbol_initial_value (ob, expr);
1163 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
1164 }
1165 }
1166
1167 /* See if we found an SCC. */
1168 if (cstate->low == cstate->dfsnum)
1169 {
1170 unsigned first, size;
1171 tree x;
1172
1173 /* Pop the SCC and compute its size. */
1174 first = sccstack.length ();
1175 do
1176 {
1177 x = sccstack[--first].t;
1178 }
1179 while (x != expr);
1180 size = sccstack.length () - first;
1181
1182 /* No need to compute hashes for LTRANS units, we don't perform
1183 any merging there. */
1184 hashval_t scc_hash = 0;
1185 unsigned scc_entry_len = 0;
1186 if (!flag_wpa)
1187 {
1188 scc_hash = hash_scc (ob->writer_cache, first, size);
1189
1190 /* Put the entries with the least number of collisions first. */
1191 unsigned entry_start = 0;
1192 scc_entry_len = size + 1;
1193 for (unsigned i = 0; i < size;)
1194 {
1195 unsigned from = i;
1196 for (i = i + 1; i < size
1197 && (sccstack[first + i].hash
1198 == sccstack[first + from].hash); ++i)
1199 ;
1200 if (i - from < scc_entry_len)
1201 {
1202 scc_entry_len = i - from;
1203 entry_start = from;
1204 }
1205 }
1206 for (unsigned i = 0; i < scc_entry_len; ++i)
1207 {
1208 scc_entry tem = sccstack[first + i];
1209 sccstack[first + i] = sccstack[first + entry_start + i];
1210 sccstack[first + entry_start + i] = tem;
1211 }
1212 }
1213
1214 /* Write LTO_tree_scc. */
1215 streamer_write_record_start (ob, LTO_tree_scc);
1216 streamer_write_uhwi (ob, size);
1217 streamer_write_uhwi (ob, scc_hash);
1218
1219 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1220 All INTEGER_CSTs need to be handled this way as we need
1221 their type to materialize them. Also builtins are handled
1222 this way.
1223 ??? We still wrap these in LTO_tree_scc so at the
1224 input side we can properly identify the tree we want
1225 to ultimatively return. */
1226 size_t old_len = ob->writer_cache->nodes.length ();
1227 if (size == 1)
1228 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1229 else
1230 {
1231 /* Write the size of the SCC entry candidates. */
1232 streamer_write_uhwi (ob, scc_entry_len);
1233
1234 /* Write all headers and populate the streamer cache. */
1235 for (unsigned i = 0; i < size; ++i)
1236 {
1237 hashval_t hash = sccstack[first+i].hash;
1238 tree t = sccstack[first+i].t;
1239 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1240 t, hash, &ix);
1241 gcc_assert (!exists_p);
1242
1243 if (!lto_is_streamable (t))
1244 internal_error ("tree code %qs is not supported "
1245 "in LTO streams",
1246 get_tree_code_name (TREE_CODE (t)));
1247
1248 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1249
1250 /* Write the header, containing everything needed to
1251 materialize EXPR on the reading side. */
1252 streamer_write_tree_header (ob, t);
1253 }
1254
1255 /* Write the bitpacks and tree references. */
1256 for (unsigned i = 0; i < size; ++i)
1257 {
1258 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1259
1260 /* Mark the end of the tree. */
1261 streamer_write_zero (ob);
1262 }
1263 }
1264 gcc_assert (old_len + size == ob->writer_cache->nodes.length ());
1265
1266 /* Finally truncate the vector. */
1267 sccstack.truncate (first);
1268
1269 if (from_state)
1270 from_state->low = MIN (from_state->low, cstate->low);
1271 return;
1272 }
1273
1274 if (from_state)
1275 from_state->low = MIN (from_state->low, cstate->low);
1276 }
1277 gcc_checking_assert (from_state);
1278 if (cstate->dfsnum < from_state->dfsnum)
1279 from_state->low = MIN (cstate->dfsnum, from_state->low);
1280 }
1281
1282
1283 /* Emit the physical representation of tree node EXPR to output block
1284 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1285 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1286
1287 void
1288 lto_output_tree (struct output_block *ob, tree expr,
1289 bool ref_p, bool this_ref_p)
1290 {
1291 unsigned ix;
1292 bool existed_p;
1293
1294 if (expr == NULL_TREE)
1295 {
1296 streamer_write_record_start (ob, LTO_null);
1297 return;
1298 }
1299
1300 if (this_ref_p && tree_is_indexable (expr))
1301 {
1302 lto_output_tree_ref (ob, expr);
1303 return;
1304 }
1305
1306 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1307 if (existed_p)
1308 {
1309 /* If a node has already been streamed out, make sure that
1310 we don't write it more than once. Otherwise, the reader
1311 will instantiate two different nodes for the same object. */
1312 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1313 streamer_write_uhwi (ob, ix);
1314 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1315 lto_tree_code_to_tag (TREE_CODE (expr)));
1316 lto_stats.num_pickle_refs_output++;
1317 }
1318 else
1319 {
1320 /* This is the first time we see EXPR, write all reachable
1321 trees to OB. */
1322 static bool in_dfs_walk;
1323
1324 /* Protect against recursion which means disconnect between
1325 what tree edges we walk in the DFS walk and what edges
1326 we stream out. */
1327 gcc_assert (!in_dfs_walk);
1328
1329 /* Start the DFS walk. */
1330 /* Save ob state ... */
1331 /* let's see ... */
1332 in_dfs_walk = true;
1333 sccstate = pointer_map_create ();
1334 gcc_obstack_init (&sccstate_obstack);
1335 next_dfs_num = 1;
1336 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
1337 sccstack.release ();
1338 pointer_map_destroy (sccstate);
1339 obstack_free (&sccstate_obstack, NULL);
1340 in_dfs_walk = false;
1341
1342 /* Finally append a reference to the tree we were writing.
1343 ??? If expr ended up as a singleton we could have
1344 inlined it here and avoid outputting a reference. */
1345 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1346 gcc_assert (existed_p);
1347 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1348 streamer_write_uhwi (ob, ix);
1349 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1350 lto_tree_code_to_tag (TREE_CODE (expr)));
1351 lto_stats.num_pickle_refs_output++;
1352 }
1353 }
1354
1355
1356 /* Output to OB a list of try/catch handlers starting with FIRST. */
1357
1358 static void
1359 output_eh_try_list (struct output_block *ob, eh_catch first)
1360 {
1361 eh_catch n;
1362
1363 for (n = first; n; n = n->next_catch)
1364 {
1365 streamer_write_record_start (ob, LTO_eh_catch);
1366 stream_write_tree (ob, n->type_list, true);
1367 stream_write_tree (ob, n->filter_list, true);
1368 stream_write_tree (ob, n->label, true);
1369 }
1370
1371 streamer_write_record_start (ob, LTO_null);
1372 }
1373
1374
1375 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1376 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1377 detect EH region sharing. */
1378
1379 static void
1380 output_eh_region (struct output_block *ob, eh_region r)
1381 {
1382 enum LTO_tags tag;
1383
1384 if (r == NULL)
1385 {
1386 streamer_write_record_start (ob, LTO_null);
1387 return;
1388 }
1389
1390 if (r->type == ERT_CLEANUP)
1391 tag = LTO_ert_cleanup;
1392 else if (r->type == ERT_TRY)
1393 tag = LTO_ert_try;
1394 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1395 tag = LTO_ert_allowed_exceptions;
1396 else if (r->type == ERT_MUST_NOT_THROW)
1397 tag = LTO_ert_must_not_throw;
1398 else
1399 gcc_unreachable ();
1400
1401 streamer_write_record_start (ob, tag);
1402 streamer_write_hwi (ob, r->index);
1403
1404 if (r->outer)
1405 streamer_write_hwi (ob, r->outer->index);
1406 else
1407 streamer_write_zero (ob);
1408
1409 if (r->inner)
1410 streamer_write_hwi (ob, r->inner->index);
1411 else
1412 streamer_write_zero (ob);
1413
1414 if (r->next_peer)
1415 streamer_write_hwi (ob, r->next_peer->index);
1416 else
1417 streamer_write_zero (ob);
1418
1419 if (r->type == ERT_TRY)
1420 {
1421 output_eh_try_list (ob, r->u.eh_try.first_catch);
1422 }
1423 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1424 {
1425 stream_write_tree (ob, r->u.allowed.type_list, true);
1426 stream_write_tree (ob, r->u.allowed.label, true);
1427 streamer_write_uhwi (ob, r->u.allowed.filter);
1428 }
1429 else if (r->type == ERT_MUST_NOT_THROW)
1430 {
1431 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1432 bitpack_d bp = bitpack_create (ob->main_stream);
1433 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1434 streamer_write_bitpack (&bp);
1435 }
1436
1437 if (r->landing_pads)
1438 streamer_write_hwi (ob, r->landing_pads->index);
1439 else
1440 streamer_write_zero (ob);
1441 }
1442
1443
1444 /* Output landing pad LP to OB. */
1445
1446 static void
1447 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1448 {
1449 if (lp == NULL)
1450 {
1451 streamer_write_record_start (ob, LTO_null);
1452 return;
1453 }
1454
1455 streamer_write_record_start (ob, LTO_eh_landing_pad);
1456 streamer_write_hwi (ob, lp->index);
1457 if (lp->next_lp)
1458 streamer_write_hwi (ob, lp->next_lp->index);
1459 else
1460 streamer_write_zero (ob);
1461
1462 if (lp->region)
1463 streamer_write_hwi (ob, lp->region->index);
1464 else
1465 streamer_write_zero (ob);
1466
1467 stream_write_tree (ob, lp->post_landing_pad, true);
1468 }
1469
1470
1471 /* Output the existing eh_table to OB. */
1472
1473 static void
1474 output_eh_regions (struct output_block *ob, struct function *fn)
1475 {
1476 if (fn->eh && fn->eh->region_tree)
1477 {
1478 unsigned i;
1479 eh_region eh;
1480 eh_landing_pad lp;
1481 tree ttype;
1482
1483 streamer_write_record_start (ob, LTO_eh_table);
1484
1485 /* Emit the index of the root of the EH region tree. */
1486 streamer_write_hwi (ob, fn->eh->region_tree->index);
1487
1488 /* Emit all the EH regions in the region array. */
1489 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1490 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1491 output_eh_region (ob, eh);
1492
1493 /* Emit all landing pads. */
1494 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1495 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1496 output_eh_lp (ob, lp);
1497
1498 /* Emit all the runtime type data. */
1499 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1500 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1501 stream_write_tree (ob, ttype, true);
1502
1503 /* Emit the table of action chains. */
1504 if (targetm.arm_eabi_unwinder)
1505 {
1506 tree t;
1507 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1508 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1509 stream_write_tree (ob, t, true);
1510 }
1511 else
1512 {
1513 uchar c;
1514 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1515 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1516 streamer_write_char_stream (ob->main_stream, c);
1517 }
1518 }
1519
1520 /* The LTO_null either terminates the record or indicates that there
1521 are no eh_records at all. */
1522 streamer_write_record_start (ob, LTO_null);
1523 }
1524
1525
1526 /* Output all of the active ssa names to the ssa_names stream. */
1527
1528 static void
1529 output_ssa_names (struct output_block *ob, struct function *fn)
1530 {
1531 unsigned int i, len;
1532
1533 len = vec_safe_length (SSANAMES (fn));
1534 streamer_write_uhwi (ob, len);
1535
1536 for (i = 1; i < len; i++)
1537 {
1538 tree ptr = (*SSANAMES (fn))[i];
1539
1540 if (ptr == NULL_TREE
1541 || SSA_NAME_IN_FREE_LIST (ptr)
1542 || virtual_operand_p (ptr))
1543 continue;
1544
1545 streamer_write_uhwi (ob, i);
1546 streamer_write_char_stream (ob->main_stream,
1547 SSA_NAME_IS_DEFAULT_DEF (ptr));
1548 if (SSA_NAME_VAR (ptr))
1549 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1550 else
1551 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1552 stream_write_tree (ob, TREE_TYPE (ptr), true);
1553 }
1554
1555 streamer_write_zero (ob);
1556 }
1557
1558
1559 /* Output the cfg. */
1560
1561 static void
1562 output_cfg (struct output_block *ob, struct function *fn)
1563 {
1564 struct lto_output_stream *tmp_stream = ob->main_stream;
1565 basic_block bb;
1566
1567 ob->main_stream = ob->cfg_stream;
1568
1569 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1570 profile_status_for_function (fn));
1571
1572 /* Output the number of the highest basic block. */
1573 streamer_write_uhwi (ob, last_basic_block_for_function (fn));
1574
1575 FOR_ALL_BB_FN (bb, fn)
1576 {
1577 edge_iterator ei;
1578 edge e;
1579
1580 streamer_write_hwi (ob, bb->index);
1581
1582 /* Output the successors and the edge flags. */
1583 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1584 FOR_EACH_EDGE (e, ei, bb->succs)
1585 {
1586 streamer_write_uhwi (ob, e->dest->index);
1587 streamer_write_hwi (ob, e->probability);
1588 streamer_write_gcov_count (ob, e->count);
1589 streamer_write_uhwi (ob, e->flags);
1590 }
1591 }
1592
1593 streamer_write_hwi (ob, -1);
1594
1595 bb = ENTRY_BLOCK_PTR;
1596 while (bb->next_bb)
1597 {
1598 streamer_write_hwi (ob, bb->next_bb->index);
1599 bb = bb->next_bb;
1600 }
1601
1602 streamer_write_hwi (ob, -1);
1603
1604 /* ??? The cfgloop interface is tied to cfun. */
1605 gcc_assert (cfun == fn);
1606
1607 /* Output the number of loops. */
1608 streamer_write_uhwi (ob, number_of_loops (fn));
1609
1610 /* Output each loop, skipping the tree root which has number zero. */
1611 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1612 {
1613 struct loop *loop = get_loop (fn, i);
1614
1615 /* Write the index of the loop header. That's enough to rebuild
1616 the loop tree on the reader side. Stream -1 for an unused
1617 loop entry. */
1618 if (!loop)
1619 {
1620 streamer_write_hwi (ob, -1);
1621 continue;
1622 }
1623 else
1624 streamer_write_hwi (ob, loop->header->index);
1625
1626 /* Write everything copy_loop_info copies. */
1627 streamer_write_enum (ob->main_stream,
1628 loop_estimation, EST_LAST, loop->estimate_state);
1629 streamer_write_hwi (ob, loop->any_upper_bound);
1630 if (loop->any_upper_bound)
1631 {
1632 streamer_write_uhwi (ob, loop->nb_iterations_upper_bound.low);
1633 streamer_write_hwi (ob, loop->nb_iterations_upper_bound.high);
1634 }
1635 streamer_write_hwi (ob, loop->any_estimate);
1636 if (loop->any_estimate)
1637 {
1638 streamer_write_uhwi (ob, loop->nb_iterations_estimate.low);
1639 streamer_write_hwi (ob, loop->nb_iterations_estimate.high);
1640 }
1641 }
1642
1643 ob->main_stream = tmp_stream;
1644 }
1645
1646
1647 /* Create the header in the file using OB. If the section type is for
1648 a function, set FN to the decl for that function. */
1649
1650 void
1651 produce_asm (struct output_block *ob, tree fn)
1652 {
1653 enum lto_section_type section_type = ob->section_type;
1654 struct lto_function_header header;
1655 char *section_name;
1656 struct lto_output_stream *header_stream;
1657
1658 if (section_type == LTO_section_function_body)
1659 {
1660 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1661 section_name = lto_get_section_name (section_type, name, NULL);
1662 }
1663 else
1664 section_name = lto_get_section_name (section_type, NULL, NULL);
1665
1666 lto_begin_section (section_name, !flag_wpa);
1667 free (section_name);
1668
1669 /* The entire header is stream computed here. */
1670 memset (&header, 0, sizeof (struct lto_function_header));
1671
1672 /* Write the header. */
1673 header.lto_header.major_version = LTO_major_version;
1674 header.lto_header.minor_version = LTO_minor_version;
1675
1676 header.compressed_size = 0;
1677
1678 if (section_type == LTO_section_function_body)
1679 header.cfg_size = ob->cfg_stream->total_size;
1680 header.main_size = ob->main_stream->total_size;
1681 header.string_size = ob->string_stream->total_size;
1682
1683 header_stream = XCNEW (struct lto_output_stream);
1684 lto_output_data_stream (header_stream, &header, sizeof header);
1685 lto_write_stream (header_stream);
1686 free (header_stream);
1687
1688 /* Put all of the gimple and the string table out the asm file as a
1689 block of text. */
1690 if (section_type == LTO_section_function_body)
1691 lto_write_stream (ob->cfg_stream);
1692 lto_write_stream (ob->main_stream);
1693 lto_write_stream (ob->string_stream);
1694
1695 lto_end_section ();
1696 }
1697
1698
1699 /* Output the base body of struct function FN using output block OB. */
1700
1701 static void
1702 output_struct_function_base (struct output_block *ob, struct function *fn)
1703 {
1704 struct bitpack_d bp;
1705 unsigned i;
1706 tree t;
1707
1708 /* Output the static chain and non-local goto save area. */
1709 stream_write_tree (ob, fn->static_chain_decl, true);
1710 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1711
1712 /* Output all the local variables in the function. */
1713 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1714 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1715 stream_write_tree (ob, t, true);
1716
1717 /* Output current IL state of the function. */
1718 streamer_write_uhwi (ob, fn->curr_properties);
1719
1720 /* Write all the attributes for FN. */
1721 bp = bitpack_create (ob->main_stream);
1722 bp_pack_value (&bp, fn->is_thunk, 1);
1723 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1724 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1725 bp_pack_value (&bp, fn->returns_struct, 1);
1726 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1727 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1728 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1729 bp_pack_value (&bp, fn->after_inlining, 1);
1730 bp_pack_value (&bp, fn->stdarg, 1);
1731 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1732 bp_pack_value (&bp, fn->calls_alloca, 1);
1733 bp_pack_value (&bp, fn->calls_setjmp, 1);
1734 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1735 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1736
1737 /* Output the function start and end loci. */
1738 stream_output_location (ob, &bp, fn->function_start_locus);
1739 stream_output_location (ob, &bp, fn->function_end_locus);
1740
1741 streamer_write_bitpack (&bp);
1742 }
1743
1744
1745 /* Output the body of function NODE->DECL. */
1746
1747 static void
1748 output_function (struct cgraph_node *node)
1749 {
1750 tree function;
1751 struct function *fn;
1752 basic_block bb;
1753 struct output_block *ob;
1754
1755 function = node->decl;
1756 fn = DECL_STRUCT_FUNCTION (function);
1757 ob = create_output_block (LTO_section_function_body);
1758
1759 clear_line_info (ob);
1760 ob->cgraph_node = node;
1761
1762 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1763
1764 /* Set current_function_decl and cfun. */
1765 push_cfun (fn);
1766
1767 /* Make string 0 be a NULL string. */
1768 streamer_write_char_stream (ob->string_stream, 0);
1769
1770 streamer_write_record_start (ob, LTO_function);
1771
1772 /* Output decls for parameters and args. */
1773 stream_write_tree (ob, DECL_RESULT (function), true);
1774 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1775
1776 /* Output DECL_INITIAL for the function, which contains the tree of
1777 lexical scopes. */
1778 stream_write_tree (ob, DECL_INITIAL (function), true);
1779
1780 /* We also stream abstract functions where we stream only stuff needed for
1781 debug info. */
1782 if (gimple_has_body_p (function))
1783 {
1784 streamer_write_uhwi (ob, 1);
1785 output_struct_function_base (ob, fn);
1786
1787 /* Output all the SSA names used in the function. */
1788 output_ssa_names (ob, fn);
1789
1790 /* Output any exception handling regions. */
1791 output_eh_regions (ob, fn);
1792
1793
1794 /* We will renumber the statements. The code that does this uses
1795 the same ordering that we use for serializing them so we can use
1796 the same code on the other end and not have to write out the
1797 statement numbers. We do not assign UIDs to PHIs here because
1798 virtual PHIs get re-computed on-the-fly which would make numbers
1799 inconsistent. */
1800 set_gimple_stmt_max_uid (cfun, 0);
1801 FOR_ALL_BB (bb)
1802 {
1803 gimple_stmt_iterator gsi;
1804 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1805 {
1806 gimple stmt = gsi_stmt (gsi);
1807
1808 /* Virtual PHIs are not going to be streamed. */
1809 if (!virtual_operand_p (gimple_phi_result (stmt)))
1810 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1811 }
1812 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1813 {
1814 gimple stmt = gsi_stmt (gsi);
1815 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1816 }
1817 }
1818 /* To avoid keeping duplicate gimple IDs in the statements, renumber
1819 virtual phis now. */
1820 FOR_ALL_BB (bb)
1821 {
1822 gimple_stmt_iterator gsi;
1823 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1824 {
1825 gimple stmt = gsi_stmt (gsi);
1826 if (virtual_operand_p (gimple_phi_result (stmt)))
1827 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1828 }
1829 }
1830
1831 /* Output the code for the function. */
1832 FOR_ALL_BB_FN (bb, fn)
1833 output_bb (ob, bb, fn);
1834
1835 /* The terminator for this function. */
1836 streamer_write_record_start (ob, LTO_null);
1837
1838 output_cfg (ob, fn);
1839
1840 pop_cfun ();
1841 }
1842 else
1843 streamer_write_uhwi (ob, 0);
1844
1845 /* Create a section to hold the pickled output of this function. */
1846 produce_asm (ob, function);
1847
1848 destroy_output_block (ob);
1849 }
1850
1851
1852 /* Emit toplevel asms. */
1853
1854 void
1855 lto_output_toplevel_asms (void)
1856 {
1857 struct output_block *ob;
1858 struct asm_node *can;
1859 char *section_name;
1860 struct lto_output_stream *header_stream;
1861 struct lto_asm_header header;
1862
1863 if (! asm_nodes)
1864 return;
1865
1866 ob = create_output_block (LTO_section_asm);
1867
1868 /* Make string 0 be a NULL string. */
1869 streamer_write_char_stream (ob->string_stream, 0);
1870
1871 for (can = asm_nodes; can; can = can->next)
1872 {
1873 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
1874 streamer_write_hwi (ob, can->order);
1875 }
1876
1877 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
1878
1879 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
1880 lto_begin_section (section_name, !flag_wpa);
1881 free (section_name);
1882
1883 /* The entire header stream is computed here. */
1884 memset (&header, 0, sizeof (header));
1885
1886 /* Write the header. */
1887 header.lto_header.major_version = LTO_major_version;
1888 header.lto_header.minor_version = LTO_minor_version;
1889
1890 header.main_size = ob->main_stream->total_size;
1891 header.string_size = ob->string_stream->total_size;
1892
1893 header_stream = XCNEW (struct lto_output_stream);
1894 lto_output_data_stream (header_stream, &header, sizeof (header));
1895 lto_write_stream (header_stream);
1896 free (header_stream);
1897
1898 /* Put all of the gimple and the string table out the asm file as a
1899 block of text. */
1900 lto_write_stream (ob->main_stream);
1901 lto_write_stream (ob->string_stream);
1902
1903 lto_end_section ();
1904
1905 destroy_output_block (ob);
1906 }
1907
1908
1909 /* Copy the function body of NODE without deserializing. */
1910
1911 static void
1912 copy_function (struct cgraph_node *node)
1913 {
1914 tree function = node->decl;
1915 struct lto_file_decl_data *file_data = node->lto_file_data;
1916 struct lto_output_stream *output_stream = XCNEW (struct lto_output_stream);
1917 const char *data;
1918 size_t len;
1919 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
1920 char *section_name =
1921 lto_get_section_name (LTO_section_function_body, name, NULL);
1922 size_t i, j;
1923 struct lto_in_decl_state *in_state;
1924 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
1925
1926 lto_begin_section (section_name, !flag_wpa);
1927 free (section_name);
1928
1929 /* We may have renamed the declaration, e.g., a static function. */
1930 name = lto_get_decl_name_mapping (file_data, name);
1931
1932 data = lto_get_section_data (file_data, LTO_section_function_body,
1933 name, &len);
1934 gcc_assert (data);
1935
1936 /* Do a bit copy of the function body. */
1937 lto_output_data_stream (output_stream, data, len);
1938 lto_write_stream (output_stream);
1939
1940 /* Copy decls. */
1941 in_state =
1942 lto_get_function_in_decl_state (node->lto_file_data, function);
1943 gcc_assert (in_state);
1944
1945 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
1946 {
1947 size_t n = in_state->streams[i].size;
1948 tree *trees = in_state->streams[i].trees;
1949 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
1950
1951 /* The out state must have the same indices and the in state.
1952 So just copy the vector. All the encoders in the in state
1953 must be empty where we reach here. */
1954 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
1955 encoder->trees.reserve_exact (n);
1956 for (j = 0; j < n; j++)
1957 encoder->trees.safe_push (trees[j]);
1958 }
1959
1960 lto_free_section_data (file_data, LTO_section_function_body, name,
1961 data, len);
1962 free (output_stream);
1963 lto_end_section ();
1964 }
1965
1966
1967 /* Main entry point from the pass manager. */
1968
1969 void
1970 lto_output (void)
1971 {
1972 struct lto_out_decl_state *decl_state;
1973 #ifdef ENABLE_CHECKING
1974 bitmap output = lto_bitmap_alloc ();
1975 #endif
1976 int i, n_nodes;
1977 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1978
1979 /* Initialize the streamer. */
1980 lto_streamer_init ();
1981
1982 n_nodes = lto_symtab_encoder_size (encoder);
1983 /* Process only the functions with bodies. */
1984 for (i = 0; i < n_nodes; i++)
1985 {
1986 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
1987 cgraph_node *node = dyn_cast <cgraph_node> (snode);
1988 if (node
1989 && lto_symtab_encoder_encode_body_p (encoder, node)
1990 && !node->alias)
1991 {
1992 #ifdef ENABLE_CHECKING
1993 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
1994 bitmap_set_bit (output, DECL_UID (node->decl));
1995 #endif
1996 decl_state = lto_new_out_decl_state ();
1997 lto_push_out_decl_state (decl_state);
1998 if (gimple_has_body_p (node->decl) || !flag_wpa)
1999 output_function (node);
2000 else
2001 copy_function (node);
2002 gcc_assert (lto_get_out_decl_state () == decl_state);
2003 lto_pop_out_decl_state ();
2004 lto_record_function_out_decl_state (node->decl, decl_state);
2005 }
2006 }
2007
2008 /* Emit the callgraph after emitting function bodies. This needs to
2009 be done now to make sure that all the statements in every function
2010 have been renumbered so that edges can be associated with call
2011 statements using the statement UIDs. */
2012 output_symtab ();
2013
2014 #ifdef ENABLE_CHECKING
2015 lto_bitmap_free (output);
2016 #endif
2017 }
2018
2019 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2020 from it and required for correct representation of its semantics.
2021 Each node in ENCODER must be a global declaration or a type. A node
2022 is written only once, even if it appears multiple times in the
2023 vector. Certain transitively-reachable nodes, such as those
2024 representing expressions, may be duplicated, but such nodes
2025 must not appear in ENCODER itself. */
2026
2027 static void
2028 write_global_stream (struct output_block *ob,
2029 struct lto_tree_ref_encoder *encoder)
2030 {
2031 tree t;
2032 size_t index;
2033 const size_t size = lto_tree_ref_encoder_size (encoder);
2034
2035 for (index = 0; index < size; index++)
2036 {
2037 t = lto_tree_ref_encoder_get_tree (encoder, index);
2038 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2039 stream_write_tree (ob, t, false);
2040 }
2041 }
2042
2043
2044 /* Write a sequence of indices into the globals vector corresponding
2045 to the trees in ENCODER. These are used by the reader to map the
2046 indices used to refer to global entities within function bodies to
2047 their referents. */
2048
2049 static void
2050 write_global_references (struct output_block *ob,
2051 struct lto_output_stream *ref_stream,
2052 struct lto_tree_ref_encoder *encoder)
2053 {
2054 tree t;
2055 uint32_t index;
2056 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2057
2058 /* Write size as 32-bit unsigned. */
2059 lto_output_data_stream (ref_stream, &size, sizeof (int32_t));
2060
2061 for (index = 0; index < size; index++)
2062 {
2063 uint32_t slot_num;
2064
2065 t = lto_tree_ref_encoder_get_tree (encoder, index);
2066 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2067 gcc_assert (slot_num != (unsigned)-1);
2068 lto_output_data_stream (ref_stream, &slot_num, sizeof slot_num);
2069 }
2070 }
2071
2072
2073 /* Write all the streams in an lto_out_decl_state STATE using
2074 output block OB and output stream OUT_STREAM. */
2075
2076 void
2077 lto_output_decl_state_streams (struct output_block *ob,
2078 struct lto_out_decl_state *state)
2079 {
2080 int i;
2081
2082 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2083 write_global_stream (ob, &state->streams[i]);
2084 }
2085
2086
2087 /* Write all the references in an lto_out_decl_state STATE using
2088 output block OB and output stream OUT_STREAM. */
2089
2090 void
2091 lto_output_decl_state_refs (struct output_block *ob,
2092 struct lto_output_stream *out_stream,
2093 struct lto_out_decl_state *state)
2094 {
2095 unsigned i;
2096 uint32_t ref;
2097 tree decl;
2098
2099 /* Write reference to FUNCTION_DECL. If there is not function,
2100 write reference to void_type_node. */
2101 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2102 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2103 gcc_assert (ref != (unsigned)-1);
2104 lto_output_data_stream (out_stream, &ref, sizeof (uint32_t));
2105
2106 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2107 write_global_references (ob, out_stream, &state->streams[i]);
2108 }
2109
2110
2111 /* Return the written size of STATE. */
2112
2113 static size_t
2114 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2115 {
2116 int i;
2117 size_t size;
2118
2119 size = sizeof (int32_t); /* fn_ref. */
2120 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2121 {
2122 size += sizeof (int32_t); /* vector size. */
2123 size += (lto_tree_ref_encoder_size (&state->streams[i])
2124 * sizeof (int32_t));
2125 }
2126 return size;
2127 }
2128
2129
2130 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2131 so far. */
2132
2133 static void
2134 write_symbol (struct streamer_tree_cache_d *cache,
2135 struct lto_output_stream *stream,
2136 tree t, struct pointer_set_t *seen, bool alias)
2137 {
2138 const char *name;
2139 enum gcc_plugin_symbol_kind kind;
2140 enum gcc_plugin_symbol_visibility visibility;
2141 unsigned slot_num;
2142 unsigned HOST_WIDEST_INT size;
2143 const char *comdat;
2144 unsigned char c;
2145
2146 /* None of the following kinds of symbols are needed in the
2147 symbol table. */
2148 if (!TREE_PUBLIC (t)
2149 || is_builtin_fn (t)
2150 || DECL_ABSTRACT (t)
2151 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2152 return;
2153 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2154
2155 gcc_assert (TREE_CODE (t) == VAR_DECL
2156 || TREE_CODE (t) == FUNCTION_DECL);
2157
2158 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2159
2160 /* This behaves like assemble_name_raw in varasm.c, performing the
2161 same name manipulations that ASM_OUTPUT_LABELREF does. */
2162 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2163
2164 if (pointer_set_contains (seen, name))
2165 return;
2166 pointer_set_insert (seen, name);
2167
2168 streamer_tree_cache_lookup (cache, t, &slot_num);
2169 gcc_assert (slot_num != (unsigned)-1);
2170
2171 if (DECL_EXTERNAL (t))
2172 {
2173 if (DECL_WEAK (t))
2174 kind = GCCPK_WEAKUNDEF;
2175 else
2176 kind = GCCPK_UNDEF;
2177 }
2178 else
2179 {
2180 if (DECL_WEAK (t))
2181 kind = GCCPK_WEAKDEF;
2182 else if (DECL_COMMON (t))
2183 kind = GCCPK_COMMON;
2184 else
2185 kind = GCCPK_DEF;
2186
2187 /* When something is defined, it should have node attached. */
2188 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2189 || varpool_get_node (t)->definition);
2190 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2191 || (cgraph_get_node (t)
2192 && cgraph_get_node (t)->definition));
2193 }
2194
2195 /* Imitate what default_elf_asm_output_external do.
2196 When symbol is external, we need to output it with DEFAULT visibility
2197 when compiling with -fvisibility=default, while with HIDDEN visibility
2198 when symbol has attribute (visibility("hidden")) specified.
2199 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2200 right. */
2201
2202 if (DECL_EXTERNAL (t)
2203 && !targetm.binds_local_p (t))
2204 visibility = GCCPV_DEFAULT;
2205 else
2206 switch (DECL_VISIBILITY (t))
2207 {
2208 case VISIBILITY_DEFAULT:
2209 visibility = GCCPV_DEFAULT;
2210 break;
2211 case VISIBILITY_PROTECTED:
2212 visibility = GCCPV_PROTECTED;
2213 break;
2214 case VISIBILITY_HIDDEN:
2215 visibility = GCCPV_HIDDEN;
2216 break;
2217 case VISIBILITY_INTERNAL:
2218 visibility = GCCPV_INTERNAL;
2219 break;
2220 }
2221
2222 if (kind == GCCPK_COMMON
2223 && DECL_SIZE_UNIT (t)
2224 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2225 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2226 else
2227 size = 0;
2228
2229 if (DECL_ONE_ONLY (t))
2230 comdat = IDENTIFIER_POINTER (DECL_COMDAT_GROUP (t));
2231 else
2232 comdat = "";
2233
2234 lto_output_data_stream (stream, name, strlen (name) + 1);
2235 lto_output_data_stream (stream, comdat, strlen (comdat) + 1);
2236 c = (unsigned char) kind;
2237 lto_output_data_stream (stream, &c, 1);
2238 c = (unsigned char) visibility;
2239 lto_output_data_stream (stream, &c, 1);
2240 lto_output_data_stream (stream, &size, 8);
2241 lto_output_data_stream (stream, &slot_num, 4);
2242 }
2243
2244 /* Return true if NODE should appear in the plugin symbol table. */
2245
2246 bool
2247 output_symbol_p (symtab_node *node)
2248 {
2249 struct cgraph_node *cnode;
2250 if (!symtab_real_symbol_p (node))
2251 return false;
2252 /* We keep external functions in symtab for sake of inlining
2253 and devirtualization. We do not want to see them in symbol table as
2254 references unless they are really used. */
2255 cnode = dyn_cast <cgraph_node> (node);
2256 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2257 && cnode->callers)
2258 return true;
2259
2260 /* Ignore all references from external vars initializers - they are not really
2261 part of the compilation unit until they are used by folding. Some symbols,
2262 like references to external construction vtables can not be referred to at all.
2263 We decide this at can_refer_decl_in_current_unit_p. */
2264 if (!node->definition || DECL_EXTERNAL (node->decl))
2265 {
2266 int i;
2267 struct ipa_ref *ref;
2268 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list,
2269 i, ref); i++)
2270 {
2271 if (ref->use == IPA_REF_ALIAS)
2272 continue;
2273 if (is_a <cgraph_node> (ref->referring))
2274 return true;
2275 if (!DECL_EXTERNAL (ref->referring->decl))
2276 return true;
2277 }
2278 return false;
2279 }
2280 return true;
2281 }
2282
2283
2284 /* Write an IL symbol table to OB.
2285 SET and VSET are cgraph/varpool node sets we are outputting. */
2286
2287 static void
2288 produce_symtab (struct output_block *ob)
2289 {
2290 struct streamer_tree_cache_d *cache = ob->writer_cache;
2291 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2292 struct pointer_set_t *seen;
2293 struct lto_output_stream stream;
2294 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2295 lto_symtab_encoder_iterator lsei;
2296
2297 lto_begin_section (section_name, false);
2298 free (section_name);
2299
2300 seen = pointer_set_create ();
2301 memset (&stream, 0, sizeof (stream));
2302
2303 /* Write the symbol table.
2304 First write everything defined and then all declarations.
2305 This is necessary to handle cases where we have duplicated symbols. */
2306 for (lsei = lsei_start (encoder);
2307 !lsei_end_p (lsei); lsei_next (&lsei))
2308 {
2309 symtab_node *node = lsei_node (lsei);
2310
2311 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2312 continue;
2313 write_symbol (cache, &stream, node->decl, seen, false);
2314 }
2315 for (lsei = lsei_start (encoder);
2316 !lsei_end_p (lsei); lsei_next (&lsei))
2317 {
2318 symtab_node *node = lsei_node (lsei);
2319
2320 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2321 continue;
2322 write_symbol (cache, &stream, node->decl, seen, false);
2323 }
2324
2325 lto_write_stream (&stream);
2326 pointer_set_destroy (seen);
2327
2328 lto_end_section ();
2329 }
2330
2331
2332 /* This pass is run after all of the functions are serialized and all
2333 of the IPA passes have written their serialized forms. This pass
2334 causes the vector of all of the global decls and types used from
2335 this file to be written in to a section that can then be read in to
2336 recover these on other side. */
2337
2338 void
2339 produce_asm_for_decls (void)
2340 {
2341 struct lto_out_decl_state *out_state;
2342 struct lto_out_decl_state *fn_out_state;
2343 struct lto_decl_header header;
2344 char *section_name;
2345 struct output_block *ob;
2346 struct lto_output_stream *header_stream, *decl_state_stream;
2347 unsigned idx, num_fns;
2348 size_t decl_state_size;
2349 int32_t num_decl_states;
2350
2351 ob = create_output_block (LTO_section_decls);
2352 ob->global = true;
2353
2354 memset (&header, 0, sizeof (struct lto_decl_header));
2355
2356 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2357 lto_begin_section (section_name, !flag_wpa);
2358 free (section_name);
2359
2360 /* Make string 0 be a NULL string. */
2361 streamer_write_char_stream (ob->string_stream, 0);
2362
2363 gcc_assert (!alias_pairs);
2364
2365 /* Write the global symbols. */
2366 out_state = lto_get_out_decl_state ();
2367 num_fns = lto_function_decl_states.length ();
2368 lto_output_decl_state_streams (ob, out_state);
2369 for (idx = 0; idx < num_fns; idx++)
2370 {
2371 fn_out_state =
2372 lto_function_decl_states[idx];
2373 lto_output_decl_state_streams (ob, fn_out_state);
2374 }
2375
2376 header.lto_header.major_version = LTO_major_version;
2377 header.lto_header.minor_version = LTO_minor_version;
2378
2379 /* Currently not used. This field would allow us to preallocate
2380 the globals vector, so that it need not be resized as it is extended. */
2381 header.num_nodes = -1;
2382
2383 /* Compute the total size of all decl out states. */
2384 decl_state_size = sizeof (int32_t);
2385 decl_state_size += lto_out_decl_state_written_size (out_state);
2386 for (idx = 0; idx < num_fns; idx++)
2387 {
2388 fn_out_state =
2389 lto_function_decl_states[idx];
2390 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2391 }
2392 header.decl_state_size = decl_state_size;
2393
2394 header.main_size = ob->main_stream->total_size;
2395 header.string_size = ob->string_stream->total_size;
2396
2397 header_stream = XCNEW (struct lto_output_stream);
2398 lto_output_data_stream (header_stream, &header, sizeof header);
2399 lto_write_stream (header_stream);
2400 free (header_stream);
2401
2402 /* Write the main out-decl state, followed by out-decl states of
2403 functions. */
2404 decl_state_stream = XCNEW (struct lto_output_stream);
2405 num_decl_states = num_fns + 1;
2406 lto_output_data_stream (decl_state_stream, &num_decl_states,
2407 sizeof (num_decl_states));
2408 lto_output_decl_state_refs (ob, decl_state_stream, out_state);
2409 for (idx = 0; idx < num_fns; idx++)
2410 {
2411 fn_out_state =
2412 lto_function_decl_states[idx];
2413 lto_output_decl_state_refs (ob, decl_state_stream, fn_out_state);
2414 }
2415 lto_write_stream (decl_state_stream);
2416 free (decl_state_stream);
2417
2418 lto_write_stream (ob->main_stream);
2419 lto_write_stream (ob->string_stream);
2420
2421 lto_end_section ();
2422
2423 /* Write the symbol table. It is used by linker to determine dependencies
2424 and thus we can skip it for WPA. */
2425 if (!flag_wpa)
2426 produce_symtab (ob);
2427
2428 /* Write command line opts. */
2429 lto_write_options ();
2430
2431 /* Deallocate memory and clean up. */
2432 for (idx = 0; idx < num_fns; idx++)
2433 {
2434 fn_out_state =
2435 lto_function_decl_states[idx];
2436 lto_delete_out_decl_state (fn_out_state);
2437 }
2438 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2439 lto_function_decl_states.release ();
2440 destroy_output_block (ob);
2441 }