]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-streamer-out.c
Add TREE_INT_CST_OFFSET_NUNITS.
[thirdparty/gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2013 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "stor-layout.h"
29 #include "stringpool.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "hashtab.h"
35 #include "basic-block.h"
36 #include "gimple.h"
37 #include "gimple-iterator.h"
38 #include "gimple-ssa.h"
39 #include "tree-ssanames.h"
40 #include "tree-pass.h"
41 #include "function.h"
42 #include "ggc.h"
43 #include "diagnostic-core.h"
44 #include "except.h"
45 #include "vec.h"
46 #include "lto-symtab.h"
47 #include "lto-streamer.h"
48 #include "data-streamer.h"
49 #include "gimple-streamer.h"
50 #include "tree-streamer.h"
51 #include "streamer-hooks.h"
52 #include "cfgloop.h"
53
54
55 /* Clear the line info stored in DATA_IN. */
56
57 static void
58 clear_line_info (struct output_block *ob)
59 {
60 ob->current_file = NULL;
61 ob->current_line = 0;
62 ob->current_col = 0;
63 }
64
65
66 /* Create the output block and return it. SECTION_TYPE is
67 LTO_section_function_body or LTO_static_initializer. */
68
69 struct output_block *
70 create_output_block (enum lto_section_type section_type)
71 {
72 struct output_block *ob = XCNEW (struct output_block);
73
74 ob->section_type = section_type;
75 ob->decl_state = lto_get_out_decl_state ();
76 ob->main_stream = XCNEW (struct lto_output_stream);
77 ob->string_stream = XCNEW (struct lto_output_stream);
78 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true);
79
80 if (section_type == LTO_section_function_body)
81 ob->cfg_stream = XCNEW (struct lto_output_stream);
82
83 clear_line_info (ob);
84
85 ob->string_hash_table.create (37);
86 gcc_obstack_init (&ob->obstack);
87
88 return ob;
89 }
90
91
92 /* Destroy the output block OB. */
93
94 void
95 destroy_output_block (struct output_block *ob)
96 {
97 enum lto_section_type section_type = ob->section_type;
98
99 ob->string_hash_table.dispose ();
100
101 free (ob->main_stream);
102 free (ob->string_stream);
103 if (section_type == LTO_section_function_body)
104 free (ob->cfg_stream);
105
106 streamer_tree_cache_delete (ob->writer_cache);
107 obstack_free (&ob->obstack, NULL);
108
109 free (ob);
110 }
111
112
113 /* Look up NODE in the type table and write the index for it to OB. */
114
115 static void
116 output_type_ref (struct output_block *ob, tree node)
117 {
118 streamer_write_record_start (ob, LTO_type_ref);
119 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
120 }
121
122
123 /* Return true if tree node T is written to various tables. For these
124 nodes, we sometimes want to write their phyiscal representation
125 (via lto_output_tree), and sometimes we need to emit an index
126 reference into a table (via lto_output_tree_ref). */
127
128 static bool
129 tree_is_indexable (tree t)
130 {
131 /* Parameters and return values of functions of variably modified types
132 must go to global stream, because they may be used in the type
133 definition. */
134 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
135 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
136 else if (TREE_CODE (t) == VAR_DECL && decl_function_context (t)
137 && !TREE_STATIC (t))
138 return false;
139 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
140 return false;
141 /* Variably modified types need to be streamed alongside function
142 bodies because they can refer to local entities. Together with
143 them we have to localize their members as well.
144 ??? In theory that includes non-FIELD_DECLs as well. */
145 else if (TYPE_P (t)
146 && variably_modified_type_p (t, NULL_TREE))
147 return false;
148 else if (TREE_CODE (t) == FIELD_DECL
149 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
150 return false;
151 else
152 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
153 }
154
155
156 /* Output info about new location into bitpack BP.
157 After outputting bitpack, lto_output_location_data has
158 to be done to output actual data. */
159
160 void
161 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
162 location_t loc)
163 {
164 expanded_location xloc;
165
166 loc = LOCATION_LOCUS (loc);
167 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
168 if (loc == UNKNOWN_LOCATION)
169 return;
170
171 xloc = expand_location (loc);
172
173 bp_pack_value (bp, ob->current_file != xloc.file, 1);
174 bp_pack_value (bp, ob->current_line != xloc.line, 1);
175 bp_pack_value (bp, ob->current_col != xloc.column, 1);
176
177 if (ob->current_file != xloc.file)
178 bp_pack_var_len_unsigned (bp,
179 streamer_string_index (ob, xloc.file,
180 strlen (xloc.file) + 1,
181 true));
182 ob->current_file = xloc.file;
183
184 if (ob->current_line != xloc.line)
185 bp_pack_var_len_unsigned (bp, xloc.line);
186 ob->current_line = xloc.line;
187
188 if (ob->current_col != xloc.column)
189 bp_pack_var_len_unsigned (bp, xloc.column);
190 ob->current_col = xloc.column;
191 }
192
193
194 /* If EXPR is an indexable tree node, output a reference to it to
195 output block OB. Otherwise, output the physical representation of
196 EXPR to OB. */
197
198 static void
199 lto_output_tree_ref (struct output_block *ob, tree expr)
200 {
201 enum tree_code code;
202
203 if (TYPE_P (expr))
204 {
205 output_type_ref (ob, expr);
206 return;
207 }
208
209 code = TREE_CODE (expr);
210 switch (code)
211 {
212 case SSA_NAME:
213 streamer_write_record_start (ob, LTO_ssa_name_ref);
214 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
215 break;
216
217 case FIELD_DECL:
218 streamer_write_record_start (ob, LTO_field_decl_ref);
219 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
220 break;
221
222 case FUNCTION_DECL:
223 streamer_write_record_start (ob, LTO_function_decl_ref);
224 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
225 break;
226
227 case VAR_DECL:
228 case DEBUG_EXPR_DECL:
229 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
230 case PARM_DECL:
231 streamer_write_record_start (ob, LTO_global_decl_ref);
232 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
233 break;
234
235 case CONST_DECL:
236 streamer_write_record_start (ob, LTO_const_decl_ref);
237 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
238 break;
239
240 case IMPORTED_DECL:
241 gcc_assert (decl_function_context (expr) == NULL);
242 streamer_write_record_start (ob, LTO_imported_decl_ref);
243 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
244 break;
245
246 case TYPE_DECL:
247 streamer_write_record_start (ob, LTO_type_decl_ref);
248 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
249 break;
250
251 case NAMESPACE_DECL:
252 streamer_write_record_start (ob, LTO_namespace_decl_ref);
253 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
254 break;
255
256 case LABEL_DECL:
257 streamer_write_record_start (ob, LTO_label_decl_ref);
258 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
259 break;
260
261 case RESULT_DECL:
262 streamer_write_record_start (ob, LTO_result_decl_ref);
263 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
264 break;
265
266 case TRANSLATION_UNIT_DECL:
267 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
268 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
269 break;
270
271 default:
272 /* No other node is indexable, so it should have been handled by
273 lto_output_tree. */
274 gcc_unreachable ();
275 }
276 }
277
278
279 /* Return true if EXPR is a tree node that can be written to disk. */
280
281 static inline bool
282 lto_is_streamable (tree expr)
283 {
284 enum tree_code code = TREE_CODE (expr);
285
286 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
287 name version in lto_output_tree_ref (see output_ssa_names). */
288 return !is_lang_specific (expr)
289 && code != SSA_NAME
290 && code != CALL_EXPR
291 && code != LANG_TYPE
292 && code != MODIFY_EXPR
293 && code != INIT_EXPR
294 && code != TARGET_EXPR
295 && code != BIND_EXPR
296 && code != WITH_CLEANUP_EXPR
297 && code != STATEMENT_LIST
298 && code != OMP_CLAUSE
299 && (code == CASE_LABEL_EXPR
300 || code == DECL_EXPR
301 || TREE_CODE_CLASS (code) != tcc_statement);
302 }
303
304
305 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
306
307 static tree
308 get_symbol_initial_value (struct output_block *ob, tree expr)
309 {
310 gcc_checking_assert (DECL_P (expr)
311 && TREE_CODE (expr) != FUNCTION_DECL
312 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
313
314 /* Handle DECL_INITIAL for symbols. */
315 tree initial = DECL_INITIAL (expr);
316 if (TREE_CODE (expr) == VAR_DECL
317 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
318 && !DECL_IN_CONSTANT_POOL (expr)
319 && initial)
320 {
321 lto_symtab_encoder_t encoder;
322 struct varpool_node *vnode;
323
324 encoder = ob->decl_state->symtab_node_encoder;
325 vnode = varpool_get_node (expr);
326 if (!vnode
327 || !lto_symtab_encoder_encode_initializer_p (encoder,
328 vnode))
329 initial = error_mark_node;
330 }
331
332 return initial;
333 }
334
335
336 /* Write a physical representation of tree node EXPR to output block
337 OB. If REF_P is true, the leaves of EXPR are emitted as references
338 via lto_output_tree_ref. IX is the index into the streamer cache
339 where EXPR is stored. */
340
341 static void
342 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
343 {
344 /* Pack all the non-pointer fields in EXPR into a bitpack and write
345 the resulting bitpack. */
346 bitpack_d bp = bitpack_create (ob->main_stream);
347 streamer_pack_tree_bitfields (ob, &bp, expr);
348 streamer_write_bitpack (&bp);
349
350 /* Write all the pointer fields in EXPR. */
351 streamer_write_tree_body (ob, expr, ref_p);
352
353 /* Write any LTO-specific data to OB. */
354 if (DECL_P (expr)
355 && TREE_CODE (expr) != FUNCTION_DECL
356 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
357 {
358 /* Handle DECL_INITIAL for symbols. */
359 tree initial = get_symbol_initial_value (ob, expr);
360 stream_write_tree (ob, initial, ref_p);
361 }
362 }
363
364 /* Write a physical representation of tree node EXPR to output block
365 OB. If REF_P is true, the leaves of EXPR are emitted as references
366 via lto_output_tree_ref. IX is the index into the streamer cache
367 where EXPR is stored. */
368
369 static void
370 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
371 {
372 if (!lto_is_streamable (expr))
373 internal_error ("tree code %qs is not supported in LTO streams",
374 get_tree_code_name (TREE_CODE (expr)));
375
376 /* Write the header, containing everything needed to materialize
377 EXPR on the reading side. */
378 streamer_write_tree_header (ob, expr);
379
380 lto_write_tree_1 (ob, expr, ref_p);
381
382 /* Mark the end of EXPR. */
383 streamer_write_zero (ob);
384 }
385
386 /* Emit the physical representation of tree node EXPR to output block
387 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
388 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
389
390 static void
391 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
392 bool ref_p, bool this_ref_p)
393 {
394 unsigned ix;
395
396 gcc_checking_assert (expr != NULL_TREE
397 && !(this_ref_p && tree_is_indexable (expr)));
398
399 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
400 expr, hash, &ix);
401 gcc_assert (!exists_p);
402 if (streamer_handle_as_builtin_p (expr))
403 {
404 /* MD and NORMAL builtins do not need to be written out
405 completely as they are always instantiated by the
406 compiler on startup. The only builtins that need to
407 be written out are BUILT_IN_FRONTEND. For all other
408 builtins, we simply write the class and code. */
409 streamer_write_builtin (ob, expr);
410 }
411 else if (TREE_CODE (expr) == INTEGER_CST
412 && !TREE_OVERFLOW (expr))
413 {
414 /* Shared INTEGER_CST nodes are special because they need their
415 original type to be materialized by the reader (to implement
416 TYPE_CACHED_VALUES). */
417 streamer_write_integer_cst (ob, expr, ref_p);
418 }
419 else
420 {
421 /* This is the first time we see EXPR, write its fields
422 to OB. */
423 lto_write_tree (ob, expr, ref_p);
424 }
425 }
426
427 struct sccs
428 {
429 unsigned int dfsnum;
430 unsigned int low;
431 };
432
433 struct scc_entry
434 {
435 tree t;
436 hashval_t hash;
437 };
438
439 static unsigned int next_dfs_num;
440 static vec<scc_entry> sccstack;
441 static struct pointer_map_t *sccstate;
442 static struct obstack sccstate_obstack;
443
444 static void
445 DFS_write_tree (struct output_block *ob, sccs *from_state,
446 tree expr, bool ref_p, bool this_ref_p);
447
448 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
449 DFS recurse for all tree edges originating from it. */
450
451 static void
452 DFS_write_tree_body (struct output_block *ob,
453 tree expr, sccs *expr_state, bool ref_p)
454 {
455 #define DFS_follow_tree_edge(DEST) \
456 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
457
458 enum tree_code code;
459
460 code = TREE_CODE (expr);
461
462 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
463 {
464 if (TREE_CODE (expr) != IDENTIFIER_NODE)
465 DFS_follow_tree_edge (TREE_TYPE (expr));
466 }
467
468 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
469 {
470 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
471 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
472 }
473
474 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
475 {
476 DFS_follow_tree_edge (TREE_REALPART (expr));
477 DFS_follow_tree_edge (TREE_IMAGPART (expr));
478 }
479
480 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
481 {
482 /* Drop names that were created for anonymous entities. */
483 if (DECL_NAME (expr)
484 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
485 && ANON_AGGRNAME_P (DECL_NAME (expr)))
486 ;
487 else
488 DFS_follow_tree_edge (DECL_NAME (expr));
489 DFS_follow_tree_edge (DECL_CONTEXT (expr));
490 }
491
492 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
493 {
494 DFS_follow_tree_edge (DECL_SIZE (expr));
495 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
496
497 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
498 special handling in LTO, it must be handled by streamer hooks. */
499
500 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
501
502 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
503 for early inlining so drop it on the floor instead of ICEing in
504 dwarf2out.c. */
505
506 if ((TREE_CODE (expr) == VAR_DECL
507 || TREE_CODE (expr) == PARM_DECL)
508 && DECL_HAS_VALUE_EXPR_P (expr))
509 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
510 if (TREE_CODE (expr) == VAR_DECL)
511 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
512 }
513
514 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
515 {
516 if (TREE_CODE (expr) == TYPE_DECL)
517 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
518 DFS_follow_tree_edge (DECL_VINDEX (expr));
519 }
520
521 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
522 {
523 /* Make sure we don't inadvertently set the assembler name. */
524 if (DECL_ASSEMBLER_NAME_SET_P (expr))
525 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
526 DFS_follow_tree_edge (DECL_SECTION_NAME (expr));
527 DFS_follow_tree_edge (DECL_COMDAT_GROUP (expr));
528 }
529
530 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
531 {
532 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
533 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
534 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
535 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
536 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
537 }
538
539 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
540 {
541 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
542 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
543 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
544 }
545
546 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
547 {
548 DFS_follow_tree_edge (TYPE_SIZE (expr));
549 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
550 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
551 DFS_follow_tree_edge (TYPE_NAME (expr));
552 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
553 reconstructed during fixup. */
554 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
555 during fixup. */
556 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
557 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
558 /* TYPE_CANONICAL is re-computed during type merging, so no need
559 to follow it here. */
560 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
561 }
562
563 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
564 {
565 if (TREE_CODE (expr) == ENUMERAL_TYPE)
566 DFS_follow_tree_edge (TYPE_VALUES (expr));
567 else if (TREE_CODE (expr) == ARRAY_TYPE)
568 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
569 else if (RECORD_OR_UNION_TYPE_P (expr))
570 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
571 DFS_follow_tree_edge (t);
572 else if (TREE_CODE (expr) == FUNCTION_TYPE
573 || TREE_CODE (expr) == METHOD_TYPE)
574 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
575
576 if (!POINTER_TYPE_P (expr))
577 DFS_follow_tree_edge (TYPE_MINVAL (expr));
578 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
579 if (RECORD_OR_UNION_TYPE_P (expr))
580 DFS_follow_tree_edge (TYPE_BINFO (expr));
581 }
582
583 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
584 {
585 DFS_follow_tree_edge (TREE_PURPOSE (expr));
586 DFS_follow_tree_edge (TREE_VALUE (expr));
587 DFS_follow_tree_edge (TREE_CHAIN (expr));
588 }
589
590 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
591 {
592 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
593 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
594 }
595
596 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
597 {
598 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
599 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
600 DFS_follow_tree_edge (TREE_BLOCK (expr));
601 }
602
603 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
604 {
605 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
606 /* ??? FIXME. See also streamer_write_chain. */
607 if (!(VAR_OR_FUNCTION_DECL_P (t)
608 && DECL_EXTERNAL (t)))
609 DFS_follow_tree_edge (t);
610
611 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
612
613 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
614 handle - those that represent inlined function scopes.
615 For the drop rest them on the floor instead of ICEing
616 in dwarf2out.c. */
617 if (inlined_function_outer_scope_p (expr))
618 {
619 tree ultimate_origin = block_ultimate_origin (expr);
620 DFS_follow_tree_edge (ultimate_origin);
621 }
622 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
623 information for early inlined BLOCKs so drop it on the floor instead
624 of ICEing in dwarf2out.c. */
625
626 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
627 streaming time. */
628
629 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
630 list is re-constructed from BLOCK_SUPERCONTEXT. */
631 }
632
633 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
634 {
635 unsigned i;
636 tree t;
637
638 /* Note that the number of BINFO slots has already been emitted in
639 EXPR's header (see streamer_write_tree_header) because this length
640 is needed to build the empty BINFO node on the reader side. */
641 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
642 DFS_follow_tree_edge (t);
643 DFS_follow_tree_edge (BINFO_OFFSET (expr));
644 DFS_follow_tree_edge (BINFO_VTABLE (expr));
645 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
646
647 /* The number of BINFO_BASE_ACCESSES has already been emitted in
648 EXPR's bitfield section. */
649 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
650 DFS_follow_tree_edge (t);
651
652 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
653 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
654 }
655
656 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
657 {
658 unsigned i;
659 tree index, value;
660
661 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
662 {
663 DFS_follow_tree_edge (index);
664 DFS_follow_tree_edge (value);
665 }
666 }
667
668 #undef DFS_follow_tree_edge
669 }
670
671 /* Return a hash value for the tree T. */
672
673 static hashval_t
674 hash_tree (struct streamer_tree_cache_d *cache, tree t)
675 {
676 #define visit(SIBLING) \
677 do { \
678 unsigned ix; \
679 if (SIBLING && streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
680 v = iterative_hash_hashval_t (streamer_tree_cache_get_hash (cache, ix), v); \
681 } while (0)
682
683 /* Hash TS_BASE. */
684 enum tree_code code = TREE_CODE (t);
685 hashval_t v = iterative_hash_host_wide_int (code, 0);
686 if (!TYPE_P (t))
687 {
688 v = iterative_hash_host_wide_int (TREE_SIDE_EFFECTS (t)
689 | (TREE_CONSTANT (t) << 1)
690 | (TREE_READONLY (t) << 2)
691 | (TREE_PUBLIC (t) << 3), v);
692 }
693 v = iterative_hash_host_wide_int (TREE_ADDRESSABLE (t)
694 | (TREE_THIS_VOLATILE (t) << 1), v);
695 if (DECL_P (t))
696 v = iterative_hash_host_wide_int (DECL_UNSIGNED (t), v);
697 else if (TYPE_P (t))
698 v = iterative_hash_host_wide_int (TYPE_UNSIGNED (t), v);
699 if (TYPE_P (t))
700 v = iterative_hash_host_wide_int (TYPE_ARTIFICIAL (t), v);
701 else
702 v = iterative_hash_host_wide_int (TREE_NO_WARNING (t), v);
703 v = iterative_hash_host_wide_int (TREE_NOTHROW (t)
704 | (TREE_STATIC (t) << 1)
705 | (TREE_PROTECTED (t) << 2)
706 | (TREE_DEPRECATED (t) << 3), v);
707 if (code != TREE_BINFO)
708 v = iterative_hash_host_wide_int (TREE_PRIVATE (t), v);
709 if (TYPE_P (t))
710 v = iterative_hash_host_wide_int (TYPE_SATURATING (t)
711 | (TYPE_ADDR_SPACE (t) << 1), v);
712 else if (code == SSA_NAME)
713 v = iterative_hash_host_wide_int (SSA_NAME_IS_DEFAULT_DEF (t), v);
714
715 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
716 {
717 int i;
718 v = iterative_hash_host_wide_int (TREE_INT_CST_NUNITS (t), v);
719 v = iterative_hash_host_wide_int (TREE_INT_CST_EXT_NUNITS (t), v);
720 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
721 v = iterative_hash_host_wide_int (TREE_INT_CST_ELT (t, i), v);
722 }
723
724 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
725 {
726 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
727 v = iterative_hash_host_wide_int (r.cl, v);
728 v = iterative_hash_host_wide_int (r.decimal
729 | (r.sign << 1)
730 | (r.signalling << 2)
731 | (r.canonical << 3), v);
732 v = iterative_hash_host_wide_int (r.uexp, v);
733 for (unsigned i = 0; i < SIGSZ; ++i)
734 v = iterative_hash_host_wide_int (r.sig[i], v);
735 }
736
737 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
738 {
739 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
740 v = iterative_hash_host_wide_int (f.mode, v);
741 v = iterative_hash_host_wide_int (f.data.low, v);
742 v = iterative_hash_host_wide_int (f.data.high, v);
743 }
744
745 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
746 {
747 v = iterative_hash_host_wide_int (DECL_MODE (t), v);
748 v = iterative_hash_host_wide_int (DECL_NONLOCAL (t)
749 | (DECL_VIRTUAL_P (t) << 1)
750 | (DECL_IGNORED_P (t) << 2)
751 | (DECL_ABSTRACT (t) << 3)
752 | (DECL_ARTIFICIAL (t) << 4)
753 | (DECL_USER_ALIGN (t) << 5)
754 | (DECL_PRESERVE_P (t) << 6)
755 | (DECL_EXTERNAL (t) << 7)
756 | (DECL_GIMPLE_REG_P (t) << 8), v);
757 v = iterative_hash_host_wide_int (DECL_ALIGN (t), v);
758 if (code == LABEL_DECL)
759 {
760 v = iterative_hash_host_wide_int (EH_LANDING_PAD_NR (t), v);
761 v = iterative_hash_host_wide_int (LABEL_DECL_UID (t), v);
762 }
763 else if (code == FIELD_DECL)
764 {
765 v = iterative_hash_host_wide_int (DECL_PACKED (t)
766 | (DECL_NONADDRESSABLE_P (t) << 1),
767 v);
768 v = iterative_hash_host_wide_int (DECL_OFFSET_ALIGN (t), v);
769 }
770 else if (code == VAR_DECL)
771 {
772 v = iterative_hash_host_wide_int (DECL_HAS_DEBUG_EXPR_P (t)
773 | (DECL_NONLOCAL_FRAME (t) << 1),
774 v);
775 }
776 if (code == RESULT_DECL
777 || code == PARM_DECL
778 || code == VAR_DECL)
779 {
780 v = iterative_hash_host_wide_int (DECL_BY_REFERENCE (t), v);
781 if (code == VAR_DECL
782 || code == PARM_DECL)
783 v = iterative_hash_host_wide_int (DECL_HAS_VALUE_EXPR_P (t), v);
784 }
785 }
786
787 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
788 v = iterative_hash_host_wide_int (DECL_REGISTER (t), v);
789
790 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
791 {
792 v = iterative_hash_host_wide_int ((DECL_COMMON (t))
793 | (DECL_DLLIMPORT_P (t) << 1)
794 | (DECL_WEAK (t) << 2)
795 | (DECL_SEEN_IN_BIND_EXPR_P (t) << 3)
796 | (DECL_COMDAT (t) << 4)
797 | (DECL_VISIBILITY_SPECIFIED (t) << 6),
798 v);
799 v = iterative_hash_host_wide_int (DECL_VISIBILITY (t), v);
800 if (code == VAR_DECL)
801 {
802 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
803 v = iterative_hash_host_wide_int (DECL_HARD_REGISTER (t)
804 | (DECL_IN_CONSTANT_POOL (t) << 1),
805 v);
806 v = iterative_hash_host_wide_int (DECL_TLS_MODEL (t), v);
807 }
808 if (TREE_CODE (t) == FUNCTION_DECL)
809 v = iterative_hash_host_wide_int (DECL_FINAL_P (t)
810 | (DECL_CXX_CONSTRUCTOR_P (t) << 1)
811 | (DECL_CXX_DESTRUCTOR_P (t) << 2),
812 v);
813 if (VAR_OR_FUNCTION_DECL_P (t))
814 v = iterative_hash_host_wide_int (DECL_INIT_PRIORITY (t), v);
815 }
816
817 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
818 {
819 v = iterative_hash_host_wide_int (DECL_BUILT_IN_CLASS (t), v);
820 v = iterative_hash_host_wide_int (DECL_STATIC_CONSTRUCTOR (t)
821 | (DECL_STATIC_DESTRUCTOR (t) << 1)
822 | (DECL_UNINLINABLE (t) << 2)
823 | (DECL_POSSIBLY_INLINED (t) << 3)
824 | (DECL_IS_NOVOPS (t) << 4)
825 | (DECL_IS_RETURNS_TWICE (t) << 5)
826 | (DECL_IS_MALLOC (t) << 6)
827 | (DECL_IS_OPERATOR_NEW (t) << 7)
828 | (DECL_DECLARED_INLINE_P (t) << 8)
829 | (DECL_STATIC_CHAIN (t) << 9)
830 | (DECL_NO_INLINE_WARNING_P (t) << 10)
831 | (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t) << 11)
832 | (DECL_NO_LIMIT_STACK (t) << 12)
833 | (DECL_DISREGARD_INLINE_LIMITS (t) << 13)
834 | (DECL_PURE_P (t) << 14)
835 | (DECL_LOOPING_CONST_OR_PURE_P (t) << 15), v);
836 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
837 v = iterative_hash_host_wide_int (DECL_FUNCTION_CODE (t), v);
838 if (DECL_STATIC_DESTRUCTOR (t))
839 v = iterative_hash_host_wide_int (DECL_FINI_PRIORITY (t), v);
840 }
841
842 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
843 {
844 v = iterative_hash_host_wide_int (TYPE_MODE (t), v);
845 v = iterative_hash_host_wide_int (TYPE_STRING_FLAG (t)
846 | (TYPE_NO_FORCE_BLK (t) << 1)
847 | (TYPE_NEEDS_CONSTRUCTING (t) << 2)
848 | (TYPE_PACKED (t) << 3)
849 | (TYPE_RESTRICT (t) << 4)
850 | (TYPE_USER_ALIGN (t) << 5)
851 | (TYPE_READONLY (t) << 6), v);
852 if (RECORD_OR_UNION_TYPE_P (t))
853 {
854 v = iterative_hash_host_wide_int (TYPE_TRANSPARENT_AGGR (t)
855 | (TYPE_FINAL_P (t) << 1), v);
856 }
857 else if (code == ARRAY_TYPE)
858 v = iterative_hash_host_wide_int (TYPE_NONALIASED_COMPONENT (t), v);
859 v = iterative_hash_host_wide_int (TYPE_PRECISION (t), v);
860 v = iterative_hash_host_wide_int (TYPE_ALIGN (t), v);
861 v = iterative_hash_host_wide_int ((TYPE_ALIAS_SET (t) == 0
862 || (!in_lto_p
863 && get_alias_set (t) == 0))
864 ? 0 : -1, v);
865 }
866
867 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
868 v = iterative_hash (TRANSLATION_UNIT_LANGUAGE (t),
869 strlen (TRANSLATION_UNIT_LANGUAGE (t)), v);
870
871 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
872 v = iterative_hash (t, sizeof (struct cl_target_option), v);
873
874 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
875 v = iterative_hash (t, sizeof (struct cl_optimization), v);
876
877 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
878 v = iterative_hash_host_wide_int (IDENTIFIER_HASH_VALUE (t), v);
879
880 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
881 v = iterative_hash (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t), v);
882
883 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
884 {
885 if (POINTER_TYPE_P (t))
886 {
887 /* For pointers factor in the pointed-to type recursively as
888 we cannot recurse through only pointers.
889 ??? We can generalize this by keeping track of the
890 in-SCC edges for each tree (or arbitrarily the first
891 such edge) and hashing that in in a second stage
892 (instead of the quadratic mixing of the SCC we do now). */
893 hashval_t x;
894 unsigned ix;
895 if (streamer_tree_cache_lookup (cache, TREE_TYPE (t), &ix))
896 x = streamer_tree_cache_get_hash (cache, ix);
897 else
898 x = hash_tree (cache, TREE_TYPE (t));
899 v = iterative_hash_hashval_t (x, v);
900 }
901 else if (code != IDENTIFIER_NODE)
902 visit (TREE_TYPE (t));
903 }
904
905 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
906 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
907 visit (VECTOR_CST_ELT (t, i));
908
909 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
910 {
911 visit (TREE_REALPART (t));
912 visit (TREE_IMAGPART (t));
913 }
914
915 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
916 {
917 /* Drop names that were created for anonymous entities. */
918 if (DECL_NAME (t)
919 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
920 && ANON_AGGRNAME_P (DECL_NAME (t)))
921 ;
922 else
923 visit (DECL_NAME (t));
924 if (DECL_FILE_SCOPE_P (t))
925 ;
926 else
927 visit (DECL_CONTEXT (t));
928 }
929
930 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
931 {
932 visit (DECL_SIZE (t));
933 visit (DECL_SIZE_UNIT (t));
934 visit (DECL_ATTRIBUTES (t));
935 if ((code == VAR_DECL
936 || code == PARM_DECL)
937 && DECL_HAS_VALUE_EXPR_P (t))
938 visit (DECL_VALUE_EXPR (t));
939 if (code == VAR_DECL
940 && DECL_HAS_DEBUG_EXPR_P (t))
941 visit (DECL_DEBUG_EXPR (t));
942 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
943 be able to call get_symbol_initial_value. */
944 }
945
946 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
947 {
948 if (code == TYPE_DECL)
949 visit (DECL_ORIGINAL_TYPE (t));
950 visit (DECL_VINDEX (t));
951 }
952
953 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
954 {
955 if (DECL_ASSEMBLER_NAME_SET_P (t))
956 visit (DECL_ASSEMBLER_NAME (t));
957 visit (DECL_SECTION_NAME (t));
958 visit (DECL_COMDAT_GROUP (t));
959 }
960
961 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
962 {
963 visit (DECL_FIELD_OFFSET (t));
964 visit (DECL_BIT_FIELD_TYPE (t));
965 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
966 visit (DECL_FIELD_BIT_OFFSET (t));
967 visit (DECL_FCONTEXT (t));
968 }
969
970 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
971 {
972 visit (DECL_FUNCTION_PERSONALITY (t));
973 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
974 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
975 }
976
977 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
978 {
979 visit (TYPE_SIZE (t));
980 visit (TYPE_SIZE_UNIT (t));
981 visit (TYPE_ATTRIBUTES (t));
982 visit (TYPE_NAME (t));
983 visit (TYPE_MAIN_VARIANT (t));
984 if (TYPE_FILE_SCOPE_P (t))
985 ;
986 else
987 visit (TYPE_CONTEXT (t));
988 visit (TYPE_STUB_DECL (t));
989 }
990
991 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
992 {
993 if (code == ENUMERAL_TYPE)
994 visit (TYPE_VALUES (t));
995 else if (code == ARRAY_TYPE)
996 visit (TYPE_DOMAIN (t));
997 else if (RECORD_OR_UNION_TYPE_P (t))
998 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
999 visit (f);
1000 else if (code == FUNCTION_TYPE
1001 || code == METHOD_TYPE)
1002 visit (TYPE_ARG_TYPES (t));
1003 if (!POINTER_TYPE_P (t))
1004 visit (TYPE_MINVAL (t));
1005 visit (TYPE_MAXVAL (t));
1006 if (RECORD_OR_UNION_TYPE_P (t))
1007 visit (TYPE_BINFO (t));
1008 }
1009
1010 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1011 {
1012 visit (TREE_PURPOSE (t));
1013 visit (TREE_VALUE (t));
1014 visit (TREE_CHAIN (t));
1015 }
1016
1017 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1018 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1019 visit (TREE_VEC_ELT (t, i));
1020
1021 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1022 {
1023 v = iterative_hash_host_wide_int (TREE_OPERAND_LENGTH (t), v);
1024 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1025 visit (TREE_OPERAND (t, i));
1026 }
1027
1028 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1029 {
1030 unsigned i;
1031 tree b;
1032 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1033 visit (b);
1034 visit (BINFO_OFFSET (t));
1035 visit (BINFO_VTABLE (t));
1036 visit (BINFO_VPTR_FIELD (t));
1037 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1038 visit (b);
1039 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1040 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1041 }
1042
1043 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1044 {
1045 unsigned i;
1046 tree index, value;
1047 v = iterative_hash_host_wide_int (CONSTRUCTOR_NELTS (t), v);
1048 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1049 {
1050 visit (index);
1051 visit (value);
1052 }
1053 }
1054
1055 return v;
1056
1057 #undef visit
1058 }
1059
1060 /* Compare two SCC entries by their hash value for qsorting them. */
1061
1062 static int
1063 scc_entry_compare (const void *p1_, const void *p2_)
1064 {
1065 const scc_entry *p1 = (const scc_entry *) p1_;
1066 const scc_entry *p2 = (const scc_entry *) p2_;
1067 if (p1->hash < p2->hash)
1068 return -1;
1069 else if (p1->hash > p2->hash)
1070 return 1;
1071 return 0;
1072 }
1073
1074 /* Return a hash value for the SCC on the SCC stack from FIRST with
1075 size SIZE. */
1076
1077 static hashval_t
1078 hash_scc (struct streamer_tree_cache_d *cache, unsigned first, unsigned size)
1079 {
1080 /* Compute hash values for the SCC members. */
1081 for (unsigned i = 0; i < size; ++i)
1082 sccstack[first+i].hash = hash_tree (cache, sccstack[first+i].t);
1083
1084 if (size == 1)
1085 return sccstack[first].hash;
1086
1087 /* Sort the SCC of type, hash pairs so that when we mix in
1088 all members of the SCC the hash value becomes independent on
1089 the order we visited the SCC. Disregard hashes equal to
1090 the hash of the tree we mix into because we cannot guarantee
1091 a stable sort for those across different TUs. */
1092 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1093 hashval_t *tem = XALLOCAVEC (hashval_t, size);
1094 for (unsigned i = 0; i < size; ++i)
1095 {
1096 hashval_t hash = sccstack[first+i].hash;
1097 hashval_t orig_hash = hash;
1098 unsigned j;
1099 /* Skip same hashes. */
1100 for (j = i + 1;
1101 j < size && sccstack[first+j].hash == orig_hash; ++j)
1102 ;
1103 for (; j < size; ++j)
1104 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1105 for (j = 0; sccstack[first+j].hash != orig_hash; ++j)
1106 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1107 tem[i] = hash;
1108 }
1109 hashval_t scc_hash = 0;
1110 for (unsigned i = 0; i < size; ++i)
1111 {
1112 sccstack[first+i].hash = tem[i];
1113 scc_hash = iterative_hash_hashval_t (tem[i], scc_hash);
1114 }
1115 return scc_hash;
1116 }
1117
1118 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1119 already in the streamer cache. Main routine called for
1120 each visit of EXPR. */
1121
1122 static void
1123 DFS_write_tree (struct output_block *ob, sccs *from_state,
1124 tree expr, bool ref_p, bool this_ref_p)
1125 {
1126 unsigned ix;
1127 sccs **slot;
1128
1129 /* Handle special cases. */
1130 if (expr == NULL_TREE)
1131 return;
1132
1133 /* Do not DFS walk into indexable trees. */
1134 if (this_ref_p && tree_is_indexable (expr))
1135 return;
1136
1137 /* Check if we already streamed EXPR. */
1138 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1139 return;
1140
1141 slot = (sccs **)pointer_map_insert (sccstate, expr);
1142 sccs *cstate = *slot;
1143 if (!cstate)
1144 {
1145 scc_entry e = { expr, 0 };
1146 /* Not yet visited. DFS recurse and push it onto the stack. */
1147 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1148 sccstack.safe_push (e);
1149 cstate->dfsnum = next_dfs_num++;
1150 cstate->low = cstate->dfsnum;
1151
1152 if (streamer_handle_as_builtin_p (expr))
1153 ;
1154 else if (TREE_CODE (expr) == INTEGER_CST
1155 && !TREE_OVERFLOW (expr))
1156 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
1157 else
1158 {
1159 DFS_write_tree_body (ob, expr, cstate, ref_p);
1160
1161 /* Walk any LTO-specific edges. */
1162 if (DECL_P (expr)
1163 && TREE_CODE (expr) != FUNCTION_DECL
1164 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1165 {
1166 /* Handle DECL_INITIAL for symbols. */
1167 tree initial = get_symbol_initial_value (ob, expr);
1168 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
1169 }
1170 }
1171
1172 /* See if we found an SCC. */
1173 if (cstate->low == cstate->dfsnum)
1174 {
1175 unsigned first, size;
1176 tree x;
1177
1178 /* Pop the SCC and compute its size. */
1179 first = sccstack.length ();
1180 do
1181 {
1182 x = sccstack[--first].t;
1183 }
1184 while (x != expr);
1185 size = sccstack.length () - first;
1186
1187 /* No need to compute hashes for LTRANS units, we don't perform
1188 any merging there. */
1189 hashval_t scc_hash = 0;
1190 unsigned scc_entry_len = 0;
1191 if (!flag_wpa)
1192 {
1193 scc_hash = hash_scc (ob->writer_cache, first, size);
1194
1195 /* Put the entries with the least number of collisions first. */
1196 unsigned entry_start = 0;
1197 scc_entry_len = size + 1;
1198 for (unsigned i = 0; i < size;)
1199 {
1200 unsigned from = i;
1201 for (i = i + 1; i < size
1202 && (sccstack[first + i].hash
1203 == sccstack[first + from].hash); ++i)
1204 ;
1205 if (i - from < scc_entry_len)
1206 {
1207 scc_entry_len = i - from;
1208 entry_start = from;
1209 }
1210 }
1211 for (unsigned i = 0; i < scc_entry_len; ++i)
1212 {
1213 scc_entry tem = sccstack[first + i];
1214 sccstack[first + i] = sccstack[first + entry_start + i];
1215 sccstack[first + entry_start + i] = tem;
1216 }
1217 }
1218
1219 /* Write LTO_tree_scc. */
1220 streamer_write_record_start (ob, LTO_tree_scc);
1221 streamer_write_uhwi (ob, size);
1222 streamer_write_uhwi (ob, scc_hash);
1223
1224 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1225 All INTEGER_CSTs need to be handled this way as we need
1226 their type to materialize them. Also builtins are handled
1227 this way.
1228 ??? We still wrap these in LTO_tree_scc so at the
1229 input side we can properly identify the tree we want
1230 to ultimatively return. */
1231 size_t old_len = ob->writer_cache->nodes.length ();
1232 if (size == 1)
1233 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1234 else
1235 {
1236 /* Write the size of the SCC entry candidates. */
1237 streamer_write_uhwi (ob, scc_entry_len);
1238
1239 /* Write all headers and populate the streamer cache. */
1240 for (unsigned i = 0; i < size; ++i)
1241 {
1242 hashval_t hash = sccstack[first+i].hash;
1243 tree t = sccstack[first+i].t;
1244 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1245 t, hash, &ix);
1246 gcc_assert (!exists_p);
1247
1248 if (!lto_is_streamable (t))
1249 internal_error ("tree code %qs is not supported "
1250 "in LTO streams",
1251 get_tree_code_name (TREE_CODE (t)));
1252
1253 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1254
1255 /* Write the header, containing everything needed to
1256 materialize EXPR on the reading side. */
1257 streamer_write_tree_header (ob, t);
1258 }
1259
1260 /* Write the bitpacks and tree references. */
1261 for (unsigned i = 0; i < size; ++i)
1262 {
1263 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1264
1265 /* Mark the end of the tree. */
1266 streamer_write_zero (ob);
1267 }
1268 }
1269 gcc_assert (old_len + size == ob->writer_cache->nodes.length ());
1270
1271 /* Finally truncate the vector. */
1272 sccstack.truncate (first);
1273
1274 if (from_state)
1275 from_state->low = MIN (from_state->low, cstate->low);
1276 return;
1277 }
1278
1279 if (from_state)
1280 from_state->low = MIN (from_state->low, cstate->low);
1281 }
1282 gcc_checking_assert (from_state);
1283 if (cstate->dfsnum < from_state->dfsnum)
1284 from_state->low = MIN (cstate->dfsnum, from_state->low);
1285 }
1286
1287
1288 /* Emit the physical representation of tree node EXPR to output block
1289 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1290 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1291
1292 void
1293 lto_output_tree (struct output_block *ob, tree expr,
1294 bool ref_p, bool this_ref_p)
1295 {
1296 unsigned ix;
1297 bool existed_p;
1298
1299 if (expr == NULL_TREE)
1300 {
1301 streamer_write_record_start (ob, LTO_null);
1302 return;
1303 }
1304
1305 if (this_ref_p && tree_is_indexable (expr))
1306 {
1307 lto_output_tree_ref (ob, expr);
1308 return;
1309 }
1310
1311 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1312 if (existed_p)
1313 {
1314 /* If a node has already been streamed out, make sure that
1315 we don't write it more than once. Otherwise, the reader
1316 will instantiate two different nodes for the same object. */
1317 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1318 streamer_write_uhwi (ob, ix);
1319 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1320 lto_tree_code_to_tag (TREE_CODE (expr)));
1321 lto_stats.num_pickle_refs_output++;
1322 }
1323 else
1324 {
1325 /* This is the first time we see EXPR, write all reachable
1326 trees to OB. */
1327 static bool in_dfs_walk;
1328
1329 /* Protect against recursion which means disconnect between
1330 what tree edges we walk in the DFS walk and what edges
1331 we stream out. */
1332 gcc_assert (!in_dfs_walk);
1333
1334 /* Start the DFS walk. */
1335 /* Save ob state ... */
1336 /* let's see ... */
1337 in_dfs_walk = true;
1338 sccstate = pointer_map_create ();
1339 gcc_obstack_init (&sccstate_obstack);
1340 next_dfs_num = 1;
1341 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
1342 sccstack.release ();
1343 pointer_map_destroy (sccstate);
1344 obstack_free (&sccstate_obstack, NULL);
1345 in_dfs_walk = false;
1346
1347 /* Finally append a reference to the tree we were writing.
1348 ??? If expr ended up as a singleton we could have
1349 inlined it here and avoid outputting a reference. */
1350 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1351 gcc_assert (existed_p);
1352 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1353 streamer_write_uhwi (ob, ix);
1354 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1355 lto_tree_code_to_tag (TREE_CODE (expr)));
1356 lto_stats.num_pickle_refs_output++;
1357 }
1358 }
1359
1360
1361 /* Output to OB a list of try/catch handlers starting with FIRST. */
1362
1363 static void
1364 output_eh_try_list (struct output_block *ob, eh_catch first)
1365 {
1366 eh_catch n;
1367
1368 for (n = first; n; n = n->next_catch)
1369 {
1370 streamer_write_record_start (ob, LTO_eh_catch);
1371 stream_write_tree (ob, n->type_list, true);
1372 stream_write_tree (ob, n->filter_list, true);
1373 stream_write_tree (ob, n->label, true);
1374 }
1375
1376 streamer_write_record_start (ob, LTO_null);
1377 }
1378
1379
1380 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1381 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1382 detect EH region sharing. */
1383
1384 static void
1385 output_eh_region (struct output_block *ob, eh_region r)
1386 {
1387 enum LTO_tags tag;
1388
1389 if (r == NULL)
1390 {
1391 streamer_write_record_start (ob, LTO_null);
1392 return;
1393 }
1394
1395 if (r->type == ERT_CLEANUP)
1396 tag = LTO_ert_cleanup;
1397 else if (r->type == ERT_TRY)
1398 tag = LTO_ert_try;
1399 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1400 tag = LTO_ert_allowed_exceptions;
1401 else if (r->type == ERT_MUST_NOT_THROW)
1402 tag = LTO_ert_must_not_throw;
1403 else
1404 gcc_unreachable ();
1405
1406 streamer_write_record_start (ob, tag);
1407 streamer_write_hwi (ob, r->index);
1408
1409 if (r->outer)
1410 streamer_write_hwi (ob, r->outer->index);
1411 else
1412 streamer_write_zero (ob);
1413
1414 if (r->inner)
1415 streamer_write_hwi (ob, r->inner->index);
1416 else
1417 streamer_write_zero (ob);
1418
1419 if (r->next_peer)
1420 streamer_write_hwi (ob, r->next_peer->index);
1421 else
1422 streamer_write_zero (ob);
1423
1424 if (r->type == ERT_TRY)
1425 {
1426 output_eh_try_list (ob, r->u.eh_try.first_catch);
1427 }
1428 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1429 {
1430 stream_write_tree (ob, r->u.allowed.type_list, true);
1431 stream_write_tree (ob, r->u.allowed.label, true);
1432 streamer_write_uhwi (ob, r->u.allowed.filter);
1433 }
1434 else if (r->type == ERT_MUST_NOT_THROW)
1435 {
1436 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1437 bitpack_d bp = bitpack_create (ob->main_stream);
1438 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1439 streamer_write_bitpack (&bp);
1440 }
1441
1442 if (r->landing_pads)
1443 streamer_write_hwi (ob, r->landing_pads->index);
1444 else
1445 streamer_write_zero (ob);
1446 }
1447
1448
1449 /* Output landing pad LP to OB. */
1450
1451 static void
1452 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1453 {
1454 if (lp == NULL)
1455 {
1456 streamer_write_record_start (ob, LTO_null);
1457 return;
1458 }
1459
1460 streamer_write_record_start (ob, LTO_eh_landing_pad);
1461 streamer_write_hwi (ob, lp->index);
1462 if (lp->next_lp)
1463 streamer_write_hwi (ob, lp->next_lp->index);
1464 else
1465 streamer_write_zero (ob);
1466
1467 if (lp->region)
1468 streamer_write_hwi (ob, lp->region->index);
1469 else
1470 streamer_write_zero (ob);
1471
1472 stream_write_tree (ob, lp->post_landing_pad, true);
1473 }
1474
1475
1476 /* Output the existing eh_table to OB. */
1477
1478 static void
1479 output_eh_regions (struct output_block *ob, struct function *fn)
1480 {
1481 if (fn->eh && fn->eh->region_tree)
1482 {
1483 unsigned i;
1484 eh_region eh;
1485 eh_landing_pad lp;
1486 tree ttype;
1487
1488 streamer_write_record_start (ob, LTO_eh_table);
1489
1490 /* Emit the index of the root of the EH region tree. */
1491 streamer_write_hwi (ob, fn->eh->region_tree->index);
1492
1493 /* Emit all the EH regions in the region array. */
1494 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1495 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1496 output_eh_region (ob, eh);
1497
1498 /* Emit all landing pads. */
1499 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1500 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1501 output_eh_lp (ob, lp);
1502
1503 /* Emit all the runtime type data. */
1504 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1505 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1506 stream_write_tree (ob, ttype, true);
1507
1508 /* Emit the table of action chains. */
1509 if (targetm.arm_eabi_unwinder)
1510 {
1511 tree t;
1512 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1513 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1514 stream_write_tree (ob, t, true);
1515 }
1516 else
1517 {
1518 uchar c;
1519 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1520 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1521 streamer_write_char_stream (ob->main_stream, c);
1522 }
1523 }
1524
1525 /* The LTO_null either terminates the record or indicates that there
1526 are no eh_records at all. */
1527 streamer_write_record_start (ob, LTO_null);
1528 }
1529
1530
1531 /* Output all of the active ssa names to the ssa_names stream. */
1532
1533 static void
1534 output_ssa_names (struct output_block *ob, struct function *fn)
1535 {
1536 unsigned int i, len;
1537
1538 len = vec_safe_length (SSANAMES (fn));
1539 streamer_write_uhwi (ob, len);
1540
1541 for (i = 1; i < len; i++)
1542 {
1543 tree ptr = (*SSANAMES (fn))[i];
1544
1545 if (ptr == NULL_TREE
1546 || SSA_NAME_IN_FREE_LIST (ptr)
1547 || virtual_operand_p (ptr))
1548 continue;
1549
1550 streamer_write_uhwi (ob, i);
1551 streamer_write_char_stream (ob->main_stream,
1552 SSA_NAME_IS_DEFAULT_DEF (ptr));
1553 if (SSA_NAME_VAR (ptr))
1554 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1555 else
1556 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1557 stream_write_tree (ob, TREE_TYPE (ptr), true);
1558 }
1559
1560 streamer_write_zero (ob);
1561 }
1562
1563
1564 /* Output the cfg. */
1565
1566 static void
1567 output_cfg (struct output_block *ob, struct function *fn)
1568 {
1569 struct lto_output_stream *tmp_stream = ob->main_stream;
1570 basic_block bb;
1571
1572 ob->main_stream = ob->cfg_stream;
1573
1574 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1575 profile_status_for_function (fn));
1576
1577 /* Output the number of the highest basic block. */
1578 streamer_write_uhwi (ob, last_basic_block_for_function (fn));
1579
1580 FOR_ALL_BB_FN (bb, fn)
1581 {
1582 edge_iterator ei;
1583 edge e;
1584
1585 streamer_write_hwi (ob, bb->index);
1586
1587 /* Output the successors and the edge flags. */
1588 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1589 FOR_EACH_EDGE (e, ei, bb->succs)
1590 {
1591 streamer_write_uhwi (ob, e->dest->index);
1592 streamer_write_hwi (ob, e->probability);
1593 streamer_write_gcov_count (ob, e->count);
1594 streamer_write_uhwi (ob, e->flags);
1595 }
1596 }
1597
1598 streamer_write_hwi (ob, -1);
1599
1600 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1601 while (bb->next_bb)
1602 {
1603 streamer_write_hwi (ob, bb->next_bb->index);
1604 bb = bb->next_bb;
1605 }
1606
1607 streamer_write_hwi (ob, -1);
1608
1609 /* ??? The cfgloop interface is tied to cfun. */
1610 gcc_assert (cfun == fn);
1611
1612 /* Output the number of loops. */
1613 streamer_write_uhwi (ob, number_of_loops (fn));
1614
1615 /* Output each loop, skipping the tree root which has number zero. */
1616 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1617 {
1618 struct loop *loop = get_loop (fn, i);
1619
1620 /* Write the index of the loop header. That's enough to rebuild
1621 the loop tree on the reader side. Stream -1 for an unused
1622 loop entry. */
1623 if (!loop)
1624 {
1625 streamer_write_hwi (ob, -1);
1626 continue;
1627 }
1628 else
1629 streamer_write_hwi (ob, loop->header->index);
1630
1631 /* Write everything copy_loop_info copies. */
1632 streamer_write_enum (ob->main_stream,
1633 loop_estimation, EST_LAST, loop->estimate_state);
1634 streamer_write_hwi (ob, loop->any_upper_bound);
1635 if (loop->any_upper_bound)
1636 {
1637 int len = loop->nb_iterations_upper_bound.get_len ();
1638 int i;
1639
1640 streamer_write_uhwi (ob, loop->nb_iterations_upper_bound.get_precision ());
1641 streamer_write_uhwi (ob, len);
1642 for (i = 0; i < len; i++)
1643 streamer_write_hwi (ob, loop->nb_iterations_upper_bound.elt (i));
1644 }
1645 streamer_write_hwi (ob, loop->any_estimate);
1646 if (loop->any_estimate)
1647 {
1648 int len = loop->nb_iterations_estimate.get_len ();
1649 int i;
1650
1651 streamer_write_uhwi (ob, loop->nb_iterations_estimate.get_precision ());
1652 streamer_write_uhwi (ob, len);
1653 for (i = 0; i < len; i++)
1654 streamer_write_hwi (ob, loop->nb_iterations_estimate.elt (i));
1655 }
1656 }
1657
1658 ob->main_stream = tmp_stream;
1659 }
1660
1661
1662 /* Create the header in the file using OB. If the section type is for
1663 a function, set FN to the decl for that function. */
1664
1665 void
1666 produce_asm (struct output_block *ob, tree fn)
1667 {
1668 enum lto_section_type section_type = ob->section_type;
1669 struct lto_function_header header;
1670 char *section_name;
1671 struct lto_output_stream *header_stream;
1672
1673 if (section_type == LTO_section_function_body)
1674 {
1675 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1676 section_name = lto_get_section_name (section_type, name, NULL);
1677 }
1678 else
1679 section_name = lto_get_section_name (section_type, NULL, NULL);
1680
1681 lto_begin_section (section_name, !flag_wpa);
1682 free (section_name);
1683
1684 /* The entire header is stream computed here. */
1685 memset (&header, 0, sizeof (struct lto_function_header));
1686
1687 /* Write the header. */
1688 header.lto_header.major_version = LTO_major_version;
1689 header.lto_header.minor_version = LTO_minor_version;
1690
1691 header.compressed_size = 0;
1692
1693 if (section_type == LTO_section_function_body)
1694 header.cfg_size = ob->cfg_stream->total_size;
1695 header.main_size = ob->main_stream->total_size;
1696 header.string_size = ob->string_stream->total_size;
1697
1698 header_stream = XCNEW (struct lto_output_stream);
1699 lto_output_data_stream (header_stream, &header, sizeof header);
1700 lto_write_stream (header_stream);
1701 free (header_stream);
1702
1703 /* Put all of the gimple and the string table out the asm file as a
1704 block of text. */
1705 if (section_type == LTO_section_function_body)
1706 lto_write_stream (ob->cfg_stream);
1707 lto_write_stream (ob->main_stream);
1708 lto_write_stream (ob->string_stream);
1709
1710 lto_end_section ();
1711 }
1712
1713
1714 /* Output the base body of struct function FN using output block OB. */
1715
1716 static void
1717 output_struct_function_base (struct output_block *ob, struct function *fn)
1718 {
1719 struct bitpack_d bp;
1720 unsigned i;
1721 tree t;
1722
1723 /* Output the static chain and non-local goto save area. */
1724 stream_write_tree (ob, fn->static_chain_decl, true);
1725 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1726
1727 /* Output all the local variables in the function. */
1728 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1729 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1730 stream_write_tree (ob, t, true);
1731
1732 /* Output current IL state of the function. */
1733 streamer_write_uhwi (ob, fn->curr_properties);
1734
1735 /* Write all the attributes for FN. */
1736 bp = bitpack_create (ob->main_stream);
1737 bp_pack_value (&bp, fn->is_thunk, 1);
1738 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1739 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1740 bp_pack_value (&bp, fn->returns_struct, 1);
1741 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1742 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1743 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1744 bp_pack_value (&bp, fn->after_inlining, 1);
1745 bp_pack_value (&bp, fn->stdarg, 1);
1746 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1747 bp_pack_value (&bp, fn->calls_alloca, 1);
1748 bp_pack_value (&bp, fn->calls_setjmp, 1);
1749 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1750 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1751
1752 /* Output the function start and end loci. */
1753 stream_output_location (ob, &bp, fn->function_start_locus);
1754 stream_output_location (ob, &bp, fn->function_end_locus);
1755
1756 streamer_write_bitpack (&bp);
1757 }
1758
1759
1760 /* Output the body of function NODE->DECL. */
1761
1762 static void
1763 output_function (struct cgraph_node *node)
1764 {
1765 tree function;
1766 struct function *fn;
1767 basic_block bb;
1768 struct output_block *ob;
1769
1770 function = node->decl;
1771 fn = DECL_STRUCT_FUNCTION (function);
1772 ob = create_output_block (LTO_section_function_body);
1773
1774 clear_line_info (ob);
1775 ob->cgraph_node = node;
1776
1777 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1778
1779 /* Set current_function_decl and cfun. */
1780 push_cfun (fn);
1781
1782 /* Make string 0 be a NULL string. */
1783 streamer_write_char_stream (ob->string_stream, 0);
1784
1785 streamer_write_record_start (ob, LTO_function);
1786
1787 /* Output decls for parameters and args. */
1788 stream_write_tree (ob, DECL_RESULT (function), true);
1789 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1790
1791 /* Output DECL_INITIAL for the function, which contains the tree of
1792 lexical scopes. */
1793 stream_write_tree (ob, DECL_INITIAL (function), true);
1794
1795 /* We also stream abstract functions where we stream only stuff needed for
1796 debug info. */
1797 if (gimple_has_body_p (function))
1798 {
1799 streamer_write_uhwi (ob, 1);
1800 output_struct_function_base (ob, fn);
1801
1802 /* Output all the SSA names used in the function. */
1803 output_ssa_names (ob, fn);
1804
1805 /* Output any exception handling regions. */
1806 output_eh_regions (ob, fn);
1807
1808
1809 /* We will renumber the statements. The code that does this uses
1810 the same ordering that we use for serializing them so we can use
1811 the same code on the other end and not have to write out the
1812 statement numbers. We do not assign UIDs to PHIs here because
1813 virtual PHIs get re-computed on-the-fly which would make numbers
1814 inconsistent. */
1815 set_gimple_stmt_max_uid (cfun, 0);
1816 FOR_ALL_BB (bb)
1817 {
1818 gimple_stmt_iterator gsi;
1819 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1820 {
1821 gimple stmt = gsi_stmt (gsi);
1822
1823 /* Virtual PHIs are not going to be streamed. */
1824 if (!virtual_operand_p (gimple_phi_result (stmt)))
1825 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1826 }
1827 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1828 {
1829 gimple stmt = gsi_stmt (gsi);
1830 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1831 }
1832 }
1833 /* To avoid keeping duplicate gimple IDs in the statements, renumber
1834 virtual phis now. */
1835 FOR_ALL_BB (bb)
1836 {
1837 gimple_stmt_iterator gsi;
1838 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1839 {
1840 gimple stmt = gsi_stmt (gsi);
1841 if (virtual_operand_p (gimple_phi_result (stmt)))
1842 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1843 }
1844 }
1845
1846 /* Output the code for the function. */
1847 FOR_ALL_BB_FN (bb, fn)
1848 output_bb (ob, bb, fn);
1849
1850 /* The terminator for this function. */
1851 streamer_write_record_start (ob, LTO_null);
1852
1853 output_cfg (ob, fn);
1854
1855 pop_cfun ();
1856 }
1857 else
1858 streamer_write_uhwi (ob, 0);
1859
1860 /* Create a section to hold the pickled output of this function. */
1861 produce_asm (ob, function);
1862
1863 destroy_output_block (ob);
1864 }
1865
1866
1867 /* Emit toplevel asms. */
1868
1869 void
1870 lto_output_toplevel_asms (void)
1871 {
1872 struct output_block *ob;
1873 struct asm_node *can;
1874 char *section_name;
1875 struct lto_output_stream *header_stream;
1876 struct lto_asm_header header;
1877
1878 if (! asm_nodes)
1879 return;
1880
1881 ob = create_output_block (LTO_section_asm);
1882
1883 /* Make string 0 be a NULL string. */
1884 streamer_write_char_stream (ob->string_stream, 0);
1885
1886 for (can = asm_nodes; can; can = can->next)
1887 {
1888 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
1889 streamer_write_hwi (ob, can->order);
1890 }
1891
1892 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
1893
1894 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
1895 lto_begin_section (section_name, !flag_wpa);
1896 free (section_name);
1897
1898 /* The entire header stream is computed here. */
1899 memset (&header, 0, sizeof (header));
1900
1901 /* Write the header. */
1902 header.lto_header.major_version = LTO_major_version;
1903 header.lto_header.minor_version = LTO_minor_version;
1904
1905 header.main_size = ob->main_stream->total_size;
1906 header.string_size = ob->string_stream->total_size;
1907
1908 header_stream = XCNEW (struct lto_output_stream);
1909 lto_output_data_stream (header_stream, &header, sizeof (header));
1910 lto_write_stream (header_stream);
1911 free (header_stream);
1912
1913 /* Put all of the gimple and the string table out the asm file as a
1914 block of text. */
1915 lto_write_stream (ob->main_stream);
1916 lto_write_stream (ob->string_stream);
1917
1918 lto_end_section ();
1919
1920 destroy_output_block (ob);
1921 }
1922
1923
1924 /* Copy the function body of NODE without deserializing. */
1925
1926 static void
1927 copy_function (struct cgraph_node *node)
1928 {
1929 tree function = node->decl;
1930 struct lto_file_decl_data *file_data = node->lto_file_data;
1931 struct lto_output_stream *output_stream = XCNEW (struct lto_output_stream);
1932 const char *data;
1933 size_t len;
1934 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
1935 char *section_name =
1936 lto_get_section_name (LTO_section_function_body, name, NULL);
1937 size_t i, j;
1938 struct lto_in_decl_state *in_state;
1939 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
1940
1941 lto_begin_section (section_name, !flag_wpa);
1942 free (section_name);
1943
1944 /* We may have renamed the declaration, e.g., a static function. */
1945 name = lto_get_decl_name_mapping (file_data, name);
1946
1947 data = lto_get_section_data (file_data, LTO_section_function_body,
1948 name, &len);
1949 gcc_assert (data);
1950
1951 /* Do a bit copy of the function body. */
1952 lto_output_data_stream (output_stream, data, len);
1953 lto_write_stream (output_stream);
1954
1955 /* Copy decls. */
1956 in_state =
1957 lto_get_function_in_decl_state (node->lto_file_data, function);
1958 gcc_assert (in_state);
1959
1960 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
1961 {
1962 size_t n = in_state->streams[i].size;
1963 tree *trees = in_state->streams[i].trees;
1964 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
1965
1966 /* The out state must have the same indices and the in state.
1967 So just copy the vector. All the encoders in the in state
1968 must be empty where we reach here. */
1969 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
1970 encoder->trees.reserve_exact (n);
1971 for (j = 0; j < n; j++)
1972 encoder->trees.safe_push (trees[j]);
1973 }
1974
1975 lto_free_section_data (file_data, LTO_section_function_body, name,
1976 data, len);
1977 free (output_stream);
1978 lto_end_section ();
1979 }
1980
1981
1982 /* Main entry point from the pass manager. */
1983
1984 void
1985 lto_output (void)
1986 {
1987 struct lto_out_decl_state *decl_state;
1988 #ifdef ENABLE_CHECKING
1989 bitmap output = lto_bitmap_alloc ();
1990 #endif
1991 int i, n_nodes;
1992 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1993
1994 /* Initialize the streamer. */
1995 lto_streamer_init ();
1996
1997 n_nodes = lto_symtab_encoder_size (encoder);
1998 /* Process only the functions with bodies. */
1999 for (i = 0; i < n_nodes; i++)
2000 {
2001 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2002 cgraph_node *node = dyn_cast <cgraph_node> (snode);
2003 if (node
2004 && lto_symtab_encoder_encode_body_p (encoder, node)
2005 && !node->alias)
2006 {
2007 #ifdef ENABLE_CHECKING
2008 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2009 bitmap_set_bit (output, DECL_UID (node->decl));
2010 #endif
2011 decl_state = lto_new_out_decl_state ();
2012 lto_push_out_decl_state (decl_state);
2013 if (gimple_has_body_p (node->decl) || !flag_wpa)
2014 output_function (node);
2015 else
2016 copy_function (node);
2017 gcc_assert (lto_get_out_decl_state () == decl_state);
2018 lto_pop_out_decl_state ();
2019 lto_record_function_out_decl_state (node->decl, decl_state);
2020 }
2021 }
2022
2023 /* Emit the callgraph after emitting function bodies. This needs to
2024 be done now to make sure that all the statements in every function
2025 have been renumbered so that edges can be associated with call
2026 statements using the statement UIDs. */
2027 output_symtab ();
2028
2029 #ifdef ENABLE_CHECKING
2030 lto_bitmap_free (output);
2031 #endif
2032 }
2033
2034 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2035 from it and required for correct representation of its semantics.
2036 Each node in ENCODER must be a global declaration or a type. A node
2037 is written only once, even if it appears multiple times in the
2038 vector. Certain transitively-reachable nodes, such as those
2039 representing expressions, may be duplicated, but such nodes
2040 must not appear in ENCODER itself. */
2041
2042 static void
2043 write_global_stream (struct output_block *ob,
2044 struct lto_tree_ref_encoder *encoder)
2045 {
2046 tree t;
2047 size_t index;
2048 const size_t size = lto_tree_ref_encoder_size (encoder);
2049
2050 for (index = 0; index < size; index++)
2051 {
2052 t = lto_tree_ref_encoder_get_tree (encoder, index);
2053 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2054 stream_write_tree (ob, t, false);
2055 }
2056 }
2057
2058
2059 /* Write a sequence of indices into the globals vector corresponding
2060 to the trees in ENCODER. These are used by the reader to map the
2061 indices used to refer to global entities within function bodies to
2062 their referents. */
2063
2064 static void
2065 write_global_references (struct output_block *ob,
2066 struct lto_output_stream *ref_stream,
2067 struct lto_tree_ref_encoder *encoder)
2068 {
2069 tree t;
2070 uint32_t index;
2071 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2072
2073 /* Write size as 32-bit unsigned. */
2074 lto_output_data_stream (ref_stream, &size, sizeof (int32_t));
2075
2076 for (index = 0; index < size; index++)
2077 {
2078 uint32_t slot_num;
2079
2080 t = lto_tree_ref_encoder_get_tree (encoder, index);
2081 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2082 gcc_assert (slot_num != (unsigned)-1);
2083 lto_output_data_stream (ref_stream, &slot_num, sizeof slot_num);
2084 }
2085 }
2086
2087
2088 /* Write all the streams in an lto_out_decl_state STATE using
2089 output block OB and output stream OUT_STREAM. */
2090
2091 void
2092 lto_output_decl_state_streams (struct output_block *ob,
2093 struct lto_out_decl_state *state)
2094 {
2095 int i;
2096
2097 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2098 write_global_stream (ob, &state->streams[i]);
2099 }
2100
2101
2102 /* Write all the references in an lto_out_decl_state STATE using
2103 output block OB and output stream OUT_STREAM. */
2104
2105 void
2106 lto_output_decl_state_refs (struct output_block *ob,
2107 struct lto_output_stream *out_stream,
2108 struct lto_out_decl_state *state)
2109 {
2110 unsigned i;
2111 uint32_t ref;
2112 tree decl;
2113
2114 /* Write reference to FUNCTION_DECL. If there is not function,
2115 write reference to void_type_node. */
2116 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2117 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2118 gcc_assert (ref != (unsigned)-1);
2119 lto_output_data_stream (out_stream, &ref, sizeof (uint32_t));
2120
2121 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2122 write_global_references (ob, out_stream, &state->streams[i]);
2123 }
2124
2125
2126 /* Return the written size of STATE. */
2127
2128 static size_t
2129 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2130 {
2131 int i;
2132 size_t size;
2133
2134 size = sizeof (int32_t); /* fn_ref. */
2135 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2136 {
2137 size += sizeof (int32_t); /* vector size. */
2138 size += (lto_tree_ref_encoder_size (&state->streams[i])
2139 * sizeof (int32_t));
2140 }
2141 return size;
2142 }
2143
2144
2145 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2146 so far. */
2147
2148 static void
2149 write_symbol (struct streamer_tree_cache_d *cache,
2150 struct lto_output_stream *stream,
2151 tree t, struct pointer_set_t *seen, bool alias)
2152 {
2153 const char *name;
2154 enum gcc_plugin_symbol_kind kind;
2155 enum gcc_plugin_symbol_visibility visibility;
2156 unsigned slot_num;
2157 unsigned HOST_WIDEST_INT size;
2158 const char *comdat;
2159 unsigned char c;
2160
2161 /* None of the following kinds of symbols are needed in the
2162 symbol table. */
2163 if (!TREE_PUBLIC (t)
2164 || is_builtin_fn (t)
2165 || DECL_ABSTRACT (t)
2166 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2167 return;
2168 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2169
2170 gcc_assert (TREE_CODE (t) == VAR_DECL
2171 || TREE_CODE (t) == FUNCTION_DECL);
2172
2173 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2174
2175 /* This behaves like assemble_name_raw in varasm.c, performing the
2176 same name manipulations that ASM_OUTPUT_LABELREF does. */
2177 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2178
2179 if (pointer_set_contains (seen, name))
2180 return;
2181 pointer_set_insert (seen, name);
2182
2183 streamer_tree_cache_lookup (cache, t, &slot_num);
2184 gcc_assert (slot_num != (unsigned)-1);
2185
2186 if (DECL_EXTERNAL (t))
2187 {
2188 if (DECL_WEAK (t))
2189 kind = GCCPK_WEAKUNDEF;
2190 else
2191 kind = GCCPK_UNDEF;
2192 }
2193 else
2194 {
2195 if (DECL_WEAK (t))
2196 kind = GCCPK_WEAKDEF;
2197 else if (DECL_COMMON (t))
2198 kind = GCCPK_COMMON;
2199 else
2200 kind = GCCPK_DEF;
2201
2202 /* When something is defined, it should have node attached. */
2203 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2204 || varpool_get_node (t)->definition);
2205 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2206 || (cgraph_get_node (t)
2207 && cgraph_get_node (t)->definition));
2208 }
2209
2210 /* Imitate what default_elf_asm_output_external do.
2211 When symbol is external, we need to output it with DEFAULT visibility
2212 when compiling with -fvisibility=default, while with HIDDEN visibility
2213 when symbol has attribute (visibility("hidden")) specified.
2214 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2215 right. */
2216
2217 if (DECL_EXTERNAL (t)
2218 && !targetm.binds_local_p (t))
2219 visibility = GCCPV_DEFAULT;
2220 else
2221 switch (DECL_VISIBILITY (t))
2222 {
2223 case VISIBILITY_DEFAULT:
2224 visibility = GCCPV_DEFAULT;
2225 break;
2226 case VISIBILITY_PROTECTED:
2227 visibility = GCCPV_PROTECTED;
2228 break;
2229 case VISIBILITY_HIDDEN:
2230 visibility = GCCPV_HIDDEN;
2231 break;
2232 case VISIBILITY_INTERNAL:
2233 visibility = GCCPV_INTERNAL;
2234 break;
2235 }
2236
2237 if (kind == GCCPK_COMMON
2238 && DECL_SIZE_UNIT (t)
2239 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2240 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2241 else
2242 size = 0;
2243
2244 if (DECL_ONE_ONLY (t))
2245 comdat = IDENTIFIER_POINTER (DECL_COMDAT_GROUP (t));
2246 else
2247 comdat = "";
2248
2249 lto_output_data_stream (stream, name, strlen (name) + 1);
2250 lto_output_data_stream (stream, comdat, strlen (comdat) + 1);
2251 c = (unsigned char) kind;
2252 lto_output_data_stream (stream, &c, 1);
2253 c = (unsigned char) visibility;
2254 lto_output_data_stream (stream, &c, 1);
2255 lto_output_data_stream (stream, &size, 8);
2256 lto_output_data_stream (stream, &slot_num, 4);
2257 }
2258
2259 /* Return true if NODE should appear in the plugin symbol table. */
2260
2261 bool
2262 output_symbol_p (symtab_node *node)
2263 {
2264 struct cgraph_node *cnode;
2265 if (!symtab_real_symbol_p (node))
2266 return false;
2267 /* We keep external functions in symtab for sake of inlining
2268 and devirtualization. We do not want to see them in symbol table as
2269 references unless they are really used. */
2270 cnode = dyn_cast <cgraph_node> (node);
2271 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2272 && cnode->callers)
2273 return true;
2274
2275 /* Ignore all references from external vars initializers - they are not really
2276 part of the compilation unit until they are used by folding. Some symbols,
2277 like references to external construction vtables can not be referred to at all.
2278 We decide this at can_refer_decl_in_current_unit_p. */
2279 if (!node->definition || DECL_EXTERNAL (node->decl))
2280 {
2281 int i;
2282 struct ipa_ref *ref;
2283 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list,
2284 i, ref); i++)
2285 {
2286 if (ref->use == IPA_REF_ALIAS)
2287 continue;
2288 if (is_a <cgraph_node> (ref->referring))
2289 return true;
2290 if (!DECL_EXTERNAL (ref->referring->decl))
2291 return true;
2292 }
2293 return false;
2294 }
2295 return true;
2296 }
2297
2298
2299 /* Write an IL symbol table to OB.
2300 SET and VSET are cgraph/varpool node sets we are outputting. */
2301
2302 static void
2303 produce_symtab (struct output_block *ob)
2304 {
2305 struct streamer_tree_cache_d *cache = ob->writer_cache;
2306 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2307 struct pointer_set_t *seen;
2308 struct lto_output_stream stream;
2309 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2310 lto_symtab_encoder_iterator lsei;
2311
2312 lto_begin_section (section_name, false);
2313 free (section_name);
2314
2315 seen = pointer_set_create ();
2316 memset (&stream, 0, sizeof (stream));
2317
2318 /* Write the symbol table.
2319 First write everything defined and then all declarations.
2320 This is necessary to handle cases where we have duplicated symbols. */
2321 for (lsei = lsei_start (encoder);
2322 !lsei_end_p (lsei); lsei_next (&lsei))
2323 {
2324 symtab_node *node = lsei_node (lsei);
2325
2326 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2327 continue;
2328 write_symbol (cache, &stream, node->decl, seen, false);
2329 }
2330 for (lsei = lsei_start (encoder);
2331 !lsei_end_p (lsei); lsei_next (&lsei))
2332 {
2333 symtab_node *node = lsei_node (lsei);
2334
2335 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2336 continue;
2337 write_symbol (cache, &stream, node->decl, seen, false);
2338 }
2339
2340 lto_write_stream (&stream);
2341 pointer_set_destroy (seen);
2342
2343 lto_end_section ();
2344 }
2345
2346
2347 /* This pass is run after all of the functions are serialized and all
2348 of the IPA passes have written their serialized forms. This pass
2349 causes the vector of all of the global decls and types used from
2350 this file to be written in to a section that can then be read in to
2351 recover these on other side. */
2352
2353 void
2354 produce_asm_for_decls (void)
2355 {
2356 struct lto_out_decl_state *out_state;
2357 struct lto_out_decl_state *fn_out_state;
2358 struct lto_decl_header header;
2359 char *section_name;
2360 struct output_block *ob;
2361 struct lto_output_stream *header_stream, *decl_state_stream;
2362 unsigned idx, num_fns;
2363 size_t decl_state_size;
2364 int32_t num_decl_states;
2365
2366 ob = create_output_block (LTO_section_decls);
2367 ob->global = true;
2368
2369 memset (&header, 0, sizeof (struct lto_decl_header));
2370
2371 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2372 lto_begin_section (section_name, !flag_wpa);
2373 free (section_name);
2374
2375 /* Make string 0 be a NULL string. */
2376 streamer_write_char_stream (ob->string_stream, 0);
2377
2378 gcc_assert (!alias_pairs);
2379
2380 /* Write the global symbols. */
2381 out_state = lto_get_out_decl_state ();
2382 num_fns = lto_function_decl_states.length ();
2383 lto_output_decl_state_streams (ob, out_state);
2384 for (idx = 0; idx < num_fns; idx++)
2385 {
2386 fn_out_state =
2387 lto_function_decl_states[idx];
2388 lto_output_decl_state_streams (ob, fn_out_state);
2389 }
2390
2391 header.lto_header.major_version = LTO_major_version;
2392 header.lto_header.minor_version = LTO_minor_version;
2393
2394 /* Currently not used. This field would allow us to preallocate
2395 the globals vector, so that it need not be resized as it is extended. */
2396 header.num_nodes = -1;
2397
2398 /* Compute the total size of all decl out states. */
2399 decl_state_size = sizeof (int32_t);
2400 decl_state_size += lto_out_decl_state_written_size (out_state);
2401 for (idx = 0; idx < num_fns; idx++)
2402 {
2403 fn_out_state =
2404 lto_function_decl_states[idx];
2405 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2406 }
2407 header.decl_state_size = decl_state_size;
2408
2409 header.main_size = ob->main_stream->total_size;
2410 header.string_size = ob->string_stream->total_size;
2411
2412 header_stream = XCNEW (struct lto_output_stream);
2413 lto_output_data_stream (header_stream, &header, sizeof header);
2414 lto_write_stream (header_stream);
2415 free (header_stream);
2416
2417 /* Write the main out-decl state, followed by out-decl states of
2418 functions. */
2419 decl_state_stream = XCNEW (struct lto_output_stream);
2420 num_decl_states = num_fns + 1;
2421 lto_output_data_stream (decl_state_stream, &num_decl_states,
2422 sizeof (num_decl_states));
2423 lto_output_decl_state_refs (ob, decl_state_stream, out_state);
2424 for (idx = 0; idx < num_fns; idx++)
2425 {
2426 fn_out_state =
2427 lto_function_decl_states[idx];
2428 lto_output_decl_state_refs (ob, decl_state_stream, fn_out_state);
2429 }
2430 lto_write_stream (decl_state_stream);
2431 free (decl_state_stream);
2432
2433 lto_write_stream (ob->main_stream);
2434 lto_write_stream (ob->string_stream);
2435
2436 lto_end_section ();
2437
2438 /* Write the symbol table. It is used by linker to determine dependencies
2439 and thus we can skip it for WPA. */
2440 if (!flag_wpa)
2441 produce_symtab (ob);
2442
2443 /* Write command line opts. */
2444 lto_write_options ();
2445
2446 /* Deallocate memory and clean up. */
2447 for (idx = 0; idx < num_fns; idx++)
2448 {
2449 fn_out_state =
2450 lto_function_decl_states[idx];
2451 lto_delete_out_decl_state (fn_out_state);
2452 }
2453 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2454 lto_function_decl_states.release ();
2455 destroy_output_block (ob);
2456 }