]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-streamer-out.c
tree-ssa.h: Remove all #include's
[thirdparty/gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2013 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "expr.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "hashtab.h"
33 #include "basic-block.h"
34 #include "gimple.h"
35 #include "gimple-ssa.h"
36 #include "tree-ssanames.h"
37 #include "tree-pass.h"
38 #include "function.h"
39 #include "ggc.h"
40 #include "diagnostic-core.h"
41 #include "except.h"
42 #include "vec.h"
43 #include "lto-symtab.h"
44 #include "lto-streamer.h"
45 #include "data-streamer.h"
46 #include "gimple-streamer.h"
47 #include "tree-streamer.h"
48 #include "streamer-hooks.h"
49 #include "cfgloop.h"
50
51
52 /* Clear the line info stored in DATA_IN. */
53
54 static void
55 clear_line_info (struct output_block *ob)
56 {
57 ob->current_file = NULL;
58 ob->current_line = 0;
59 ob->current_col = 0;
60 }
61
62
63 /* Create the output block and return it. SECTION_TYPE is
64 LTO_section_function_body or LTO_static_initializer. */
65
66 struct output_block *
67 create_output_block (enum lto_section_type section_type)
68 {
69 struct output_block *ob = XCNEW (struct output_block);
70
71 ob->section_type = section_type;
72 ob->decl_state = lto_get_out_decl_state ();
73 ob->main_stream = XCNEW (struct lto_output_stream);
74 ob->string_stream = XCNEW (struct lto_output_stream);
75 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true);
76
77 if (section_type == LTO_section_function_body)
78 ob->cfg_stream = XCNEW (struct lto_output_stream);
79
80 clear_line_info (ob);
81
82 ob->string_hash_table.create (37);
83 gcc_obstack_init (&ob->obstack);
84
85 return ob;
86 }
87
88
89 /* Destroy the output block OB. */
90
91 void
92 destroy_output_block (struct output_block *ob)
93 {
94 enum lto_section_type section_type = ob->section_type;
95
96 ob->string_hash_table.dispose ();
97
98 free (ob->main_stream);
99 free (ob->string_stream);
100 if (section_type == LTO_section_function_body)
101 free (ob->cfg_stream);
102
103 streamer_tree_cache_delete (ob->writer_cache);
104 obstack_free (&ob->obstack, NULL);
105
106 free (ob);
107 }
108
109
110 /* Look up NODE in the type table and write the index for it to OB. */
111
112 static void
113 output_type_ref (struct output_block *ob, tree node)
114 {
115 streamer_write_record_start (ob, LTO_type_ref);
116 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
117 }
118
119
120 /* Return true if tree node T is written to various tables. For these
121 nodes, we sometimes want to write their phyiscal representation
122 (via lto_output_tree), and sometimes we need to emit an index
123 reference into a table (via lto_output_tree_ref). */
124
125 static bool
126 tree_is_indexable (tree t)
127 {
128 /* Parameters and return values of functions of variably modified types
129 must go to global stream, because they may be used in the type
130 definition. */
131 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
132 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
133 else if (TREE_CODE (t) == VAR_DECL && decl_function_context (t)
134 && !TREE_STATIC (t))
135 return false;
136 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
137 return false;
138 /* Variably modified types need to be streamed alongside function
139 bodies because they can refer to local entities. Together with
140 them we have to localize their members as well.
141 ??? In theory that includes non-FIELD_DECLs as well. */
142 else if (TYPE_P (t)
143 && variably_modified_type_p (t, NULL_TREE))
144 return false;
145 else if (TREE_CODE (t) == FIELD_DECL
146 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
147 return false;
148 else
149 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
150 }
151
152
153 /* Output info about new location into bitpack BP.
154 After outputting bitpack, lto_output_location_data has
155 to be done to output actual data. */
156
157 void
158 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
159 location_t loc)
160 {
161 expanded_location xloc;
162
163 loc = LOCATION_LOCUS (loc);
164 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
165 if (loc == UNKNOWN_LOCATION)
166 return;
167
168 xloc = expand_location (loc);
169
170 bp_pack_value (bp, ob->current_file != xloc.file, 1);
171 bp_pack_value (bp, ob->current_line != xloc.line, 1);
172 bp_pack_value (bp, ob->current_col != xloc.column, 1);
173
174 if (ob->current_file != xloc.file)
175 bp_pack_var_len_unsigned (bp,
176 streamer_string_index (ob, xloc.file,
177 strlen (xloc.file) + 1,
178 true));
179 ob->current_file = xloc.file;
180
181 if (ob->current_line != xloc.line)
182 bp_pack_var_len_unsigned (bp, xloc.line);
183 ob->current_line = xloc.line;
184
185 if (ob->current_col != xloc.column)
186 bp_pack_var_len_unsigned (bp, xloc.column);
187 ob->current_col = xloc.column;
188 }
189
190
191 /* If EXPR is an indexable tree node, output a reference to it to
192 output block OB. Otherwise, output the physical representation of
193 EXPR to OB. */
194
195 static void
196 lto_output_tree_ref (struct output_block *ob, tree expr)
197 {
198 enum tree_code code;
199
200 if (TYPE_P (expr))
201 {
202 output_type_ref (ob, expr);
203 return;
204 }
205
206 code = TREE_CODE (expr);
207 switch (code)
208 {
209 case SSA_NAME:
210 streamer_write_record_start (ob, LTO_ssa_name_ref);
211 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
212 break;
213
214 case FIELD_DECL:
215 streamer_write_record_start (ob, LTO_field_decl_ref);
216 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
217 break;
218
219 case FUNCTION_DECL:
220 streamer_write_record_start (ob, LTO_function_decl_ref);
221 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
222 break;
223
224 case VAR_DECL:
225 case DEBUG_EXPR_DECL:
226 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
227 case PARM_DECL:
228 streamer_write_record_start (ob, LTO_global_decl_ref);
229 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
230 break;
231
232 case CONST_DECL:
233 streamer_write_record_start (ob, LTO_const_decl_ref);
234 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
235 break;
236
237 case IMPORTED_DECL:
238 gcc_assert (decl_function_context (expr) == NULL);
239 streamer_write_record_start (ob, LTO_imported_decl_ref);
240 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
241 break;
242
243 case TYPE_DECL:
244 streamer_write_record_start (ob, LTO_type_decl_ref);
245 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
246 break;
247
248 case NAMESPACE_DECL:
249 streamer_write_record_start (ob, LTO_namespace_decl_ref);
250 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
251 break;
252
253 case LABEL_DECL:
254 streamer_write_record_start (ob, LTO_label_decl_ref);
255 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
256 break;
257
258 case RESULT_DECL:
259 streamer_write_record_start (ob, LTO_result_decl_ref);
260 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
261 break;
262
263 case TRANSLATION_UNIT_DECL:
264 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
265 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
266 break;
267
268 default:
269 /* No other node is indexable, so it should have been handled by
270 lto_output_tree. */
271 gcc_unreachable ();
272 }
273 }
274
275
276 /* Return true if EXPR is a tree node that can be written to disk. */
277
278 static inline bool
279 lto_is_streamable (tree expr)
280 {
281 enum tree_code code = TREE_CODE (expr);
282
283 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
284 name version in lto_output_tree_ref (see output_ssa_names). */
285 return !is_lang_specific (expr)
286 && code != SSA_NAME
287 && code != CALL_EXPR
288 && code != LANG_TYPE
289 && code != MODIFY_EXPR
290 && code != INIT_EXPR
291 && code != TARGET_EXPR
292 && code != BIND_EXPR
293 && code != WITH_CLEANUP_EXPR
294 && code != STATEMENT_LIST
295 && code != OMP_CLAUSE
296 && (code == CASE_LABEL_EXPR
297 || code == DECL_EXPR
298 || TREE_CODE_CLASS (code) != tcc_statement);
299 }
300
301
302 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
303
304 static tree
305 get_symbol_initial_value (struct output_block *ob, tree expr)
306 {
307 gcc_checking_assert (DECL_P (expr)
308 && TREE_CODE (expr) != FUNCTION_DECL
309 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
310
311 /* Handle DECL_INITIAL for symbols. */
312 tree initial = DECL_INITIAL (expr);
313 if (TREE_CODE (expr) == VAR_DECL
314 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
315 && !DECL_IN_CONSTANT_POOL (expr)
316 && initial)
317 {
318 lto_symtab_encoder_t encoder;
319 struct varpool_node *vnode;
320
321 encoder = ob->decl_state->symtab_node_encoder;
322 vnode = varpool_get_node (expr);
323 if (!vnode
324 || !lto_symtab_encoder_encode_initializer_p (encoder,
325 vnode))
326 initial = error_mark_node;
327 }
328
329 return initial;
330 }
331
332
333 /* Write a physical representation of tree node EXPR to output block
334 OB. If REF_P is true, the leaves of EXPR are emitted as references
335 via lto_output_tree_ref. IX is the index into the streamer cache
336 where EXPR is stored. */
337
338 static void
339 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
340 {
341 /* Pack all the non-pointer fields in EXPR into a bitpack and write
342 the resulting bitpack. */
343 bitpack_d bp = bitpack_create (ob->main_stream);
344 streamer_pack_tree_bitfields (ob, &bp, expr);
345 streamer_write_bitpack (&bp);
346
347 /* Write all the pointer fields in EXPR. */
348 streamer_write_tree_body (ob, expr, ref_p);
349
350 /* Write any LTO-specific data to OB. */
351 if (DECL_P (expr)
352 && TREE_CODE (expr) != FUNCTION_DECL
353 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
354 {
355 /* Handle DECL_INITIAL for symbols. */
356 tree initial = get_symbol_initial_value (ob, expr);
357 stream_write_tree (ob, initial, ref_p);
358 }
359 }
360
361 /* Write a physical representation of tree node EXPR to output block
362 OB. If REF_P is true, the leaves of EXPR are emitted as references
363 via lto_output_tree_ref. IX is the index into the streamer cache
364 where EXPR is stored. */
365
366 static void
367 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
368 {
369 if (!lto_is_streamable (expr))
370 internal_error ("tree code %qs is not supported in LTO streams",
371 get_tree_code_name (TREE_CODE (expr)));
372
373 /* Write the header, containing everything needed to materialize
374 EXPR on the reading side. */
375 streamer_write_tree_header (ob, expr);
376
377 lto_write_tree_1 (ob, expr, ref_p);
378
379 /* Mark the end of EXPR. */
380 streamer_write_zero (ob);
381 }
382
383 /* Emit the physical representation of tree node EXPR to output block
384 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
385 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
386
387 static void
388 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
389 bool ref_p, bool this_ref_p)
390 {
391 unsigned ix;
392
393 gcc_checking_assert (expr != NULL_TREE
394 && !(this_ref_p && tree_is_indexable (expr)));
395
396 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
397 expr, hash, &ix);
398 gcc_assert (!exists_p);
399 if (streamer_handle_as_builtin_p (expr))
400 {
401 /* MD and NORMAL builtins do not need to be written out
402 completely as they are always instantiated by the
403 compiler on startup. The only builtins that need to
404 be written out are BUILT_IN_FRONTEND. For all other
405 builtins, we simply write the class and code. */
406 streamer_write_builtin (ob, expr);
407 }
408 else if (TREE_CODE (expr) == INTEGER_CST
409 && !TREE_OVERFLOW (expr))
410 {
411 /* Shared INTEGER_CST nodes are special because they need their
412 original type to be materialized by the reader (to implement
413 TYPE_CACHED_VALUES). */
414 streamer_write_integer_cst (ob, expr, ref_p);
415 }
416 else
417 {
418 /* This is the first time we see EXPR, write its fields
419 to OB. */
420 lto_write_tree (ob, expr, ref_p);
421 }
422 }
423
424 struct sccs
425 {
426 unsigned int dfsnum;
427 unsigned int low;
428 };
429
430 struct scc_entry
431 {
432 tree t;
433 hashval_t hash;
434 };
435
436 static unsigned int next_dfs_num;
437 static vec<scc_entry> sccstack;
438 static struct pointer_map_t *sccstate;
439 static struct obstack sccstate_obstack;
440
441 static void
442 DFS_write_tree (struct output_block *ob, sccs *from_state,
443 tree expr, bool ref_p, bool this_ref_p);
444
445 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
446 DFS recurse for all tree edges originating from it. */
447
448 static void
449 DFS_write_tree_body (struct output_block *ob,
450 tree expr, sccs *expr_state, bool ref_p)
451 {
452 #define DFS_follow_tree_edge(DEST) \
453 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
454
455 enum tree_code code;
456
457 code = TREE_CODE (expr);
458
459 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
460 {
461 if (TREE_CODE (expr) != IDENTIFIER_NODE)
462 DFS_follow_tree_edge (TREE_TYPE (expr));
463 }
464
465 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
466 {
467 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
468 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
469 }
470
471 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
472 {
473 DFS_follow_tree_edge (TREE_REALPART (expr));
474 DFS_follow_tree_edge (TREE_IMAGPART (expr));
475 }
476
477 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
478 {
479 /* Drop names that were created for anonymous entities. */
480 if (DECL_NAME (expr)
481 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
482 && ANON_AGGRNAME_P (DECL_NAME (expr)))
483 ;
484 else
485 DFS_follow_tree_edge (DECL_NAME (expr));
486 DFS_follow_tree_edge (DECL_CONTEXT (expr));
487 }
488
489 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
490 {
491 DFS_follow_tree_edge (DECL_SIZE (expr));
492 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
493
494 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
495 special handling in LTO, it must be handled by streamer hooks. */
496
497 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
498
499 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
500 for early inlining so drop it on the floor instead of ICEing in
501 dwarf2out.c. */
502
503 if ((TREE_CODE (expr) == VAR_DECL
504 || TREE_CODE (expr) == PARM_DECL)
505 && DECL_HAS_VALUE_EXPR_P (expr))
506 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
507 if (TREE_CODE (expr) == VAR_DECL)
508 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
509 }
510
511 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
512 {
513 if (TREE_CODE (expr) == TYPE_DECL)
514 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
515 DFS_follow_tree_edge (DECL_VINDEX (expr));
516 }
517
518 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
519 {
520 /* Make sure we don't inadvertently set the assembler name. */
521 if (DECL_ASSEMBLER_NAME_SET_P (expr))
522 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
523 DFS_follow_tree_edge (DECL_SECTION_NAME (expr));
524 DFS_follow_tree_edge (DECL_COMDAT_GROUP (expr));
525 }
526
527 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
528 {
529 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
530 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
531 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
532 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
533 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
534 }
535
536 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
537 {
538 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
539 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
540 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
541 }
542
543 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
544 {
545 DFS_follow_tree_edge (TYPE_SIZE (expr));
546 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
547 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
548 DFS_follow_tree_edge (TYPE_NAME (expr));
549 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
550 reconstructed during fixup. */
551 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
552 during fixup. */
553 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
554 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
555 /* TYPE_CANONICAL is re-computed during type merging, so no need
556 to follow it here. */
557 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
558 }
559
560 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
561 {
562 if (TREE_CODE (expr) == ENUMERAL_TYPE)
563 DFS_follow_tree_edge (TYPE_VALUES (expr));
564 else if (TREE_CODE (expr) == ARRAY_TYPE)
565 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
566 else if (RECORD_OR_UNION_TYPE_P (expr))
567 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
568 DFS_follow_tree_edge (t);
569 else if (TREE_CODE (expr) == FUNCTION_TYPE
570 || TREE_CODE (expr) == METHOD_TYPE)
571 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
572
573 if (!POINTER_TYPE_P (expr))
574 DFS_follow_tree_edge (TYPE_MINVAL (expr));
575 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
576 if (RECORD_OR_UNION_TYPE_P (expr))
577 DFS_follow_tree_edge (TYPE_BINFO (expr));
578 }
579
580 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
581 {
582 DFS_follow_tree_edge (TREE_PURPOSE (expr));
583 DFS_follow_tree_edge (TREE_VALUE (expr));
584 DFS_follow_tree_edge (TREE_CHAIN (expr));
585 }
586
587 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
588 {
589 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
590 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
591 }
592
593 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
594 {
595 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
596 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
597 DFS_follow_tree_edge (TREE_BLOCK (expr));
598 }
599
600 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
601 {
602 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
603 /* ??? FIXME. See also streamer_write_chain. */
604 if (!(VAR_OR_FUNCTION_DECL_P (t)
605 && DECL_EXTERNAL (t)))
606 DFS_follow_tree_edge (t);
607
608 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
609
610 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
611 handle - those that represent inlined function scopes.
612 For the drop rest them on the floor instead of ICEing
613 in dwarf2out.c. */
614 if (inlined_function_outer_scope_p (expr))
615 {
616 tree ultimate_origin = block_ultimate_origin (expr);
617 DFS_follow_tree_edge (ultimate_origin);
618 }
619 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
620 information for early inlined BLOCKs so drop it on the floor instead
621 of ICEing in dwarf2out.c. */
622
623 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
624 streaming time. */
625
626 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
627 list is re-constructed from BLOCK_SUPERCONTEXT. */
628 }
629
630 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
631 {
632 unsigned i;
633 tree t;
634
635 /* Note that the number of BINFO slots has already been emitted in
636 EXPR's header (see streamer_write_tree_header) because this length
637 is needed to build the empty BINFO node on the reader side. */
638 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
639 DFS_follow_tree_edge (t);
640 DFS_follow_tree_edge (BINFO_OFFSET (expr));
641 DFS_follow_tree_edge (BINFO_VTABLE (expr));
642 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
643
644 /* The number of BINFO_BASE_ACCESSES has already been emitted in
645 EXPR's bitfield section. */
646 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
647 DFS_follow_tree_edge (t);
648
649 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
650 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
651 }
652
653 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
654 {
655 unsigned i;
656 tree index, value;
657
658 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
659 {
660 DFS_follow_tree_edge (index);
661 DFS_follow_tree_edge (value);
662 }
663 }
664
665 #undef DFS_follow_tree_edge
666 }
667
668 /* Return a hash value for the tree T. */
669
670 static hashval_t
671 hash_tree (struct streamer_tree_cache_d *cache, tree t)
672 {
673 #define visit(SIBLING) \
674 do { \
675 unsigned ix; \
676 if (SIBLING && streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
677 v = iterative_hash_hashval_t (streamer_tree_cache_get_hash (cache, ix), v); \
678 } while (0)
679
680 /* Hash TS_BASE. */
681 enum tree_code code = TREE_CODE (t);
682 hashval_t v = iterative_hash_host_wide_int (code, 0);
683 if (!TYPE_P (t))
684 {
685 v = iterative_hash_host_wide_int (TREE_SIDE_EFFECTS (t)
686 | (TREE_CONSTANT (t) << 1)
687 | (TREE_READONLY (t) << 2)
688 | (TREE_PUBLIC (t) << 3), v);
689 }
690 v = iterative_hash_host_wide_int (TREE_ADDRESSABLE (t)
691 | (TREE_THIS_VOLATILE (t) << 1), v);
692 if (DECL_P (t))
693 v = iterative_hash_host_wide_int (DECL_UNSIGNED (t), v);
694 else if (TYPE_P (t))
695 v = iterative_hash_host_wide_int (TYPE_UNSIGNED (t), v);
696 if (TYPE_P (t))
697 v = iterative_hash_host_wide_int (TYPE_ARTIFICIAL (t), v);
698 else
699 v = iterative_hash_host_wide_int (TREE_NO_WARNING (t), v);
700 v = iterative_hash_host_wide_int (TREE_NOTHROW (t)
701 | (TREE_STATIC (t) << 1)
702 | (TREE_PROTECTED (t) << 2)
703 | (TREE_DEPRECATED (t) << 3), v);
704 if (code != TREE_BINFO)
705 v = iterative_hash_host_wide_int (TREE_PRIVATE (t), v);
706 if (TYPE_P (t))
707 v = iterative_hash_host_wide_int (TYPE_SATURATING (t)
708 | (TYPE_ADDR_SPACE (t) << 1), v);
709 else if (code == SSA_NAME)
710 v = iterative_hash_host_wide_int (SSA_NAME_IS_DEFAULT_DEF (t), v);
711
712 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
713 {
714 v = iterative_hash_host_wide_int (TREE_INT_CST_LOW (t), v);
715 v = iterative_hash_host_wide_int (TREE_INT_CST_HIGH (t), v);
716 }
717
718 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
719 {
720 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
721 v = iterative_hash_host_wide_int (r.cl, v);
722 v = iterative_hash_host_wide_int (r.decimal
723 | (r.sign << 1)
724 | (r.signalling << 2)
725 | (r.canonical << 3), v);
726 v = iterative_hash_host_wide_int (r.uexp, v);
727 for (unsigned i = 0; i < SIGSZ; ++i)
728 v = iterative_hash_host_wide_int (r.sig[i], v);
729 }
730
731 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
732 {
733 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
734 v = iterative_hash_host_wide_int (f.mode, v);
735 v = iterative_hash_host_wide_int (f.data.low, v);
736 v = iterative_hash_host_wide_int (f.data.high, v);
737 }
738
739 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
740 {
741 v = iterative_hash_host_wide_int (DECL_MODE (t), v);
742 v = iterative_hash_host_wide_int (DECL_NONLOCAL (t)
743 | (DECL_VIRTUAL_P (t) << 1)
744 | (DECL_IGNORED_P (t) << 2)
745 | (DECL_ABSTRACT (t) << 3)
746 | (DECL_ARTIFICIAL (t) << 4)
747 | (DECL_USER_ALIGN (t) << 5)
748 | (DECL_PRESERVE_P (t) << 6)
749 | (DECL_EXTERNAL (t) << 7)
750 | (DECL_GIMPLE_REG_P (t) << 8), v);
751 v = iterative_hash_host_wide_int (DECL_ALIGN (t), v);
752 if (code == LABEL_DECL)
753 {
754 v = iterative_hash_host_wide_int (EH_LANDING_PAD_NR (t), v);
755 v = iterative_hash_host_wide_int (LABEL_DECL_UID (t), v);
756 }
757 else if (code == FIELD_DECL)
758 {
759 v = iterative_hash_host_wide_int (DECL_PACKED (t)
760 | (DECL_NONADDRESSABLE_P (t) << 1),
761 v);
762 v = iterative_hash_host_wide_int (DECL_OFFSET_ALIGN (t), v);
763 }
764 else if (code == VAR_DECL)
765 {
766 v = iterative_hash_host_wide_int (DECL_HAS_DEBUG_EXPR_P (t)
767 | (DECL_NONLOCAL_FRAME (t) << 1),
768 v);
769 }
770 if (code == RESULT_DECL
771 || code == PARM_DECL
772 || code == VAR_DECL)
773 {
774 v = iterative_hash_host_wide_int (DECL_BY_REFERENCE (t), v);
775 if (code == VAR_DECL
776 || code == PARM_DECL)
777 v = iterative_hash_host_wide_int (DECL_HAS_VALUE_EXPR_P (t), v);
778 }
779 }
780
781 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
782 v = iterative_hash_host_wide_int (DECL_REGISTER (t), v);
783
784 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
785 {
786 v = iterative_hash_host_wide_int ((DECL_COMMON (t))
787 | (DECL_DLLIMPORT_P (t) << 1)
788 | (DECL_WEAK (t) << 2)
789 | (DECL_SEEN_IN_BIND_EXPR_P (t) << 3)
790 | (DECL_COMDAT (t) << 4)
791 | (DECL_VISIBILITY_SPECIFIED (t) << 6),
792 v);
793 v = iterative_hash_host_wide_int (DECL_VISIBILITY (t), v);
794 if (code == VAR_DECL)
795 {
796 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
797 v = iterative_hash_host_wide_int (DECL_HARD_REGISTER (t)
798 | (DECL_IN_CONSTANT_POOL (t) << 1),
799 v);
800 v = iterative_hash_host_wide_int (DECL_TLS_MODEL (t), v);
801 }
802 if (TREE_CODE (t) == FUNCTION_DECL)
803 v = iterative_hash_host_wide_int (DECL_FINAL_P (t)
804 | (DECL_CXX_CONSTRUCTOR_P (t) << 1)
805 | (DECL_CXX_DESTRUCTOR_P (t) << 2),
806 v);
807 if (VAR_OR_FUNCTION_DECL_P (t))
808 v = iterative_hash_host_wide_int (DECL_INIT_PRIORITY (t), v);
809 }
810
811 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
812 {
813 v = iterative_hash_host_wide_int (DECL_BUILT_IN_CLASS (t), v);
814 v = iterative_hash_host_wide_int (DECL_STATIC_CONSTRUCTOR (t)
815 | (DECL_STATIC_DESTRUCTOR (t) << 1)
816 | (DECL_UNINLINABLE (t) << 2)
817 | (DECL_POSSIBLY_INLINED (t) << 3)
818 | (DECL_IS_NOVOPS (t) << 4)
819 | (DECL_IS_RETURNS_TWICE (t) << 5)
820 | (DECL_IS_MALLOC (t) << 6)
821 | (DECL_IS_OPERATOR_NEW (t) << 7)
822 | (DECL_DECLARED_INLINE_P (t) << 8)
823 | (DECL_STATIC_CHAIN (t) << 9)
824 | (DECL_NO_INLINE_WARNING_P (t) << 10)
825 | (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t) << 11)
826 | (DECL_NO_LIMIT_STACK (t) << 12)
827 | (DECL_DISREGARD_INLINE_LIMITS (t) << 13)
828 | (DECL_PURE_P (t) << 14)
829 | (DECL_LOOPING_CONST_OR_PURE_P (t) << 15), v);
830 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
831 v = iterative_hash_host_wide_int (DECL_FUNCTION_CODE (t), v);
832 if (DECL_STATIC_DESTRUCTOR (t))
833 v = iterative_hash_host_wide_int (DECL_FINI_PRIORITY (t), v);
834 }
835
836 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
837 {
838 v = iterative_hash_host_wide_int (TYPE_MODE (t), v);
839 v = iterative_hash_host_wide_int (TYPE_STRING_FLAG (t)
840 | (TYPE_NO_FORCE_BLK (t) << 1)
841 | (TYPE_NEEDS_CONSTRUCTING (t) << 2)
842 | (TYPE_PACKED (t) << 3)
843 | (TYPE_RESTRICT (t) << 4)
844 | (TYPE_USER_ALIGN (t) << 5)
845 | (TYPE_READONLY (t) << 6), v);
846 if (RECORD_OR_UNION_TYPE_P (t))
847 {
848 v = iterative_hash_host_wide_int (TYPE_TRANSPARENT_AGGR (t)
849 | (TYPE_FINAL_P (t) << 1), v);
850 }
851 else if (code == ARRAY_TYPE)
852 v = iterative_hash_host_wide_int (TYPE_NONALIASED_COMPONENT (t), v);
853 v = iterative_hash_host_wide_int (TYPE_PRECISION (t), v);
854 v = iterative_hash_host_wide_int (TYPE_ALIGN (t), v);
855 v = iterative_hash_host_wide_int ((TYPE_ALIAS_SET (t) == 0
856 || (!in_lto_p
857 && get_alias_set (t) == 0))
858 ? 0 : -1, v);
859 }
860
861 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
862 v = iterative_hash (TRANSLATION_UNIT_LANGUAGE (t),
863 strlen (TRANSLATION_UNIT_LANGUAGE (t)), v);
864
865 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
866 v = iterative_hash (t, sizeof (struct cl_target_option), v);
867
868 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
869 v = iterative_hash (t, sizeof (struct cl_optimization), v);
870
871 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
872 v = iterative_hash_host_wide_int (IDENTIFIER_HASH_VALUE (t), v);
873
874 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
875 v = iterative_hash (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t), v);
876
877 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
878 {
879 if (POINTER_TYPE_P (t))
880 {
881 /* For pointers factor in the pointed-to type recursively as
882 we cannot recurse through only pointers.
883 ??? We can generalize this by keeping track of the
884 in-SCC edges for each tree (or arbitrarily the first
885 such edge) and hashing that in in a second stage
886 (instead of the quadratic mixing of the SCC we do now). */
887 hashval_t x;
888 unsigned ix;
889 if (streamer_tree_cache_lookup (cache, TREE_TYPE (t), &ix))
890 x = streamer_tree_cache_get_hash (cache, ix);
891 else
892 x = hash_tree (cache, TREE_TYPE (t));
893 v = iterative_hash_hashval_t (x, v);
894 }
895 else if (code != IDENTIFIER_NODE)
896 visit (TREE_TYPE (t));
897 }
898
899 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
900 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
901 visit (VECTOR_CST_ELT (t, i));
902
903 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
904 {
905 visit (TREE_REALPART (t));
906 visit (TREE_IMAGPART (t));
907 }
908
909 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
910 {
911 /* Drop names that were created for anonymous entities. */
912 if (DECL_NAME (t)
913 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
914 && ANON_AGGRNAME_P (DECL_NAME (t)))
915 ;
916 else
917 visit (DECL_NAME (t));
918 if (DECL_FILE_SCOPE_P (t))
919 ;
920 else
921 visit (DECL_CONTEXT (t));
922 }
923
924 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
925 {
926 visit (DECL_SIZE (t));
927 visit (DECL_SIZE_UNIT (t));
928 visit (DECL_ATTRIBUTES (t));
929 if ((code == VAR_DECL
930 || code == PARM_DECL)
931 && DECL_HAS_VALUE_EXPR_P (t))
932 visit (DECL_VALUE_EXPR (t));
933 if (code == VAR_DECL
934 && DECL_HAS_DEBUG_EXPR_P (t))
935 visit (DECL_DEBUG_EXPR (t));
936 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
937 be able to call get_symbol_initial_value. */
938 }
939
940 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
941 {
942 if (code == TYPE_DECL)
943 visit (DECL_ORIGINAL_TYPE (t));
944 visit (DECL_VINDEX (t));
945 }
946
947 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
948 {
949 if (DECL_ASSEMBLER_NAME_SET_P (t))
950 visit (DECL_ASSEMBLER_NAME (t));
951 visit (DECL_SECTION_NAME (t));
952 visit (DECL_COMDAT_GROUP (t));
953 }
954
955 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
956 {
957 visit (DECL_FIELD_OFFSET (t));
958 visit (DECL_BIT_FIELD_TYPE (t));
959 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
960 visit (DECL_FIELD_BIT_OFFSET (t));
961 visit (DECL_FCONTEXT (t));
962 }
963
964 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
965 {
966 visit (DECL_FUNCTION_PERSONALITY (t));
967 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
968 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
969 }
970
971 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
972 {
973 visit (TYPE_SIZE (t));
974 visit (TYPE_SIZE_UNIT (t));
975 visit (TYPE_ATTRIBUTES (t));
976 visit (TYPE_NAME (t));
977 visit (TYPE_MAIN_VARIANT (t));
978 if (TYPE_FILE_SCOPE_P (t))
979 ;
980 else
981 visit (TYPE_CONTEXT (t));
982 visit (TYPE_STUB_DECL (t));
983 }
984
985 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
986 {
987 if (code == ENUMERAL_TYPE)
988 visit (TYPE_VALUES (t));
989 else if (code == ARRAY_TYPE)
990 visit (TYPE_DOMAIN (t));
991 else if (RECORD_OR_UNION_TYPE_P (t))
992 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
993 visit (f);
994 else if (code == FUNCTION_TYPE
995 || code == METHOD_TYPE)
996 visit (TYPE_ARG_TYPES (t));
997 if (!POINTER_TYPE_P (t))
998 visit (TYPE_MINVAL (t));
999 visit (TYPE_MAXVAL (t));
1000 if (RECORD_OR_UNION_TYPE_P (t))
1001 visit (TYPE_BINFO (t));
1002 }
1003
1004 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1005 {
1006 visit (TREE_PURPOSE (t));
1007 visit (TREE_VALUE (t));
1008 visit (TREE_CHAIN (t));
1009 }
1010
1011 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1012 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1013 visit (TREE_VEC_ELT (t, i));
1014
1015 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1016 {
1017 v = iterative_hash_host_wide_int (TREE_OPERAND_LENGTH (t), v);
1018 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1019 visit (TREE_OPERAND (t, i));
1020 }
1021
1022 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1023 {
1024 unsigned i;
1025 tree b;
1026 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1027 visit (b);
1028 visit (BINFO_OFFSET (t));
1029 visit (BINFO_VTABLE (t));
1030 visit (BINFO_VPTR_FIELD (t));
1031 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1032 visit (b);
1033 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1034 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1035 }
1036
1037 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1038 {
1039 unsigned i;
1040 tree index, value;
1041 v = iterative_hash_host_wide_int (CONSTRUCTOR_NELTS (t), v);
1042 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1043 {
1044 visit (index);
1045 visit (value);
1046 }
1047 }
1048
1049 return v;
1050
1051 #undef visit
1052 }
1053
1054 /* Compare two SCC entries by their hash value for qsorting them. */
1055
1056 static int
1057 scc_entry_compare (const void *p1_, const void *p2_)
1058 {
1059 const scc_entry *p1 = (const scc_entry *) p1_;
1060 const scc_entry *p2 = (const scc_entry *) p2_;
1061 if (p1->hash < p2->hash)
1062 return -1;
1063 else if (p1->hash > p2->hash)
1064 return 1;
1065 return 0;
1066 }
1067
1068 /* Return a hash value for the SCC on the SCC stack from FIRST with
1069 size SIZE. */
1070
1071 static hashval_t
1072 hash_scc (struct streamer_tree_cache_d *cache, unsigned first, unsigned size)
1073 {
1074 /* Compute hash values for the SCC members. */
1075 for (unsigned i = 0; i < size; ++i)
1076 sccstack[first+i].hash = hash_tree (cache, sccstack[first+i].t);
1077
1078 if (size == 1)
1079 return sccstack[first].hash;
1080
1081 /* Sort the SCC of type, hash pairs so that when we mix in
1082 all members of the SCC the hash value becomes independent on
1083 the order we visited the SCC. Disregard hashes equal to
1084 the hash of the tree we mix into because we cannot guarantee
1085 a stable sort for those across different TUs. */
1086 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1087 hashval_t *tem = XALLOCAVEC (hashval_t, size);
1088 for (unsigned i = 0; i < size; ++i)
1089 {
1090 hashval_t hash = sccstack[first+i].hash;
1091 hashval_t orig_hash = hash;
1092 unsigned j;
1093 /* Skip same hashes. */
1094 for (j = i + 1;
1095 j < size && sccstack[first+j].hash == orig_hash; ++j)
1096 ;
1097 for (; j < size; ++j)
1098 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1099 for (j = 0; sccstack[first+j].hash != orig_hash; ++j)
1100 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1101 tem[i] = hash;
1102 }
1103 hashval_t scc_hash = 0;
1104 for (unsigned i = 0; i < size; ++i)
1105 {
1106 sccstack[first+i].hash = tem[i];
1107 scc_hash = iterative_hash_hashval_t (tem[i], scc_hash);
1108 }
1109 return scc_hash;
1110 }
1111
1112 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1113 already in the streamer cache. Main routine called for
1114 each visit of EXPR. */
1115
1116 static void
1117 DFS_write_tree (struct output_block *ob, sccs *from_state,
1118 tree expr, bool ref_p, bool this_ref_p)
1119 {
1120 unsigned ix;
1121 sccs **slot;
1122
1123 /* Handle special cases. */
1124 if (expr == NULL_TREE)
1125 return;
1126
1127 /* Do not DFS walk into indexable trees. */
1128 if (this_ref_p && tree_is_indexable (expr))
1129 return;
1130
1131 /* Check if we already streamed EXPR. */
1132 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1133 return;
1134
1135 slot = (sccs **)pointer_map_insert (sccstate, expr);
1136 sccs *cstate = *slot;
1137 if (!cstate)
1138 {
1139 scc_entry e = { expr, 0 };
1140 /* Not yet visited. DFS recurse and push it onto the stack. */
1141 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1142 sccstack.safe_push (e);
1143 cstate->dfsnum = next_dfs_num++;
1144 cstate->low = cstate->dfsnum;
1145
1146 if (streamer_handle_as_builtin_p (expr))
1147 ;
1148 else if (TREE_CODE (expr) == INTEGER_CST
1149 && !TREE_OVERFLOW (expr))
1150 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
1151 else
1152 {
1153 DFS_write_tree_body (ob, expr, cstate, ref_p);
1154
1155 /* Walk any LTO-specific edges. */
1156 if (DECL_P (expr)
1157 && TREE_CODE (expr) != FUNCTION_DECL
1158 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1159 {
1160 /* Handle DECL_INITIAL for symbols. */
1161 tree initial = get_symbol_initial_value (ob, expr);
1162 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
1163 }
1164 }
1165
1166 /* See if we found an SCC. */
1167 if (cstate->low == cstate->dfsnum)
1168 {
1169 unsigned first, size;
1170 tree x;
1171
1172 /* Pop the SCC and compute its size. */
1173 first = sccstack.length ();
1174 do
1175 {
1176 x = sccstack[--first].t;
1177 }
1178 while (x != expr);
1179 size = sccstack.length () - first;
1180
1181 /* No need to compute hashes for LTRANS units, we don't perform
1182 any merging there. */
1183 hashval_t scc_hash = 0;
1184 unsigned scc_entry_len = 0;
1185 if (!flag_wpa)
1186 {
1187 scc_hash = hash_scc (ob->writer_cache, first, size);
1188
1189 /* Put the entries with the least number of collisions first. */
1190 unsigned entry_start = 0;
1191 scc_entry_len = size + 1;
1192 for (unsigned i = 0; i < size;)
1193 {
1194 unsigned from = i;
1195 for (i = i + 1; i < size
1196 && (sccstack[first + i].hash
1197 == sccstack[first + from].hash); ++i)
1198 ;
1199 if (i - from < scc_entry_len)
1200 {
1201 scc_entry_len = i - from;
1202 entry_start = from;
1203 }
1204 }
1205 for (unsigned i = 0; i < scc_entry_len; ++i)
1206 {
1207 scc_entry tem = sccstack[first + i];
1208 sccstack[first + i] = sccstack[first + entry_start + i];
1209 sccstack[first + entry_start + i] = tem;
1210 }
1211 }
1212
1213 /* Write LTO_tree_scc. */
1214 streamer_write_record_start (ob, LTO_tree_scc);
1215 streamer_write_uhwi (ob, size);
1216 streamer_write_uhwi (ob, scc_hash);
1217
1218 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1219 All INTEGER_CSTs need to be handled this way as we need
1220 their type to materialize them. Also builtins are handled
1221 this way.
1222 ??? We still wrap these in LTO_tree_scc so at the
1223 input side we can properly identify the tree we want
1224 to ultimatively return. */
1225 size_t old_len = ob->writer_cache->nodes.length ();
1226 if (size == 1)
1227 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1228 else
1229 {
1230 /* Write the size of the SCC entry candidates. */
1231 streamer_write_uhwi (ob, scc_entry_len);
1232
1233 /* Write all headers and populate the streamer cache. */
1234 for (unsigned i = 0; i < size; ++i)
1235 {
1236 hashval_t hash = sccstack[first+i].hash;
1237 tree t = sccstack[first+i].t;
1238 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1239 t, hash, &ix);
1240 gcc_assert (!exists_p);
1241
1242 if (!lto_is_streamable (t))
1243 internal_error ("tree code %qs is not supported "
1244 "in LTO streams",
1245 get_tree_code_name (TREE_CODE (t)));
1246
1247 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1248
1249 /* Write the header, containing everything needed to
1250 materialize EXPR on the reading side. */
1251 streamer_write_tree_header (ob, t);
1252 }
1253
1254 /* Write the bitpacks and tree references. */
1255 for (unsigned i = 0; i < size; ++i)
1256 {
1257 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1258
1259 /* Mark the end of the tree. */
1260 streamer_write_zero (ob);
1261 }
1262 }
1263 gcc_assert (old_len + size == ob->writer_cache->nodes.length ());
1264
1265 /* Finally truncate the vector. */
1266 sccstack.truncate (first);
1267
1268 if (from_state)
1269 from_state->low = MIN (from_state->low, cstate->low);
1270 return;
1271 }
1272
1273 if (from_state)
1274 from_state->low = MIN (from_state->low, cstate->low);
1275 }
1276 gcc_checking_assert (from_state);
1277 if (cstate->dfsnum < from_state->dfsnum)
1278 from_state->low = MIN (cstate->dfsnum, from_state->low);
1279 }
1280
1281
1282 /* Emit the physical representation of tree node EXPR to output block
1283 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1284 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1285
1286 void
1287 lto_output_tree (struct output_block *ob, tree expr,
1288 bool ref_p, bool this_ref_p)
1289 {
1290 unsigned ix;
1291 bool existed_p;
1292
1293 if (expr == NULL_TREE)
1294 {
1295 streamer_write_record_start (ob, LTO_null);
1296 return;
1297 }
1298
1299 if (this_ref_p && tree_is_indexable (expr))
1300 {
1301 lto_output_tree_ref (ob, expr);
1302 return;
1303 }
1304
1305 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1306 if (existed_p)
1307 {
1308 /* If a node has already been streamed out, make sure that
1309 we don't write it more than once. Otherwise, the reader
1310 will instantiate two different nodes for the same object. */
1311 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1312 streamer_write_uhwi (ob, ix);
1313 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1314 lto_tree_code_to_tag (TREE_CODE (expr)));
1315 lto_stats.num_pickle_refs_output++;
1316 }
1317 else
1318 {
1319 /* This is the first time we see EXPR, write all reachable
1320 trees to OB. */
1321 static bool in_dfs_walk;
1322
1323 /* Protect against recursion which means disconnect between
1324 what tree edges we walk in the DFS walk and what edges
1325 we stream out. */
1326 gcc_assert (!in_dfs_walk);
1327
1328 /* Start the DFS walk. */
1329 /* Save ob state ... */
1330 /* let's see ... */
1331 in_dfs_walk = true;
1332 sccstate = pointer_map_create ();
1333 gcc_obstack_init (&sccstate_obstack);
1334 next_dfs_num = 1;
1335 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
1336 sccstack.release ();
1337 pointer_map_destroy (sccstate);
1338 obstack_free (&sccstate_obstack, NULL);
1339 in_dfs_walk = false;
1340
1341 /* Finally append a reference to the tree we were writing.
1342 ??? If expr ended up as a singleton we could have
1343 inlined it here and avoid outputting a reference. */
1344 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1345 gcc_assert (existed_p);
1346 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1347 streamer_write_uhwi (ob, ix);
1348 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1349 lto_tree_code_to_tag (TREE_CODE (expr)));
1350 lto_stats.num_pickle_refs_output++;
1351 }
1352 }
1353
1354
1355 /* Output to OB a list of try/catch handlers starting with FIRST. */
1356
1357 static void
1358 output_eh_try_list (struct output_block *ob, eh_catch first)
1359 {
1360 eh_catch n;
1361
1362 for (n = first; n; n = n->next_catch)
1363 {
1364 streamer_write_record_start (ob, LTO_eh_catch);
1365 stream_write_tree (ob, n->type_list, true);
1366 stream_write_tree (ob, n->filter_list, true);
1367 stream_write_tree (ob, n->label, true);
1368 }
1369
1370 streamer_write_record_start (ob, LTO_null);
1371 }
1372
1373
1374 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1375 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1376 detect EH region sharing. */
1377
1378 static void
1379 output_eh_region (struct output_block *ob, eh_region r)
1380 {
1381 enum LTO_tags tag;
1382
1383 if (r == NULL)
1384 {
1385 streamer_write_record_start (ob, LTO_null);
1386 return;
1387 }
1388
1389 if (r->type == ERT_CLEANUP)
1390 tag = LTO_ert_cleanup;
1391 else if (r->type == ERT_TRY)
1392 tag = LTO_ert_try;
1393 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1394 tag = LTO_ert_allowed_exceptions;
1395 else if (r->type == ERT_MUST_NOT_THROW)
1396 tag = LTO_ert_must_not_throw;
1397 else
1398 gcc_unreachable ();
1399
1400 streamer_write_record_start (ob, tag);
1401 streamer_write_hwi (ob, r->index);
1402
1403 if (r->outer)
1404 streamer_write_hwi (ob, r->outer->index);
1405 else
1406 streamer_write_zero (ob);
1407
1408 if (r->inner)
1409 streamer_write_hwi (ob, r->inner->index);
1410 else
1411 streamer_write_zero (ob);
1412
1413 if (r->next_peer)
1414 streamer_write_hwi (ob, r->next_peer->index);
1415 else
1416 streamer_write_zero (ob);
1417
1418 if (r->type == ERT_TRY)
1419 {
1420 output_eh_try_list (ob, r->u.eh_try.first_catch);
1421 }
1422 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1423 {
1424 stream_write_tree (ob, r->u.allowed.type_list, true);
1425 stream_write_tree (ob, r->u.allowed.label, true);
1426 streamer_write_uhwi (ob, r->u.allowed.filter);
1427 }
1428 else if (r->type == ERT_MUST_NOT_THROW)
1429 {
1430 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1431 bitpack_d bp = bitpack_create (ob->main_stream);
1432 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1433 streamer_write_bitpack (&bp);
1434 }
1435
1436 if (r->landing_pads)
1437 streamer_write_hwi (ob, r->landing_pads->index);
1438 else
1439 streamer_write_zero (ob);
1440 }
1441
1442
1443 /* Output landing pad LP to OB. */
1444
1445 static void
1446 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1447 {
1448 if (lp == NULL)
1449 {
1450 streamer_write_record_start (ob, LTO_null);
1451 return;
1452 }
1453
1454 streamer_write_record_start (ob, LTO_eh_landing_pad);
1455 streamer_write_hwi (ob, lp->index);
1456 if (lp->next_lp)
1457 streamer_write_hwi (ob, lp->next_lp->index);
1458 else
1459 streamer_write_zero (ob);
1460
1461 if (lp->region)
1462 streamer_write_hwi (ob, lp->region->index);
1463 else
1464 streamer_write_zero (ob);
1465
1466 stream_write_tree (ob, lp->post_landing_pad, true);
1467 }
1468
1469
1470 /* Output the existing eh_table to OB. */
1471
1472 static void
1473 output_eh_regions (struct output_block *ob, struct function *fn)
1474 {
1475 if (fn->eh && fn->eh->region_tree)
1476 {
1477 unsigned i;
1478 eh_region eh;
1479 eh_landing_pad lp;
1480 tree ttype;
1481
1482 streamer_write_record_start (ob, LTO_eh_table);
1483
1484 /* Emit the index of the root of the EH region tree. */
1485 streamer_write_hwi (ob, fn->eh->region_tree->index);
1486
1487 /* Emit all the EH regions in the region array. */
1488 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1489 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1490 output_eh_region (ob, eh);
1491
1492 /* Emit all landing pads. */
1493 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1494 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1495 output_eh_lp (ob, lp);
1496
1497 /* Emit all the runtime type data. */
1498 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1499 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1500 stream_write_tree (ob, ttype, true);
1501
1502 /* Emit the table of action chains. */
1503 if (targetm.arm_eabi_unwinder)
1504 {
1505 tree t;
1506 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1507 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1508 stream_write_tree (ob, t, true);
1509 }
1510 else
1511 {
1512 uchar c;
1513 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1514 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1515 streamer_write_char_stream (ob->main_stream, c);
1516 }
1517 }
1518
1519 /* The LTO_null either terminates the record or indicates that there
1520 are no eh_records at all. */
1521 streamer_write_record_start (ob, LTO_null);
1522 }
1523
1524
1525 /* Output all of the active ssa names to the ssa_names stream. */
1526
1527 static void
1528 output_ssa_names (struct output_block *ob, struct function *fn)
1529 {
1530 unsigned int i, len;
1531
1532 len = vec_safe_length (SSANAMES (fn));
1533 streamer_write_uhwi (ob, len);
1534
1535 for (i = 1; i < len; i++)
1536 {
1537 tree ptr = (*SSANAMES (fn))[i];
1538
1539 if (ptr == NULL_TREE
1540 || SSA_NAME_IN_FREE_LIST (ptr)
1541 || virtual_operand_p (ptr))
1542 continue;
1543
1544 streamer_write_uhwi (ob, i);
1545 streamer_write_char_stream (ob->main_stream,
1546 SSA_NAME_IS_DEFAULT_DEF (ptr));
1547 if (SSA_NAME_VAR (ptr))
1548 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1549 else
1550 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1551 stream_write_tree (ob, TREE_TYPE (ptr), true);
1552 }
1553
1554 streamer_write_zero (ob);
1555 }
1556
1557
1558 /* Output the cfg. */
1559
1560 static void
1561 output_cfg (struct output_block *ob, struct function *fn)
1562 {
1563 struct lto_output_stream *tmp_stream = ob->main_stream;
1564 basic_block bb;
1565
1566 ob->main_stream = ob->cfg_stream;
1567
1568 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1569 profile_status_for_function (fn));
1570
1571 /* Output the number of the highest basic block. */
1572 streamer_write_uhwi (ob, last_basic_block_for_function (fn));
1573
1574 FOR_ALL_BB_FN (bb, fn)
1575 {
1576 edge_iterator ei;
1577 edge e;
1578
1579 streamer_write_hwi (ob, bb->index);
1580
1581 /* Output the successors and the edge flags. */
1582 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1583 FOR_EACH_EDGE (e, ei, bb->succs)
1584 {
1585 streamer_write_uhwi (ob, e->dest->index);
1586 streamer_write_hwi (ob, e->probability);
1587 streamer_write_gcov_count (ob, e->count);
1588 streamer_write_uhwi (ob, e->flags);
1589 }
1590 }
1591
1592 streamer_write_hwi (ob, -1);
1593
1594 bb = ENTRY_BLOCK_PTR;
1595 while (bb->next_bb)
1596 {
1597 streamer_write_hwi (ob, bb->next_bb->index);
1598 bb = bb->next_bb;
1599 }
1600
1601 streamer_write_hwi (ob, -1);
1602
1603 /* ??? The cfgloop interface is tied to cfun. */
1604 gcc_assert (cfun == fn);
1605
1606 /* Output the number of loops. */
1607 streamer_write_uhwi (ob, number_of_loops (fn));
1608
1609 /* Output each loop, skipping the tree root which has number zero. */
1610 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1611 {
1612 struct loop *loop = get_loop (fn, i);
1613
1614 /* Write the index of the loop header. That's enough to rebuild
1615 the loop tree on the reader side. Stream -1 for an unused
1616 loop entry. */
1617 if (!loop)
1618 {
1619 streamer_write_hwi (ob, -1);
1620 continue;
1621 }
1622 else
1623 streamer_write_hwi (ob, loop->header->index);
1624
1625 /* Write everything copy_loop_info copies. */
1626 streamer_write_enum (ob->main_stream,
1627 loop_estimation, EST_LAST, loop->estimate_state);
1628 streamer_write_hwi (ob, loop->any_upper_bound);
1629 if (loop->any_upper_bound)
1630 {
1631 streamer_write_uhwi (ob, loop->nb_iterations_upper_bound.low);
1632 streamer_write_hwi (ob, loop->nb_iterations_upper_bound.high);
1633 }
1634 streamer_write_hwi (ob, loop->any_estimate);
1635 if (loop->any_estimate)
1636 {
1637 streamer_write_uhwi (ob, loop->nb_iterations_estimate.low);
1638 streamer_write_hwi (ob, loop->nb_iterations_estimate.high);
1639 }
1640 }
1641
1642 ob->main_stream = tmp_stream;
1643 }
1644
1645
1646 /* Create the header in the file using OB. If the section type is for
1647 a function, set FN to the decl for that function. */
1648
1649 void
1650 produce_asm (struct output_block *ob, tree fn)
1651 {
1652 enum lto_section_type section_type = ob->section_type;
1653 struct lto_function_header header;
1654 char *section_name;
1655 struct lto_output_stream *header_stream;
1656
1657 if (section_type == LTO_section_function_body)
1658 {
1659 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1660 section_name = lto_get_section_name (section_type, name, NULL);
1661 }
1662 else
1663 section_name = lto_get_section_name (section_type, NULL, NULL);
1664
1665 lto_begin_section (section_name, !flag_wpa);
1666 free (section_name);
1667
1668 /* The entire header is stream computed here. */
1669 memset (&header, 0, sizeof (struct lto_function_header));
1670
1671 /* Write the header. */
1672 header.lto_header.major_version = LTO_major_version;
1673 header.lto_header.minor_version = LTO_minor_version;
1674
1675 header.compressed_size = 0;
1676
1677 if (section_type == LTO_section_function_body)
1678 header.cfg_size = ob->cfg_stream->total_size;
1679 header.main_size = ob->main_stream->total_size;
1680 header.string_size = ob->string_stream->total_size;
1681
1682 header_stream = XCNEW (struct lto_output_stream);
1683 lto_output_data_stream (header_stream, &header, sizeof header);
1684 lto_write_stream (header_stream);
1685 free (header_stream);
1686
1687 /* Put all of the gimple and the string table out the asm file as a
1688 block of text. */
1689 if (section_type == LTO_section_function_body)
1690 lto_write_stream (ob->cfg_stream);
1691 lto_write_stream (ob->main_stream);
1692 lto_write_stream (ob->string_stream);
1693
1694 lto_end_section ();
1695 }
1696
1697
1698 /* Output the base body of struct function FN using output block OB. */
1699
1700 static void
1701 output_struct_function_base (struct output_block *ob, struct function *fn)
1702 {
1703 struct bitpack_d bp;
1704 unsigned i;
1705 tree t;
1706
1707 /* Output the static chain and non-local goto save area. */
1708 stream_write_tree (ob, fn->static_chain_decl, true);
1709 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1710
1711 /* Output all the local variables in the function. */
1712 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1713 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1714 stream_write_tree (ob, t, true);
1715
1716 /* Output current IL state of the function. */
1717 streamer_write_uhwi (ob, fn->curr_properties);
1718
1719 /* Write all the attributes for FN. */
1720 bp = bitpack_create (ob->main_stream);
1721 bp_pack_value (&bp, fn->is_thunk, 1);
1722 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1723 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1724 bp_pack_value (&bp, fn->returns_struct, 1);
1725 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1726 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1727 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1728 bp_pack_value (&bp, fn->after_inlining, 1);
1729 bp_pack_value (&bp, fn->stdarg, 1);
1730 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1731 bp_pack_value (&bp, fn->calls_alloca, 1);
1732 bp_pack_value (&bp, fn->calls_setjmp, 1);
1733 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1734 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1735
1736 /* Output the function start and end loci. */
1737 stream_output_location (ob, &bp, fn->function_start_locus);
1738 stream_output_location (ob, &bp, fn->function_end_locus);
1739
1740 streamer_write_bitpack (&bp);
1741 }
1742
1743
1744 /* Output the body of function NODE->DECL. */
1745
1746 static void
1747 output_function (struct cgraph_node *node)
1748 {
1749 tree function;
1750 struct function *fn;
1751 basic_block bb;
1752 struct output_block *ob;
1753
1754 function = node->symbol.decl;
1755 fn = DECL_STRUCT_FUNCTION (function);
1756 ob = create_output_block (LTO_section_function_body);
1757
1758 clear_line_info (ob);
1759 ob->cgraph_node = node;
1760
1761 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1762
1763 /* Set current_function_decl and cfun. */
1764 push_cfun (fn);
1765
1766 /* Make string 0 be a NULL string. */
1767 streamer_write_char_stream (ob->string_stream, 0);
1768
1769 streamer_write_record_start (ob, LTO_function);
1770
1771 /* Output decls for parameters and args. */
1772 stream_write_tree (ob, DECL_RESULT (function), true);
1773 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1774
1775 /* Output DECL_INITIAL for the function, which contains the tree of
1776 lexical scopes. */
1777 stream_write_tree (ob, DECL_INITIAL (function), true);
1778
1779 /* We also stream abstract functions where we stream only stuff needed for
1780 debug info. */
1781 if (gimple_has_body_p (function))
1782 {
1783 streamer_write_uhwi (ob, 1);
1784 output_struct_function_base (ob, fn);
1785
1786 /* Output all the SSA names used in the function. */
1787 output_ssa_names (ob, fn);
1788
1789 /* Output any exception handling regions. */
1790 output_eh_regions (ob, fn);
1791
1792
1793 /* We will renumber the statements. The code that does this uses
1794 the same ordering that we use for serializing them so we can use
1795 the same code on the other end and not have to write out the
1796 statement numbers. We do not assign UIDs to PHIs here because
1797 virtual PHIs get re-computed on-the-fly which would make numbers
1798 inconsistent. */
1799 set_gimple_stmt_max_uid (cfun, 0);
1800 FOR_ALL_BB (bb)
1801 {
1802 gimple_stmt_iterator gsi;
1803 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1804 {
1805 gimple stmt = gsi_stmt (gsi);
1806
1807 /* Virtual PHIs are not going to be streamed. */
1808 if (!virtual_operand_p (gimple_phi_result (stmt)))
1809 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1810 }
1811 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1812 {
1813 gimple stmt = gsi_stmt (gsi);
1814 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1815 }
1816 }
1817 /* To avoid keeping duplicate gimple IDs in the statements, renumber
1818 virtual phis now. */
1819 FOR_ALL_BB (bb)
1820 {
1821 gimple_stmt_iterator gsi;
1822 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1823 {
1824 gimple stmt = gsi_stmt (gsi);
1825 if (virtual_operand_p (gimple_phi_result (stmt)))
1826 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1827 }
1828 }
1829
1830 /* Output the code for the function. */
1831 FOR_ALL_BB_FN (bb, fn)
1832 output_bb (ob, bb, fn);
1833
1834 /* The terminator for this function. */
1835 streamer_write_record_start (ob, LTO_null);
1836
1837 output_cfg (ob, fn);
1838
1839 pop_cfun ();
1840 }
1841 else
1842 streamer_write_uhwi (ob, 0);
1843
1844 /* Create a section to hold the pickled output of this function. */
1845 produce_asm (ob, function);
1846
1847 destroy_output_block (ob);
1848 }
1849
1850
1851 /* Emit toplevel asms. */
1852
1853 void
1854 lto_output_toplevel_asms (void)
1855 {
1856 struct output_block *ob;
1857 struct asm_node *can;
1858 char *section_name;
1859 struct lto_output_stream *header_stream;
1860 struct lto_asm_header header;
1861
1862 if (! asm_nodes)
1863 return;
1864
1865 ob = create_output_block (LTO_section_asm);
1866
1867 /* Make string 0 be a NULL string. */
1868 streamer_write_char_stream (ob->string_stream, 0);
1869
1870 for (can = asm_nodes; can; can = can->next)
1871 {
1872 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
1873 streamer_write_hwi (ob, can->order);
1874 }
1875
1876 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
1877
1878 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
1879 lto_begin_section (section_name, !flag_wpa);
1880 free (section_name);
1881
1882 /* The entire header stream is computed here. */
1883 memset (&header, 0, sizeof (header));
1884
1885 /* Write the header. */
1886 header.lto_header.major_version = LTO_major_version;
1887 header.lto_header.minor_version = LTO_minor_version;
1888
1889 header.main_size = ob->main_stream->total_size;
1890 header.string_size = ob->string_stream->total_size;
1891
1892 header_stream = XCNEW (struct lto_output_stream);
1893 lto_output_data_stream (header_stream, &header, sizeof (header));
1894 lto_write_stream (header_stream);
1895 free (header_stream);
1896
1897 /* Put all of the gimple and the string table out the asm file as a
1898 block of text. */
1899 lto_write_stream (ob->main_stream);
1900 lto_write_stream (ob->string_stream);
1901
1902 lto_end_section ();
1903
1904 destroy_output_block (ob);
1905 }
1906
1907
1908 /* Copy the function body of NODE without deserializing. */
1909
1910 static void
1911 copy_function (struct cgraph_node *node)
1912 {
1913 tree function = node->symbol.decl;
1914 struct lto_file_decl_data *file_data = node->symbol.lto_file_data;
1915 struct lto_output_stream *output_stream = XCNEW (struct lto_output_stream);
1916 const char *data;
1917 size_t len;
1918 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
1919 char *section_name =
1920 lto_get_section_name (LTO_section_function_body, name, NULL);
1921 size_t i, j;
1922 struct lto_in_decl_state *in_state;
1923 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
1924
1925 lto_begin_section (section_name, !flag_wpa);
1926 free (section_name);
1927
1928 /* We may have renamed the declaration, e.g., a static function. */
1929 name = lto_get_decl_name_mapping (file_data, name);
1930
1931 data = lto_get_section_data (file_data, LTO_section_function_body,
1932 name, &len);
1933 gcc_assert (data);
1934
1935 /* Do a bit copy of the function body. */
1936 lto_output_data_stream (output_stream, data, len);
1937 lto_write_stream (output_stream);
1938
1939 /* Copy decls. */
1940 in_state =
1941 lto_get_function_in_decl_state (node->symbol.lto_file_data, function);
1942 gcc_assert (in_state);
1943
1944 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
1945 {
1946 size_t n = in_state->streams[i].size;
1947 tree *trees = in_state->streams[i].trees;
1948 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
1949
1950 /* The out state must have the same indices and the in state.
1951 So just copy the vector. All the encoders in the in state
1952 must be empty where we reach here. */
1953 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
1954 encoder->trees.reserve_exact (n);
1955 for (j = 0; j < n; j++)
1956 encoder->trees.safe_push (trees[j]);
1957 }
1958
1959 lto_free_section_data (file_data, LTO_section_function_body, name,
1960 data, len);
1961 free (output_stream);
1962 lto_end_section ();
1963 }
1964
1965
1966 /* Main entry point from the pass manager. */
1967
1968 static void
1969 lto_output (void)
1970 {
1971 struct lto_out_decl_state *decl_state;
1972 #ifdef ENABLE_CHECKING
1973 bitmap output = lto_bitmap_alloc ();
1974 #endif
1975 int i, n_nodes;
1976 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1977
1978 /* Initialize the streamer. */
1979 lto_streamer_init ();
1980
1981 n_nodes = lto_symtab_encoder_size (encoder);
1982 /* Process only the functions with bodies. */
1983 for (i = 0; i < n_nodes; i++)
1984 {
1985 symtab_node snode = lto_symtab_encoder_deref (encoder, i);
1986 cgraph_node *node = dyn_cast <cgraph_node> (snode);
1987 if (node
1988 && lto_symtab_encoder_encode_body_p (encoder, node)
1989 && !node->symbol.alias)
1990 {
1991 #ifdef ENABLE_CHECKING
1992 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->symbol.decl)));
1993 bitmap_set_bit (output, DECL_UID (node->symbol.decl));
1994 #endif
1995 decl_state = lto_new_out_decl_state ();
1996 lto_push_out_decl_state (decl_state);
1997 if (gimple_has_body_p (node->symbol.decl) || !flag_wpa)
1998 output_function (node);
1999 else
2000 copy_function (node);
2001 gcc_assert (lto_get_out_decl_state () == decl_state);
2002 lto_pop_out_decl_state ();
2003 lto_record_function_out_decl_state (node->symbol.decl, decl_state);
2004 }
2005 }
2006
2007 /* Emit the callgraph after emitting function bodies. This needs to
2008 be done now to make sure that all the statements in every function
2009 have been renumbered so that edges can be associated with call
2010 statements using the statement UIDs. */
2011 output_symtab ();
2012
2013 #ifdef ENABLE_CHECKING
2014 lto_bitmap_free (output);
2015 #endif
2016 }
2017
2018 namespace {
2019
2020 const pass_data pass_data_ipa_lto_gimple_out =
2021 {
2022 IPA_PASS, /* type */
2023 "lto_gimple_out", /* name */
2024 OPTGROUP_NONE, /* optinfo_flags */
2025 true, /* has_gate */
2026 false, /* has_execute */
2027 TV_IPA_LTO_GIMPLE_OUT, /* tv_id */
2028 0, /* properties_required */
2029 0, /* properties_provided */
2030 0, /* properties_destroyed */
2031 0, /* todo_flags_start */
2032 0, /* todo_flags_finish */
2033 };
2034
2035 class pass_ipa_lto_gimple_out : public ipa_opt_pass_d
2036 {
2037 public:
2038 pass_ipa_lto_gimple_out (gcc::context *ctxt)
2039 : ipa_opt_pass_d (pass_data_ipa_lto_gimple_out, ctxt,
2040 NULL, /* generate_summary */
2041 lto_output, /* write_summary */
2042 NULL, /* read_summary */
2043 lto_output, /* write_optimization_summary */
2044 NULL, /* read_optimization_summary */
2045 NULL, /* stmt_fixup */
2046 0, /* function_transform_todo_flags_start */
2047 NULL, /* function_transform */
2048 NULL) /* variable_transform */
2049 {}
2050
2051 /* opt_pass methods: */
2052 bool gate () { return gate_lto_out (); }
2053
2054 }; // class pass_ipa_lto_gimple_out
2055
2056 } // anon namespace
2057
2058 ipa_opt_pass_d *
2059 make_pass_ipa_lto_gimple_out (gcc::context *ctxt)
2060 {
2061 return new pass_ipa_lto_gimple_out (ctxt);
2062 }
2063
2064
2065 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2066 from it and required for correct representation of its semantics.
2067 Each node in ENCODER must be a global declaration or a type. A node
2068 is written only once, even if it appears multiple times in the
2069 vector. Certain transitively-reachable nodes, such as those
2070 representing expressions, may be duplicated, but such nodes
2071 must not appear in ENCODER itself. */
2072
2073 static void
2074 write_global_stream (struct output_block *ob,
2075 struct lto_tree_ref_encoder *encoder)
2076 {
2077 tree t;
2078 size_t index;
2079 const size_t size = lto_tree_ref_encoder_size (encoder);
2080
2081 for (index = 0; index < size; index++)
2082 {
2083 t = lto_tree_ref_encoder_get_tree (encoder, index);
2084 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2085 stream_write_tree (ob, t, false);
2086 }
2087 }
2088
2089
2090 /* Write a sequence of indices into the globals vector corresponding
2091 to the trees in ENCODER. These are used by the reader to map the
2092 indices used to refer to global entities within function bodies to
2093 their referents. */
2094
2095 static void
2096 write_global_references (struct output_block *ob,
2097 struct lto_output_stream *ref_stream,
2098 struct lto_tree_ref_encoder *encoder)
2099 {
2100 tree t;
2101 uint32_t index;
2102 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2103
2104 /* Write size as 32-bit unsigned. */
2105 lto_output_data_stream (ref_stream, &size, sizeof (int32_t));
2106
2107 for (index = 0; index < size; index++)
2108 {
2109 uint32_t slot_num;
2110
2111 t = lto_tree_ref_encoder_get_tree (encoder, index);
2112 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2113 gcc_assert (slot_num != (unsigned)-1);
2114 lto_output_data_stream (ref_stream, &slot_num, sizeof slot_num);
2115 }
2116 }
2117
2118
2119 /* Write all the streams in an lto_out_decl_state STATE using
2120 output block OB and output stream OUT_STREAM. */
2121
2122 void
2123 lto_output_decl_state_streams (struct output_block *ob,
2124 struct lto_out_decl_state *state)
2125 {
2126 int i;
2127
2128 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2129 write_global_stream (ob, &state->streams[i]);
2130 }
2131
2132
2133 /* Write all the references in an lto_out_decl_state STATE using
2134 output block OB and output stream OUT_STREAM. */
2135
2136 void
2137 lto_output_decl_state_refs (struct output_block *ob,
2138 struct lto_output_stream *out_stream,
2139 struct lto_out_decl_state *state)
2140 {
2141 unsigned i;
2142 uint32_t ref;
2143 tree decl;
2144
2145 /* Write reference to FUNCTION_DECL. If there is not function,
2146 write reference to void_type_node. */
2147 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2148 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2149 gcc_assert (ref != (unsigned)-1);
2150 lto_output_data_stream (out_stream, &ref, sizeof (uint32_t));
2151
2152 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2153 write_global_references (ob, out_stream, &state->streams[i]);
2154 }
2155
2156
2157 /* Return the written size of STATE. */
2158
2159 static size_t
2160 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2161 {
2162 int i;
2163 size_t size;
2164
2165 size = sizeof (int32_t); /* fn_ref. */
2166 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2167 {
2168 size += sizeof (int32_t); /* vector size. */
2169 size += (lto_tree_ref_encoder_size (&state->streams[i])
2170 * sizeof (int32_t));
2171 }
2172 return size;
2173 }
2174
2175
2176 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2177 so far. */
2178
2179 static void
2180 write_symbol (struct streamer_tree_cache_d *cache,
2181 struct lto_output_stream *stream,
2182 tree t, struct pointer_set_t *seen, bool alias)
2183 {
2184 const char *name;
2185 enum gcc_plugin_symbol_kind kind;
2186 enum gcc_plugin_symbol_visibility visibility;
2187 unsigned slot_num;
2188 unsigned HOST_WIDEST_INT size;
2189 const char *comdat;
2190 unsigned char c;
2191
2192 /* None of the following kinds of symbols are needed in the
2193 symbol table. */
2194 if (!TREE_PUBLIC (t)
2195 || is_builtin_fn (t)
2196 || DECL_ABSTRACT (t)
2197 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2198 return;
2199 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2200
2201 gcc_assert (TREE_CODE (t) == VAR_DECL
2202 || TREE_CODE (t) == FUNCTION_DECL);
2203
2204 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2205
2206 /* This behaves like assemble_name_raw in varasm.c, performing the
2207 same name manipulations that ASM_OUTPUT_LABELREF does. */
2208 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2209
2210 if (pointer_set_contains (seen, name))
2211 return;
2212 pointer_set_insert (seen, name);
2213
2214 streamer_tree_cache_lookup (cache, t, &slot_num);
2215 gcc_assert (slot_num != (unsigned)-1);
2216
2217 if (DECL_EXTERNAL (t))
2218 {
2219 if (DECL_WEAK (t))
2220 kind = GCCPK_WEAKUNDEF;
2221 else
2222 kind = GCCPK_UNDEF;
2223 }
2224 else
2225 {
2226 if (DECL_WEAK (t))
2227 kind = GCCPK_WEAKDEF;
2228 else if (DECL_COMMON (t))
2229 kind = GCCPK_COMMON;
2230 else
2231 kind = GCCPK_DEF;
2232
2233 /* When something is defined, it should have node attached. */
2234 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2235 || varpool_get_node (t)->symbol.definition);
2236 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2237 || (cgraph_get_node (t)
2238 && cgraph_get_node (t)->symbol.definition));
2239 }
2240
2241 /* Imitate what default_elf_asm_output_external do.
2242 When symbol is external, we need to output it with DEFAULT visibility
2243 when compiling with -fvisibility=default, while with HIDDEN visibility
2244 when symbol has attribute (visibility("hidden")) specified.
2245 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2246 right. */
2247
2248 if (DECL_EXTERNAL (t)
2249 && !targetm.binds_local_p (t))
2250 visibility = GCCPV_DEFAULT;
2251 else
2252 switch (DECL_VISIBILITY (t))
2253 {
2254 case VISIBILITY_DEFAULT:
2255 visibility = GCCPV_DEFAULT;
2256 break;
2257 case VISIBILITY_PROTECTED:
2258 visibility = GCCPV_PROTECTED;
2259 break;
2260 case VISIBILITY_HIDDEN:
2261 visibility = GCCPV_HIDDEN;
2262 break;
2263 case VISIBILITY_INTERNAL:
2264 visibility = GCCPV_INTERNAL;
2265 break;
2266 }
2267
2268 if (kind == GCCPK_COMMON
2269 && DECL_SIZE_UNIT (t)
2270 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2271 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2272 else
2273 size = 0;
2274
2275 if (DECL_ONE_ONLY (t))
2276 comdat = IDENTIFIER_POINTER (DECL_COMDAT_GROUP (t));
2277 else
2278 comdat = "";
2279
2280 lto_output_data_stream (stream, name, strlen (name) + 1);
2281 lto_output_data_stream (stream, comdat, strlen (comdat) + 1);
2282 c = (unsigned char) kind;
2283 lto_output_data_stream (stream, &c, 1);
2284 c = (unsigned char) visibility;
2285 lto_output_data_stream (stream, &c, 1);
2286 lto_output_data_stream (stream, &size, 8);
2287 lto_output_data_stream (stream, &slot_num, 4);
2288 }
2289
2290 /* Return true if NODE should appear in the plugin symbol table. */
2291
2292 bool
2293 output_symbol_p (symtab_node node)
2294 {
2295 struct cgraph_node *cnode;
2296 if (!symtab_real_symbol_p (node))
2297 return false;
2298 /* We keep external functions in symtab for sake of inlining
2299 and devirtualization. We do not want to see them in symbol table as
2300 references unless they are really used. */
2301 cnode = dyn_cast <cgraph_node> (node);
2302 if (cnode && (!node->symbol.definition || DECL_EXTERNAL (cnode->symbol.decl))
2303 && cnode->callers)
2304 return true;
2305
2306 /* Ignore all references from external vars initializers - they are not really
2307 part of the compilation unit until they are used by folding. Some symbols,
2308 like references to external construction vtables can not be referred to at all.
2309 We decide this at can_refer_decl_in_current_unit_p. */
2310 if (!node->symbol.definition || DECL_EXTERNAL (node->symbol.decl))
2311 {
2312 int i;
2313 struct ipa_ref *ref;
2314 for (i = 0; ipa_ref_list_referring_iterate (&node->symbol.ref_list,
2315 i, ref); i++)
2316 {
2317 if (ref->use == IPA_REF_ALIAS)
2318 continue;
2319 if (is_a <cgraph_node> (ref->referring))
2320 return true;
2321 if (!DECL_EXTERNAL (ref->referring->symbol.decl))
2322 return true;
2323 }
2324 return false;
2325 }
2326 return true;
2327 }
2328
2329
2330 /* Write an IL symbol table to OB.
2331 SET and VSET are cgraph/varpool node sets we are outputting. */
2332
2333 static void
2334 produce_symtab (struct output_block *ob)
2335 {
2336 struct streamer_tree_cache_d *cache = ob->writer_cache;
2337 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2338 struct pointer_set_t *seen;
2339 struct lto_output_stream stream;
2340 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2341 lto_symtab_encoder_iterator lsei;
2342
2343 lto_begin_section (section_name, false);
2344 free (section_name);
2345
2346 seen = pointer_set_create ();
2347 memset (&stream, 0, sizeof (stream));
2348
2349 /* Write the symbol table.
2350 First write everything defined and then all declarations.
2351 This is necessary to handle cases where we have duplicated symbols. */
2352 for (lsei = lsei_start (encoder);
2353 !lsei_end_p (lsei); lsei_next (&lsei))
2354 {
2355 symtab_node node = lsei_node (lsei);
2356
2357 if (!output_symbol_p (node) || DECL_EXTERNAL (node->symbol.decl))
2358 continue;
2359 write_symbol (cache, &stream, node->symbol.decl, seen, false);
2360 }
2361 for (lsei = lsei_start (encoder);
2362 !lsei_end_p (lsei); lsei_next (&lsei))
2363 {
2364 symtab_node node = lsei_node (lsei);
2365
2366 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->symbol.decl))
2367 continue;
2368 write_symbol (cache, &stream, node->symbol.decl, seen, false);
2369 }
2370
2371 lto_write_stream (&stream);
2372 pointer_set_destroy (seen);
2373
2374 lto_end_section ();
2375 }
2376
2377
2378 /* This pass is run after all of the functions are serialized and all
2379 of the IPA passes have written their serialized forms. This pass
2380 causes the vector of all of the global decls and types used from
2381 this file to be written in to a section that can then be read in to
2382 recover these on other side. */
2383
2384 static void
2385 produce_asm_for_decls (void)
2386 {
2387 struct lto_out_decl_state *out_state;
2388 struct lto_out_decl_state *fn_out_state;
2389 struct lto_decl_header header;
2390 char *section_name;
2391 struct output_block *ob;
2392 struct lto_output_stream *header_stream, *decl_state_stream;
2393 unsigned idx, num_fns;
2394 size_t decl_state_size;
2395 int32_t num_decl_states;
2396
2397 ob = create_output_block (LTO_section_decls);
2398 ob->global = true;
2399
2400 memset (&header, 0, sizeof (struct lto_decl_header));
2401
2402 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2403 lto_begin_section (section_name, !flag_wpa);
2404 free (section_name);
2405
2406 /* Make string 0 be a NULL string. */
2407 streamer_write_char_stream (ob->string_stream, 0);
2408
2409 gcc_assert (!alias_pairs);
2410
2411 /* Write the global symbols. */
2412 out_state = lto_get_out_decl_state ();
2413 num_fns = lto_function_decl_states.length ();
2414 lto_output_decl_state_streams (ob, out_state);
2415 for (idx = 0; idx < num_fns; idx++)
2416 {
2417 fn_out_state =
2418 lto_function_decl_states[idx];
2419 lto_output_decl_state_streams (ob, fn_out_state);
2420 }
2421
2422 header.lto_header.major_version = LTO_major_version;
2423 header.lto_header.minor_version = LTO_minor_version;
2424
2425 /* Currently not used. This field would allow us to preallocate
2426 the globals vector, so that it need not be resized as it is extended. */
2427 header.num_nodes = -1;
2428
2429 /* Compute the total size of all decl out states. */
2430 decl_state_size = sizeof (int32_t);
2431 decl_state_size += lto_out_decl_state_written_size (out_state);
2432 for (idx = 0; idx < num_fns; idx++)
2433 {
2434 fn_out_state =
2435 lto_function_decl_states[idx];
2436 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2437 }
2438 header.decl_state_size = decl_state_size;
2439
2440 header.main_size = ob->main_stream->total_size;
2441 header.string_size = ob->string_stream->total_size;
2442
2443 header_stream = XCNEW (struct lto_output_stream);
2444 lto_output_data_stream (header_stream, &header, sizeof header);
2445 lto_write_stream (header_stream);
2446 free (header_stream);
2447
2448 /* Write the main out-decl state, followed by out-decl states of
2449 functions. */
2450 decl_state_stream = XCNEW (struct lto_output_stream);
2451 num_decl_states = num_fns + 1;
2452 lto_output_data_stream (decl_state_stream, &num_decl_states,
2453 sizeof (num_decl_states));
2454 lto_output_decl_state_refs (ob, decl_state_stream, out_state);
2455 for (idx = 0; idx < num_fns; idx++)
2456 {
2457 fn_out_state =
2458 lto_function_decl_states[idx];
2459 lto_output_decl_state_refs (ob, decl_state_stream, fn_out_state);
2460 }
2461 lto_write_stream (decl_state_stream);
2462 free (decl_state_stream);
2463
2464 lto_write_stream (ob->main_stream);
2465 lto_write_stream (ob->string_stream);
2466
2467 lto_end_section ();
2468
2469 /* Write the symbol table. It is used by linker to determine dependencies
2470 and thus we can skip it for WPA. */
2471 if (!flag_wpa)
2472 produce_symtab (ob);
2473
2474 /* Write command line opts. */
2475 lto_write_options ();
2476
2477 /* Deallocate memory and clean up. */
2478 for (idx = 0; idx < num_fns; idx++)
2479 {
2480 fn_out_state =
2481 lto_function_decl_states[idx];
2482 lto_delete_out_decl_state (fn_out_state);
2483 }
2484 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2485 lto_function_decl_states.release ();
2486 destroy_output_block (ob);
2487 }
2488
2489
2490 namespace {
2491
2492 const pass_data pass_data_ipa_lto_finish_out =
2493 {
2494 IPA_PASS, /* type */
2495 "lto_decls_out", /* name */
2496 OPTGROUP_NONE, /* optinfo_flags */
2497 true, /* has_gate */
2498 false, /* has_execute */
2499 TV_IPA_LTO_DECL_OUT, /* tv_id */
2500 0, /* properties_required */
2501 0, /* properties_provided */
2502 0, /* properties_destroyed */
2503 0, /* todo_flags_start */
2504 0, /* todo_flags_finish */
2505 };
2506
2507 class pass_ipa_lto_finish_out : public ipa_opt_pass_d
2508 {
2509 public:
2510 pass_ipa_lto_finish_out (gcc::context *ctxt)
2511 : ipa_opt_pass_d (pass_data_ipa_lto_finish_out, ctxt,
2512 NULL, /* generate_summary */
2513 produce_asm_for_decls, /* write_summary */
2514 NULL, /* read_summary */
2515 produce_asm_for_decls, /* write_optimization_summary */
2516 NULL, /* read_optimization_summary */
2517 NULL, /* stmt_fixup */
2518 0, /* function_transform_todo_flags_start */
2519 NULL, /* function_transform */
2520 NULL) /* variable_transform */
2521 {}
2522
2523 /* opt_pass methods: */
2524 bool gate () { return gate_lto_out (); }
2525
2526 }; // class pass_ipa_lto_finish_out
2527
2528 } // anon namespace
2529
2530 ipa_opt_pass_d *
2531 make_pass_ipa_lto_finish_out (gcc::context *ctxt)
2532 {
2533 return new pass_ipa_lto_finish_out (ctxt);
2534 }