]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-streamer-out.c
re PR go/78628 (GO fails to build a translation unit decl)
[thirdparty/gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2017 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
43 #include "debug.h"
44
45
46 static void lto_write_tree (struct output_block*, tree, bool);
47
48 /* Clear the line info stored in DATA_IN. */
49
50 static void
51 clear_line_info (struct output_block *ob)
52 {
53 ob->current_file = NULL;
54 ob->current_line = 0;
55 ob->current_col = 0;
56 ob->current_sysp = false;
57 }
58
59
60 /* Create the output block and return it. SECTION_TYPE is
61 LTO_section_function_body or LTO_static_initializer. */
62
63 struct output_block *
64 create_output_block (enum lto_section_type section_type)
65 {
66 struct output_block *ob = XCNEW (struct output_block);
67
68 ob->section_type = section_type;
69 ob->decl_state = lto_get_out_decl_state ();
70 ob->main_stream = XCNEW (struct lto_output_stream);
71 ob->string_stream = XCNEW (struct lto_output_stream);
72 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
73
74 if (section_type == LTO_section_function_body)
75 ob->cfg_stream = XCNEW (struct lto_output_stream);
76
77 clear_line_info (ob);
78
79 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
80 gcc_obstack_init (&ob->obstack);
81
82 return ob;
83 }
84
85
86 /* Destroy the output block OB. */
87
88 void
89 destroy_output_block (struct output_block *ob)
90 {
91 enum lto_section_type section_type = ob->section_type;
92
93 delete ob->string_hash_table;
94 ob->string_hash_table = NULL;
95
96 free (ob->main_stream);
97 free (ob->string_stream);
98 if (section_type == LTO_section_function_body)
99 free (ob->cfg_stream);
100
101 streamer_tree_cache_delete (ob->writer_cache);
102 obstack_free (&ob->obstack, NULL);
103
104 free (ob);
105 }
106
107
108 /* Look up NODE in the type table and write the index for it to OB. */
109
110 static void
111 output_type_ref (struct output_block *ob, tree node)
112 {
113 streamer_write_record_start (ob, LTO_type_ref);
114 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
115 }
116
117
118 /* Return true if tree node T is written to various tables. For these
119 nodes, we sometimes want to write their phyiscal representation
120 (via lto_output_tree), and sometimes we need to emit an index
121 reference into a table (via lto_output_tree_ref). */
122
123 static bool
124 tree_is_indexable (tree t)
125 {
126 /* Parameters and return values of functions of variably modified types
127 must go to global stream, because they may be used in the type
128 definition. */
129 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
130 && DECL_CONTEXT (t))
131 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
132 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
133 else if (TREE_CODE (t) == IMPORTED_DECL)
134 return false;
135 else if (((VAR_P (t) && !TREE_STATIC (t))
136 || TREE_CODE (t) == TYPE_DECL
137 || TREE_CODE (t) == CONST_DECL
138 || TREE_CODE (t) == NAMELIST_DECL)
139 && decl_function_context (t))
140 return false;
141 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
142 return false;
143 /* Variably modified types need to be streamed alongside function
144 bodies because they can refer to local entities. Together with
145 them we have to localize their members as well.
146 ??? In theory that includes non-FIELD_DECLs as well. */
147 else if (TYPE_P (t)
148 && variably_modified_type_p (t, NULL_TREE))
149 return false;
150 else if (TREE_CODE (t) == FIELD_DECL
151 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
152 return false;
153 else
154 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
155 }
156
157
158 /* Output info about new location into bitpack BP.
159 After outputting bitpack, lto_output_location_data has
160 to be done to output actual data. */
161
162 void
163 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
164 location_t loc)
165 {
166 expanded_location xloc;
167
168 loc = LOCATION_LOCUS (loc);
169 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
170 loc < RESERVED_LOCATION_COUNT
171 ? loc : RESERVED_LOCATION_COUNT);
172 if (loc < RESERVED_LOCATION_COUNT)
173 return;
174
175 xloc = expand_location (loc);
176
177 bp_pack_value (bp, ob->current_file != xloc.file, 1);
178 bp_pack_value (bp, ob->current_line != xloc.line, 1);
179 bp_pack_value (bp, ob->current_col != xloc.column, 1);
180
181 if (ob->current_file != xloc.file)
182 {
183 bp_pack_string (ob, bp, xloc.file, true);
184 bp_pack_value (bp, xloc.sysp, 1);
185 }
186 ob->current_file = xloc.file;
187 ob->current_sysp = xloc.sysp;
188
189 if (ob->current_line != xloc.line)
190 bp_pack_var_len_unsigned (bp, xloc.line);
191 ob->current_line = xloc.line;
192
193 if (ob->current_col != xloc.column)
194 bp_pack_var_len_unsigned (bp, xloc.column);
195 ob->current_col = xloc.column;
196 }
197
198
199 /* If EXPR is an indexable tree node, output a reference to it to
200 output block OB. Otherwise, output the physical representation of
201 EXPR to OB. */
202
203 static void
204 lto_output_tree_ref (struct output_block *ob, tree expr)
205 {
206 enum tree_code code;
207
208 if (TYPE_P (expr))
209 {
210 output_type_ref (ob, expr);
211 return;
212 }
213
214 code = TREE_CODE (expr);
215 switch (code)
216 {
217 case SSA_NAME:
218 streamer_write_record_start (ob, LTO_ssa_name_ref);
219 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
220 break;
221
222 case FIELD_DECL:
223 streamer_write_record_start (ob, LTO_field_decl_ref);
224 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
225 break;
226
227 case FUNCTION_DECL:
228 streamer_write_record_start (ob, LTO_function_decl_ref);
229 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
230 break;
231
232 case VAR_DECL:
233 case DEBUG_EXPR_DECL:
234 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
235 /* FALLTHRU */
236 case PARM_DECL:
237 streamer_write_record_start (ob, LTO_global_decl_ref);
238 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
239 break;
240
241 case CONST_DECL:
242 streamer_write_record_start (ob, LTO_const_decl_ref);
243 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
244 break;
245
246 case IMPORTED_DECL:
247 gcc_assert (decl_function_context (expr) == NULL);
248 streamer_write_record_start (ob, LTO_imported_decl_ref);
249 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
250 break;
251
252 case TYPE_DECL:
253 streamer_write_record_start (ob, LTO_type_decl_ref);
254 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
255 break;
256
257 case NAMELIST_DECL:
258 streamer_write_record_start (ob, LTO_namelist_decl_ref);
259 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
260 break;
261
262 case NAMESPACE_DECL:
263 streamer_write_record_start (ob, LTO_namespace_decl_ref);
264 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
265 break;
266
267 case LABEL_DECL:
268 streamer_write_record_start (ob, LTO_label_decl_ref);
269 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
270 break;
271
272 case RESULT_DECL:
273 streamer_write_record_start (ob, LTO_result_decl_ref);
274 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
275 break;
276
277 case TRANSLATION_UNIT_DECL:
278 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
279 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
280 break;
281
282 default:
283 /* No other node is indexable, so it should have been handled by
284 lto_output_tree. */
285 gcc_unreachable ();
286 }
287 }
288
289
290 /* Return true if EXPR is a tree node that can be written to disk. */
291
292 static inline bool
293 lto_is_streamable (tree expr)
294 {
295 enum tree_code code = TREE_CODE (expr);
296
297 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
298 name version in lto_output_tree_ref (see output_ssa_names). */
299 return !is_lang_specific (expr)
300 && code != SSA_NAME
301 && code != CALL_EXPR
302 && code != LANG_TYPE
303 && code != MODIFY_EXPR
304 && code != INIT_EXPR
305 && code != TARGET_EXPR
306 && code != BIND_EXPR
307 && code != WITH_CLEANUP_EXPR
308 && code != STATEMENT_LIST
309 && (code == CASE_LABEL_EXPR
310 || code == DECL_EXPR
311 || TREE_CODE_CLASS (code) != tcc_statement);
312 }
313
314 /* Very rough estimate of streaming size of the initializer. If we ignored
315 presence of strings, we could simply just count number of non-indexable
316 tree nodes and number of references to indexable nodes. Strings however
317 may be very large and we do not want to dump them int othe global stream.
318
319 Count the size of initializer until the size in DATA is positive. */
320
321 static tree
322 subtract_estimated_size (tree *tp, int *ws, void *data)
323 {
324 long *sum = (long *)data;
325 if (tree_is_indexable (*tp))
326 {
327 /* Indexable tree is one reference to global stream.
328 Guess it may be about 4 bytes. */
329 *sum -= 4;
330 *ws = 0;
331 }
332 /* String table entry + base of tree node needs to be streamed. */
333 if (TREE_CODE (*tp) == STRING_CST)
334 *sum -= TREE_STRING_LENGTH (*tp) + 8;
335 else
336 {
337 /* Identifiers are also variable length but should not appear
338 naked in constructor. */
339 gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
340 /* We do not really make attempt to work out size of pickled tree, as
341 it is very variable. Make it bigger than the reference. */
342 *sum -= 16;
343 }
344 if (*sum < 0)
345 return *tp;
346 return NULL_TREE;
347 }
348
349
350 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
351
352 static tree
353 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
354 {
355 gcc_checking_assert (DECL_P (expr)
356 && TREE_CODE (expr) != FUNCTION_DECL
357 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
358
359 /* Handle DECL_INITIAL for symbols. */
360 tree initial = DECL_INITIAL (expr);
361 if (VAR_P (expr)
362 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
363 && !DECL_IN_CONSTANT_POOL (expr)
364 && initial)
365 {
366 varpool_node *vnode;
367 /* Extra section needs about 30 bytes; do not produce it for simple
368 scalar values. */
369 if (!(vnode = varpool_node::get (expr))
370 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
371 initial = error_mark_node;
372 if (initial != error_mark_node)
373 {
374 long max_size = 30;
375 if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
376 NULL))
377 initial = error_mark_node;
378 }
379 }
380
381 return initial;
382 }
383
384
385 /* Write a physical representation of tree node EXPR to output block
386 OB. If REF_P is true, the leaves of EXPR are emitted as references
387 via lto_output_tree_ref. IX is the index into the streamer cache
388 where EXPR is stored. */
389
390 static void
391 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
392 {
393 /* Pack all the non-pointer fields in EXPR into a bitpack and write
394 the resulting bitpack. */
395 streamer_write_tree_bitfields (ob, expr);
396
397 /* Write all the pointer fields in EXPR. */
398 streamer_write_tree_body (ob, expr, ref_p);
399
400 /* Write any LTO-specific data to OB. */
401 if (DECL_P (expr)
402 && TREE_CODE (expr) != FUNCTION_DECL
403 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
404 {
405 /* Handle DECL_INITIAL for symbols. */
406 tree initial = get_symbol_initial_value
407 (ob->decl_state->symtab_node_encoder, expr);
408 stream_write_tree (ob, initial, ref_p);
409 }
410
411 /* Stream references to early generated DIEs. Keep in sync with the
412 trees handled in dwarf2out_die_ref_for_decl. */
413 if ((DECL_P (expr)
414 && TREE_CODE (expr) != FIELD_DECL
415 && TREE_CODE (expr) != DEBUG_EXPR_DECL
416 && TREE_CODE (expr) != TYPE_DECL)
417 || TREE_CODE (expr) == BLOCK)
418 {
419 const char *sym;
420 unsigned HOST_WIDE_INT off;
421 if (debug_info_level > DINFO_LEVEL_NONE
422 && debug_hooks->die_ref_for_decl (expr, &sym, &off))
423 {
424 streamer_write_string (ob, ob->main_stream, sym, true);
425 streamer_write_uhwi (ob, off);
426 }
427 else
428 streamer_write_string (ob, ob->main_stream, NULL, true);
429 }
430 }
431
432 /* Write a physical representation of tree node EXPR to output block
433 OB. If REF_P is true, the leaves of EXPR are emitted as references
434 via lto_output_tree_ref. IX is the index into the streamer cache
435 where EXPR is stored. */
436
437 static void
438 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
439 {
440 if (!lto_is_streamable (expr))
441 internal_error ("tree code %qs is not supported in LTO streams",
442 get_tree_code_name (TREE_CODE (expr)));
443
444 /* Write the header, containing everything needed to materialize
445 EXPR on the reading side. */
446 streamer_write_tree_header (ob, expr);
447
448 lto_write_tree_1 (ob, expr, ref_p);
449
450 /* Mark the end of EXPR. */
451 streamer_write_zero (ob);
452 }
453
454 /* Emit the physical representation of tree node EXPR to output block OB,
455 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
456 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
457
458 static void
459 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
460 bool ref_p, bool this_ref_p)
461 {
462 unsigned ix;
463
464 gcc_checking_assert (expr != NULL_TREE
465 && !(this_ref_p && tree_is_indexable (expr)));
466
467 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
468 expr, hash, &ix);
469 gcc_assert (!exists_p);
470 if (TREE_CODE (expr) == INTEGER_CST
471 && !TREE_OVERFLOW (expr))
472 {
473 /* Shared INTEGER_CST nodes are special because they need their
474 original type to be materialized by the reader (to implement
475 TYPE_CACHED_VALUES). */
476 streamer_write_integer_cst (ob, expr, ref_p);
477 }
478 else
479 {
480 /* This is the first time we see EXPR, write its fields
481 to OB. */
482 lto_write_tree (ob, expr, ref_p);
483 }
484 }
485
486 class DFS
487 {
488 public:
489 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
490 bool single_p);
491 ~DFS ();
492
493 struct scc_entry
494 {
495 tree t;
496 hashval_t hash;
497 };
498 vec<scc_entry> sccstack;
499
500 private:
501 struct sccs
502 {
503 unsigned int dfsnum;
504 unsigned int low;
505 };
506 struct worklist
507 {
508 tree expr;
509 sccs *from_state;
510 sccs *cstate;
511 bool ref_p;
512 bool this_ref_p;
513 };
514
515 static int scc_entry_compare (const void *, const void *);
516
517 void DFS_write_tree_body (struct output_block *ob,
518 tree expr, sccs *expr_state, bool ref_p);
519
520 void DFS_write_tree (struct output_block *ob, sccs *from_state,
521 tree expr, bool ref_p, bool this_ref_p);
522
523 hashval_t
524 hash_scc (struct output_block *ob, unsigned first, unsigned size,
525 bool ref_p, bool this_ref_p);
526
527 hash_map<tree, sccs *> sccstate;
528 vec<worklist> worklist_vec;
529 struct obstack sccstate_obstack;
530 };
531
532 /* Emit the physical representation of tree node EXPR to output block OB,
533 using depth-first search on the subgraph. If THIS_REF_P is true, the
534 leaves of EXPR are emitted as references via lto_output_tree_ref.
535 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
536 this is for a rewalk of a single leaf SCC. */
537
538 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
539 bool single_p)
540 {
541 unsigned int next_dfs_num = 1;
542 sccstack.create (0);
543 gcc_obstack_init (&sccstate_obstack);
544 worklist_vec = vNULL;
545 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
546 while (!worklist_vec.is_empty ())
547 {
548 worklist &w = worklist_vec.last ();
549 expr = w.expr;
550 sccs *from_state = w.from_state;
551 sccs *cstate = w.cstate;
552 ref_p = w.ref_p;
553 this_ref_p = w.this_ref_p;
554 if (cstate == NULL)
555 {
556 sccs **slot = &sccstate.get_or_insert (expr);
557 cstate = *slot;
558 if (cstate)
559 {
560 gcc_checking_assert (from_state);
561 if (cstate->dfsnum < from_state->dfsnum)
562 from_state->low = MIN (cstate->dfsnum, from_state->low);
563 worklist_vec.pop ();
564 continue;
565 }
566
567 scc_entry e = { expr, 0 };
568 /* Not yet visited. DFS recurse and push it onto the stack. */
569 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
570 sccstack.safe_push (e);
571 cstate->dfsnum = next_dfs_num++;
572 cstate->low = cstate->dfsnum;
573 w.cstate = cstate;
574
575 if (TREE_CODE (expr) == INTEGER_CST
576 && !TREE_OVERFLOW (expr))
577 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
578 else
579 {
580 DFS_write_tree_body (ob, expr, cstate, ref_p);
581
582 /* Walk any LTO-specific edges. */
583 if (DECL_P (expr)
584 && TREE_CODE (expr) != FUNCTION_DECL
585 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
586 {
587 /* Handle DECL_INITIAL for symbols. */
588 tree initial
589 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
590 expr);
591 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
592 }
593 }
594 continue;
595 }
596
597 /* See if we found an SCC. */
598 if (cstate->low == cstate->dfsnum)
599 {
600 unsigned first, size;
601 tree x;
602
603 /* If we are re-walking a single leaf SCC just pop it,
604 let earlier worklist item access the sccstack. */
605 if (single_p)
606 {
607 worklist_vec.pop ();
608 continue;
609 }
610
611 /* Pop the SCC and compute its size. */
612 first = sccstack.length ();
613 do
614 {
615 x = sccstack[--first].t;
616 }
617 while (x != expr);
618 size = sccstack.length () - first;
619
620 /* No need to compute hashes for LTRANS units, we don't perform
621 any merging there. */
622 hashval_t scc_hash = 0;
623 unsigned scc_entry_len = 0;
624 if (!flag_wpa)
625 {
626 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
627
628 /* Put the entries with the least number of collisions first. */
629 unsigned entry_start = 0;
630 scc_entry_len = size + 1;
631 for (unsigned i = 0; i < size;)
632 {
633 unsigned from = i;
634 for (i = i + 1; i < size
635 && (sccstack[first + i].hash
636 == sccstack[first + from].hash); ++i)
637 ;
638 if (i - from < scc_entry_len)
639 {
640 scc_entry_len = i - from;
641 entry_start = from;
642 }
643 }
644 for (unsigned i = 0; i < scc_entry_len; ++i)
645 std::swap (sccstack[first + i],
646 sccstack[first + entry_start + i]);
647
648 /* We already sorted SCC deterministically in hash_scc. */
649
650 /* Check that we have only one SCC.
651 Naturally we may have conflicts if hash function is not
652 strong enough. Lets see how far this gets. */
653 gcc_checking_assert (scc_entry_len == 1);
654 }
655
656 /* Write LTO_tree_scc. */
657 streamer_write_record_start (ob, LTO_tree_scc);
658 streamer_write_uhwi (ob, size);
659 streamer_write_uhwi (ob, scc_hash);
660
661 /* Write size-1 SCCs without wrapping them inside SCC bundles.
662 All INTEGER_CSTs need to be handled this way as we need
663 their type to materialize them. Also builtins are handled
664 this way.
665 ??? We still wrap these in LTO_tree_scc so at the
666 input side we can properly identify the tree we want
667 to ultimatively return. */
668 if (size == 1)
669 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
670 else
671 {
672 /* Write the size of the SCC entry candidates. */
673 streamer_write_uhwi (ob, scc_entry_len);
674
675 /* Write all headers and populate the streamer cache. */
676 for (unsigned i = 0; i < size; ++i)
677 {
678 hashval_t hash = sccstack[first+i].hash;
679 tree t = sccstack[first+i].t;
680 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
681 t, hash, NULL);
682 gcc_assert (!exists_p);
683
684 if (!lto_is_streamable (t))
685 internal_error ("tree code %qs is not supported "
686 "in LTO streams",
687 get_tree_code_name (TREE_CODE (t)));
688
689 /* Write the header, containing everything needed to
690 materialize EXPR on the reading side. */
691 streamer_write_tree_header (ob, t);
692 }
693
694 /* Write the bitpacks and tree references. */
695 for (unsigned i = 0; i < size; ++i)
696 {
697 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
698
699 /* Mark the end of the tree. */
700 streamer_write_zero (ob);
701 }
702 }
703
704 /* Finally truncate the vector. */
705 sccstack.truncate (first);
706
707 if (from_state)
708 from_state->low = MIN (from_state->low, cstate->low);
709 worklist_vec.pop ();
710 continue;
711 }
712
713 gcc_checking_assert (from_state);
714 from_state->low = MIN (from_state->low, cstate->low);
715 if (cstate->dfsnum < from_state->dfsnum)
716 from_state->low = MIN (cstate->dfsnum, from_state->low);
717 worklist_vec.pop ();
718 }
719 worklist_vec.release ();
720 }
721
722 DFS::~DFS ()
723 {
724 sccstack.release ();
725 obstack_free (&sccstate_obstack, NULL);
726 }
727
728 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
729 DFS recurse for all tree edges originating from it. */
730
731 void
732 DFS::DFS_write_tree_body (struct output_block *ob,
733 tree expr, sccs *expr_state, bool ref_p)
734 {
735 #define DFS_follow_tree_edge(DEST) \
736 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
737
738 enum tree_code code;
739
740 code = TREE_CODE (expr);
741
742 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
743 {
744 if (TREE_CODE (expr) != IDENTIFIER_NODE)
745 DFS_follow_tree_edge (TREE_TYPE (expr));
746 }
747
748 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
749 {
750 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
751 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
752 }
753
754 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
755 {
756 DFS_follow_tree_edge (TREE_REALPART (expr));
757 DFS_follow_tree_edge (TREE_IMAGPART (expr));
758 }
759
760 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
761 {
762 /* Drop names that were created for anonymous entities. */
763 if (DECL_NAME (expr)
764 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
765 && anon_aggrname_p (DECL_NAME (expr)))
766 ;
767 else
768 DFS_follow_tree_edge (DECL_NAME (expr));
769 if (TREE_CODE (expr) != TRANSLATION_UNIT_DECL
770 && ! DECL_CONTEXT (expr))
771 DFS_follow_tree_edge ((*all_translation_units)[0]);
772 else
773 DFS_follow_tree_edge (DECL_CONTEXT (expr));
774 }
775
776 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
777 {
778 DFS_follow_tree_edge (DECL_SIZE (expr));
779 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
780
781 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
782 special handling in LTO, it must be handled by streamer hooks. */
783
784 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
785
786 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
787 for early inlining so drop it on the floor instead of ICEing in
788 dwarf2out.c.
789 We however use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
790 declarations which should be eliminated by decl merging. Be sure none
791 leaks to this point. */
792 gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
793 DFS_follow_tree_edge (DECL_ABSTRACT_ORIGIN (expr));
794
795 if ((VAR_P (expr)
796 || TREE_CODE (expr) == PARM_DECL)
797 && DECL_HAS_VALUE_EXPR_P (expr))
798 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
799 if (VAR_P (expr))
800 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
801 }
802
803 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
804 {
805 if (TREE_CODE (expr) == TYPE_DECL)
806 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
807 }
808
809 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
810 {
811 /* Make sure we don't inadvertently set the assembler name. */
812 if (DECL_ASSEMBLER_NAME_SET_P (expr))
813 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
814 }
815
816 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
817 {
818 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
819 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
820 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
821 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
822 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
823 }
824
825 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
826 {
827 DFS_follow_tree_edge (DECL_VINDEX (expr));
828 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
829 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
830 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
831 }
832
833 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
834 {
835 DFS_follow_tree_edge (TYPE_SIZE (expr));
836 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
837 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
838 DFS_follow_tree_edge (TYPE_NAME (expr));
839 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
840 reconstructed during fixup. */
841 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
842 during fixup. */
843 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
844 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
845 /* TYPE_CANONICAL is re-computed during type merging, so no need
846 to follow it here. */
847 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
848 }
849
850 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
851 {
852 if (TREE_CODE (expr) == ENUMERAL_TYPE)
853 DFS_follow_tree_edge (TYPE_VALUES (expr));
854 else if (TREE_CODE (expr) == ARRAY_TYPE)
855 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
856 else if (RECORD_OR_UNION_TYPE_P (expr))
857 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
858 DFS_follow_tree_edge (t);
859 else if (TREE_CODE (expr) == FUNCTION_TYPE
860 || TREE_CODE (expr) == METHOD_TYPE)
861 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
862
863 if (!POINTER_TYPE_P (expr))
864 DFS_follow_tree_edge (TYPE_MIN_VALUE_RAW (expr));
865 DFS_follow_tree_edge (TYPE_MAX_VALUE_RAW (expr));
866 }
867
868 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
869 {
870 DFS_follow_tree_edge (TREE_PURPOSE (expr));
871 DFS_follow_tree_edge (TREE_VALUE (expr));
872 DFS_follow_tree_edge (TREE_CHAIN (expr));
873 }
874
875 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
876 {
877 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
878 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
879 }
880
881 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
882 {
883 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
884 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
885 DFS_follow_tree_edge (TREE_BLOCK (expr));
886 }
887
888 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
889 {
890 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
891 if (VAR_OR_FUNCTION_DECL_P (t)
892 && DECL_EXTERNAL (t))
893 /* We have to stream externals in the block chain as
894 non-references. See also
895 tree-streamer-out.c:streamer_write_chain. */
896 DFS_write_tree (ob, expr_state, t, ref_p, false);
897 else
898 DFS_follow_tree_edge (t);
899
900 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
901
902 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
903 handle - those that represent inlined function scopes.
904 For the drop rest them on the floor instead of ICEing
905 in dwarf2out.c, but keep the notion of whether the block
906 is an inlined block by refering to itself for the sake of
907 tree_nonartificial_location. */
908 if (inlined_function_outer_scope_p (expr))
909 {
910 tree ultimate_origin = block_ultimate_origin (expr);
911 DFS_follow_tree_edge (ultimate_origin);
912 }
913 else if (BLOCK_ABSTRACT_ORIGIN (expr))
914 DFS_follow_tree_edge (expr);
915 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
916 information for early inlined BLOCKs so drop it on the floor instead
917 of ICEing in dwarf2out.c. */
918
919 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
920 streaming time. */
921
922 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
923 list is re-constructed from BLOCK_SUPERCONTEXT. */
924 }
925
926 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
927 {
928 unsigned i;
929 tree t;
930
931 /* Note that the number of BINFO slots has already been emitted in
932 EXPR's header (see streamer_write_tree_header) because this length
933 is needed to build the empty BINFO node on the reader side. */
934 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
935 DFS_follow_tree_edge (t);
936 DFS_follow_tree_edge (BINFO_OFFSET (expr));
937 DFS_follow_tree_edge (BINFO_VTABLE (expr));
938 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
939
940 /* The number of BINFO_BASE_ACCESSES has already been emitted in
941 EXPR's bitfield section. */
942 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
943 DFS_follow_tree_edge (t);
944
945 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
946 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
947 }
948
949 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
950 {
951 unsigned i;
952 tree index, value;
953
954 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
955 {
956 DFS_follow_tree_edge (index);
957 DFS_follow_tree_edge (value);
958 }
959 }
960
961 if (code == OMP_CLAUSE)
962 {
963 int i;
964 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
965 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
966 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
967 }
968
969 #undef DFS_follow_tree_edge
970 }
971
972 /* Return a hash value for the tree T.
973 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
974 may hold hash values if trees inside current SCC. */
975
976 static hashval_t
977 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
978 {
979 inchash::hash hstate;
980
981 #define visit(SIBLING) \
982 do { \
983 unsigned ix; \
984 if (!SIBLING) \
985 hstate.add_int (0); \
986 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
987 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
988 else if (map) \
989 hstate.add_int (*map->get (SIBLING)); \
990 else \
991 hstate.add_int (1); \
992 } while (0)
993
994 /* Hash TS_BASE. */
995 enum tree_code code = TREE_CODE (t);
996 hstate.add_int (code);
997 if (!TYPE_P (t))
998 {
999 hstate.add_flag (TREE_SIDE_EFFECTS (t));
1000 hstate.add_flag (TREE_CONSTANT (t));
1001 hstate.add_flag (TREE_READONLY (t));
1002 hstate.add_flag (TREE_PUBLIC (t));
1003 }
1004 hstate.add_flag (TREE_ADDRESSABLE (t));
1005 hstate.add_flag (TREE_THIS_VOLATILE (t));
1006 if (DECL_P (t))
1007 hstate.add_flag (DECL_UNSIGNED (t));
1008 else if (TYPE_P (t))
1009 hstate.add_flag (TYPE_UNSIGNED (t));
1010 if (TYPE_P (t))
1011 hstate.add_flag (TYPE_ARTIFICIAL (t));
1012 else
1013 hstate.add_flag (TREE_NO_WARNING (t));
1014 hstate.add_flag (TREE_NOTHROW (t));
1015 hstate.add_flag (TREE_STATIC (t));
1016 hstate.add_flag (TREE_PROTECTED (t));
1017 hstate.add_flag (TREE_DEPRECATED (t));
1018 if (code != TREE_BINFO)
1019 hstate.add_flag (TREE_PRIVATE (t));
1020 if (TYPE_P (t))
1021 {
1022 hstate.add_flag (AGGREGATE_TYPE_P (t)
1023 ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
1024 hstate.add_flag (TYPE_ADDR_SPACE (t));
1025 }
1026 else if (code == SSA_NAME)
1027 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
1028 hstate.commit_flag ();
1029
1030 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1031 {
1032 int i;
1033 hstate.add_wide_int (TREE_INT_CST_NUNITS (t));
1034 hstate.add_wide_int (TREE_INT_CST_EXT_NUNITS (t));
1035 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1036 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
1037 }
1038
1039 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1040 {
1041 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1042 hstate.add_flag (r.cl);
1043 hstate.add_flag (r.sign);
1044 hstate.add_flag (r.signalling);
1045 hstate.add_flag (r.canonical);
1046 hstate.commit_flag ();
1047 hstate.add_int (r.uexp);
1048 hstate.add (r.sig, sizeof (r.sig));
1049 }
1050
1051 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1052 {
1053 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1054 hstate.add_int (f.mode);
1055 hstate.add_int (f.data.low);
1056 hstate.add_int (f.data.high);
1057 }
1058
1059 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1060 {
1061 hstate.add_wide_int (DECL_MODE (t));
1062 hstate.add_flag (DECL_NONLOCAL (t));
1063 hstate.add_flag (DECL_VIRTUAL_P (t));
1064 hstate.add_flag (DECL_IGNORED_P (t));
1065 hstate.add_flag (DECL_ABSTRACT_P (t));
1066 hstate.add_flag (DECL_ARTIFICIAL (t));
1067 hstate.add_flag (DECL_USER_ALIGN (t));
1068 hstate.add_flag (DECL_PRESERVE_P (t));
1069 hstate.add_flag (DECL_EXTERNAL (t));
1070 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1071 hstate.commit_flag ();
1072 hstate.add_int (DECL_ALIGN (t));
1073 if (code == LABEL_DECL)
1074 {
1075 hstate.add_int (EH_LANDING_PAD_NR (t));
1076 hstate.add_int (LABEL_DECL_UID (t));
1077 }
1078 else if (code == FIELD_DECL)
1079 {
1080 hstate.add_flag (DECL_PACKED (t));
1081 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1082 hstate.add_int (DECL_OFFSET_ALIGN (t));
1083 }
1084 else if (code == VAR_DECL)
1085 {
1086 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1087 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1088 }
1089 if (code == RESULT_DECL
1090 || code == PARM_DECL
1091 || code == VAR_DECL)
1092 {
1093 hstate.add_flag (DECL_BY_REFERENCE (t));
1094 if (code == VAR_DECL
1095 || code == PARM_DECL)
1096 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1097 }
1098 hstate.commit_flag ();
1099 }
1100
1101 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1102 hstate.add_int (DECL_REGISTER (t));
1103
1104 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1105 {
1106 hstate.add_flag (DECL_COMMON (t));
1107 hstate.add_flag (DECL_DLLIMPORT_P (t));
1108 hstate.add_flag (DECL_WEAK (t));
1109 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1110 hstate.add_flag (DECL_COMDAT (t));
1111 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1112 hstate.add_int (DECL_VISIBILITY (t));
1113 if (code == VAR_DECL)
1114 {
1115 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1116 hstate.add_flag (DECL_HARD_REGISTER (t));
1117 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1118 }
1119 if (TREE_CODE (t) == FUNCTION_DECL)
1120 {
1121 hstate.add_flag (DECL_FINAL_P (t));
1122 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1123 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1124 }
1125 hstate.commit_flag ();
1126 }
1127
1128 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1129 {
1130 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1131 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1132 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1133 hstate.add_flag (DECL_UNINLINABLE (t));
1134 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1135 hstate.add_flag (DECL_IS_NOVOPS (t));
1136 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1137 hstate.add_flag (DECL_IS_MALLOC (t));
1138 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1139 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1140 hstate.add_flag (DECL_STATIC_CHAIN (t));
1141 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1142 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1143 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1144 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1145 hstate.add_flag (DECL_PURE_P (t));
1146 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1147 hstate.commit_flag ();
1148 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1149 hstate.add_int (DECL_FUNCTION_CODE (t));
1150 }
1151
1152 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1153 {
1154 hstate.add_wide_int (TYPE_MODE (t));
1155 hstate.add_flag (TYPE_STRING_FLAG (t));
1156 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1157 no streaming. */
1158 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
1159 hstate.add_flag (TYPE_PACKED (t));
1160 hstate.add_flag (TYPE_RESTRICT (t));
1161 hstate.add_flag (TYPE_USER_ALIGN (t));
1162 hstate.add_flag (TYPE_READONLY (t));
1163 if (RECORD_OR_UNION_TYPE_P (t))
1164 {
1165 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1166 hstate.add_flag (TYPE_FINAL_P (t));
1167 }
1168 else if (code == ARRAY_TYPE)
1169 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1170 if (AGGREGATE_TYPE_P (t))
1171 hstate.add_flag (TYPE_TYPELESS_STORAGE (t));
1172 hstate.commit_flag ();
1173 hstate.add_int (TYPE_PRECISION (t));
1174 hstate.add_int (TYPE_ALIGN (t));
1175 }
1176
1177 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1178 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1179 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1180
1181 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1182 /* We don't stream these when passing things to a different target. */
1183 && !lto_stream_offload_p)
1184 hstate.add_wide_int (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1185
1186 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1187 hstate.add_wide_int (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1188
1189 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1190 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1191
1192 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1193 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1194
1195 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1196 {
1197 if (code != IDENTIFIER_NODE)
1198 visit (TREE_TYPE (t));
1199 }
1200
1201 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1202 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
1203 visit (VECTOR_CST_ELT (t, i));
1204
1205 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1206 {
1207 visit (TREE_REALPART (t));
1208 visit (TREE_IMAGPART (t));
1209 }
1210
1211 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1212 {
1213 /* Drop names that were created for anonymous entities. */
1214 if (DECL_NAME (t)
1215 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1216 && anon_aggrname_p (DECL_NAME (t)))
1217 ;
1218 else
1219 visit (DECL_NAME (t));
1220 if (DECL_FILE_SCOPE_P (t))
1221 ;
1222 else
1223 visit (DECL_CONTEXT (t));
1224 }
1225
1226 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1227 {
1228 visit (DECL_SIZE (t));
1229 visit (DECL_SIZE_UNIT (t));
1230 visit (DECL_ATTRIBUTES (t));
1231 if ((code == VAR_DECL
1232 || code == PARM_DECL)
1233 && DECL_HAS_VALUE_EXPR_P (t))
1234 visit (DECL_VALUE_EXPR (t));
1235 if (code == VAR_DECL
1236 && DECL_HAS_DEBUG_EXPR_P (t))
1237 visit (DECL_DEBUG_EXPR (t));
1238 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1239 be able to call get_symbol_initial_value. */
1240 }
1241
1242 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1243 {
1244 if (code == TYPE_DECL)
1245 visit (DECL_ORIGINAL_TYPE (t));
1246 }
1247
1248 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1249 {
1250 if (DECL_ASSEMBLER_NAME_SET_P (t))
1251 visit (DECL_ASSEMBLER_NAME (t));
1252 }
1253
1254 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1255 {
1256 visit (DECL_FIELD_OFFSET (t));
1257 visit (DECL_BIT_FIELD_TYPE (t));
1258 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1259 visit (DECL_FIELD_BIT_OFFSET (t));
1260 visit (DECL_FCONTEXT (t));
1261 }
1262
1263 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1264 {
1265 visit (DECL_VINDEX (t));
1266 visit (DECL_FUNCTION_PERSONALITY (t));
1267 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1268 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1269 }
1270
1271 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1272 {
1273 visit (TYPE_SIZE (t));
1274 visit (TYPE_SIZE_UNIT (t));
1275 visit (TYPE_ATTRIBUTES (t));
1276 visit (TYPE_NAME (t));
1277 visit (TYPE_MAIN_VARIANT (t));
1278 if (TYPE_FILE_SCOPE_P (t))
1279 ;
1280 else
1281 visit (TYPE_CONTEXT (t));
1282 visit (TYPE_STUB_DECL (t));
1283 }
1284
1285 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1286 {
1287 if (code == ENUMERAL_TYPE)
1288 visit (TYPE_VALUES (t));
1289 else if (code == ARRAY_TYPE)
1290 visit (TYPE_DOMAIN (t));
1291 else if (RECORD_OR_UNION_TYPE_P (t))
1292 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1293 visit (f);
1294 else if (code == FUNCTION_TYPE
1295 || code == METHOD_TYPE)
1296 visit (TYPE_ARG_TYPES (t));
1297 if (!POINTER_TYPE_P (t))
1298 visit (TYPE_MIN_VALUE_RAW (t));
1299 visit (TYPE_MAX_VALUE_RAW (t));
1300 }
1301
1302 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1303 {
1304 visit (TREE_PURPOSE (t));
1305 visit (TREE_VALUE (t));
1306 visit (TREE_CHAIN (t));
1307 }
1308
1309 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1310 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1311 visit (TREE_VEC_ELT (t, i));
1312
1313 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1314 {
1315 hstate.add_wide_int (TREE_OPERAND_LENGTH (t));
1316 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1317 visit (TREE_OPERAND (t, i));
1318 }
1319
1320 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1321 {
1322 unsigned i;
1323 tree b;
1324 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1325 visit (b);
1326 visit (BINFO_OFFSET (t));
1327 visit (BINFO_VTABLE (t));
1328 visit (BINFO_VPTR_FIELD (t));
1329 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1330 visit (b);
1331 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1332 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1333 }
1334
1335 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1336 {
1337 unsigned i;
1338 tree index, value;
1339 hstate.add_wide_int (CONSTRUCTOR_NELTS (t));
1340 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1341 {
1342 visit (index);
1343 visit (value);
1344 }
1345 }
1346
1347 if (code == OMP_CLAUSE)
1348 {
1349 int i;
1350 HOST_WIDE_INT val;
1351
1352 hstate.add_wide_int (OMP_CLAUSE_CODE (t));
1353 switch (OMP_CLAUSE_CODE (t))
1354 {
1355 case OMP_CLAUSE_DEFAULT:
1356 val = OMP_CLAUSE_DEFAULT_KIND (t);
1357 break;
1358 case OMP_CLAUSE_SCHEDULE:
1359 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1360 break;
1361 case OMP_CLAUSE_DEPEND:
1362 val = OMP_CLAUSE_DEPEND_KIND (t);
1363 break;
1364 case OMP_CLAUSE_MAP:
1365 val = OMP_CLAUSE_MAP_KIND (t);
1366 break;
1367 case OMP_CLAUSE_PROC_BIND:
1368 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1369 break;
1370 case OMP_CLAUSE_REDUCTION:
1371 val = OMP_CLAUSE_REDUCTION_CODE (t);
1372 break;
1373 default:
1374 val = 0;
1375 break;
1376 }
1377 hstate.add_wide_int (val);
1378 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1379 visit (OMP_CLAUSE_OPERAND (t, i));
1380 visit (OMP_CLAUSE_CHAIN (t));
1381 }
1382
1383 return hstate.end ();
1384
1385 #undef visit
1386 }
1387
1388 /* Compare two SCC entries by their hash value for qsorting them. */
1389
1390 int
1391 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1392 {
1393 const scc_entry *p1 = (const scc_entry *) p1_;
1394 const scc_entry *p2 = (const scc_entry *) p2_;
1395 if (p1->hash < p2->hash)
1396 return -1;
1397 else if (p1->hash > p2->hash)
1398 return 1;
1399 return 0;
1400 }
1401
1402 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1403 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1404
1405 hashval_t
1406 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1407 bool ref_p, bool this_ref_p)
1408 {
1409 unsigned int last_classes = 0, iterations = 0;
1410
1411 /* Compute hash values for the SCC members. */
1412 for (unsigned i = 0; i < size; ++i)
1413 sccstack[first+i].hash
1414 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1415
1416 if (size == 1)
1417 return sccstack[first].hash;
1418
1419 /* We aim to get unique hash for every tree within SCC and compute hash value
1420 of the whole SCC by combining all values together in a stable (entry-point
1421 independent) order. This guarantees that the same SCC regions within
1422 different translation units will get the same hash values and therefore
1423 will be merged at WPA time.
1424
1425 Often the hashes are already unique. In that case we compute the SCC hash
1426 by combining individual hash values in an increasing order.
1427
1428 If there are duplicates, we seek at least one tree with unique hash (and
1429 pick one with minimal hash and this property). Then we obtain a stable
1430 order by DFS walk starting from this unique tree and then use the index
1431 within this order to make individual hash values unique.
1432
1433 If there is no tree with unique hash, we iteratively propagate the hash
1434 values across the internal edges of SCC. This usually quickly leads
1435 to unique hashes. Consider, for example, an SCC containing two pointers
1436 that are identical except for the types they point to and assume that
1437 these types are also part of the SCC. The propagation will add the
1438 points-to type information into their hash values. */
1439 do
1440 {
1441 /* Sort the SCC so we can easily check for uniqueness. */
1442 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1443
1444 unsigned int classes = 1;
1445 int firstunique = -1;
1446
1447 /* Find the tree with lowest unique hash (if it exists) and compute
1448 the number of equivalence classes. */
1449 if (sccstack[first].hash != sccstack[first+1].hash)
1450 firstunique = 0;
1451 for (unsigned i = 1; i < size; ++i)
1452 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1453 {
1454 classes++;
1455 if (firstunique == -1
1456 && (i == size - 1
1457 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1458 firstunique = i;
1459 }
1460
1461 /* If we found a tree with unique hash, stop the iteration. */
1462 if (firstunique != -1
1463 /* Also terminate if we run out of iterations or if the number of
1464 equivalence classes is no longer increasing.
1465 For example a cyclic list of trees that are all equivalent will
1466 never have unique entry point; we however do not build such SCCs
1467 in our IL. */
1468 || classes <= last_classes || iterations > 16)
1469 {
1470 hashval_t scc_hash;
1471
1472 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1473 starting from FIRSTUNIQUE to obtain a stable order. */
1474 if (classes != size && firstunique != -1)
1475 {
1476 hash_map <tree, hashval_t> map(size*2);
1477
1478 /* Store hash values into a map, so we can associate them with
1479 the reordered SCC. */
1480 for (unsigned i = 0; i < size; ++i)
1481 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1482
1483 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1484 true);
1485 gcc_assert (again.sccstack.length () == size);
1486
1487 memcpy (sccstack.address () + first,
1488 again.sccstack.address (),
1489 sizeof (scc_entry) * size);
1490
1491 /* Update hash values of individual members by hashing in the
1492 index within the stable order. This ensures uniqueness.
1493 Also compute the SCC hash by mixing in all hash values in
1494 the stable order we obtained. */
1495 sccstack[first].hash = *map.get (sccstack[first].t);
1496 scc_hash = sccstack[first].hash;
1497 for (unsigned i = 1; i < size; ++i)
1498 {
1499 sccstack[first+i].hash
1500 = iterative_hash_hashval_t (i,
1501 *map.get (sccstack[first+i].t));
1502 scc_hash
1503 = iterative_hash_hashval_t (scc_hash,
1504 sccstack[first+i].hash);
1505 }
1506 }
1507 /* If we got a unique hash value for each tree, then sort already
1508 ensured entry-point independent order. Only compute the final
1509 SCC hash.
1510
1511 If we failed to find the unique entry point, we go by the same
1512 route. We will eventually introduce unwanted hash conflicts. */
1513 else
1514 {
1515 scc_hash = sccstack[first].hash;
1516 for (unsigned i = 1; i < size; ++i)
1517 scc_hash
1518 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1519
1520 /* We cannot 100% guarantee that the hash won't conflict so as
1521 to make it impossible to find a unique hash. This however
1522 should be an extremely rare case. ICE for now so possible
1523 issues are found and evaluated. */
1524 gcc_checking_assert (classes == size);
1525 }
1526
1527 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1528 hash into the hash of each element. */
1529 for (unsigned i = 0; i < size; ++i)
1530 sccstack[first+i].hash
1531 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1532 return scc_hash;
1533 }
1534
1535 last_classes = classes;
1536 iterations++;
1537
1538 /* We failed to identify the entry point; propagate hash values across
1539 the edges. */
1540 hash_map <tree, hashval_t> map(size*2);
1541
1542 for (unsigned i = 0; i < size; ++i)
1543 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1544
1545 for (unsigned i = 0; i < size; i++)
1546 sccstack[first+i].hash
1547 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1548 }
1549 while (true);
1550 }
1551
1552 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1553 already in the streamer cache. Main routine called for
1554 each visit of EXPR. */
1555
1556 void
1557 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1558 tree expr, bool ref_p, bool this_ref_p)
1559 {
1560 /* Handle special cases. */
1561 if (expr == NULL_TREE)
1562 return;
1563
1564 /* Do not DFS walk into indexable trees. */
1565 if (this_ref_p && tree_is_indexable (expr))
1566 return;
1567
1568 /* Check if we already streamed EXPR. */
1569 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1570 return;
1571
1572 worklist w;
1573 w.expr = expr;
1574 w.from_state = from_state;
1575 w.cstate = NULL;
1576 w.ref_p = ref_p;
1577 w.this_ref_p = this_ref_p;
1578 worklist_vec.safe_push (w);
1579 }
1580
1581
1582 /* Emit the physical representation of tree node EXPR to output block OB.
1583 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1584 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1585
1586 void
1587 lto_output_tree (struct output_block *ob, tree expr,
1588 bool ref_p, bool this_ref_p)
1589 {
1590 unsigned ix;
1591 bool existed_p;
1592
1593 if (expr == NULL_TREE)
1594 {
1595 streamer_write_record_start (ob, LTO_null);
1596 return;
1597 }
1598
1599 if (this_ref_p && tree_is_indexable (expr))
1600 {
1601 lto_output_tree_ref (ob, expr);
1602 return;
1603 }
1604
1605 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1606 if (existed_p)
1607 {
1608 /* If a node has already been streamed out, make sure that
1609 we don't write it more than once. Otherwise, the reader
1610 will instantiate two different nodes for the same object. */
1611 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1612 streamer_write_uhwi (ob, ix);
1613 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1614 lto_tree_code_to_tag (TREE_CODE (expr)));
1615 lto_stats.num_pickle_refs_output++;
1616 }
1617 else
1618 {
1619 /* This is the first time we see EXPR, write all reachable
1620 trees to OB. */
1621 static bool in_dfs_walk;
1622
1623 /* Protect against recursion which means disconnect between
1624 what tree edges we walk in the DFS walk and what edges
1625 we stream out. */
1626 gcc_assert (!in_dfs_walk);
1627
1628 /* Start the DFS walk. */
1629 /* Save ob state ... */
1630 /* let's see ... */
1631 in_dfs_walk = true;
1632 DFS (ob, expr, ref_p, this_ref_p, false);
1633 in_dfs_walk = false;
1634
1635 /* Finally append a reference to the tree we were writing.
1636 ??? If expr ended up as a singleton we could have
1637 inlined it here and avoid outputting a reference. */
1638 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1639 gcc_assert (existed_p);
1640 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1641 streamer_write_uhwi (ob, ix);
1642 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1643 lto_tree_code_to_tag (TREE_CODE (expr)));
1644 lto_stats.num_pickle_refs_output++;
1645 }
1646 }
1647
1648
1649 /* Output to OB a list of try/catch handlers starting with FIRST. */
1650
1651 static void
1652 output_eh_try_list (struct output_block *ob, eh_catch first)
1653 {
1654 eh_catch n;
1655
1656 for (n = first; n; n = n->next_catch)
1657 {
1658 streamer_write_record_start (ob, LTO_eh_catch);
1659 stream_write_tree (ob, n->type_list, true);
1660 stream_write_tree (ob, n->filter_list, true);
1661 stream_write_tree (ob, n->label, true);
1662 }
1663
1664 streamer_write_record_start (ob, LTO_null);
1665 }
1666
1667
1668 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1669 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1670 detect EH region sharing. */
1671
1672 static void
1673 output_eh_region (struct output_block *ob, eh_region r)
1674 {
1675 enum LTO_tags tag;
1676
1677 if (r == NULL)
1678 {
1679 streamer_write_record_start (ob, LTO_null);
1680 return;
1681 }
1682
1683 if (r->type == ERT_CLEANUP)
1684 tag = LTO_ert_cleanup;
1685 else if (r->type == ERT_TRY)
1686 tag = LTO_ert_try;
1687 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1688 tag = LTO_ert_allowed_exceptions;
1689 else if (r->type == ERT_MUST_NOT_THROW)
1690 tag = LTO_ert_must_not_throw;
1691 else
1692 gcc_unreachable ();
1693
1694 streamer_write_record_start (ob, tag);
1695 streamer_write_hwi (ob, r->index);
1696
1697 if (r->outer)
1698 streamer_write_hwi (ob, r->outer->index);
1699 else
1700 streamer_write_zero (ob);
1701
1702 if (r->inner)
1703 streamer_write_hwi (ob, r->inner->index);
1704 else
1705 streamer_write_zero (ob);
1706
1707 if (r->next_peer)
1708 streamer_write_hwi (ob, r->next_peer->index);
1709 else
1710 streamer_write_zero (ob);
1711
1712 if (r->type == ERT_TRY)
1713 {
1714 output_eh_try_list (ob, r->u.eh_try.first_catch);
1715 }
1716 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1717 {
1718 stream_write_tree (ob, r->u.allowed.type_list, true);
1719 stream_write_tree (ob, r->u.allowed.label, true);
1720 streamer_write_uhwi (ob, r->u.allowed.filter);
1721 }
1722 else if (r->type == ERT_MUST_NOT_THROW)
1723 {
1724 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1725 bitpack_d bp = bitpack_create (ob->main_stream);
1726 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1727 streamer_write_bitpack (&bp);
1728 }
1729
1730 if (r->landing_pads)
1731 streamer_write_hwi (ob, r->landing_pads->index);
1732 else
1733 streamer_write_zero (ob);
1734 }
1735
1736
1737 /* Output landing pad LP to OB. */
1738
1739 static void
1740 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1741 {
1742 if (lp == NULL)
1743 {
1744 streamer_write_record_start (ob, LTO_null);
1745 return;
1746 }
1747
1748 streamer_write_record_start (ob, LTO_eh_landing_pad);
1749 streamer_write_hwi (ob, lp->index);
1750 if (lp->next_lp)
1751 streamer_write_hwi (ob, lp->next_lp->index);
1752 else
1753 streamer_write_zero (ob);
1754
1755 if (lp->region)
1756 streamer_write_hwi (ob, lp->region->index);
1757 else
1758 streamer_write_zero (ob);
1759
1760 stream_write_tree (ob, lp->post_landing_pad, true);
1761 }
1762
1763
1764 /* Output the existing eh_table to OB. */
1765
1766 static void
1767 output_eh_regions (struct output_block *ob, struct function *fn)
1768 {
1769 if (fn->eh && fn->eh->region_tree)
1770 {
1771 unsigned i;
1772 eh_region eh;
1773 eh_landing_pad lp;
1774 tree ttype;
1775
1776 streamer_write_record_start (ob, LTO_eh_table);
1777
1778 /* Emit the index of the root of the EH region tree. */
1779 streamer_write_hwi (ob, fn->eh->region_tree->index);
1780
1781 /* Emit all the EH regions in the region array. */
1782 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1783 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1784 output_eh_region (ob, eh);
1785
1786 /* Emit all landing pads. */
1787 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1788 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1789 output_eh_lp (ob, lp);
1790
1791 /* Emit all the runtime type data. */
1792 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1793 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1794 stream_write_tree (ob, ttype, true);
1795
1796 /* Emit the table of action chains. */
1797 if (targetm.arm_eabi_unwinder)
1798 {
1799 tree t;
1800 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1801 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1802 stream_write_tree (ob, t, true);
1803 }
1804 else
1805 {
1806 uchar c;
1807 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1808 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1809 streamer_write_char_stream (ob->main_stream, c);
1810 }
1811 }
1812
1813 /* The LTO_null either terminates the record or indicates that there
1814 are no eh_records at all. */
1815 streamer_write_record_start (ob, LTO_null);
1816 }
1817
1818
1819 /* Output all of the active ssa names to the ssa_names stream. */
1820
1821 static void
1822 output_ssa_names (struct output_block *ob, struct function *fn)
1823 {
1824 unsigned int i, len;
1825
1826 len = vec_safe_length (SSANAMES (fn));
1827 streamer_write_uhwi (ob, len);
1828
1829 for (i = 1; i < len; i++)
1830 {
1831 tree ptr = (*SSANAMES (fn))[i];
1832
1833 if (ptr == NULL_TREE
1834 || SSA_NAME_IN_FREE_LIST (ptr)
1835 || virtual_operand_p (ptr)
1836 /* Simply skip unreleased SSA names. */
1837 || (! SSA_NAME_IS_DEFAULT_DEF (ptr)
1838 && (! SSA_NAME_DEF_STMT (ptr)
1839 || ! gimple_bb (SSA_NAME_DEF_STMT (ptr)))))
1840 continue;
1841
1842 streamer_write_uhwi (ob, i);
1843 streamer_write_char_stream (ob->main_stream,
1844 SSA_NAME_IS_DEFAULT_DEF (ptr));
1845 if (SSA_NAME_VAR (ptr))
1846 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1847 else
1848 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1849 stream_write_tree (ob, TREE_TYPE (ptr), true);
1850 }
1851
1852 streamer_write_zero (ob);
1853 }
1854
1855
1856
1857 /* Output the cfg. */
1858
1859 static void
1860 output_cfg (struct output_block *ob, struct function *fn)
1861 {
1862 struct lto_output_stream *tmp_stream = ob->main_stream;
1863 basic_block bb;
1864
1865 ob->main_stream = ob->cfg_stream;
1866
1867 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1868 profile_status_for_fn (fn));
1869
1870 /* Output the number of the highest basic block. */
1871 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1872
1873 FOR_ALL_BB_FN (bb, fn)
1874 {
1875 edge_iterator ei;
1876 edge e;
1877
1878 streamer_write_hwi (ob, bb->index);
1879
1880 /* Output the successors and the edge flags. */
1881 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1882 FOR_EACH_EDGE (e, ei, bb->succs)
1883 {
1884 streamer_write_uhwi (ob, e->dest->index);
1885 e->probability.stream_out (ob);
1886 e->count.stream_out (ob);
1887 streamer_write_uhwi (ob, e->flags);
1888 }
1889 }
1890
1891 streamer_write_hwi (ob, -1);
1892
1893 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1894 while (bb->next_bb)
1895 {
1896 streamer_write_hwi (ob, bb->next_bb->index);
1897 bb = bb->next_bb;
1898 }
1899
1900 streamer_write_hwi (ob, -1);
1901
1902 /* ??? The cfgloop interface is tied to cfun. */
1903 gcc_assert (cfun == fn);
1904
1905 /* Output the number of loops. */
1906 streamer_write_uhwi (ob, number_of_loops (fn));
1907
1908 /* Output each loop, skipping the tree root which has number zero. */
1909 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1910 {
1911 struct loop *loop = get_loop (fn, i);
1912
1913 /* Write the index of the loop header. That's enough to rebuild
1914 the loop tree on the reader side. Stream -1 for an unused
1915 loop entry. */
1916 if (!loop)
1917 {
1918 streamer_write_hwi (ob, -1);
1919 continue;
1920 }
1921 else
1922 streamer_write_hwi (ob, loop->header->index);
1923
1924 /* Write everything copy_loop_info copies. */
1925 streamer_write_enum (ob->main_stream,
1926 loop_estimation, EST_LAST, loop->estimate_state);
1927 streamer_write_hwi (ob, loop->any_upper_bound);
1928 if (loop->any_upper_bound)
1929 streamer_write_widest_int (ob, loop->nb_iterations_upper_bound);
1930 streamer_write_hwi (ob, loop->any_likely_upper_bound);
1931 if (loop->any_likely_upper_bound)
1932 streamer_write_widest_int (ob, loop->nb_iterations_likely_upper_bound);
1933 streamer_write_hwi (ob, loop->any_estimate);
1934 if (loop->any_estimate)
1935 streamer_write_widest_int (ob, loop->nb_iterations_estimate);
1936
1937 /* Write OMP SIMD related info. */
1938 streamer_write_hwi (ob, loop->safelen);
1939 streamer_write_hwi (ob, loop->dont_vectorize);
1940 streamer_write_hwi (ob, loop->force_vectorize);
1941 stream_write_tree (ob, loop->simduid, true);
1942 }
1943
1944 ob->main_stream = tmp_stream;
1945 }
1946
1947
1948 /* Create the header in the file using OB. If the section type is for
1949 a function, set FN to the decl for that function. */
1950
1951 void
1952 produce_asm (struct output_block *ob, tree fn)
1953 {
1954 enum lto_section_type section_type = ob->section_type;
1955 struct lto_function_header header;
1956 char *section_name;
1957
1958 if (section_type == LTO_section_function_body)
1959 {
1960 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1961 section_name = lto_get_section_name (section_type, name, NULL);
1962 }
1963 else
1964 section_name = lto_get_section_name (section_type, NULL, NULL);
1965
1966 lto_begin_section (section_name, !flag_wpa);
1967 free (section_name);
1968
1969 /* The entire header is stream computed here. */
1970 memset (&header, 0, sizeof (struct lto_function_header));
1971
1972 /* Write the header. */
1973 header.major_version = LTO_major_version;
1974 header.minor_version = LTO_minor_version;
1975
1976 if (section_type == LTO_section_function_body)
1977 header.cfg_size = ob->cfg_stream->total_size;
1978 header.main_size = ob->main_stream->total_size;
1979 header.string_size = ob->string_stream->total_size;
1980 lto_write_data (&header, sizeof header);
1981
1982 /* Put all of the gimple and the string table out the asm file as a
1983 block of text. */
1984 if (section_type == LTO_section_function_body)
1985 lto_write_stream (ob->cfg_stream);
1986 lto_write_stream (ob->main_stream);
1987 lto_write_stream (ob->string_stream);
1988
1989 lto_end_section ();
1990 }
1991
1992
1993 /* Output the base body of struct function FN using output block OB. */
1994
1995 static void
1996 output_struct_function_base (struct output_block *ob, struct function *fn)
1997 {
1998 struct bitpack_d bp;
1999 unsigned i;
2000 tree t;
2001
2002 /* Output the static chain and non-local goto save area. */
2003 stream_write_tree (ob, fn->static_chain_decl, true);
2004 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
2005
2006 /* Output all the local variables in the function. */
2007 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
2008 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
2009 stream_write_tree (ob, t, true);
2010
2011 /* Output current IL state of the function. */
2012 streamer_write_uhwi (ob, fn->curr_properties);
2013
2014 /* Write all the attributes for FN. */
2015 bp = bitpack_create (ob->main_stream);
2016 bp_pack_value (&bp, fn->is_thunk, 1);
2017 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
2018 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
2019 bp_pack_value (&bp, fn->returns_struct, 1);
2020 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
2021 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
2022 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
2023 bp_pack_value (&bp, fn->after_inlining, 1);
2024 bp_pack_value (&bp, fn->stdarg, 1);
2025 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2026 bp_pack_value (&bp, fn->has_forced_label_in_static, 1);
2027 bp_pack_value (&bp, fn->calls_alloca, 1);
2028 bp_pack_value (&bp, fn->calls_setjmp, 1);
2029 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2030 bp_pack_value (&bp, fn->has_simduid_loops, 1);
2031 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2032 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2033 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2034
2035 /* Output the function start and end loci. */
2036 stream_output_location (ob, &bp, fn->function_start_locus);
2037 stream_output_location (ob, &bp, fn->function_end_locus);
2038
2039 streamer_write_bitpack (&bp);
2040 }
2041
2042
2043 /* Collect all leaf BLOCKs beyond ROOT into LEAFS. */
2044
2045 static void
2046 collect_block_tree_leafs (tree root, vec<tree> &leafs)
2047 {
2048 for (root = BLOCK_SUBBLOCKS (root); root; root = BLOCK_CHAIN (root))
2049 if (! BLOCK_SUBBLOCKS (root))
2050 leafs.safe_push (root);
2051 else
2052 collect_block_tree_leafs (BLOCK_SUBBLOCKS (root), leafs);
2053 }
2054
2055 /* Output the body of function NODE->DECL. */
2056
2057 static void
2058 output_function (struct cgraph_node *node)
2059 {
2060 tree function;
2061 struct function *fn;
2062 basic_block bb;
2063 struct output_block *ob;
2064
2065 function = node->decl;
2066 fn = DECL_STRUCT_FUNCTION (function);
2067 ob = create_output_block (LTO_section_function_body);
2068
2069 clear_line_info (ob);
2070 ob->symbol = node;
2071
2072 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2073
2074 /* Set current_function_decl and cfun. */
2075 push_cfun (fn);
2076
2077 /* Make string 0 be a NULL string. */
2078 streamer_write_char_stream (ob->string_stream, 0);
2079
2080 streamer_write_record_start (ob, LTO_function);
2081
2082 /* Output decls for parameters and args. */
2083 stream_write_tree (ob, DECL_RESULT (function), true);
2084 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2085
2086 /* Output debug args if available. */
2087 vec<tree, va_gc> **debugargs = decl_debug_args_lookup (function);
2088 if (! debugargs)
2089 streamer_write_uhwi (ob, 0);
2090 else
2091 {
2092 streamer_write_uhwi (ob, (*debugargs)->length ());
2093 for (unsigned i = 0; i < (*debugargs)->length (); ++i)
2094 stream_write_tree (ob, (**debugargs)[i], true);
2095 }
2096
2097 /* Output DECL_INITIAL for the function, which contains the tree of
2098 lexical scopes. */
2099 stream_write_tree (ob, DECL_INITIAL (function), true);
2100 /* As we do not recurse into BLOCK_SUBBLOCKS but only BLOCK_SUPERCONTEXT
2101 collect block tree leafs and stream those. */
2102 auto_vec<tree> block_tree_leafs;
2103 if (DECL_INITIAL (function))
2104 collect_block_tree_leafs (DECL_INITIAL (function), block_tree_leafs);
2105 streamer_write_uhwi (ob, block_tree_leafs.length ());
2106 for (unsigned i = 0; i < block_tree_leafs.length (); ++i)
2107 stream_write_tree (ob, block_tree_leafs[i], true);
2108
2109 /* We also stream abstract functions where we stream only stuff needed for
2110 debug info. */
2111 if (gimple_has_body_p (function))
2112 {
2113 streamer_write_uhwi (ob, 1);
2114 output_struct_function_base (ob, fn);
2115
2116 /* Output all the SSA names used in the function. */
2117 output_ssa_names (ob, fn);
2118
2119 /* Output any exception handling regions. */
2120 output_eh_regions (ob, fn);
2121
2122
2123 /* We will renumber the statements. The code that does this uses
2124 the same ordering that we use for serializing them so we can use
2125 the same code on the other end and not have to write out the
2126 statement numbers. We do not assign UIDs to PHIs here because
2127 virtual PHIs get re-computed on-the-fly which would make numbers
2128 inconsistent. */
2129 set_gimple_stmt_max_uid (cfun, 0);
2130 FOR_ALL_BB_FN (bb, cfun)
2131 {
2132 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2133 gsi_next (&gsi))
2134 {
2135 gphi *stmt = gsi.phi ();
2136
2137 /* Virtual PHIs are not going to be streamed. */
2138 if (!virtual_operand_p (gimple_phi_result (stmt)))
2139 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2140 }
2141 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2142 gsi_next (&gsi))
2143 {
2144 gimple *stmt = gsi_stmt (gsi);
2145 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2146 }
2147 }
2148 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2149 virtual phis now. */
2150 FOR_ALL_BB_FN (bb, cfun)
2151 {
2152 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2153 gsi_next (&gsi))
2154 {
2155 gphi *stmt = gsi.phi ();
2156 if (virtual_operand_p (gimple_phi_result (stmt)))
2157 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2158 }
2159 }
2160
2161 /* Output the code for the function. */
2162 FOR_ALL_BB_FN (bb, fn)
2163 output_bb (ob, bb, fn);
2164
2165 /* The terminator for this function. */
2166 streamer_write_record_start (ob, LTO_null);
2167
2168 output_cfg (ob, fn);
2169
2170 pop_cfun ();
2171 }
2172 else
2173 streamer_write_uhwi (ob, 0);
2174
2175 /* Create a section to hold the pickled output of this function. */
2176 produce_asm (ob, function);
2177
2178 destroy_output_block (ob);
2179 }
2180
2181 /* Output the body of function NODE->DECL. */
2182
2183 static void
2184 output_constructor (struct varpool_node *node)
2185 {
2186 tree var = node->decl;
2187 struct output_block *ob;
2188
2189 ob = create_output_block (LTO_section_function_body);
2190
2191 clear_line_info (ob);
2192 ob->symbol = node;
2193
2194 /* Make string 0 be a NULL string. */
2195 streamer_write_char_stream (ob->string_stream, 0);
2196
2197 /* Output DECL_INITIAL for the function, which contains the tree of
2198 lexical scopes. */
2199 stream_write_tree (ob, DECL_INITIAL (var), true);
2200
2201 /* Create a section to hold the pickled output of this function. */
2202 produce_asm (ob, var);
2203
2204 destroy_output_block (ob);
2205 }
2206
2207
2208 /* Emit toplevel asms. */
2209
2210 void
2211 lto_output_toplevel_asms (void)
2212 {
2213 struct output_block *ob;
2214 struct asm_node *can;
2215 char *section_name;
2216 struct lto_simple_header_with_strings header;
2217
2218 if (!symtab->first_asm_symbol ())
2219 return;
2220
2221 ob = create_output_block (LTO_section_asm);
2222
2223 /* Make string 0 be a NULL string. */
2224 streamer_write_char_stream (ob->string_stream, 0);
2225
2226 for (can = symtab->first_asm_symbol (); can; can = can->next)
2227 {
2228 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2229 streamer_write_hwi (ob, can->order);
2230 }
2231
2232 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2233
2234 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2235 lto_begin_section (section_name, !flag_wpa);
2236 free (section_name);
2237
2238 /* The entire header stream is computed here. */
2239 memset (&header, 0, sizeof (header));
2240
2241 /* Write the header. */
2242 header.major_version = LTO_major_version;
2243 header.minor_version = LTO_minor_version;
2244
2245 header.main_size = ob->main_stream->total_size;
2246 header.string_size = ob->string_stream->total_size;
2247 lto_write_data (&header, sizeof header);
2248
2249 /* Put all of the gimple and the string table out the asm file as a
2250 block of text. */
2251 lto_write_stream (ob->main_stream);
2252 lto_write_stream (ob->string_stream);
2253
2254 lto_end_section ();
2255
2256 destroy_output_block (ob);
2257 }
2258
2259
2260 /* Copy the function body or variable constructor of NODE without deserializing. */
2261
2262 static void
2263 copy_function_or_variable (struct symtab_node *node)
2264 {
2265 tree function = node->decl;
2266 struct lto_file_decl_data *file_data = node->lto_file_data;
2267 const char *data;
2268 size_t len;
2269 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2270 char *section_name =
2271 lto_get_section_name (LTO_section_function_body, name, NULL);
2272 size_t i, j;
2273 struct lto_in_decl_state *in_state;
2274 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2275
2276 lto_begin_section (section_name, false);
2277 free (section_name);
2278
2279 /* We may have renamed the declaration, e.g., a static function. */
2280 name = lto_get_decl_name_mapping (file_data, name);
2281
2282 data = lto_get_raw_section_data (file_data, LTO_section_function_body,
2283 name, &len);
2284 gcc_assert (data);
2285
2286 /* Do a bit copy of the function body. */
2287 lto_write_raw_data (data, len);
2288
2289 /* Copy decls. */
2290 in_state =
2291 lto_get_function_in_decl_state (node->lto_file_data, function);
2292 out_state->compressed = in_state->compressed;
2293 gcc_assert (in_state);
2294
2295 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2296 {
2297 size_t n = vec_safe_length (in_state->streams[i]);
2298 vec<tree, va_gc> *trees = in_state->streams[i];
2299 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2300
2301 /* The out state must have the same indices and the in state.
2302 So just copy the vector. All the encoders in the in state
2303 must be empty where we reach here. */
2304 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2305 encoder->trees.reserve_exact (n);
2306 for (j = 0; j < n; j++)
2307 encoder->trees.safe_push ((*trees)[j]);
2308 }
2309
2310 lto_free_raw_section_data (file_data, LTO_section_function_body, name,
2311 data, len);
2312 lto_end_section ();
2313 }
2314
2315 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2316
2317 static tree
2318 wrap_refs (tree *tp, int *ws, void *)
2319 {
2320 tree t = *tp;
2321 if (handled_component_p (t)
2322 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
2323 && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2324 {
2325 tree decl = TREE_OPERAND (t, 0);
2326 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2327 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2328 build1 (ADDR_EXPR, ptrtype, decl),
2329 build_int_cst (ptrtype, 0));
2330 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2331 *ws = 0;
2332 }
2333 else if (TREE_CODE (t) == CONSTRUCTOR)
2334 ;
2335 else if (!EXPR_P (t))
2336 *ws = 0;
2337 return NULL_TREE;
2338 }
2339
2340 /* Main entry point from the pass manager. */
2341
2342 void
2343 lto_output (void)
2344 {
2345 struct lto_out_decl_state *decl_state;
2346 bitmap output = NULL;
2347 int i, n_nodes;
2348 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2349
2350 if (flag_checking)
2351 output = lto_bitmap_alloc ();
2352
2353 /* Initialize the streamer. */
2354 lto_streamer_init ();
2355
2356 n_nodes = lto_symtab_encoder_size (encoder);
2357 /* Process only the functions with bodies. */
2358 for (i = 0; i < n_nodes; i++)
2359 {
2360 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2361 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2362 {
2363 if (lto_symtab_encoder_encode_body_p (encoder, node)
2364 && !node->alias
2365 && (!node->thunk.thunk_p || !node->thunk.add_pointer_bounds_args))
2366 {
2367 if (flag_checking)
2368 {
2369 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2370 bitmap_set_bit (output, DECL_UID (node->decl));
2371 }
2372 decl_state = lto_new_out_decl_state ();
2373 lto_push_out_decl_state (decl_state);
2374 if (gimple_has_body_p (node->decl) || !flag_wpa
2375 /* Thunks have no body but they may be synthetized
2376 at WPA time. */
2377 || DECL_ARGUMENTS (node->decl))
2378 output_function (node);
2379 else
2380 copy_function_or_variable (node);
2381 gcc_assert (lto_get_out_decl_state () == decl_state);
2382 lto_pop_out_decl_state ();
2383 lto_record_function_out_decl_state (node->decl, decl_state);
2384 }
2385 }
2386 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2387 {
2388 /* Wrap symbol references inside the ctor in a type
2389 preserving MEM_REF. */
2390 tree ctor = DECL_INITIAL (node->decl);
2391 if (ctor && !in_lto_p)
2392 walk_tree (&ctor, wrap_refs, NULL, NULL);
2393 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2394 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2395 && !node->alias)
2396 {
2397 timevar_push (TV_IPA_LTO_CTORS_OUT);
2398 if (flag_checking)
2399 {
2400 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2401 bitmap_set_bit (output, DECL_UID (node->decl));
2402 }
2403 decl_state = lto_new_out_decl_state ();
2404 lto_push_out_decl_state (decl_state);
2405 if (DECL_INITIAL (node->decl) != error_mark_node
2406 || !flag_wpa)
2407 output_constructor (node);
2408 else
2409 copy_function_or_variable (node);
2410 gcc_assert (lto_get_out_decl_state () == decl_state);
2411 lto_pop_out_decl_state ();
2412 lto_record_function_out_decl_state (node->decl, decl_state);
2413 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2414 }
2415 }
2416 }
2417
2418 /* Emit the callgraph after emitting function bodies. This needs to
2419 be done now to make sure that all the statements in every function
2420 have been renumbered so that edges can be associated with call
2421 statements using the statement UIDs. */
2422 output_symtab ();
2423
2424 output_offload_tables ();
2425
2426 #if CHECKING_P
2427 lto_bitmap_free (output);
2428 #endif
2429 }
2430
2431 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2432 from it and required for correct representation of its semantics.
2433 Each node in ENCODER must be a global declaration or a type. A node
2434 is written only once, even if it appears multiple times in the
2435 vector. Certain transitively-reachable nodes, such as those
2436 representing expressions, may be duplicated, but such nodes
2437 must not appear in ENCODER itself. */
2438
2439 static void
2440 write_global_stream (struct output_block *ob,
2441 struct lto_tree_ref_encoder *encoder)
2442 {
2443 tree t;
2444 size_t index;
2445 const size_t size = lto_tree_ref_encoder_size (encoder);
2446
2447 for (index = 0; index < size; index++)
2448 {
2449 t = lto_tree_ref_encoder_get_tree (encoder, index);
2450 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2451 stream_write_tree (ob, t, false);
2452 }
2453 }
2454
2455
2456 /* Write a sequence of indices into the globals vector corresponding
2457 to the trees in ENCODER. These are used by the reader to map the
2458 indices used to refer to global entities within function bodies to
2459 their referents. */
2460
2461 static void
2462 write_global_references (struct output_block *ob,
2463 struct lto_tree_ref_encoder *encoder)
2464 {
2465 tree t;
2466 uint32_t index;
2467 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2468
2469 /* Write size and slot indexes as 32-bit unsigned numbers. */
2470 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2471 data[0] = size;
2472
2473 for (index = 0; index < size; index++)
2474 {
2475 unsigned slot_num;
2476
2477 t = lto_tree_ref_encoder_get_tree (encoder, index);
2478 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2479 gcc_assert (slot_num != (unsigned)-1);
2480 data[index + 1] = slot_num;
2481 }
2482
2483 lto_write_data (data, sizeof (int32_t) * (size + 1));
2484 free (data);
2485 }
2486
2487
2488 /* Write all the streams in an lto_out_decl_state STATE using
2489 output block OB and output stream OUT_STREAM. */
2490
2491 void
2492 lto_output_decl_state_streams (struct output_block *ob,
2493 struct lto_out_decl_state *state)
2494 {
2495 int i;
2496
2497 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2498 write_global_stream (ob, &state->streams[i]);
2499 }
2500
2501
2502 /* Write all the references in an lto_out_decl_state STATE using
2503 output block OB and output stream OUT_STREAM. */
2504
2505 void
2506 lto_output_decl_state_refs (struct output_block *ob,
2507 struct lto_out_decl_state *state)
2508 {
2509 unsigned i;
2510 unsigned ref;
2511 tree decl;
2512
2513 /* Write reference to FUNCTION_DECL. If there is not function,
2514 write reference to void_type_node. */
2515 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2516 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2517 gcc_assert (ref != (unsigned)-1);
2518 ref = ref * 2 + (state->compressed ? 1 : 0);
2519 lto_write_data (&ref, sizeof (uint32_t));
2520
2521 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2522 write_global_references (ob, &state->streams[i]);
2523 }
2524
2525
2526 /* Return the written size of STATE. */
2527
2528 static size_t
2529 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2530 {
2531 int i;
2532 size_t size;
2533
2534 size = sizeof (int32_t); /* fn_ref. */
2535 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2536 {
2537 size += sizeof (int32_t); /* vector size. */
2538 size += (lto_tree_ref_encoder_size (&state->streams[i])
2539 * sizeof (int32_t));
2540 }
2541 return size;
2542 }
2543
2544
2545 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2546 so far. */
2547
2548 static void
2549 write_symbol (struct streamer_tree_cache_d *cache,
2550 tree t, hash_set<const char *> *seen, bool alias)
2551 {
2552 const char *name;
2553 enum gcc_plugin_symbol_kind kind;
2554 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2555 unsigned slot_num;
2556 uint64_t size;
2557 const char *comdat;
2558 unsigned char c;
2559
2560 /* None of the following kinds of symbols are needed in the
2561 symbol table. */
2562 if (!TREE_PUBLIC (t)
2563 || is_builtin_fn (t)
2564 || DECL_ABSTRACT_P (t)
2565 || (VAR_P (t) && DECL_HARD_REGISTER (t)))
2566 return;
2567
2568 gcc_assert (VAR_OR_FUNCTION_DECL_P (t));
2569
2570 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2571
2572 /* This behaves like assemble_name_raw in varasm.c, performing the
2573 same name manipulations that ASM_OUTPUT_LABELREF does. */
2574 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2575
2576 if (seen->add (name))
2577 return;
2578
2579 streamer_tree_cache_lookup (cache, t, &slot_num);
2580 gcc_assert (slot_num != (unsigned)-1);
2581
2582 if (DECL_EXTERNAL (t))
2583 {
2584 if (DECL_WEAK (t))
2585 kind = GCCPK_WEAKUNDEF;
2586 else
2587 kind = GCCPK_UNDEF;
2588 }
2589 else
2590 {
2591 if (DECL_WEAK (t))
2592 kind = GCCPK_WEAKDEF;
2593 else if (DECL_COMMON (t))
2594 kind = GCCPK_COMMON;
2595 else
2596 kind = GCCPK_DEF;
2597
2598 /* When something is defined, it should have node attached. */
2599 gcc_assert (alias || !VAR_P (t) || varpool_node::get (t)->definition);
2600 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2601 || (cgraph_node::get (t)
2602 && cgraph_node::get (t)->definition));
2603 }
2604
2605 /* Imitate what default_elf_asm_output_external do.
2606 When symbol is external, we need to output it with DEFAULT visibility
2607 when compiling with -fvisibility=default, while with HIDDEN visibility
2608 when symbol has attribute (visibility("hidden")) specified.
2609 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2610 right. */
2611
2612 if (DECL_EXTERNAL (t)
2613 && !targetm.binds_local_p (t))
2614 visibility = GCCPV_DEFAULT;
2615 else
2616 switch (DECL_VISIBILITY (t))
2617 {
2618 case VISIBILITY_DEFAULT:
2619 visibility = GCCPV_DEFAULT;
2620 break;
2621 case VISIBILITY_PROTECTED:
2622 visibility = GCCPV_PROTECTED;
2623 break;
2624 case VISIBILITY_HIDDEN:
2625 visibility = GCCPV_HIDDEN;
2626 break;
2627 case VISIBILITY_INTERNAL:
2628 visibility = GCCPV_INTERNAL;
2629 break;
2630 }
2631
2632 if (kind == GCCPK_COMMON
2633 && DECL_SIZE_UNIT (t)
2634 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2635 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2636 else
2637 size = 0;
2638
2639 if (DECL_ONE_ONLY (t))
2640 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2641 else
2642 comdat = "";
2643
2644 lto_write_data (name, strlen (name) + 1);
2645 lto_write_data (comdat, strlen (comdat) + 1);
2646 c = (unsigned char) kind;
2647 lto_write_data (&c, 1);
2648 c = (unsigned char) visibility;
2649 lto_write_data (&c, 1);
2650 lto_write_data (&size, 8);
2651 lto_write_data (&slot_num, 4);
2652 }
2653
2654 /* Return true if NODE should appear in the plugin symbol table. */
2655
2656 bool
2657 output_symbol_p (symtab_node *node)
2658 {
2659 struct cgraph_node *cnode;
2660 if (!node->real_symbol_p ())
2661 return false;
2662 /* We keep external functions in symtab for sake of inlining
2663 and devirtualization. We do not want to see them in symbol table as
2664 references unless they are really used. */
2665 cnode = dyn_cast <cgraph_node *> (node);
2666 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2667 && cnode->callers)
2668 return true;
2669
2670 /* Ignore all references from external vars initializers - they are not really
2671 part of the compilation unit until they are used by folding. Some symbols,
2672 like references to external construction vtables can not be referred to at all.
2673 We decide this at can_refer_decl_in_current_unit_p. */
2674 if (!node->definition || DECL_EXTERNAL (node->decl))
2675 {
2676 int i;
2677 struct ipa_ref *ref;
2678 for (i = 0; node->iterate_referring (i, ref); i++)
2679 {
2680 if (ref->use == IPA_REF_ALIAS)
2681 continue;
2682 if (is_a <cgraph_node *> (ref->referring))
2683 return true;
2684 if (!DECL_EXTERNAL (ref->referring->decl))
2685 return true;
2686 }
2687 return false;
2688 }
2689 return true;
2690 }
2691
2692
2693 /* Write an IL symbol table to OB.
2694 SET and VSET are cgraph/varpool node sets we are outputting. */
2695
2696 static void
2697 produce_symtab (struct output_block *ob)
2698 {
2699 struct streamer_tree_cache_d *cache = ob->writer_cache;
2700 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2701 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2702 lto_symtab_encoder_iterator lsei;
2703
2704 lto_begin_section (section_name, false);
2705 free (section_name);
2706
2707 hash_set<const char *> seen;
2708
2709 /* Write the symbol table.
2710 First write everything defined and then all declarations.
2711 This is necessary to handle cases where we have duplicated symbols. */
2712 for (lsei = lsei_start (encoder);
2713 !lsei_end_p (lsei); lsei_next (&lsei))
2714 {
2715 symtab_node *node = lsei_node (lsei);
2716
2717 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2718 continue;
2719 write_symbol (cache, node->decl, &seen, false);
2720 }
2721 for (lsei = lsei_start (encoder);
2722 !lsei_end_p (lsei); lsei_next (&lsei))
2723 {
2724 symtab_node *node = lsei_node (lsei);
2725
2726 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2727 continue;
2728 write_symbol (cache, node->decl, &seen, false);
2729 }
2730
2731 lto_end_section ();
2732 }
2733
2734
2735 /* Init the streamer_mode_table for output, where we collect info on what
2736 machine_mode values have been streamed. */
2737 void
2738 lto_output_init_mode_table (void)
2739 {
2740 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2741 }
2742
2743
2744 /* Write the mode table. */
2745 static void
2746 lto_write_mode_table (void)
2747 {
2748 struct output_block *ob;
2749 ob = create_output_block (LTO_section_mode_table);
2750 bitpack_d bp = bitpack_create (ob->main_stream);
2751
2752 /* Ensure that for GET_MODE_INNER (m) != m we have
2753 also the inner mode marked. */
2754 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2755 if (streamer_mode_table[i])
2756 {
2757 machine_mode m = (machine_mode) i;
2758 machine_mode inner_m = GET_MODE_INNER (m);
2759 if (inner_m != m)
2760 streamer_mode_table[(int) inner_m] = 1;
2761 }
2762 /* First stream modes that have GET_MODE_INNER (m) == m,
2763 so that we can refer to them afterwards. */
2764 for (int pass = 0; pass < 2; pass++)
2765 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2766 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2767 {
2768 machine_mode m = (machine_mode) i;
2769 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
2770 continue;
2771 bp_pack_value (&bp, m, 8);
2772 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2773 bp_pack_value (&bp, GET_MODE_SIZE (m), 8);
2774 bp_pack_value (&bp, GET_MODE_PRECISION (m), 16);
2775 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2776 bp_pack_value (&bp, GET_MODE_NUNITS (m), 8);
2777 switch (GET_MODE_CLASS (m))
2778 {
2779 case MODE_FRACT:
2780 case MODE_UFRACT:
2781 case MODE_ACCUM:
2782 case MODE_UACCUM:
2783 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2784 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2785 break;
2786 case MODE_FLOAT:
2787 case MODE_DECIMAL_FLOAT:
2788 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2789 break;
2790 default:
2791 break;
2792 }
2793 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2794 }
2795 bp_pack_value (&bp, VOIDmode, 8);
2796
2797 streamer_write_bitpack (&bp);
2798
2799 char *section_name
2800 = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2801 lto_begin_section (section_name, !flag_wpa);
2802 free (section_name);
2803
2804 /* The entire header stream is computed here. */
2805 struct lto_simple_header_with_strings header;
2806 memset (&header, 0, sizeof (header));
2807
2808 /* Write the header. */
2809 header.major_version = LTO_major_version;
2810 header.minor_version = LTO_minor_version;
2811
2812 header.main_size = ob->main_stream->total_size;
2813 header.string_size = ob->string_stream->total_size;
2814 lto_write_data (&header, sizeof header);
2815
2816 /* Put all of the gimple and the string table out the asm file as a
2817 block of text. */
2818 lto_write_stream (ob->main_stream);
2819 lto_write_stream (ob->string_stream);
2820
2821 lto_end_section ();
2822 destroy_output_block (ob);
2823 }
2824
2825
2826 /* This pass is run after all of the functions are serialized and all
2827 of the IPA passes have written their serialized forms. This pass
2828 causes the vector of all of the global decls and types used from
2829 this file to be written in to a section that can then be read in to
2830 recover these on other side. */
2831
2832 void
2833 produce_asm_for_decls (void)
2834 {
2835 struct lto_out_decl_state *out_state;
2836 struct lto_out_decl_state *fn_out_state;
2837 struct lto_decl_header header;
2838 char *section_name;
2839 struct output_block *ob;
2840 unsigned idx, num_fns;
2841 size_t decl_state_size;
2842 int32_t num_decl_states;
2843
2844 ob = create_output_block (LTO_section_decls);
2845
2846 memset (&header, 0, sizeof (struct lto_decl_header));
2847
2848 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2849 lto_begin_section (section_name, !flag_wpa);
2850 free (section_name);
2851
2852 /* Make string 0 be a NULL string. */
2853 streamer_write_char_stream (ob->string_stream, 0);
2854
2855 gcc_assert (!alias_pairs);
2856
2857 /* Get rid of the global decl state hash tables to save some memory. */
2858 out_state = lto_get_out_decl_state ();
2859 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2860 if (out_state->streams[i].tree_hash_table)
2861 {
2862 delete out_state->streams[i].tree_hash_table;
2863 out_state->streams[i].tree_hash_table = NULL;
2864 }
2865
2866 /* Write the global symbols. */
2867 lto_output_decl_state_streams (ob, out_state);
2868 num_fns = lto_function_decl_states.length ();
2869 for (idx = 0; idx < num_fns; idx++)
2870 {
2871 fn_out_state =
2872 lto_function_decl_states[idx];
2873 lto_output_decl_state_streams (ob, fn_out_state);
2874 }
2875
2876 header.major_version = LTO_major_version;
2877 header.minor_version = LTO_minor_version;
2878
2879 /* Currently not used. This field would allow us to preallocate
2880 the globals vector, so that it need not be resized as it is extended. */
2881 header.num_nodes = -1;
2882
2883 /* Compute the total size of all decl out states. */
2884 decl_state_size = sizeof (int32_t);
2885 decl_state_size += lto_out_decl_state_written_size (out_state);
2886 for (idx = 0; idx < num_fns; idx++)
2887 {
2888 fn_out_state =
2889 lto_function_decl_states[idx];
2890 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2891 }
2892 header.decl_state_size = decl_state_size;
2893
2894 header.main_size = ob->main_stream->total_size;
2895 header.string_size = ob->string_stream->total_size;
2896
2897 lto_write_data (&header, sizeof header);
2898
2899 /* Write the main out-decl state, followed by out-decl states of
2900 functions. */
2901 num_decl_states = num_fns + 1;
2902 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2903 lto_output_decl_state_refs (ob, out_state);
2904 for (idx = 0; idx < num_fns; idx++)
2905 {
2906 fn_out_state = lto_function_decl_states[idx];
2907 lto_output_decl_state_refs (ob, fn_out_state);
2908 }
2909
2910 lto_write_stream (ob->main_stream);
2911 lto_write_stream (ob->string_stream);
2912
2913 lto_end_section ();
2914
2915 /* Write the symbol table. It is used by linker to determine dependencies
2916 and thus we can skip it for WPA. */
2917 if (!flag_wpa)
2918 produce_symtab (ob);
2919
2920 /* Write command line opts. */
2921 lto_write_options ();
2922
2923 /* Deallocate memory and clean up. */
2924 for (idx = 0; idx < num_fns; idx++)
2925 {
2926 fn_out_state =
2927 lto_function_decl_states[idx];
2928 lto_delete_out_decl_state (fn_out_state);
2929 }
2930 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2931 lto_function_decl_states.release ();
2932 destroy_output_block (ob);
2933 if (lto_stream_offload_p)
2934 lto_write_mode_table ();
2935 }