]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-streamer-out.c
lto-streamer-out.c (DFS::DFS_write_tree_body): Do not walk DECL_VINDEX.
[thirdparty/gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2018 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
43 #include "debug.h"
44 #include "omp-offload.h"
45 #include "print-tree.h"
46
47
48 static void lto_write_tree (struct output_block*, tree, bool);
49
50 /* Clear the line info stored in DATA_IN. */
51
52 static void
53 clear_line_info (struct output_block *ob)
54 {
55 ob->current_file = NULL;
56 ob->current_line = 0;
57 ob->current_col = 0;
58 ob->current_sysp = false;
59 }
60
61
62 /* Create the output block and return it. SECTION_TYPE is
63 LTO_section_function_body or LTO_static_initializer. */
64
65 struct output_block *
66 create_output_block (enum lto_section_type section_type)
67 {
68 struct output_block *ob = XCNEW (struct output_block);
69 if (streamer_dump_file)
70 fprintf (streamer_dump_file, "Creating output block for %s\n",
71 lto_section_name [section_type]);
72
73 ob->section_type = section_type;
74 ob->decl_state = lto_get_out_decl_state ();
75 ob->main_stream = XCNEW (struct lto_output_stream);
76 ob->string_stream = XCNEW (struct lto_output_stream);
77 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
78
79 if (section_type == LTO_section_function_body)
80 ob->cfg_stream = XCNEW (struct lto_output_stream);
81
82 clear_line_info (ob);
83
84 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
85 gcc_obstack_init (&ob->obstack);
86
87 return ob;
88 }
89
90
91 /* Destroy the output block OB. */
92
93 void
94 destroy_output_block (struct output_block *ob)
95 {
96 enum lto_section_type section_type = ob->section_type;
97
98 delete ob->string_hash_table;
99 ob->string_hash_table = NULL;
100
101 free (ob->main_stream);
102 free (ob->string_stream);
103 if (section_type == LTO_section_function_body)
104 free (ob->cfg_stream);
105
106 streamer_tree_cache_delete (ob->writer_cache);
107 obstack_free (&ob->obstack, NULL);
108
109 free (ob);
110 }
111
112
113 /* Look up NODE in the type table and write the index for it to OB. */
114
115 static void
116 output_type_ref (struct output_block *ob, tree node)
117 {
118 streamer_write_record_start (ob, LTO_type_ref);
119 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
120 }
121
122
123 /* Return true if tree node T is written to various tables. For these
124 nodes, we sometimes want to write their phyiscal representation
125 (via lto_output_tree), and sometimes we need to emit an index
126 reference into a table (via lto_output_tree_ref). */
127
128 static bool
129 tree_is_indexable (tree t)
130 {
131 /* Parameters and return values of functions of variably modified types
132 must go to global stream, because they may be used in the type
133 definition. */
134 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
135 && DECL_CONTEXT (t))
136 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
137 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared.
138 We should no longer need to stream it. */
139 else if (TREE_CODE (t) == IMPORTED_DECL)
140 gcc_unreachable ();
141 else if (TREE_CODE (t) == LABEL_DECL)
142 return FORCED_LABEL (t) || DECL_NONLOCAL (t);
143 else if (((VAR_P (t) && !TREE_STATIC (t))
144 || TREE_CODE (t) == TYPE_DECL
145 || TREE_CODE (t) == CONST_DECL
146 || TREE_CODE (t) == NAMELIST_DECL)
147 && decl_function_context (t))
148 return false;
149 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
150 return false;
151 /* Variably modified types need to be streamed alongside function
152 bodies because they can refer to local entities. Together with
153 them we have to localize their members as well.
154 ??? In theory that includes non-FIELD_DECLs as well. */
155 else if (TYPE_P (t)
156 && variably_modified_type_p (t, NULL_TREE))
157 return false;
158 else if (TREE_CODE (t) == FIELD_DECL
159 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
160 return false;
161 else
162 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
163 }
164
165
166 /* Output info about new location into bitpack BP.
167 After outputting bitpack, lto_output_location_data has
168 to be done to output actual data. */
169
170 void
171 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
172 location_t loc)
173 {
174 expanded_location xloc;
175
176 loc = LOCATION_LOCUS (loc);
177 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
178 loc < RESERVED_LOCATION_COUNT
179 ? loc : RESERVED_LOCATION_COUNT);
180 if (loc < RESERVED_LOCATION_COUNT)
181 return;
182
183 xloc = expand_location (loc);
184
185 bp_pack_value (bp, ob->current_file != xloc.file, 1);
186 bp_pack_value (bp, ob->current_line != xloc.line, 1);
187 bp_pack_value (bp, ob->current_col != xloc.column, 1);
188
189 if (ob->current_file != xloc.file)
190 {
191 bp_pack_string (ob, bp, xloc.file, true);
192 bp_pack_value (bp, xloc.sysp, 1);
193 }
194 ob->current_file = xloc.file;
195 ob->current_sysp = xloc.sysp;
196
197 if (ob->current_line != xloc.line)
198 bp_pack_var_len_unsigned (bp, xloc.line);
199 ob->current_line = xloc.line;
200
201 if (ob->current_col != xloc.column)
202 bp_pack_var_len_unsigned (bp, xloc.column);
203 ob->current_col = xloc.column;
204 }
205
206
207 /* If EXPR is an indexable tree node, output a reference to it to
208 output block OB. Otherwise, output the physical representation of
209 EXPR to OB. */
210
211 static void
212 lto_output_tree_ref (struct output_block *ob, tree expr)
213 {
214 enum tree_code code;
215
216 if (TYPE_P (expr))
217 {
218 output_type_ref (ob, expr);
219 return;
220 }
221
222 code = TREE_CODE (expr);
223 switch (code)
224 {
225 case SSA_NAME:
226 streamer_write_record_start (ob, LTO_ssa_name_ref);
227 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
228 break;
229
230 case FIELD_DECL:
231 streamer_write_record_start (ob, LTO_field_decl_ref);
232 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
233 break;
234
235 case FUNCTION_DECL:
236 streamer_write_record_start (ob, LTO_function_decl_ref);
237 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
238 break;
239
240 case VAR_DECL:
241 case DEBUG_EXPR_DECL:
242 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
243 /* FALLTHRU */
244 case PARM_DECL:
245 streamer_write_record_start (ob, LTO_global_decl_ref);
246 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
247 break;
248
249 case CONST_DECL:
250 streamer_write_record_start (ob, LTO_const_decl_ref);
251 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
252 break;
253
254 case IMPORTED_DECL:
255 gcc_assert (decl_function_context (expr) == NULL);
256 streamer_write_record_start (ob, LTO_imported_decl_ref);
257 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
258 break;
259
260 case TYPE_DECL:
261 streamer_write_record_start (ob, LTO_type_decl_ref);
262 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
263 break;
264
265 case NAMELIST_DECL:
266 streamer_write_record_start (ob, LTO_namelist_decl_ref);
267 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
268 break;
269
270 case NAMESPACE_DECL:
271 streamer_write_record_start (ob, LTO_namespace_decl_ref);
272 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
273 break;
274
275 case LABEL_DECL:
276 streamer_write_record_start (ob, LTO_label_decl_ref);
277 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
278 break;
279
280 case RESULT_DECL:
281 streamer_write_record_start (ob, LTO_result_decl_ref);
282 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
283 break;
284
285 case TRANSLATION_UNIT_DECL:
286 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
287 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
288 break;
289
290 default:
291 /* No other node is indexable, so it should have been handled by
292 lto_output_tree. */
293 gcc_unreachable ();
294 }
295 }
296
297
298 /* Return true if EXPR is a tree node that can be written to disk. */
299
300 static inline bool
301 lto_is_streamable (tree expr)
302 {
303 enum tree_code code = TREE_CODE (expr);
304
305 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
306 name version in lto_output_tree_ref (see output_ssa_names). */
307 return !is_lang_specific (expr)
308 && code != SSA_NAME
309 && code != CALL_EXPR
310 && code != LANG_TYPE
311 && code != MODIFY_EXPR
312 && code != INIT_EXPR
313 && code != TARGET_EXPR
314 && code != BIND_EXPR
315 && code != WITH_CLEANUP_EXPR
316 && code != STATEMENT_LIST
317 && (code == CASE_LABEL_EXPR
318 || code == DECL_EXPR
319 || TREE_CODE_CLASS (code) != tcc_statement);
320 }
321
322 /* Very rough estimate of streaming size of the initializer. If we ignored
323 presence of strings, we could simply just count number of non-indexable
324 tree nodes and number of references to indexable nodes. Strings however
325 may be very large and we do not want to dump them int othe global stream.
326
327 Count the size of initializer until the size in DATA is positive. */
328
329 static tree
330 subtract_estimated_size (tree *tp, int *ws, void *data)
331 {
332 long *sum = (long *)data;
333 if (tree_is_indexable (*tp))
334 {
335 /* Indexable tree is one reference to global stream.
336 Guess it may be about 4 bytes. */
337 *sum -= 4;
338 *ws = 0;
339 }
340 /* String table entry + base of tree node needs to be streamed. */
341 if (TREE_CODE (*tp) == STRING_CST)
342 *sum -= TREE_STRING_LENGTH (*tp) + 8;
343 else
344 {
345 /* Identifiers are also variable length but should not appear
346 naked in constructor. */
347 gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
348 /* We do not really make attempt to work out size of pickled tree, as
349 it is very variable. Make it bigger than the reference. */
350 *sum -= 16;
351 }
352 if (*sum < 0)
353 return *tp;
354 return NULL_TREE;
355 }
356
357
358 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
359
360 static tree
361 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
362 {
363 gcc_checking_assert (DECL_P (expr)
364 && TREE_CODE (expr) != FUNCTION_DECL
365 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
366
367 /* Handle DECL_INITIAL for symbols. */
368 tree initial = DECL_INITIAL (expr);
369 if (VAR_P (expr)
370 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
371 && !DECL_IN_CONSTANT_POOL (expr)
372 && initial)
373 {
374 varpool_node *vnode;
375 /* Extra section needs about 30 bytes; do not produce it for simple
376 scalar values. */
377 if (!(vnode = varpool_node::get (expr))
378 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
379 initial = error_mark_node;
380 if (initial != error_mark_node)
381 {
382 long max_size = 30;
383 if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
384 NULL))
385 initial = error_mark_node;
386 }
387 }
388
389 return initial;
390 }
391
392
393 /* Write a physical representation of tree node EXPR to output block
394 OB. If REF_P is true, the leaves of EXPR are emitted as references
395 via lto_output_tree_ref. IX is the index into the streamer cache
396 where EXPR is stored. */
397
398 static void
399 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
400 {
401 /* Pack all the non-pointer fields in EXPR into a bitpack and write
402 the resulting bitpack. */
403 streamer_write_tree_bitfields (ob, expr);
404
405 /* Write all the pointer fields in EXPR. */
406 streamer_write_tree_body (ob, expr, ref_p);
407
408 /* Write any LTO-specific data to OB. */
409 if (DECL_P (expr)
410 && TREE_CODE (expr) != FUNCTION_DECL
411 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
412 {
413 /* Handle DECL_INITIAL for symbols. */
414 tree initial = get_symbol_initial_value
415 (ob->decl_state->symtab_node_encoder, expr);
416 stream_write_tree (ob, initial, ref_p);
417 }
418
419 /* Stream references to early generated DIEs. Keep in sync with the
420 trees handled in dwarf2out_die_ref_for_decl. */
421 if ((DECL_P (expr)
422 && TREE_CODE (expr) != FIELD_DECL
423 && TREE_CODE (expr) != DEBUG_EXPR_DECL
424 && TREE_CODE (expr) != TYPE_DECL)
425 || TREE_CODE (expr) == BLOCK)
426 {
427 const char *sym;
428 unsigned HOST_WIDE_INT off;
429 if (debug_info_level > DINFO_LEVEL_NONE
430 && debug_hooks->die_ref_for_decl (expr, &sym, &off))
431 {
432 streamer_write_string (ob, ob->main_stream, sym, true);
433 streamer_write_uhwi (ob, off);
434 }
435 else
436 streamer_write_string (ob, ob->main_stream, NULL, true);
437 }
438 }
439
440 /* Write a physical representation of tree node EXPR to output block
441 OB. If REF_P is true, the leaves of EXPR are emitted as references
442 via lto_output_tree_ref. IX is the index into the streamer cache
443 where EXPR is stored. */
444
445 static void
446 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
447 {
448 if (!lto_is_streamable (expr))
449 internal_error ("tree code %qs is not supported in LTO streams",
450 get_tree_code_name (TREE_CODE (expr)));
451
452 /* Write the header, containing everything needed to materialize
453 EXPR on the reading side. */
454 streamer_write_tree_header (ob, expr);
455
456 lto_write_tree_1 (ob, expr, ref_p);
457
458 /* Mark the end of EXPR. */
459 streamer_write_zero (ob);
460 }
461
462 /* Emit the physical representation of tree node EXPR to output block OB,
463 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
464 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
465
466 static void
467 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
468 bool ref_p, bool this_ref_p)
469 {
470 unsigned ix;
471
472 gcc_checking_assert (expr != NULL_TREE
473 && !(this_ref_p && tree_is_indexable (expr)));
474
475 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
476 expr, hash, &ix);
477 gcc_assert (!exists_p);
478 if (TREE_CODE (expr) == INTEGER_CST
479 && !TREE_OVERFLOW (expr))
480 {
481 /* Shared INTEGER_CST nodes are special because they need their
482 original type to be materialized by the reader (to implement
483 TYPE_CACHED_VALUES). */
484 streamer_write_integer_cst (ob, expr, ref_p);
485 }
486 else
487 {
488 /* This is the first time we see EXPR, write its fields
489 to OB. */
490 lto_write_tree (ob, expr, ref_p);
491 }
492 }
493
494 class DFS
495 {
496 public:
497 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
498 bool single_p);
499 ~DFS ();
500
501 struct scc_entry
502 {
503 tree t;
504 hashval_t hash;
505 };
506 vec<scc_entry> sccstack;
507
508 private:
509 struct sccs
510 {
511 unsigned int dfsnum;
512 unsigned int low;
513 };
514 struct worklist
515 {
516 tree expr;
517 sccs *from_state;
518 sccs *cstate;
519 bool ref_p;
520 bool this_ref_p;
521 };
522
523 static int scc_entry_compare (const void *, const void *);
524
525 void DFS_write_tree_body (struct output_block *ob,
526 tree expr, sccs *expr_state, bool ref_p);
527
528 void DFS_write_tree (struct output_block *ob, sccs *from_state,
529 tree expr, bool ref_p, bool this_ref_p);
530
531 hashval_t
532 hash_scc (struct output_block *ob, unsigned first, unsigned size,
533 bool ref_p, bool this_ref_p);
534
535 hash_map<tree, sccs *> sccstate;
536 vec<worklist> worklist_vec;
537 struct obstack sccstate_obstack;
538 };
539
540 /* Emit the physical representation of tree node EXPR to output block OB,
541 using depth-first search on the subgraph. If THIS_REF_P is true, the
542 leaves of EXPR are emitted as references via lto_output_tree_ref.
543 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
544 this is for a rewalk of a single leaf SCC. */
545
546 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
547 bool single_p)
548 {
549 unsigned int next_dfs_num = 1;
550 sccstack.create (0);
551 gcc_obstack_init (&sccstate_obstack);
552 worklist_vec = vNULL;
553 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
554 while (!worklist_vec.is_empty ())
555 {
556 worklist &w = worklist_vec.last ();
557 expr = w.expr;
558 sccs *from_state = w.from_state;
559 sccs *cstate = w.cstate;
560 ref_p = w.ref_p;
561 this_ref_p = w.this_ref_p;
562 if (cstate == NULL)
563 {
564 sccs **slot = &sccstate.get_or_insert (expr);
565 cstate = *slot;
566 if (cstate)
567 {
568 gcc_checking_assert (from_state);
569 if (cstate->dfsnum < from_state->dfsnum)
570 from_state->low = MIN (cstate->dfsnum, from_state->low);
571 worklist_vec.pop ();
572 continue;
573 }
574
575 scc_entry e = { expr, 0 };
576 /* Not yet visited. DFS recurse and push it onto the stack. */
577 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
578 sccstack.safe_push (e);
579 cstate->dfsnum = next_dfs_num++;
580 cstate->low = cstate->dfsnum;
581 w.cstate = cstate;
582
583 if (TREE_CODE (expr) == INTEGER_CST
584 && !TREE_OVERFLOW (expr))
585 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
586 else
587 {
588 DFS_write_tree_body (ob, expr, cstate, ref_p);
589
590 /* Walk any LTO-specific edges. */
591 if (DECL_P (expr)
592 && TREE_CODE (expr) != FUNCTION_DECL
593 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
594 {
595 /* Handle DECL_INITIAL for symbols. */
596 tree initial
597 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
598 expr);
599 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
600 }
601 }
602 continue;
603 }
604
605 /* See if we found an SCC. */
606 if (cstate->low == cstate->dfsnum)
607 {
608 unsigned first, size;
609 tree x;
610
611 /* If we are re-walking a single leaf SCC just pop it,
612 let earlier worklist item access the sccstack. */
613 if (single_p)
614 {
615 worklist_vec.pop ();
616 continue;
617 }
618
619 /* Pop the SCC and compute its size. */
620 first = sccstack.length ();
621 do
622 {
623 x = sccstack[--first].t;
624 }
625 while (x != expr);
626 size = sccstack.length () - first;
627
628 /* No need to compute hashes for LTRANS units, we don't perform
629 any merging there. */
630 hashval_t scc_hash = 0;
631 unsigned scc_entry_len = 0;
632 if (!flag_wpa)
633 {
634 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
635
636 /* Put the entries with the least number of collisions first. */
637 unsigned entry_start = 0;
638 scc_entry_len = size + 1;
639 for (unsigned i = 0; i < size;)
640 {
641 unsigned from = i;
642 for (i = i + 1; i < size
643 && (sccstack[first + i].hash
644 == sccstack[first + from].hash); ++i)
645 ;
646 if (i - from < scc_entry_len)
647 {
648 scc_entry_len = i - from;
649 entry_start = from;
650 }
651 }
652 for (unsigned i = 0; i < scc_entry_len; ++i)
653 std::swap (sccstack[first + i],
654 sccstack[first + entry_start + i]);
655
656 /* We already sorted SCC deterministically in hash_scc. */
657
658 /* Check that we have only one SCC.
659 Naturally we may have conflicts if hash function is not
660 strong enough. Lets see how far this gets. */
661 gcc_checking_assert (scc_entry_len == 1);
662 }
663
664 /* Write LTO_tree_scc. */
665 streamer_write_record_start (ob, LTO_tree_scc);
666 streamer_write_uhwi (ob, size);
667 streamer_write_uhwi (ob, scc_hash);
668
669 /* Write size-1 SCCs without wrapping them inside SCC bundles.
670 All INTEGER_CSTs need to be handled this way as we need
671 their type to materialize them. Also builtins are handled
672 this way.
673 ??? We still wrap these in LTO_tree_scc so at the
674 input side we can properly identify the tree we want
675 to ultimatively return. */
676 if (size == 1)
677 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
678 else
679 {
680 /* Write the size of the SCC entry candidates. */
681 streamer_write_uhwi (ob, scc_entry_len);
682
683 /* Write all headers and populate the streamer cache. */
684 for (unsigned i = 0; i < size; ++i)
685 {
686 hashval_t hash = sccstack[first+i].hash;
687 tree t = sccstack[first+i].t;
688 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
689 t, hash, NULL);
690 gcc_assert (!exists_p);
691
692 if (!lto_is_streamable (t))
693 internal_error ("tree code %qs is not supported "
694 "in LTO streams",
695 get_tree_code_name (TREE_CODE (t)));
696
697 /* Write the header, containing everything needed to
698 materialize EXPR on the reading side. */
699 streamer_write_tree_header (ob, t);
700 }
701
702 /* Write the bitpacks and tree references. */
703 for (unsigned i = 0; i < size; ++i)
704 {
705 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
706
707 /* Mark the end of the tree. */
708 streamer_write_zero (ob);
709 }
710 }
711
712 /* Finally truncate the vector. */
713 sccstack.truncate (first);
714
715 if (from_state)
716 from_state->low = MIN (from_state->low, cstate->low);
717 worklist_vec.pop ();
718 continue;
719 }
720
721 gcc_checking_assert (from_state);
722 from_state->low = MIN (from_state->low, cstate->low);
723 if (cstate->dfsnum < from_state->dfsnum)
724 from_state->low = MIN (cstate->dfsnum, from_state->low);
725 worklist_vec.pop ();
726 }
727 worklist_vec.release ();
728 }
729
730 DFS::~DFS ()
731 {
732 sccstack.release ();
733 obstack_free (&sccstate_obstack, NULL);
734 }
735
736 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
737 DFS recurse for all tree edges originating from it. */
738
739 void
740 DFS::DFS_write_tree_body (struct output_block *ob,
741 tree expr, sccs *expr_state, bool ref_p)
742 {
743 #define DFS_follow_tree_edge(DEST) \
744 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
745
746 enum tree_code code;
747
748 if (streamer_dump_file)
749 {
750 print_node_brief (streamer_dump_file, " Streaming ",
751 expr, 4);
752 fprintf (streamer_dump_file, " to %s\n",
753 lto_section_name [ob->section_type]);
754 }
755
756 code = TREE_CODE (expr);
757
758 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
759 {
760 if (TREE_CODE (expr) != IDENTIFIER_NODE)
761 DFS_follow_tree_edge (TREE_TYPE (expr));
762 }
763
764 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
765 {
766 unsigned int count = vector_cst_encoded_nelts (expr);
767 for (unsigned int i = 0; i < count; ++i)
768 DFS_follow_tree_edge (VECTOR_CST_ENCODED_ELT (expr, i));
769 }
770
771 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
772 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
773 DFS_follow_tree_edge (POLY_INT_CST_COEFF (expr, i));
774
775 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
776 {
777 DFS_follow_tree_edge (TREE_REALPART (expr));
778 DFS_follow_tree_edge (TREE_IMAGPART (expr));
779 }
780
781 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
782 {
783 /* Drop names that were created for anonymous entities. */
784 if (DECL_NAME (expr)
785 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
786 && anon_aggrname_p (DECL_NAME (expr)))
787 ;
788 else
789 DFS_follow_tree_edge (DECL_NAME (expr));
790 if (TREE_CODE (expr) != TRANSLATION_UNIT_DECL
791 && ! DECL_CONTEXT (expr))
792 DFS_follow_tree_edge ((*all_translation_units)[0]);
793 else
794 DFS_follow_tree_edge (DECL_CONTEXT (expr));
795 }
796
797 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
798 {
799 DFS_follow_tree_edge (DECL_SIZE (expr));
800 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
801
802 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
803 special handling in LTO, it must be handled by streamer hooks. */
804
805 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
806
807 /* We use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
808 declarations which should be eliminated by decl merging. Be sure none
809 leaks to this point. */
810 gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
811 DFS_follow_tree_edge (DECL_ABSTRACT_ORIGIN (expr));
812
813 if ((VAR_P (expr)
814 || TREE_CODE (expr) == PARM_DECL)
815 && DECL_HAS_VALUE_EXPR_P (expr))
816 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
817 if (VAR_P (expr)
818 && DECL_HAS_DEBUG_EXPR_P (expr))
819 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
820 }
821
822 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
823 {
824 /* Make sure we don't inadvertently set the assembler name. */
825 if (DECL_ASSEMBLER_NAME_SET_P (expr))
826 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
827 }
828
829 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
830 {
831 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
832 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
833 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
834 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
835 gcc_checking_assert (!DECL_FCONTEXT (expr));
836 }
837
838 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
839 {
840 gcc_checking_assert (DECL_VINDEX (expr) == NULL);
841 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
842 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
843 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
844 }
845
846 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
847 {
848 DFS_follow_tree_edge (TYPE_SIZE (expr));
849 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
850 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
851 DFS_follow_tree_edge (TYPE_NAME (expr));
852 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
853 reconstructed during fixup. */
854 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
855 during fixup. */
856 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
857 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
858 /* TYPE_CANONICAL is re-computed during type merging, so no need
859 to follow it here. */
860 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
861 }
862
863 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
864 {
865 if (TREE_CODE (expr) == ENUMERAL_TYPE)
866 DFS_follow_tree_edge (TYPE_VALUES (expr));
867 else if (TREE_CODE (expr) == ARRAY_TYPE)
868 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
869 else if (RECORD_OR_UNION_TYPE_P (expr))
870 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
871 DFS_follow_tree_edge (t);
872 else if (TREE_CODE (expr) == FUNCTION_TYPE
873 || TREE_CODE (expr) == METHOD_TYPE)
874 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
875
876 if (!POINTER_TYPE_P (expr))
877 DFS_follow_tree_edge (TYPE_MIN_VALUE_RAW (expr));
878 DFS_follow_tree_edge (TYPE_MAX_VALUE_RAW (expr));
879 }
880
881 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
882 {
883 DFS_follow_tree_edge (TREE_PURPOSE (expr));
884 DFS_follow_tree_edge (TREE_VALUE (expr));
885 DFS_follow_tree_edge (TREE_CHAIN (expr));
886 }
887
888 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
889 {
890 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
891 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
892 }
893
894 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
895 {
896 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
897 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
898 DFS_follow_tree_edge (TREE_BLOCK (expr));
899 }
900
901 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
902 {
903 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
904 {
905 /* We would have to stream externals in the block chain as
906 non-references but we should have dropped them in
907 free-lang-data. */
908 gcc_assert (!VAR_OR_FUNCTION_DECL_P (t) || !DECL_EXTERNAL (t));
909 DFS_follow_tree_edge (t);
910 }
911
912 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
913 DFS_follow_tree_edge (BLOCK_ABSTRACT_ORIGIN (expr));
914
915 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
916 information for early inlined BLOCKs so drop it on the floor instead
917 of ICEing in dwarf2out.c. */
918
919 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
920 streaming time. */
921
922 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
923 list is re-constructed from BLOCK_SUPERCONTEXT. */
924 }
925
926 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
927 {
928 unsigned i;
929 tree t;
930
931 /* Note that the number of BINFO slots has already been emitted in
932 EXPR's header (see streamer_write_tree_header) because this length
933 is needed to build the empty BINFO node on the reader side. */
934 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
935 DFS_follow_tree_edge (t);
936 DFS_follow_tree_edge (BINFO_OFFSET (expr));
937 DFS_follow_tree_edge (BINFO_VTABLE (expr));
938
939 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX,
940 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
941 by C++ FE only. */
942 }
943
944 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
945 {
946 unsigned i;
947 tree index, value;
948
949 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
950 {
951 DFS_follow_tree_edge (index);
952 DFS_follow_tree_edge (value);
953 }
954 }
955
956 if (code == OMP_CLAUSE)
957 {
958 int i;
959 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
960 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
961 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
962 }
963
964 #undef DFS_follow_tree_edge
965 }
966
967 /* Return a hash value for the tree T.
968 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
969 may hold hash values if trees inside current SCC. */
970
971 static hashval_t
972 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
973 {
974 inchash::hash hstate;
975
976 #define visit(SIBLING) \
977 do { \
978 unsigned ix; \
979 if (!SIBLING) \
980 hstate.add_int (0); \
981 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
982 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
983 else if (map) \
984 hstate.add_int (*map->get (SIBLING)); \
985 else \
986 hstate.add_int (1); \
987 } while (0)
988
989 /* Hash TS_BASE. */
990 enum tree_code code = TREE_CODE (t);
991 hstate.add_int (code);
992 if (!TYPE_P (t))
993 {
994 hstate.add_flag (TREE_SIDE_EFFECTS (t));
995 hstate.add_flag (TREE_CONSTANT (t));
996 hstate.add_flag (TREE_READONLY (t));
997 hstate.add_flag (TREE_PUBLIC (t));
998 }
999 hstate.add_flag (TREE_ADDRESSABLE (t));
1000 hstate.add_flag (TREE_THIS_VOLATILE (t));
1001 if (DECL_P (t))
1002 hstate.add_flag (DECL_UNSIGNED (t));
1003 else if (TYPE_P (t))
1004 hstate.add_flag (TYPE_UNSIGNED (t));
1005 if (TYPE_P (t))
1006 hstate.add_flag (TYPE_ARTIFICIAL (t));
1007 else
1008 hstate.add_flag (TREE_NO_WARNING (t));
1009 hstate.add_flag (TREE_NOTHROW (t));
1010 hstate.add_flag (TREE_STATIC (t));
1011 hstate.add_flag (TREE_PROTECTED (t));
1012 hstate.add_flag (TREE_DEPRECATED (t));
1013 if (code != TREE_BINFO)
1014 hstate.add_flag (TREE_PRIVATE (t));
1015 if (TYPE_P (t))
1016 {
1017 hstate.add_flag (AGGREGATE_TYPE_P (t)
1018 ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
1019 hstate.add_flag (TYPE_ADDR_SPACE (t));
1020 }
1021 else if (code == SSA_NAME)
1022 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
1023 hstate.commit_flag ();
1024
1025 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1026 hstate.add_wide_int (wi::to_widest (t));
1027
1028 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1029 {
1030 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1031 hstate.add_flag (r.cl);
1032 hstate.add_flag (r.sign);
1033 hstate.add_flag (r.signalling);
1034 hstate.add_flag (r.canonical);
1035 hstate.commit_flag ();
1036 hstate.add_int (r.uexp);
1037 hstate.add (r.sig, sizeof (r.sig));
1038 }
1039
1040 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1041 {
1042 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1043 hstate.add_int (f.mode);
1044 hstate.add_int (f.data.low);
1045 hstate.add_int (f.data.high);
1046 }
1047
1048 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1049 {
1050 hstate.add_hwi (DECL_MODE (t));
1051 hstate.add_flag (DECL_NONLOCAL (t));
1052 hstate.add_flag (DECL_VIRTUAL_P (t));
1053 hstate.add_flag (DECL_IGNORED_P (t));
1054 hstate.add_flag (DECL_ABSTRACT_P (t));
1055 hstate.add_flag (DECL_ARTIFICIAL (t));
1056 hstate.add_flag (DECL_USER_ALIGN (t));
1057 hstate.add_flag (DECL_PRESERVE_P (t));
1058 hstate.add_flag (DECL_EXTERNAL (t));
1059 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1060 hstate.commit_flag ();
1061 hstate.add_int (DECL_ALIGN (t));
1062 if (code == LABEL_DECL)
1063 {
1064 hstate.add_int (EH_LANDING_PAD_NR (t));
1065 hstate.add_int (LABEL_DECL_UID (t));
1066 }
1067 else if (code == FIELD_DECL)
1068 {
1069 hstate.add_flag (DECL_PACKED (t));
1070 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1071 hstate.add_flag (DECL_PADDING_P (t));
1072 hstate.add_int (DECL_OFFSET_ALIGN (t));
1073 }
1074 else if (code == VAR_DECL)
1075 {
1076 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1077 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1078 }
1079 if (code == RESULT_DECL
1080 || code == PARM_DECL
1081 || code == VAR_DECL)
1082 {
1083 hstate.add_flag (DECL_BY_REFERENCE (t));
1084 if (code == VAR_DECL
1085 || code == PARM_DECL)
1086 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1087 }
1088 hstate.commit_flag ();
1089 }
1090
1091 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1092 hstate.add_int (DECL_REGISTER (t));
1093
1094 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1095 {
1096 hstate.add_flag (DECL_COMMON (t));
1097 hstate.add_flag (DECL_DLLIMPORT_P (t));
1098 hstate.add_flag (DECL_WEAK (t));
1099 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1100 hstate.add_flag (DECL_COMDAT (t));
1101 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1102 hstate.add_int (DECL_VISIBILITY (t));
1103 if (code == VAR_DECL)
1104 {
1105 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1106 hstate.add_flag (DECL_HARD_REGISTER (t));
1107 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1108 }
1109 if (TREE_CODE (t) == FUNCTION_DECL)
1110 {
1111 hstate.add_flag (DECL_FINAL_P (t));
1112 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1113 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1114 }
1115 hstate.commit_flag ();
1116 }
1117
1118 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1119 {
1120 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1121 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1122 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1123 hstate.add_flag (DECL_UNINLINABLE (t));
1124 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1125 hstate.add_flag (DECL_IS_NOVOPS (t));
1126 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1127 hstate.add_flag (DECL_IS_MALLOC (t));
1128 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1129 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1130 hstate.add_flag (DECL_STATIC_CHAIN (t));
1131 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1132 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1133 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1134 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1135 hstate.add_flag (DECL_PURE_P (t));
1136 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1137 hstate.commit_flag ();
1138 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1139 hstate.add_int (DECL_FUNCTION_CODE (t));
1140 }
1141
1142 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1143 {
1144 hstate.add_hwi (TYPE_MODE (t));
1145 hstate.add_flag (TYPE_STRING_FLAG (t));
1146 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1147 no streaming. */
1148 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
1149 hstate.add_flag (TYPE_PACKED (t));
1150 hstate.add_flag (TYPE_RESTRICT (t));
1151 hstate.add_flag (TYPE_USER_ALIGN (t));
1152 hstate.add_flag (TYPE_READONLY (t));
1153 if (RECORD_OR_UNION_TYPE_P (t))
1154 {
1155 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1156 hstate.add_flag (TYPE_FINAL_P (t));
1157 }
1158 else if (code == ARRAY_TYPE)
1159 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1160 if (AGGREGATE_TYPE_P (t))
1161 hstate.add_flag (TYPE_TYPELESS_STORAGE (t));
1162 hstate.commit_flag ();
1163 hstate.add_int (TYPE_PRECISION (t));
1164 hstate.add_int (TYPE_ALIGN (t));
1165 hstate.add_int (TYPE_EMPTY_P (t));
1166 }
1167
1168 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1169 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1170 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1171
1172 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1173 /* We don't stream these when passing things to a different target. */
1174 && !lto_stream_offload_p)
1175 hstate.add_hwi (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1176
1177 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1178 hstate.add_hwi (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1179
1180 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1181 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1182
1183 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1184 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1185
1186 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1187 {
1188 if (code != IDENTIFIER_NODE)
1189 visit (TREE_TYPE (t));
1190 }
1191
1192 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1193 {
1194 unsigned int count = vector_cst_encoded_nelts (t);
1195 for (unsigned int i = 0; i < count; ++i)
1196 visit (VECTOR_CST_ENCODED_ELT (t, i));
1197 }
1198
1199 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
1200 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1201 visit (POLY_INT_CST_COEFF (t, i));
1202
1203 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1204 {
1205 visit (TREE_REALPART (t));
1206 visit (TREE_IMAGPART (t));
1207 }
1208
1209 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1210 {
1211 /* Drop names that were created for anonymous entities. */
1212 if (DECL_NAME (t)
1213 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1214 && anon_aggrname_p (DECL_NAME (t)))
1215 ;
1216 else
1217 visit (DECL_NAME (t));
1218 if (DECL_FILE_SCOPE_P (t))
1219 ;
1220 else
1221 visit (DECL_CONTEXT (t));
1222 }
1223
1224 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1225 {
1226 visit (DECL_SIZE (t));
1227 visit (DECL_SIZE_UNIT (t));
1228 visit (DECL_ATTRIBUTES (t));
1229 if ((code == VAR_DECL
1230 || code == PARM_DECL)
1231 && DECL_HAS_VALUE_EXPR_P (t))
1232 visit (DECL_VALUE_EXPR (t));
1233 if (code == VAR_DECL
1234 && DECL_HAS_DEBUG_EXPR_P (t))
1235 visit (DECL_DEBUG_EXPR (t));
1236 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1237 be able to call get_symbol_initial_value. */
1238 }
1239
1240 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1241 {
1242 if (DECL_ASSEMBLER_NAME_SET_P (t))
1243 visit (DECL_ASSEMBLER_NAME (t));
1244 }
1245
1246 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1247 {
1248 visit (DECL_FIELD_OFFSET (t));
1249 visit (DECL_BIT_FIELD_TYPE (t));
1250 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1251 visit (DECL_FIELD_BIT_OFFSET (t));
1252 }
1253
1254 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1255 {
1256 visit (DECL_FUNCTION_PERSONALITY (t));
1257 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1258 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1259 }
1260
1261 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1262 {
1263 visit (TYPE_SIZE (t));
1264 visit (TYPE_SIZE_UNIT (t));
1265 visit (TYPE_ATTRIBUTES (t));
1266 visit (TYPE_NAME (t));
1267 visit (TYPE_MAIN_VARIANT (t));
1268 if (TYPE_FILE_SCOPE_P (t))
1269 ;
1270 else
1271 visit (TYPE_CONTEXT (t));
1272 visit (TYPE_STUB_DECL (t));
1273 }
1274
1275 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1276 {
1277 if (code == ENUMERAL_TYPE)
1278 visit (TYPE_VALUES (t));
1279 else if (code == ARRAY_TYPE)
1280 visit (TYPE_DOMAIN (t));
1281 else if (RECORD_OR_UNION_TYPE_P (t))
1282 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1283 visit (f);
1284 else if (code == FUNCTION_TYPE
1285 || code == METHOD_TYPE)
1286 visit (TYPE_ARG_TYPES (t));
1287 if (!POINTER_TYPE_P (t))
1288 visit (TYPE_MIN_VALUE_RAW (t));
1289 visit (TYPE_MAX_VALUE_RAW (t));
1290 }
1291
1292 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1293 {
1294 visit (TREE_PURPOSE (t));
1295 visit (TREE_VALUE (t));
1296 visit (TREE_CHAIN (t));
1297 }
1298
1299 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1300 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1301 visit (TREE_VEC_ELT (t, i));
1302
1303 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1304 {
1305 hstate.add_hwi (TREE_OPERAND_LENGTH (t));
1306 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1307 visit (TREE_OPERAND (t, i));
1308 }
1309
1310 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1311 {
1312 unsigned i;
1313 tree b;
1314 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1315 visit (b);
1316 visit (BINFO_OFFSET (t));
1317 visit (BINFO_VTABLE (t));
1318 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1319 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
1320 by C++ FE only. */
1321 }
1322
1323 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1324 {
1325 unsigned i;
1326 tree index, value;
1327 hstate.add_hwi (CONSTRUCTOR_NELTS (t));
1328 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1329 {
1330 visit (index);
1331 visit (value);
1332 }
1333 }
1334
1335 if (code == OMP_CLAUSE)
1336 {
1337 int i;
1338 HOST_WIDE_INT val;
1339
1340 hstate.add_hwi (OMP_CLAUSE_CODE (t));
1341 switch (OMP_CLAUSE_CODE (t))
1342 {
1343 case OMP_CLAUSE_DEFAULT:
1344 val = OMP_CLAUSE_DEFAULT_KIND (t);
1345 break;
1346 case OMP_CLAUSE_SCHEDULE:
1347 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1348 break;
1349 case OMP_CLAUSE_DEPEND:
1350 val = OMP_CLAUSE_DEPEND_KIND (t);
1351 break;
1352 case OMP_CLAUSE_MAP:
1353 val = OMP_CLAUSE_MAP_KIND (t);
1354 break;
1355 case OMP_CLAUSE_PROC_BIND:
1356 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1357 break;
1358 case OMP_CLAUSE_REDUCTION:
1359 val = OMP_CLAUSE_REDUCTION_CODE (t);
1360 break;
1361 default:
1362 val = 0;
1363 break;
1364 }
1365 hstate.add_hwi (val);
1366 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1367 visit (OMP_CLAUSE_OPERAND (t, i));
1368 visit (OMP_CLAUSE_CHAIN (t));
1369 }
1370
1371 return hstate.end ();
1372
1373 #undef visit
1374 }
1375
1376 /* Compare two SCC entries by their hash value for qsorting them. */
1377
1378 int
1379 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1380 {
1381 const scc_entry *p1 = (const scc_entry *) p1_;
1382 const scc_entry *p2 = (const scc_entry *) p2_;
1383 if (p1->hash < p2->hash)
1384 return -1;
1385 else if (p1->hash > p2->hash)
1386 return 1;
1387 return 0;
1388 }
1389
1390 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1391 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1392
1393 hashval_t
1394 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1395 bool ref_p, bool this_ref_p)
1396 {
1397 unsigned int last_classes = 0, iterations = 0;
1398
1399 /* Compute hash values for the SCC members. */
1400 for (unsigned i = 0; i < size; ++i)
1401 sccstack[first+i].hash
1402 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1403
1404 if (size == 1)
1405 return sccstack[first].hash;
1406
1407 /* We aim to get unique hash for every tree within SCC and compute hash value
1408 of the whole SCC by combining all values together in a stable (entry-point
1409 independent) order. This guarantees that the same SCC regions within
1410 different translation units will get the same hash values and therefore
1411 will be merged at WPA time.
1412
1413 Often the hashes are already unique. In that case we compute the SCC hash
1414 by combining individual hash values in an increasing order.
1415
1416 If there are duplicates, we seek at least one tree with unique hash (and
1417 pick one with minimal hash and this property). Then we obtain a stable
1418 order by DFS walk starting from this unique tree and then use the index
1419 within this order to make individual hash values unique.
1420
1421 If there is no tree with unique hash, we iteratively propagate the hash
1422 values across the internal edges of SCC. This usually quickly leads
1423 to unique hashes. Consider, for example, an SCC containing two pointers
1424 that are identical except for the types they point to and assume that
1425 these types are also part of the SCC. The propagation will add the
1426 points-to type information into their hash values. */
1427 do
1428 {
1429 /* Sort the SCC so we can easily check for uniqueness. */
1430 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1431
1432 unsigned int classes = 1;
1433 int firstunique = -1;
1434
1435 /* Find the tree with lowest unique hash (if it exists) and compute
1436 the number of equivalence classes. */
1437 if (sccstack[first].hash != sccstack[first+1].hash)
1438 firstunique = 0;
1439 for (unsigned i = 1; i < size; ++i)
1440 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1441 {
1442 classes++;
1443 if (firstunique == -1
1444 && (i == size - 1
1445 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1446 firstunique = i;
1447 }
1448
1449 /* If we found a tree with unique hash, stop the iteration. */
1450 if (firstunique != -1
1451 /* Also terminate if we run out of iterations or if the number of
1452 equivalence classes is no longer increasing.
1453 For example a cyclic list of trees that are all equivalent will
1454 never have unique entry point; we however do not build such SCCs
1455 in our IL. */
1456 || classes <= last_classes || iterations > 16)
1457 {
1458 hashval_t scc_hash;
1459
1460 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1461 starting from FIRSTUNIQUE to obtain a stable order. */
1462 if (classes != size && firstunique != -1)
1463 {
1464 hash_map <tree, hashval_t> map(size*2);
1465
1466 /* Store hash values into a map, so we can associate them with
1467 the reordered SCC. */
1468 for (unsigned i = 0; i < size; ++i)
1469 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1470
1471 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1472 true);
1473 gcc_assert (again.sccstack.length () == size);
1474
1475 memcpy (sccstack.address () + first,
1476 again.sccstack.address (),
1477 sizeof (scc_entry) * size);
1478
1479 /* Update hash values of individual members by hashing in the
1480 index within the stable order. This ensures uniqueness.
1481 Also compute the SCC hash by mixing in all hash values in
1482 the stable order we obtained. */
1483 sccstack[first].hash = *map.get (sccstack[first].t);
1484 scc_hash = sccstack[first].hash;
1485 for (unsigned i = 1; i < size; ++i)
1486 {
1487 sccstack[first+i].hash
1488 = iterative_hash_hashval_t (i,
1489 *map.get (sccstack[first+i].t));
1490 scc_hash
1491 = iterative_hash_hashval_t (scc_hash,
1492 sccstack[first+i].hash);
1493 }
1494 }
1495 /* If we got a unique hash value for each tree, then sort already
1496 ensured entry-point independent order. Only compute the final
1497 SCC hash.
1498
1499 If we failed to find the unique entry point, we go by the same
1500 route. We will eventually introduce unwanted hash conflicts. */
1501 else
1502 {
1503 scc_hash = sccstack[first].hash;
1504 for (unsigned i = 1; i < size; ++i)
1505 scc_hash
1506 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1507
1508 /* We cannot 100% guarantee that the hash won't conflict so as
1509 to make it impossible to find a unique hash. This however
1510 should be an extremely rare case. ICE for now so possible
1511 issues are found and evaluated. */
1512 gcc_checking_assert (classes == size);
1513 }
1514
1515 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1516 hash into the hash of each element. */
1517 for (unsigned i = 0; i < size; ++i)
1518 sccstack[first+i].hash
1519 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1520 return scc_hash;
1521 }
1522
1523 last_classes = classes;
1524 iterations++;
1525
1526 /* We failed to identify the entry point; propagate hash values across
1527 the edges. */
1528 hash_map <tree, hashval_t> map(size*2);
1529
1530 for (unsigned i = 0; i < size; ++i)
1531 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1532
1533 for (unsigned i = 0; i < size; i++)
1534 sccstack[first+i].hash
1535 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1536 }
1537 while (true);
1538 }
1539
1540 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1541 already in the streamer cache. Main routine called for
1542 each visit of EXPR. */
1543
1544 void
1545 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1546 tree expr, bool ref_p, bool this_ref_p)
1547 {
1548 /* Handle special cases. */
1549 if (expr == NULL_TREE)
1550 return;
1551
1552 /* Do not DFS walk into indexable trees. */
1553 if (this_ref_p && tree_is_indexable (expr))
1554 return;
1555
1556 /* Check if we already streamed EXPR. */
1557 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1558 return;
1559
1560 worklist w;
1561 w.expr = expr;
1562 w.from_state = from_state;
1563 w.cstate = NULL;
1564 w.ref_p = ref_p;
1565 w.this_ref_p = this_ref_p;
1566 worklist_vec.safe_push (w);
1567 }
1568
1569
1570 /* Emit the physical representation of tree node EXPR to output block OB.
1571 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1572 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1573
1574 void
1575 lto_output_tree (struct output_block *ob, tree expr,
1576 bool ref_p, bool this_ref_p)
1577 {
1578 unsigned ix;
1579 bool existed_p;
1580
1581 if (expr == NULL_TREE)
1582 {
1583 streamer_write_record_start (ob, LTO_null);
1584 return;
1585 }
1586
1587 if (this_ref_p && tree_is_indexable (expr))
1588 {
1589 lto_output_tree_ref (ob, expr);
1590 return;
1591 }
1592
1593 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1594 if (existed_p)
1595 {
1596 /* If a node has already been streamed out, make sure that
1597 we don't write it more than once. Otherwise, the reader
1598 will instantiate two different nodes for the same object. */
1599 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1600 streamer_write_uhwi (ob, ix);
1601 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1602 lto_tree_code_to_tag (TREE_CODE (expr)));
1603 lto_stats.num_pickle_refs_output++;
1604 }
1605 else
1606 {
1607 /* This is the first time we see EXPR, write all reachable
1608 trees to OB. */
1609 static bool in_dfs_walk;
1610
1611 /* Protect against recursion which means disconnect between
1612 what tree edges we walk in the DFS walk and what edges
1613 we stream out. */
1614 gcc_assert (!in_dfs_walk);
1615
1616 if (streamer_dump_file)
1617 {
1618 print_node_brief (streamer_dump_file, " Streaming SCC of ",
1619 expr, 4);
1620 fprintf (streamer_dump_file, "\n");
1621 }
1622
1623 /* Start the DFS walk. */
1624 /* Save ob state ... */
1625 /* let's see ... */
1626 in_dfs_walk = true;
1627 DFS (ob, expr, ref_p, this_ref_p, false);
1628 in_dfs_walk = false;
1629
1630 /* Finally append a reference to the tree we were writing.
1631 ??? If expr ended up as a singleton we could have
1632 inlined it here and avoid outputting a reference. */
1633 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1634 gcc_assert (existed_p);
1635 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1636 streamer_write_uhwi (ob, ix);
1637 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1638 lto_tree_code_to_tag (TREE_CODE (expr)));
1639 if (streamer_dump_file)
1640 {
1641 print_node_brief (streamer_dump_file, " Finished SCC of ",
1642 expr, 4);
1643 fprintf (streamer_dump_file, "\n\n");
1644 }
1645 lto_stats.num_pickle_refs_output++;
1646 }
1647 }
1648
1649
1650 /* Output to OB a list of try/catch handlers starting with FIRST. */
1651
1652 static void
1653 output_eh_try_list (struct output_block *ob, eh_catch first)
1654 {
1655 eh_catch n;
1656
1657 for (n = first; n; n = n->next_catch)
1658 {
1659 streamer_write_record_start (ob, LTO_eh_catch);
1660 stream_write_tree (ob, n->type_list, true);
1661 stream_write_tree (ob, n->filter_list, true);
1662 stream_write_tree (ob, n->label, true);
1663 }
1664
1665 streamer_write_record_start (ob, LTO_null);
1666 }
1667
1668
1669 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1670 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1671 detect EH region sharing. */
1672
1673 static void
1674 output_eh_region (struct output_block *ob, eh_region r)
1675 {
1676 enum LTO_tags tag;
1677
1678 if (r == NULL)
1679 {
1680 streamer_write_record_start (ob, LTO_null);
1681 return;
1682 }
1683
1684 if (r->type == ERT_CLEANUP)
1685 tag = LTO_ert_cleanup;
1686 else if (r->type == ERT_TRY)
1687 tag = LTO_ert_try;
1688 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1689 tag = LTO_ert_allowed_exceptions;
1690 else if (r->type == ERT_MUST_NOT_THROW)
1691 tag = LTO_ert_must_not_throw;
1692 else
1693 gcc_unreachable ();
1694
1695 streamer_write_record_start (ob, tag);
1696 streamer_write_hwi (ob, r->index);
1697
1698 if (r->outer)
1699 streamer_write_hwi (ob, r->outer->index);
1700 else
1701 streamer_write_zero (ob);
1702
1703 if (r->inner)
1704 streamer_write_hwi (ob, r->inner->index);
1705 else
1706 streamer_write_zero (ob);
1707
1708 if (r->next_peer)
1709 streamer_write_hwi (ob, r->next_peer->index);
1710 else
1711 streamer_write_zero (ob);
1712
1713 if (r->type == ERT_TRY)
1714 {
1715 output_eh_try_list (ob, r->u.eh_try.first_catch);
1716 }
1717 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1718 {
1719 stream_write_tree (ob, r->u.allowed.type_list, true);
1720 stream_write_tree (ob, r->u.allowed.label, true);
1721 streamer_write_uhwi (ob, r->u.allowed.filter);
1722 }
1723 else if (r->type == ERT_MUST_NOT_THROW)
1724 {
1725 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1726 bitpack_d bp = bitpack_create (ob->main_stream);
1727 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1728 streamer_write_bitpack (&bp);
1729 }
1730
1731 if (r->landing_pads)
1732 streamer_write_hwi (ob, r->landing_pads->index);
1733 else
1734 streamer_write_zero (ob);
1735 }
1736
1737
1738 /* Output landing pad LP to OB. */
1739
1740 static void
1741 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1742 {
1743 if (lp == NULL)
1744 {
1745 streamer_write_record_start (ob, LTO_null);
1746 return;
1747 }
1748
1749 streamer_write_record_start (ob, LTO_eh_landing_pad);
1750 streamer_write_hwi (ob, lp->index);
1751 if (lp->next_lp)
1752 streamer_write_hwi (ob, lp->next_lp->index);
1753 else
1754 streamer_write_zero (ob);
1755
1756 if (lp->region)
1757 streamer_write_hwi (ob, lp->region->index);
1758 else
1759 streamer_write_zero (ob);
1760
1761 stream_write_tree (ob, lp->post_landing_pad, true);
1762 }
1763
1764
1765 /* Output the existing eh_table to OB. */
1766
1767 static void
1768 output_eh_regions (struct output_block *ob, struct function *fn)
1769 {
1770 if (fn->eh && fn->eh->region_tree)
1771 {
1772 unsigned i;
1773 eh_region eh;
1774 eh_landing_pad lp;
1775 tree ttype;
1776
1777 streamer_write_record_start (ob, LTO_eh_table);
1778
1779 /* Emit the index of the root of the EH region tree. */
1780 streamer_write_hwi (ob, fn->eh->region_tree->index);
1781
1782 /* Emit all the EH regions in the region array. */
1783 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1784 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1785 output_eh_region (ob, eh);
1786
1787 /* Emit all landing pads. */
1788 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1789 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1790 output_eh_lp (ob, lp);
1791
1792 /* Emit all the runtime type data. */
1793 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1794 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1795 stream_write_tree (ob, ttype, true);
1796
1797 /* Emit the table of action chains. */
1798 if (targetm.arm_eabi_unwinder)
1799 {
1800 tree t;
1801 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1802 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1803 stream_write_tree (ob, t, true);
1804 }
1805 else
1806 {
1807 uchar c;
1808 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1809 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1810 streamer_write_char_stream (ob->main_stream, c);
1811 }
1812 }
1813
1814 /* The LTO_null either terminates the record or indicates that there
1815 are no eh_records at all. */
1816 streamer_write_record_start (ob, LTO_null);
1817 }
1818
1819
1820 /* Output all of the active ssa names to the ssa_names stream. */
1821
1822 static void
1823 output_ssa_names (struct output_block *ob, struct function *fn)
1824 {
1825 unsigned int i, len;
1826
1827 len = vec_safe_length (SSANAMES (fn));
1828 streamer_write_uhwi (ob, len);
1829
1830 for (i = 1; i < len; i++)
1831 {
1832 tree ptr = (*SSANAMES (fn))[i];
1833
1834 if (ptr == NULL_TREE
1835 || SSA_NAME_IN_FREE_LIST (ptr)
1836 || virtual_operand_p (ptr)
1837 /* Simply skip unreleased SSA names. */
1838 || (! SSA_NAME_IS_DEFAULT_DEF (ptr)
1839 && (! SSA_NAME_DEF_STMT (ptr)
1840 || ! gimple_bb (SSA_NAME_DEF_STMT (ptr)))))
1841 continue;
1842
1843 streamer_write_uhwi (ob, i);
1844 streamer_write_char_stream (ob->main_stream,
1845 SSA_NAME_IS_DEFAULT_DEF (ptr));
1846 if (SSA_NAME_VAR (ptr))
1847 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1848 else
1849 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1850 stream_write_tree (ob, TREE_TYPE (ptr), true);
1851 }
1852
1853 streamer_write_zero (ob);
1854 }
1855
1856
1857
1858 /* Output the cfg. */
1859
1860 static void
1861 output_cfg (struct output_block *ob, struct function *fn)
1862 {
1863 struct lto_output_stream *tmp_stream = ob->main_stream;
1864 basic_block bb;
1865
1866 ob->main_stream = ob->cfg_stream;
1867
1868 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1869 profile_status_for_fn (fn));
1870
1871 /* Output the number of the highest basic block. */
1872 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1873
1874 FOR_ALL_BB_FN (bb, fn)
1875 {
1876 edge_iterator ei;
1877 edge e;
1878
1879 streamer_write_hwi (ob, bb->index);
1880
1881 /* Output the successors and the edge flags. */
1882 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1883 FOR_EACH_EDGE (e, ei, bb->succs)
1884 {
1885 streamer_write_uhwi (ob, e->dest->index);
1886 e->probability.stream_out (ob);
1887 streamer_write_uhwi (ob, e->flags);
1888 }
1889 }
1890
1891 streamer_write_hwi (ob, -1);
1892
1893 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1894 while (bb->next_bb)
1895 {
1896 streamer_write_hwi (ob, bb->next_bb->index);
1897 bb = bb->next_bb;
1898 }
1899
1900 streamer_write_hwi (ob, -1);
1901
1902 /* ??? The cfgloop interface is tied to cfun. */
1903 gcc_assert (cfun == fn);
1904
1905 /* Output the number of loops. */
1906 streamer_write_uhwi (ob, number_of_loops (fn));
1907
1908 /* Output each loop, skipping the tree root which has number zero. */
1909 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1910 {
1911 struct loop *loop = get_loop (fn, i);
1912
1913 /* Write the index of the loop header. That's enough to rebuild
1914 the loop tree on the reader side. Stream -1 for an unused
1915 loop entry. */
1916 if (!loop)
1917 {
1918 streamer_write_hwi (ob, -1);
1919 continue;
1920 }
1921 else
1922 streamer_write_hwi (ob, loop->header->index);
1923
1924 /* Write everything copy_loop_info copies. */
1925 streamer_write_enum (ob->main_stream,
1926 loop_estimation, EST_LAST, loop->estimate_state);
1927 streamer_write_hwi (ob, loop->any_upper_bound);
1928 if (loop->any_upper_bound)
1929 streamer_write_widest_int (ob, loop->nb_iterations_upper_bound);
1930 streamer_write_hwi (ob, loop->any_likely_upper_bound);
1931 if (loop->any_likely_upper_bound)
1932 streamer_write_widest_int (ob, loop->nb_iterations_likely_upper_bound);
1933 streamer_write_hwi (ob, loop->any_estimate);
1934 if (loop->any_estimate)
1935 streamer_write_widest_int (ob, loop->nb_iterations_estimate);
1936
1937 /* Write OMP SIMD related info. */
1938 streamer_write_hwi (ob, loop->safelen);
1939 streamer_write_hwi (ob, loop->unroll);
1940 streamer_write_hwi (ob, loop->dont_vectorize);
1941 streamer_write_hwi (ob, loop->force_vectorize);
1942 stream_write_tree (ob, loop->simduid, true);
1943 }
1944
1945 ob->main_stream = tmp_stream;
1946 }
1947
1948
1949 /* Create the header in the file using OB. If the section type is for
1950 a function, set FN to the decl for that function. */
1951
1952 void
1953 produce_asm (struct output_block *ob, tree fn)
1954 {
1955 enum lto_section_type section_type = ob->section_type;
1956 struct lto_function_header header;
1957 char *section_name;
1958
1959 if (section_type == LTO_section_function_body)
1960 {
1961 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1962 section_name = lto_get_section_name (section_type, name, NULL);
1963 }
1964 else
1965 section_name = lto_get_section_name (section_type, NULL, NULL);
1966
1967 lto_begin_section (section_name, !flag_wpa);
1968 free (section_name);
1969
1970 /* The entire header is stream computed here. */
1971 memset (&header, 0, sizeof (struct lto_function_header));
1972
1973 /* Write the header. */
1974 header.major_version = LTO_major_version;
1975 header.minor_version = LTO_minor_version;
1976
1977 if (section_type == LTO_section_function_body)
1978 header.cfg_size = ob->cfg_stream->total_size;
1979 header.main_size = ob->main_stream->total_size;
1980 header.string_size = ob->string_stream->total_size;
1981 lto_write_data (&header, sizeof header);
1982
1983 /* Put all of the gimple and the string table out the asm file as a
1984 block of text. */
1985 if (section_type == LTO_section_function_body)
1986 lto_write_stream (ob->cfg_stream);
1987 lto_write_stream (ob->main_stream);
1988 lto_write_stream (ob->string_stream);
1989
1990 lto_end_section ();
1991 }
1992
1993
1994 /* Output the base body of struct function FN using output block OB. */
1995
1996 static void
1997 output_struct_function_base (struct output_block *ob, struct function *fn)
1998 {
1999 struct bitpack_d bp;
2000 unsigned i;
2001 tree t;
2002
2003 /* Output the static chain and non-local goto save area. */
2004 stream_write_tree (ob, fn->static_chain_decl, true);
2005 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
2006
2007 /* Output all the local variables in the function. */
2008 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
2009 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
2010 stream_write_tree (ob, t, true);
2011
2012 /* Output current IL state of the function. */
2013 streamer_write_uhwi (ob, fn->curr_properties);
2014
2015 /* Write all the attributes for FN. */
2016 bp = bitpack_create (ob->main_stream);
2017 bp_pack_value (&bp, fn->is_thunk, 1);
2018 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
2019 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
2020 bp_pack_value (&bp, fn->returns_struct, 1);
2021 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
2022 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
2023 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
2024 bp_pack_value (&bp, fn->after_inlining, 1);
2025 bp_pack_value (&bp, fn->stdarg, 1);
2026 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2027 bp_pack_value (&bp, fn->has_forced_label_in_static, 1);
2028 bp_pack_value (&bp, fn->calls_alloca, 1);
2029 bp_pack_value (&bp, fn->calls_setjmp, 1);
2030 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2031 bp_pack_value (&bp, fn->has_simduid_loops, 1);
2032 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2033 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2034 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2035
2036 /* Output the function start and end loci. */
2037 stream_output_location (ob, &bp, fn->function_start_locus);
2038 stream_output_location (ob, &bp, fn->function_end_locus);
2039
2040 /* Save the instance discriminator if present. */
2041 int *instance_number_p = NULL;
2042 if (decl_to_instance_map)
2043 instance_number_p = decl_to_instance_map->get (fn->decl);
2044 bp_pack_value (&bp, !!instance_number_p, 1);
2045 if (instance_number_p)
2046 bp_pack_value (&bp, *instance_number_p, sizeof (int) * CHAR_BIT);
2047
2048 streamer_write_bitpack (&bp);
2049 }
2050
2051
2052 /* Collect all leaf BLOCKs beyond ROOT into LEAFS. */
2053
2054 static void
2055 collect_block_tree_leafs (tree root, vec<tree> &leafs)
2056 {
2057 for (root = BLOCK_SUBBLOCKS (root); root; root = BLOCK_CHAIN (root))
2058 if (! BLOCK_SUBBLOCKS (root))
2059 leafs.safe_push (root);
2060 else
2061 collect_block_tree_leafs (BLOCK_SUBBLOCKS (root), leafs);
2062 }
2063
2064 /* Output the body of function NODE->DECL. */
2065
2066 static void
2067 output_function (struct cgraph_node *node)
2068 {
2069 tree function;
2070 struct function *fn;
2071 basic_block bb;
2072 struct output_block *ob;
2073
2074 if (streamer_dump_file)
2075 fprintf (streamer_dump_file, "\nStreaming body of %s\n",
2076 node->name ());
2077
2078 function = node->decl;
2079 fn = DECL_STRUCT_FUNCTION (function);
2080 ob = create_output_block (LTO_section_function_body);
2081
2082 clear_line_info (ob);
2083 ob->symbol = node;
2084
2085 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2086
2087 /* Set current_function_decl and cfun. */
2088 push_cfun (fn);
2089
2090 /* Make string 0 be a NULL string. */
2091 streamer_write_char_stream (ob->string_stream, 0);
2092
2093 streamer_write_record_start (ob, LTO_function);
2094
2095 /* Output decls for parameters and args. */
2096 stream_write_tree (ob, DECL_RESULT (function), true);
2097 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2098
2099 /* Output debug args if available. */
2100 vec<tree, va_gc> **debugargs = decl_debug_args_lookup (function);
2101 if (! debugargs)
2102 streamer_write_uhwi (ob, 0);
2103 else
2104 {
2105 streamer_write_uhwi (ob, (*debugargs)->length ());
2106 for (unsigned i = 0; i < (*debugargs)->length (); ++i)
2107 stream_write_tree (ob, (**debugargs)[i], true);
2108 }
2109
2110 /* Output DECL_INITIAL for the function, which contains the tree of
2111 lexical scopes. */
2112 stream_write_tree (ob, DECL_INITIAL (function), true);
2113 /* As we do not recurse into BLOCK_SUBBLOCKS but only BLOCK_SUPERCONTEXT
2114 collect block tree leafs and stream those. */
2115 auto_vec<tree> block_tree_leafs;
2116 if (DECL_INITIAL (function))
2117 collect_block_tree_leafs (DECL_INITIAL (function), block_tree_leafs);
2118 streamer_write_uhwi (ob, block_tree_leafs.length ());
2119 for (unsigned i = 0; i < block_tree_leafs.length (); ++i)
2120 stream_write_tree (ob, block_tree_leafs[i], true);
2121
2122 /* We also stream abstract functions where we stream only stuff needed for
2123 debug info. */
2124 if (gimple_has_body_p (function))
2125 {
2126 /* Fixup loops if required to match discovery done in the reader. */
2127 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
2128
2129 streamer_write_uhwi (ob, 1);
2130 output_struct_function_base (ob, fn);
2131
2132 /* Output all the SSA names used in the function. */
2133 output_ssa_names (ob, fn);
2134
2135 /* Output any exception handling regions. */
2136 output_eh_regions (ob, fn);
2137
2138
2139 /* We will renumber the statements. The code that does this uses
2140 the same ordering that we use for serializing them so we can use
2141 the same code on the other end and not have to write out the
2142 statement numbers. We do not assign UIDs to PHIs here because
2143 virtual PHIs get re-computed on-the-fly which would make numbers
2144 inconsistent. */
2145 set_gimple_stmt_max_uid (cfun, 0);
2146 FOR_ALL_BB_FN (bb, cfun)
2147 {
2148 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2149 gsi_next (&gsi))
2150 {
2151 gphi *stmt = gsi.phi ();
2152
2153 /* Virtual PHIs are not going to be streamed. */
2154 if (!virtual_operand_p (gimple_phi_result (stmt)))
2155 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2156 }
2157 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2158 gsi_next (&gsi))
2159 {
2160 gimple *stmt = gsi_stmt (gsi);
2161 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2162 }
2163 }
2164 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2165 virtual phis now. */
2166 FOR_ALL_BB_FN (bb, cfun)
2167 {
2168 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2169 gsi_next (&gsi))
2170 {
2171 gphi *stmt = gsi.phi ();
2172 if (virtual_operand_p (gimple_phi_result (stmt)))
2173 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2174 }
2175 }
2176
2177 /* Output the code for the function. */
2178 FOR_ALL_BB_FN (bb, fn)
2179 output_bb (ob, bb, fn);
2180
2181 /* The terminator for this function. */
2182 streamer_write_record_start (ob, LTO_null);
2183
2184 output_cfg (ob, fn);
2185
2186 loop_optimizer_finalize ();
2187 pop_cfun ();
2188 }
2189 else
2190 streamer_write_uhwi (ob, 0);
2191
2192 /* Create a section to hold the pickled output of this function. */
2193 produce_asm (ob, function);
2194
2195 destroy_output_block (ob);
2196 if (streamer_dump_file)
2197 fprintf (streamer_dump_file, "Finished streaming %s\n",
2198 node->name ());
2199 }
2200
2201 /* Output the body of function NODE->DECL. */
2202
2203 static void
2204 output_constructor (struct varpool_node *node)
2205 {
2206 tree var = node->decl;
2207 struct output_block *ob;
2208
2209 if (streamer_dump_file)
2210 fprintf (streamer_dump_file, "\nStreaming constructor of %s\n",
2211 node->name ());
2212
2213 ob = create_output_block (LTO_section_function_body);
2214
2215 clear_line_info (ob);
2216 ob->symbol = node;
2217
2218 /* Make string 0 be a NULL string. */
2219 streamer_write_char_stream (ob->string_stream, 0);
2220
2221 /* Output DECL_INITIAL for the function, which contains the tree of
2222 lexical scopes. */
2223 stream_write_tree (ob, DECL_INITIAL (var), true);
2224
2225 /* Create a section to hold the pickled output of this function. */
2226 produce_asm (ob, var);
2227
2228 destroy_output_block (ob);
2229 if (streamer_dump_file)
2230 fprintf (streamer_dump_file, "Finished streaming %s\n",
2231 node->name ());
2232 }
2233
2234
2235 /* Emit toplevel asms. */
2236
2237 void
2238 lto_output_toplevel_asms (void)
2239 {
2240 struct output_block *ob;
2241 struct asm_node *can;
2242 char *section_name;
2243 struct lto_simple_header_with_strings header;
2244
2245 if (!symtab->first_asm_symbol ())
2246 return;
2247
2248 ob = create_output_block (LTO_section_asm);
2249
2250 /* Make string 0 be a NULL string. */
2251 streamer_write_char_stream (ob->string_stream, 0);
2252
2253 for (can = symtab->first_asm_symbol (); can; can = can->next)
2254 {
2255 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2256 streamer_write_hwi (ob, can->order);
2257 }
2258
2259 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2260
2261 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2262 lto_begin_section (section_name, !flag_wpa);
2263 free (section_name);
2264
2265 /* The entire header stream is computed here. */
2266 memset (&header, 0, sizeof (header));
2267
2268 /* Write the header. */
2269 header.major_version = LTO_major_version;
2270 header.minor_version = LTO_minor_version;
2271
2272 header.main_size = ob->main_stream->total_size;
2273 header.string_size = ob->string_stream->total_size;
2274 lto_write_data (&header, sizeof header);
2275
2276 /* Put all of the gimple and the string table out the asm file as a
2277 block of text. */
2278 lto_write_stream (ob->main_stream);
2279 lto_write_stream (ob->string_stream);
2280
2281 lto_end_section ();
2282
2283 destroy_output_block (ob);
2284 }
2285
2286
2287 /* Copy the function body or variable constructor of NODE without deserializing. */
2288
2289 static void
2290 copy_function_or_variable (struct symtab_node *node)
2291 {
2292 tree function = node->decl;
2293 struct lto_file_decl_data *file_data = node->lto_file_data;
2294 const char *data;
2295 size_t len;
2296 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2297 char *section_name =
2298 lto_get_section_name (LTO_section_function_body, name, NULL);
2299 size_t i, j;
2300 struct lto_in_decl_state *in_state;
2301 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2302
2303 if (streamer_dump_file)
2304 fprintf (streamer_dump_file, "Copying section for %s\n", name);
2305 lto_begin_section (section_name, false);
2306 free (section_name);
2307
2308 /* We may have renamed the declaration, e.g., a static function. */
2309 name = lto_get_decl_name_mapping (file_data, name);
2310
2311 data = lto_get_raw_section_data (file_data, LTO_section_function_body,
2312 name, &len);
2313 gcc_assert (data);
2314
2315 /* Do a bit copy of the function body. */
2316 lto_write_raw_data (data, len);
2317
2318 /* Copy decls. */
2319 in_state =
2320 lto_get_function_in_decl_state (node->lto_file_data, function);
2321 out_state->compressed = in_state->compressed;
2322 gcc_assert (in_state);
2323
2324 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2325 {
2326 size_t n = vec_safe_length (in_state->streams[i]);
2327 vec<tree, va_gc> *trees = in_state->streams[i];
2328 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2329
2330 /* The out state must have the same indices and the in state.
2331 So just copy the vector. All the encoders in the in state
2332 must be empty where we reach here. */
2333 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2334 encoder->trees.reserve_exact (n);
2335 for (j = 0; j < n; j++)
2336 encoder->trees.safe_push ((*trees)[j]);
2337 }
2338
2339 lto_free_raw_section_data (file_data, LTO_section_function_body, name,
2340 data, len);
2341 lto_end_section ();
2342 }
2343
2344 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2345
2346 static tree
2347 wrap_refs (tree *tp, int *ws, void *)
2348 {
2349 tree t = *tp;
2350 if (handled_component_p (t)
2351 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
2352 && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2353 {
2354 tree decl = TREE_OPERAND (t, 0);
2355 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2356 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2357 build1 (ADDR_EXPR, ptrtype, decl),
2358 build_int_cst (ptrtype, 0));
2359 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2360 *ws = 0;
2361 }
2362 else if (TREE_CODE (t) == CONSTRUCTOR)
2363 ;
2364 else if (!EXPR_P (t))
2365 *ws = 0;
2366 return NULL_TREE;
2367 }
2368
2369 /* Remove functions that are no longer used from offload_funcs, and mark the
2370 remaining ones with DECL_PRESERVE_P. */
2371
2372 static void
2373 prune_offload_funcs (void)
2374 {
2375 if (!offload_funcs)
2376 return;
2377
2378 unsigned ix, ix2;
2379 tree *elem_ptr;
2380 VEC_ORDERED_REMOVE_IF (*offload_funcs, ix, ix2, elem_ptr,
2381 cgraph_node::get (*elem_ptr) == NULL);
2382
2383 tree fn_decl;
2384 FOR_EACH_VEC_ELT (*offload_funcs, ix, fn_decl)
2385 DECL_PRESERVE_P (fn_decl) = 1;
2386 }
2387
2388 /* Main entry point from the pass manager. */
2389
2390 void
2391 lto_output (void)
2392 {
2393 struct lto_out_decl_state *decl_state;
2394 bitmap output = NULL;
2395 int i, n_nodes;
2396 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2397
2398 prune_offload_funcs ();
2399
2400 if (flag_checking)
2401 output = lto_bitmap_alloc ();
2402
2403 /* Initialize the streamer. */
2404 lto_streamer_init ();
2405
2406 n_nodes = lto_symtab_encoder_size (encoder);
2407 /* Process only the functions with bodies. */
2408 for (i = 0; i < n_nodes; i++)
2409 {
2410 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2411 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2412 {
2413 if (lto_symtab_encoder_encode_body_p (encoder, node)
2414 && !node->alias
2415 && (!node->thunk.thunk_p || !node->thunk.add_pointer_bounds_args))
2416 {
2417 if (flag_checking)
2418 {
2419 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2420 bitmap_set_bit (output, DECL_UID (node->decl));
2421 }
2422 decl_state = lto_new_out_decl_state ();
2423 lto_push_out_decl_state (decl_state);
2424 if (gimple_has_body_p (node->decl)
2425 || (!flag_wpa
2426 && flag_incremental_link != INCREMENTAL_LINK_LTO)
2427 /* Thunks have no body but they may be synthetized
2428 at WPA time. */
2429 || DECL_ARGUMENTS (node->decl))
2430 output_function (node);
2431 else
2432 copy_function_or_variable (node);
2433 gcc_assert (lto_get_out_decl_state () == decl_state);
2434 lto_pop_out_decl_state ();
2435 lto_record_function_out_decl_state (node->decl, decl_state);
2436 }
2437 }
2438 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2439 {
2440 /* Wrap symbol references inside the ctor in a type
2441 preserving MEM_REF. */
2442 tree ctor = DECL_INITIAL (node->decl);
2443 if (ctor && !in_lto_p)
2444 walk_tree (&ctor, wrap_refs, NULL, NULL);
2445 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2446 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2447 && !node->alias)
2448 {
2449 timevar_push (TV_IPA_LTO_CTORS_OUT);
2450 if (flag_checking)
2451 {
2452 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2453 bitmap_set_bit (output, DECL_UID (node->decl));
2454 }
2455 decl_state = lto_new_out_decl_state ();
2456 lto_push_out_decl_state (decl_state);
2457 if (DECL_INITIAL (node->decl) != error_mark_node
2458 || (!flag_wpa
2459 && flag_incremental_link != INCREMENTAL_LINK_LTO))
2460 output_constructor (node);
2461 else
2462 copy_function_or_variable (node);
2463 gcc_assert (lto_get_out_decl_state () == decl_state);
2464 lto_pop_out_decl_state ();
2465 lto_record_function_out_decl_state (node->decl, decl_state);
2466 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2467 }
2468 }
2469 }
2470
2471 /* Emit the callgraph after emitting function bodies. This needs to
2472 be done now to make sure that all the statements in every function
2473 have been renumbered so that edges can be associated with call
2474 statements using the statement UIDs. */
2475 output_symtab ();
2476
2477 output_offload_tables ();
2478
2479 #if CHECKING_P
2480 lto_bitmap_free (output);
2481 #endif
2482 }
2483
2484 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2485 from it and required for correct representation of its semantics.
2486 Each node in ENCODER must be a global declaration or a type. A node
2487 is written only once, even if it appears multiple times in the
2488 vector. Certain transitively-reachable nodes, such as those
2489 representing expressions, may be duplicated, but such nodes
2490 must not appear in ENCODER itself. */
2491
2492 static void
2493 write_global_stream (struct output_block *ob,
2494 struct lto_tree_ref_encoder *encoder)
2495 {
2496 tree t;
2497 size_t index;
2498 const size_t size = lto_tree_ref_encoder_size (encoder);
2499
2500 for (index = 0; index < size; index++)
2501 {
2502 t = lto_tree_ref_encoder_get_tree (encoder, index);
2503 if (streamer_dump_file)
2504 {
2505 fprintf (streamer_dump_file, " %i:", (int)index);
2506 print_node_brief (streamer_dump_file, "", t, 4);
2507 fprintf (streamer_dump_file, "\n");
2508 }
2509 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2510 stream_write_tree (ob, t, false);
2511 }
2512 }
2513
2514
2515 /* Write a sequence of indices into the globals vector corresponding
2516 to the trees in ENCODER. These are used by the reader to map the
2517 indices used to refer to global entities within function bodies to
2518 their referents. */
2519
2520 static void
2521 write_global_references (struct output_block *ob,
2522 struct lto_tree_ref_encoder *encoder)
2523 {
2524 tree t;
2525 uint32_t index;
2526 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2527
2528 /* Write size and slot indexes as 32-bit unsigned numbers. */
2529 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2530 data[0] = size;
2531
2532 for (index = 0; index < size; index++)
2533 {
2534 unsigned slot_num;
2535
2536 t = lto_tree_ref_encoder_get_tree (encoder, index);
2537 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2538 gcc_assert (slot_num != (unsigned)-1);
2539 data[index + 1] = slot_num;
2540 }
2541
2542 lto_write_data (data, sizeof (int32_t) * (size + 1));
2543 free (data);
2544 }
2545
2546
2547 /* Write all the streams in an lto_out_decl_state STATE using
2548 output block OB and output stream OUT_STREAM. */
2549
2550 void
2551 lto_output_decl_state_streams (struct output_block *ob,
2552 struct lto_out_decl_state *state)
2553 {
2554 int i;
2555
2556 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2557 write_global_stream (ob, &state->streams[i]);
2558 }
2559
2560
2561 /* Write all the references in an lto_out_decl_state STATE using
2562 output block OB and output stream OUT_STREAM. */
2563
2564 void
2565 lto_output_decl_state_refs (struct output_block *ob,
2566 struct lto_out_decl_state *state)
2567 {
2568 unsigned i;
2569 unsigned ref;
2570 tree decl;
2571
2572 /* Write reference to FUNCTION_DECL. If there is not function,
2573 write reference to void_type_node. */
2574 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2575 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2576 gcc_assert (ref != (unsigned)-1);
2577 ref = ref * 2 + (state->compressed ? 1 : 0);
2578 lto_write_data (&ref, sizeof (uint32_t));
2579
2580 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2581 write_global_references (ob, &state->streams[i]);
2582 }
2583
2584
2585 /* Return the written size of STATE. */
2586
2587 static size_t
2588 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2589 {
2590 int i;
2591 size_t size;
2592
2593 size = sizeof (int32_t); /* fn_ref. */
2594 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2595 {
2596 size += sizeof (int32_t); /* vector size. */
2597 size += (lto_tree_ref_encoder_size (&state->streams[i])
2598 * sizeof (int32_t));
2599 }
2600 return size;
2601 }
2602
2603
2604 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2605 so far. */
2606
2607 static void
2608 write_symbol (struct streamer_tree_cache_d *cache,
2609 tree t, hash_set<const char *> *seen, bool alias)
2610 {
2611 const char *name;
2612 enum gcc_plugin_symbol_kind kind;
2613 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2614 unsigned slot_num;
2615 uint64_t size;
2616 const char *comdat;
2617 unsigned char c;
2618
2619 gcc_checking_assert (TREE_PUBLIC (t)
2620 && (TREE_CODE (t) != FUNCTION_DECL
2621 || !fndecl_built_in_p (t))
2622 && !DECL_ABSTRACT_P (t)
2623 && (!VAR_P (t) || !DECL_HARD_REGISTER (t)));
2624
2625 gcc_assert (VAR_OR_FUNCTION_DECL_P (t));
2626
2627 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2628
2629 /* This behaves like assemble_name_raw in varasm.c, performing the
2630 same name manipulations that ASM_OUTPUT_LABELREF does. */
2631 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2632
2633 if (seen->add (name))
2634 return;
2635
2636 streamer_tree_cache_lookup (cache, t, &slot_num);
2637 gcc_assert (slot_num != (unsigned)-1);
2638
2639 if (DECL_EXTERNAL (t))
2640 {
2641 if (DECL_WEAK (t))
2642 kind = GCCPK_WEAKUNDEF;
2643 else
2644 kind = GCCPK_UNDEF;
2645 }
2646 else
2647 {
2648 if (DECL_WEAK (t))
2649 kind = GCCPK_WEAKDEF;
2650 else if (DECL_COMMON (t))
2651 kind = GCCPK_COMMON;
2652 else
2653 kind = GCCPK_DEF;
2654
2655 /* When something is defined, it should have node attached. */
2656 gcc_assert (alias || !VAR_P (t) || varpool_node::get (t)->definition);
2657 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2658 || (cgraph_node::get (t)
2659 && cgraph_node::get (t)->definition));
2660 }
2661
2662 /* Imitate what default_elf_asm_output_external do.
2663 When symbol is external, we need to output it with DEFAULT visibility
2664 when compiling with -fvisibility=default, while with HIDDEN visibility
2665 when symbol has attribute (visibility("hidden")) specified.
2666 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2667 right. */
2668
2669 if (DECL_EXTERNAL (t)
2670 && !targetm.binds_local_p (t))
2671 visibility = GCCPV_DEFAULT;
2672 else
2673 switch (DECL_VISIBILITY (t))
2674 {
2675 case VISIBILITY_DEFAULT:
2676 visibility = GCCPV_DEFAULT;
2677 break;
2678 case VISIBILITY_PROTECTED:
2679 visibility = GCCPV_PROTECTED;
2680 break;
2681 case VISIBILITY_HIDDEN:
2682 visibility = GCCPV_HIDDEN;
2683 break;
2684 case VISIBILITY_INTERNAL:
2685 visibility = GCCPV_INTERNAL;
2686 break;
2687 }
2688
2689 if (kind == GCCPK_COMMON
2690 && DECL_SIZE_UNIT (t)
2691 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2692 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2693 else
2694 size = 0;
2695
2696 if (DECL_ONE_ONLY (t))
2697 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2698 else
2699 comdat = "";
2700
2701 lto_write_data (name, strlen (name) + 1);
2702 lto_write_data (comdat, strlen (comdat) + 1);
2703 c = (unsigned char) kind;
2704 lto_write_data (&c, 1);
2705 c = (unsigned char) visibility;
2706 lto_write_data (&c, 1);
2707 lto_write_data (&size, 8);
2708 lto_write_data (&slot_num, 4);
2709 }
2710
2711 /* Write an IL symbol table to OB.
2712 SET and VSET are cgraph/varpool node sets we are outputting. */
2713
2714 static void
2715 produce_symtab (struct output_block *ob)
2716 {
2717 struct streamer_tree_cache_d *cache = ob->writer_cache;
2718 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2719 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2720 lto_symtab_encoder_iterator lsei;
2721
2722 lto_begin_section (section_name, false);
2723 free (section_name);
2724
2725 hash_set<const char *> seen;
2726
2727 /* Write the symbol table.
2728 First write everything defined and then all declarations.
2729 This is necessary to handle cases where we have duplicated symbols. */
2730 for (lsei = lsei_start (encoder);
2731 !lsei_end_p (lsei); lsei_next (&lsei))
2732 {
2733 symtab_node *node = lsei_node (lsei);
2734
2735 if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2736 continue;
2737 write_symbol (cache, node->decl, &seen, false);
2738 }
2739 for (lsei = lsei_start (encoder);
2740 !lsei_end_p (lsei); lsei_next (&lsei))
2741 {
2742 symtab_node *node = lsei_node (lsei);
2743
2744 if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2745 continue;
2746 write_symbol (cache, node->decl, &seen, false);
2747 }
2748
2749 lto_end_section ();
2750 }
2751
2752
2753 /* Init the streamer_mode_table for output, where we collect info on what
2754 machine_mode values have been streamed. */
2755 void
2756 lto_output_init_mode_table (void)
2757 {
2758 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2759 }
2760
2761
2762 /* Write the mode table. */
2763 static void
2764 lto_write_mode_table (void)
2765 {
2766 struct output_block *ob;
2767 ob = create_output_block (LTO_section_mode_table);
2768 bitpack_d bp = bitpack_create (ob->main_stream);
2769
2770 /* Ensure that for GET_MODE_INNER (m) != m we have
2771 also the inner mode marked. */
2772 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2773 if (streamer_mode_table[i])
2774 {
2775 machine_mode m = (machine_mode) i;
2776 machine_mode inner_m = GET_MODE_INNER (m);
2777 if (inner_m != m)
2778 streamer_mode_table[(int) inner_m] = 1;
2779 }
2780 /* First stream modes that have GET_MODE_INNER (m) == m,
2781 so that we can refer to them afterwards. */
2782 for (int pass = 0; pass < 2; pass++)
2783 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2784 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2785 {
2786 machine_mode m = (machine_mode) i;
2787 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
2788 continue;
2789 bp_pack_value (&bp, m, 8);
2790 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2791 bp_pack_poly_value (&bp, GET_MODE_SIZE (m), 16);
2792 bp_pack_poly_value (&bp, GET_MODE_PRECISION (m), 16);
2793 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2794 bp_pack_poly_value (&bp, GET_MODE_NUNITS (m), 16);
2795 switch (GET_MODE_CLASS (m))
2796 {
2797 case MODE_FRACT:
2798 case MODE_UFRACT:
2799 case MODE_ACCUM:
2800 case MODE_UACCUM:
2801 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2802 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2803 break;
2804 case MODE_FLOAT:
2805 case MODE_DECIMAL_FLOAT:
2806 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2807 break;
2808 default:
2809 break;
2810 }
2811 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2812 }
2813 bp_pack_value (&bp, VOIDmode, 8);
2814
2815 streamer_write_bitpack (&bp);
2816
2817 char *section_name
2818 = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2819 lto_begin_section (section_name, !flag_wpa);
2820 free (section_name);
2821
2822 /* The entire header stream is computed here. */
2823 struct lto_simple_header_with_strings header;
2824 memset (&header, 0, sizeof (header));
2825
2826 /* Write the header. */
2827 header.major_version = LTO_major_version;
2828 header.minor_version = LTO_minor_version;
2829
2830 header.main_size = ob->main_stream->total_size;
2831 header.string_size = ob->string_stream->total_size;
2832 lto_write_data (&header, sizeof header);
2833
2834 /* Put all of the gimple and the string table out the asm file as a
2835 block of text. */
2836 lto_write_stream (ob->main_stream);
2837 lto_write_stream (ob->string_stream);
2838
2839 lto_end_section ();
2840 destroy_output_block (ob);
2841 }
2842
2843
2844 /* This pass is run after all of the functions are serialized and all
2845 of the IPA passes have written their serialized forms. This pass
2846 causes the vector of all of the global decls and types used from
2847 this file to be written in to a section that can then be read in to
2848 recover these on other side. */
2849
2850 void
2851 produce_asm_for_decls (void)
2852 {
2853 struct lto_out_decl_state *out_state;
2854 struct lto_out_decl_state *fn_out_state;
2855 struct lto_decl_header header;
2856 char *section_name;
2857 struct output_block *ob;
2858 unsigned idx, num_fns;
2859 size_t decl_state_size;
2860 int32_t num_decl_states;
2861
2862 ob = create_output_block (LTO_section_decls);
2863
2864 memset (&header, 0, sizeof (struct lto_decl_header));
2865
2866 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2867 lto_begin_section (section_name, !flag_wpa);
2868 free (section_name);
2869
2870 /* Make string 0 be a NULL string. */
2871 streamer_write_char_stream (ob->string_stream, 0);
2872
2873 gcc_assert (!alias_pairs);
2874
2875 /* Get rid of the global decl state hash tables to save some memory. */
2876 out_state = lto_get_out_decl_state ();
2877 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2878 if (out_state->streams[i].tree_hash_table)
2879 {
2880 delete out_state->streams[i].tree_hash_table;
2881 out_state->streams[i].tree_hash_table = NULL;
2882 }
2883
2884 /* Write the global symbols. */
2885 if (streamer_dump_file)
2886 fprintf (streamer_dump_file, "Outputting global stream\n");
2887 lto_output_decl_state_streams (ob, out_state);
2888 num_fns = lto_function_decl_states.length ();
2889 for (idx = 0; idx < num_fns; idx++)
2890 {
2891 fn_out_state =
2892 lto_function_decl_states[idx];
2893 if (streamer_dump_file)
2894 fprintf (streamer_dump_file, "Outputting stream for %s\n",
2895 IDENTIFIER_POINTER
2896 (DECL_ASSEMBLER_NAME (fn_out_state->fn_decl)));
2897 lto_output_decl_state_streams (ob, fn_out_state);
2898 }
2899
2900 header.major_version = LTO_major_version;
2901 header.minor_version = LTO_minor_version;
2902
2903 /* Currently not used. This field would allow us to preallocate
2904 the globals vector, so that it need not be resized as it is extended. */
2905 header.num_nodes = -1;
2906
2907 /* Compute the total size of all decl out states. */
2908 decl_state_size = sizeof (int32_t);
2909 decl_state_size += lto_out_decl_state_written_size (out_state);
2910 for (idx = 0; idx < num_fns; idx++)
2911 {
2912 fn_out_state =
2913 lto_function_decl_states[idx];
2914 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2915 }
2916 header.decl_state_size = decl_state_size;
2917
2918 header.main_size = ob->main_stream->total_size;
2919 header.string_size = ob->string_stream->total_size;
2920
2921 lto_write_data (&header, sizeof header);
2922
2923 /* Write the main out-decl state, followed by out-decl states of
2924 functions. */
2925 num_decl_states = num_fns + 1;
2926 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2927 lto_output_decl_state_refs (ob, out_state);
2928 for (idx = 0; idx < num_fns; idx++)
2929 {
2930 fn_out_state = lto_function_decl_states[idx];
2931 lto_output_decl_state_refs (ob, fn_out_state);
2932 }
2933
2934 lto_write_stream (ob->main_stream);
2935 lto_write_stream (ob->string_stream);
2936
2937 lto_end_section ();
2938
2939 /* Write the symbol table. It is used by linker to determine dependencies
2940 and thus we can skip it for WPA. */
2941 if (!flag_wpa)
2942 produce_symtab (ob);
2943
2944 /* Write command line opts. */
2945 lto_write_options ();
2946
2947 /* Deallocate memory and clean up. */
2948 for (idx = 0; idx < num_fns; idx++)
2949 {
2950 fn_out_state =
2951 lto_function_decl_states[idx];
2952 lto_delete_out_decl_state (fn_out_state);
2953 }
2954 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2955 lto_function_decl_states.release ();
2956 destroy_output_block (ob);
2957 if (lto_stream_offload_p)
2958 lto_write_mode_table ();
2959 }