]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-streamer-out.c
[PATCH] Commonize anon-name generation
[thirdparty/gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2019 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
43 #include "debug.h"
44 #include "omp-offload.h"
45 #include "print-tree.h"
46
47
48 static void lto_write_tree (struct output_block*, tree, bool);
49
50 /* Clear the line info stored in DATA_IN. */
51
52 static void
53 clear_line_info (struct output_block *ob)
54 {
55 ob->current_file = NULL;
56 ob->current_line = 0;
57 ob->current_col = 0;
58 ob->current_sysp = false;
59 }
60
61
62 /* Create the output block and return it. SECTION_TYPE is
63 LTO_section_function_body or LTO_static_initializer. */
64
65 struct output_block *
66 create_output_block (enum lto_section_type section_type)
67 {
68 struct output_block *ob = XCNEW (struct output_block);
69 if (streamer_dump_file)
70 fprintf (streamer_dump_file, "Creating output block for %s\n",
71 lto_section_name [section_type]);
72
73 ob->section_type = section_type;
74 ob->decl_state = lto_get_out_decl_state ();
75 ob->main_stream = XCNEW (struct lto_output_stream);
76 ob->string_stream = XCNEW (struct lto_output_stream);
77 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
78
79 if (section_type == LTO_section_function_body)
80 ob->cfg_stream = XCNEW (struct lto_output_stream);
81
82 clear_line_info (ob);
83
84 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
85 gcc_obstack_init (&ob->obstack);
86
87 return ob;
88 }
89
90
91 /* Destroy the output block OB. */
92
93 void
94 destroy_output_block (struct output_block *ob)
95 {
96 enum lto_section_type section_type = ob->section_type;
97
98 delete ob->string_hash_table;
99 ob->string_hash_table = NULL;
100
101 free (ob->main_stream);
102 free (ob->string_stream);
103 if (section_type == LTO_section_function_body)
104 free (ob->cfg_stream);
105
106 streamer_tree_cache_delete (ob->writer_cache);
107 obstack_free (&ob->obstack, NULL);
108
109 free (ob);
110 }
111
112
113 /* Look up NODE in the type table and write the index for it to OB. */
114
115 static void
116 output_type_ref (struct output_block *ob, tree node)
117 {
118 streamer_write_record_start (ob, LTO_type_ref);
119 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
120 }
121
122
123 /* Return true if tree node T is written to various tables. For these
124 nodes, we sometimes want to write their phyiscal representation
125 (via lto_output_tree), and sometimes we need to emit an index
126 reference into a table (via lto_output_tree_ref). */
127
128 static bool
129 tree_is_indexable (tree t)
130 {
131 /* Parameters and return values of functions of variably modified types
132 must go to global stream, because they may be used in the type
133 definition. */
134 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
135 && DECL_CONTEXT (t))
136 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
137 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared.
138 We should no longer need to stream it. */
139 else if (TREE_CODE (t) == IMPORTED_DECL)
140 gcc_unreachable ();
141 else if (TREE_CODE (t) == LABEL_DECL)
142 return FORCED_LABEL (t) || DECL_NONLOCAL (t);
143 else if (((VAR_P (t) && !TREE_STATIC (t))
144 || TREE_CODE (t) == TYPE_DECL
145 || TREE_CODE (t) == CONST_DECL
146 || TREE_CODE (t) == NAMELIST_DECL)
147 && decl_function_context (t))
148 return false;
149 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
150 return false;
151 /* Variably modified types need to be streamed alongside function
152 bodies because they can refer to local entities. Together with
153 them we have to localize their members as well.
154 ??? In theory that includes non-FIELD_DECLs as well. */
155 else if (TYPE_P (t)
156 && variably_modified_type_p (t, NULL_TREE))
157 return false;
158 else if (TREE_CODE (t) == FIELD_DECL
159 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
160 return false;
161 else
162 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
163 }
164
165
166 /* Output info about new location into bitpack BP.
167 After outputting bitpack, lto_output_location_data has
168 to be done to output actual data. */
169
170 void
171 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
172 location_t loc)
173 {
174 expanded_location xloc;
175
176 loc = LOCATION_LOCUS (loc);
177 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
178 loc < RESERVED_LOCATION_COUNT
179 ? loc : RESERVED_LOCATION_COUNT);
180 if (loc < RESERVED_LOCATION_COUNT)
181 return;
182
183 xloc = expand_location (loc);
184
185 bp_pack_value (bp, ob->current_file != xloc.file, 1);
186 bp_pack_value (bp, ob->current_line != xloc.line, 1);
187 bp_pack_value (bp, ob->current_col != xloc.column, 1);
188
189 if (ob->current_file != xloc.file)
190 {
191 bp_pack_string (ob, bp, xloc.file, true);
192 bp_pack_value (bp, xloc.sysp, 1);
193 }
194 ob->current_file = xloc.file;
195 ob->current_sysp = xloc.sysp;
196
197 if (ob->current_line != xloc.line)
198 bp_pack_var_len_unsigned (bp, xloc.line);
199 ob->current_line = xloc.line;
200
201 if (ob->current_col != xloc.column)
202 bp_pack_var_len_unsigned (bp, xloc.column);
203 ob->current_col = xloc.column;
204 }
205
206
207 /* If EXPR is an indexable tree node, output a reference to it to
208 output block OB. Otherwise, output the physical representation of
209 EXPR to OB. */
210
211 static void
212 lto_output_tree_ref (struct output_block *ob, tree expr)
213 {
214 enum tree_code code;
215
216 if (TYPE_P (expr))
217 {
218 output_type_ref (ob, expr);
219 return;
220 }
221
222 code = TREE_CODE (expr);
223 switch (code)
224 {
225 case SSA_NAME:
226 streamer_write_record_start (ob, LTO_ssa_name_ref);
227 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
228 break;
229
230 case FIELD_DECL:
231 streamer_write_record_start (ob, LTO_field_decl_ref);
232 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
233 break;
234
235 case FUNCTION_DECL:
236 streamer_write_record_start (ob, LTO_function_decl_ref);
237 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
238 break;
239
240 case VAR_DECL:
241 case DEBUG_EXPR_DECL:
242 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
243 /* FALLTHRU */
244 case PARM_DECL:
245 streamer_write_record_start (ob, LTO_global_decl_ref);
246 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
247 break;
248
249 case CONST_DECL:
250 streamer_write_record_start (ob, LTO_const_decl_ref);
251 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
252 break;
253
254 case IMPORTED_DECL:
255 gcc_assert (decl_function_context (expr) == NULL);
256 streamer_write_record_start (ob, LTO_imported_decl_ref);
257 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
258 break;
259
260 case TYPE_DECL:
261 streamer_write_record_start (ob, LTO_type_decl_ref);
262 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
263 break;
264
265 case NAMELIST_DECL:
266 streamer_write_record_start (ob, LTO_namelist_decl_ref);
267 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
268 break;
269
270 case NAMESPACE_DECL:
271 streamer_write_record_start (ob, LTO_namespace_decl_ref);
272 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
273 break;
274
275 case LABEL_DECL:
276 streamer_write_record_start (ob, LTO_label_decl_ref);
277 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
278 break;
279
280 case RESULT_DECL:
281 streamer_write_record_start (ob, LTO_result_decl_ref);
282 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
283 break;
284
285 case TRANSLATION_UNIT_DECL:
286 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
287 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
288 break;
289
290 default:
291 /* No other node is indexable, so it should have been handled by
292 lto_output_tree. */
293 gcc_unreachable ();
294 }
295 }
296
297
298 /* Return true if EXPR is a tree node that can be written to disk. */
299
300 static inline bool
301 lto_is_streamable (tree expr)
302 {
303 enum tree_code code = TREE_CODE (expr);
304
305 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
306 name version in lto_output_tree_ref (see output_ssa_names). */
307 return !is_lang_specific (expr)
308 && code != SSA_NAME
309 && code != LANG_TYPE
310 && code != MODIFY_EXPR
311 && code != INIT_EXPR
312 && code != TARGET_EXPR
313 && code != BIND_EXPR
314 && code != WITH_CLEANUP_EXPR
315 && code != STATEMENT_LIST
316 && (code == CASE_LABEL_EXPR
317 || code == DECL_EXPR
318 || TREE_CODE_CLASS (code) != tcc_statement);
319 }
320
321 /* Very rough estimate of streaming size of the initializer. If we ignored
322 presence of strings, we could simply just count number of non-indexable
323 tree nodes and number of references to indexable nodes. Strings however
324 may be very large and we do not want to dump them int othe global stream.
325
326 Count the size of initializer until the size in DATA is positive. */
327
328 static tree
329 subtract_estimated_size (tree *tp, int *ws, void *data)
330 {
331 long *sum = (long *)data;
332 if (tree_is_indexable (*tp))
333 {
334 /* Indexable tree is one reference to global stream.
335 Guess it may be about 4 bytes. */
336 *sum -= 4;
337 *ws = 0;
338 }
339 /* String table entry + base of tree node needs to be streamed. */
340 if (TREE_CODE (*tp) == STRING_CST)
341 *sum -= TREE_STRING_LENGTH (*tp) + 8;
342 else
343 {
344 /* Identifiers are also variable length but should not appear
345 naked in constructor. */
346 gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
347 /* We do not really make attempt to work out size of pickled tree, as
348 it is very variable. Make it bigger than the reference. */
349 *sum -= 16;
350 }
351 if (*sum < 0)
352 return *tp;
353 return NULL_TREE;
354 }
355
356
357 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
358
359 static tree
360 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
361 {
362 gcc_checking_assert (DECL_P (expr)
363 && TREE_CODE (expr) != FUNCTION_DECL
364 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
365
366 /* Handle DECL_INITIAL for symbols. */
367 tree initial = DECL_INITIAL (expr);
368 if (VAR_P (expr)
369 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
370 && !DECL_IN_CONSTANT_POOL (expr)
371 && initial)
372 {
373 varpool_node *vnode;
374 /* Extra section needs about 30 bytes; do not produce it for simple
375 scalar values. */
376 if (!(vnode = varpool_node::get (expr))
377 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
378 initial = error_mark_node;
379 if (initial != error_mark_node)
380 {
381 long max_size = 30;
382 if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
383 NULL))
384 initial = error_mark_node;
385 }
386 }
387
388 return initial;
389 }
390
391
392 /* Write a physical representation of tree node EXPR to output block
393 OB. If REF_P is true, the leaves of EXPR are emitted as references
394 via lto_output_tree_ref. IX is the index into the streamer cache
395 where EXPR is stored. */
396
397 static void
398 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
399 {
400 /* Pack all the non-pointer fields in EXPR into a bitpack and write
401 the resulting bitpack. */
402 streamer_write_tree_bitfields (ob, expr);
403
404 /* Write all the pointer fields in EXPR. */
405 streamer_write_tree_body (ob, expr, ref_p);
406
407 /* Write any LTO-specific data to OB. */
408 if (DECL_P (expr)
409 && TREE_CODE (expr) != FUNCTION_DECL
410 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
411 {
412 /* Handle DECL_INITIAL for symbols. */
413 tree initial = get_symbol_initial_value
414 (ob->decl_state->symtab_node_encoder, expr);
415 stream_write_tree (ob, initial, ref_p);
416 }
417
418 /* Stream references to early generated DIEs. Keep in sync with the
419 trees handled in dwarf2out_die_ref_for_decl. */
420 if ((DECL_P (expr)
421 && TREE_CODE (expr) != FIELD_DECL
422 && TREE_CODE (expr) != DEBUG_EXPR_DECL
423 && TREE_CODE (expr) != TYPE_DECL)
424 || TREE_CODE (expr) == BLOCK)
425 {
426 const char *sym;
427 unsigned HOST_WIDE_INT off;
428 if (debug_info_level > DINFO_LEVEL_NONE
429 && debug_hooks->die_ref_for_decl (expr, &sym, &off))
430 {
431 streamer_write_string (ob, ob->main_stream, sym, true);
432 streamer_write_uhwi (ob, off);
433 }
434 else
435 streamer_write_string (ob, ob->main_stream, NULL, true);
436 }
437 }
438
439 /* Write a physical representation of tree node EXPR to output block
440 OB. If REF_P is true, the leaves of EXPR are emitted as references
441 via lto_output_tree_ref. IX is the index into the streamer cache
442 where EXPR is stored. */
443
444 static void
445 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
446 {
447 if (!lto_is_streamable (expr))
448 internal_error ("tree code %qs is not supported in LTO streams",
449 get_tree_code_name (TREE_CODE (expr)));
450
451 /* Write the header, containing everything needed to materialize
452 EXPR on the reading side. */
453 streamer_write_tree_header (ob, expr);
454
455 lto_write_tree_1 (ob, expr, ref_p);
456
457 /* Mark the end of EXPR. */
458 streamer_write_zero (ob);
459 }
460
461 /* Emit the physical representation of tree node EXPR to output block OB,
462 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
463 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
464
465 static void
466 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
467 bool ref_p, bool this_ref_p)
468 {
469 unsigned ix;
470
471 gcc_checking_assert (expr != NULL_TREE
472 && !(this_ref_p && tree_is_indexable (expr)));
473
474 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
475 expr, hash, &ix);
476 gcc_assert (!exists_p);
477 if (TREE_CODE (expr) == INTEGER_CST
478 && !TREE_OVERFLOW (expr))
479 {
480 /* Shared INTEGER_CST nodes are special because they need their
481 original type to be materialized by the reader (to implement
482 TYPE_CACHED_VALUES). */
483 streamer_write_integer_cst (ob, expr, ref_p);
484 }
485 else
486 {
487 /* This is the first time we see EXPR, write its fields
488 to OB. */
489 lto_write_tree (ob, expr, ref_p);
490 }
491 }
492
493 class DFS
494 {
495 public:
496 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
497 bool single_p);
498 ~DFS ();
499
500 struct scc_entry
501 {
502 tree t;
503 hashval_t hash;
504 };
505 vec<scc_entry> sccstack;
506
507 private:
508 struct sccs
509 {
510 unsigned int dfsnum;
511 unsigned int low;
512 };
513 struct worklist
514 {
515 tree expr;
516 sccs *from_state;
517 sccs *cstate;
518 bool ref_p;
519 bool this_ref_p;
520 };
521
522 static int scc_entry_compare (const void *, const void *);
523
524 void DFS_write_tree_body (struct output_block *ob,
525 tree expr, sccs *expr_state, bool ref_p);
526
527 void DFS_write_tree (struct output_block *ob, sccs *from_state,
528 tree expr, bool ref_p, bool this_ref_p);
529
530 hashval_t
531 hash_scc (struct output_block *ob, unsigned first, unsigned size,
532 bool ref_p, bool this_ref_p);
533
534 hash_map<tree, sccs *> sccstate;
535 vec<worklist> worklist_vec;
536 struct obstack sccstate_obstack;
537 };
538
539 /* Emit the physical representation of tree node EXPR to output block OB,
540 using depth-first search on the subgraph. If THIS_REF_P is true, the
541 leaves of EXPR are emitted as references via lto_output_tree_ref.
542 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
543 this is for a rewalk of a single leaf SCC. */
544
545 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
546 bool single_p)
547 {
548 unsigned int next_dfs_num = 1;
549 sccstack.create (0);
550 gcc_obstack_init (&sccstate_obstack);
551 worklist_vec = vNULL;
552 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
553 while (!worklist_vec.is_empty ())
554 {
555 worklist &w = worklist_vec.last ();
556 expr = w.expr;
557 sccs *from_state = w.from_state;
558 sccs *cstate = w.cstate;
559 ref_p = w.ref_p;
560 this_ref_p = w.this_ref_p;
561 if (cstate == NULL)
562 {
563 sccs **slot = &sccstate.get_or_insert (expr);
564 cstate = *slot;
565 if (cstate)
566 {
567 gcc_checking_assert (from_state);
568 if (cstate->dfsnum < from_state->dfsnum)
569 from_state->low = MIN (cstate->dfsnum, from_state->low);
570 worklist_vec.pop ();
571 continue;
572 }
573
574 scc_entry e = { expr, 0 };
575 /* Not yet visited. DFS recurse and push it onto the stack. */
576 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
577 sccstack.safe_push (e);
578 cstate->dfsnum = next_dfs_num++;
579 cstate->low = cstate->dfsnum;
580 w.cstate = cstate;
581
582 if (TREE_CODE (expr) == INTEGER_CST
583 && !TREE_OVERFLOW (expr))
584 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
585 else
586 {
587 DFS_write_tree_body (ob, expr, cstate, ref_p);
588
589 /* Walk any LTO-specific edges. */
590 if (DECL_P (expr)
591 && TREE_CODE (expr) != FUNCTION_DECL
592 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
593 {
594 /* Handle DECL_INITIAL for symbols. */
595 tree initial
596 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
597 expr);
598 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
599 }
600 }
601 continue;
602 }
603
604 /* See if we found an SCC. */
605 if (cstate->low == cstate->dfsnum)
606 {
607 unsigned first, size;
608 tree x;
609
610 /* If we are re-walking a single leaf SCC just pop it,
611 let earlier worklist item access the sccstack. */
612 if (single_p)
613 {
614 worklist_vec.pop ();
615 continue;
616 }
617
618 /* Pop the SCC and compute its size. */
619 first = sccstack.length ();
620 do
621 {
622 x = sccstack[--first].t;
623 }
624 while (x != expr);
625 size = sccstack.length () - first;
626
627 /* No need to compute hashes for LTRANS units, we don't perform
628 any merging there. */
629 hashval_t scc_hash = 0;
630 unsigned scc_entry_len = 0;
631 if (!flag_wpa)
632 {
633 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
634
635 /* Put the entries with the least number of collisions first. */
636 unsigned entry_start = 0;
637 scc_entry_len = size + 1;
638 for (unsigned i = 0; i < size;)
639 {
640 unsigned from = i;
641 for (i = i + 1; i < size
642 && (sccstack[first + i].hash
643 == sccstack[first + from].hash); ++i)
644 ;
645 if (i - from < scc_entry_len)
646 {
647 scc_entry_len = i - from;
648 entry_start = from;
649 }
650 }
651 for (unsigned i = 0; i < scc_entry_len; ++i)
652 std::swap (sccstack[first + i],
653 sccstack[first + entry_start + i]);
654
655 /* We already sorted SCC deterministically in hash_scc. */
656
657 /* Check that we have only one SCC.
658 Naturally we may have conflicts if hash function is not
659 strong enough. Lets see how far this gets. */
660 gcc_checking_assert (scc_entry_len == 1);
661 }
662
663 /* Write LTO_tree_scc. */
664 streamer_write_record_start (ob, LTO_tree_scc);
665 streamer_write_uhwi (ob, size);
666 streamer_write_uhwi (ob, scc_hash);
667
668 /* Write size-1 SCCs without wrapping them inside SCC bundles.
669 All INTEGER_CSTs need to be handled this way as we need
670 their type to materialize them. Also builtins are handled
671 this way.
672 ??? We still wrap these in LTO_tree_scc so at the
673 input side we can properly identify the tree we want
674 to ultimatively return. */
675 if (size == 1)
676 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
677 else
678 {
679 /* Write the size of the SCC entry candidates. */
680 streamer_write_uhwi (ob, scc_entry_len);
681
682 /* Write all headers and populate the streamer cache. */
683 for (unsigned i = 0; i < size; ++i)
684 {
685 hashval_t hash = sccstack[first+i].hash;
686 tree t = sccstack[first+i].t;
687 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
688 t, hash, NULL);
689 gcc_assert (!exists_p);
690
691 if (!lto_is_streamable (t))
692 internal_error ("tree code %qs is not supported "
693 "in LTO streams",
694 get_tree_code_name (TREE_CODE (t)));
695
696 /* Write the header, containing everything needed to
697 materialize EXPR on the reading side. */
698 streamer_write_tree_header (ob, t);
699 }
700
701 /* Write the bitpacks and tree references. */
702 for (unsigned i = 0; i < size; ++i)
703 {
704 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
705
706 /* Mark the end of the tree. */
707 streamer_write_zero (ob);
708 }
709 }
710
711 /* Finally truncate the vector. */
712 sccstack.truncate (first);
713
714 if (from_state)
715 from_state->low = MIN (from_state->low, cstate->low);
716 worklist_vec.pop ();
717 continue;
718 }
719
720 gcc_checking_assert (from_state);
721 from_state->low = MIN (from_state->low, cstate->low);
722 if (cstate->dfsnum < from_state->dfsnum)
723 from_state->low = MIN (cstate->dfsnum, from_state->low);
724 worklist_vec.pop ();
725 }
726 worklist_vec.release ();
727 }
728
729 DFS::~DFS ()
730 {
731 sccstack.release ();
732 obstack_free (&sccstate_obstack, NULL);
733 }
734
735 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
736 DFS recurse for all tree edges originating from it. */
737
738 void
739 DFS::DFS_write_tree_body (struct output_block *ob,
740 tree expr, sccs *expr_state, bool ref_p)
741 {
742 #define DFS_follow_tree_edge(DEST) \
743 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
744
745 enum tree_code code;
746
747 if (streamer_dump_file)
748 {
749 print_node_brief (streamer_dump_file, " Streaming ",
750 expr, 4);
751 fprintf (streamer_dump_file, " to %s\n",
752 lto_section_name [ob->section_type]);
753 }
754
755 code = TREE_CODE (expr);
756
757 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
758 {
759 if (TREE_CODE (expr) != IDENTIFIER_NODE)
760 DFS_follow_tree_edge (TREE_TYPE (expr));
761 }
762
763 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
764 {
765 unsigned int count = vector_cst_encoded_nelts (expr);
766 for (unsigned int i = 0; i < count; ++i)
767 DFS_follow_tree_edge (VECTOR_CST_ENCODED_ELT (expr, i));
768 }
769
770 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
771 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
772 DFS_follow_tree_edge (POLY_INT_CST_COEFF (expr, i));
773
774 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
775 {
776 DFS_follow_tree_edge (TREE_REALPART (expr));
777 DFS_follow_tree_edge (TREE_IMAGPART (expr));
778 }
779
780 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
781 {
782 /* Drop names that were created for anonymous entities. */
783 if (DECL_NAME (expr)
784 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
785 && IDENTIFIER_ANON_P (DECL_NAME (expr)))
786 ;
787 else
788 DFS_follow_tree_edge (DECL_NAME (expr));
789 if (TREE_CODE (expr) != TRANSLATION_UNIT_DECL
790 && ! DECL_CONTEXT (expr))
791 DFS_follow_tree_edge ((*all_translation_units)[0]);
792 else
793 DFS_follow_tree_edge (DECL_CONTEXT (expr));
794 }
795
796 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
797 {
798 DFS_follow_tree_edge (DECL_SIZE (expr));
799 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
800
801 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
802 special handling in LTO, it must be handled by streamer hooks. */
803
804 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
805
806 /* We use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
807 declarations which should be eliminated by decl merging. Be sure none
808 leaks to this point. */
809 gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
810 DFS_follow_tree_edge (DECL_ABSTRACT_ORIGIN (expr));
811
812 if ((VAR_P (expr)
813 || TREE_CODE (expr) == PARM_DECL)
814 && DECL_HAS_VALUE_EXPR_P (expr))
815 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
816 if (VAR_P (expr)
817 && DECL_HAS_DEBUG_EXPR_P (expr))
818 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
819 }
820
821 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
822 {
823 /* Make sure we don't inadvertently set the assembler name. */
824 if (DECL_ASSEMBLER_NAME_SET_P (expr))
825 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
826 }
827
828 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
829 {
830 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
831 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
832 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
833 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
834 gcc_checking_assert (!DECL_FCONTEXT (expr));
835 }
836
837 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
838 {
839 gcc_checking_assert (DECL_VINDEX (expr) == NULL);
840 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
841 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
842 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
843 }
844
845 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
846 {
847 DFS_follow_tree_edge (TYPE_SIZE (expr));
848 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
849 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
850 DFS_follow_tree_edge (TYPE_NAME (expr));
851 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
852 reconstructed during fixup. */
853 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
854 during fixup. */
855 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
856 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
857 /* TYPE_CANONICAL is re-computed during type merging, so no need
858 to follow it here. */
859 /* Do not stream TYPE_STUB_DECL; it is not needed by LTO but currently
860 it cannot be freed by free_lang_data without triggering ICEs in
861 langhooks. */
862 }
863
864 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
865 {
866 if (TREE_CODE (expr) == ENUMERAL_TYPE)
867 DFS_follow_tree_edge (TYPE_VALUES (expr));
868 else if (TREE_CODE (expr) == ARRAY_TYPE)
869 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
870 else if (RECORD_OR_UNION_TYPE_P (expr))
871 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
872 DFS_follow_tree_edge (t);
873 else if (TREE_CODE (expr) == FUNCTION_TYPE
874 || TREE_CODE (expr) == METHOD_TYPE)
875 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
876
877 if (!POINTER_TYPE_P (expr))
878 DFS_follow_tree_edge (TYPE_MIN_VALUE_RAW (expr));
879 DFS_follow_tree_edge (TYPE_MAX_VALUE_RAW (expr));
880 }
881
882 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
883 {
884 DFS_follow_tree_edge (TREE_PURPOSE (expr));
885 DFS_follow_tree_edge (TREE_VALUE (expr));
886 DFS_follow_tree_edge (TREE_CHAIN (expr));
887 }
888
889 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
890 {
891 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
892 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
893 }
894
895 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
896 {
897 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
898 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
899 DFS_follow_tree_edge (TREE_BLOCK (expr));
900 }
901
902 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
903 {
904 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
905 {
906 /* We would have to stream externals in the block chain as
907 non-references but we should have dropped them in
908 free-lang-data. */
909 gcc_assert (!VAR_OR_FUNCTION_DECL_P (t) || !DECL_EXTERNAL (t));
910 DFS_follow_tree_edge (t);
911 }
912
913 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
914 DFS_follow_tree_edge (BLOCK_ABSTRACT_ORIGIN (expr));
915
916 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
917 information for early inlined BLOCKs so drop it on the floor instead
918 of ICEing in dwarf2out.c. */
919
920 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
921 streaming time. */
922
923 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
924 list is re-constructed from BLOCK_SUPERCONTEXT. */
925 }
926
927 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
928 {
929 unsigned i;
930 tree t;
931
932 /* Note that the number of BINFO slots has already been emitted in
933 EXPR's header (see streamer_write_tree_header) because this length
934 is needed to build the empty BINFO node on the reader side. */
935 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
936 DFS_follow_tree_edge (t);
937 DFS_follow_tree_edge (BINFO_OFFSET (expr));
938 DFS_follow_tree_edge (BINFO_VTABLE (expr));
939
940 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX,
941 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
942 by C++ FE only. */
943 }
944
945 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
946 {
947 unsigned i;
948 tree index, value;
949
950 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
951 {
952 DFS_follow_tree_edge (index);
953 DFS_follow_tree_edge (value);
954 }
955 }
956
957 if (code == OMP_CLAUSE)
958 {
959 int i;
960 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
961 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
962 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
963 }
964
965 #undef DFS_follow_tree_edge
966 }
967
968 /* Return a hash value for the tree T.
969 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
970 may hold hash values if trees inside current SCC. */
971
972 static hashval_t
973 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
974 {
975 inchash::hash hstate;
976
977 #define visit(SIBLING) \
978 do { \
979 unsigned ix; \
980 if (!SIBLING) \
981 hstate.add_int (0); \
982 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
983 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
984 else if (map) \
985 hstate.add_int (*map->get (SIBLING)); \
986 else \
987 hstate.add_int (1); \
988 } while (0)
989
990 /* Hash TS_BASE. */
991 enum tree_code code = TREE_CODE (t);
992 hstate.add_int (code);
993 if (!TYPE_P (t))
994 {
995 hstate.add_flag (TREE_SIDE_EFFECTS (t));
996 hstate.add_flag (TREE_CONSTANT (t));
997 hstate.add_flag (TREE_READONLY (t));
998 hstate.add_flag (TREE_PUBLIC (t));
999 }
1000 hstate.add_flag (TREE_ADDRESSABLE (t));
1001 hstate.add_flag (TREE_THIS_VOLATILE (t));
1002 if (DECL_P (t))
1003 hstate.add_flag (DECL_UNSIGNED (t));
1004 else if (TYPE_P (t))
1005 hstate.add_flag (TYPE_UNSIGNED (t));
1006 if (TYPE_P (t))
1007 hstate.add_flag (TYPE_ARTIFICIAL (t));
1008 else
1009 hstate.add_flag (TREE_NO_WARNING (t));
1010 hstate.add_flag (TREE_NOTHROW (t));
1011 hstate.add_flag (TREE_STATIC (t));
1012 hstate.add_flag (TREE_PROTECTED (t));
1013 hstate.add_flag (TREE_DEPRECATED (t));
1014 if (code != TREE_BINFO)
1015 hstate.add_flag (TREE_PRIVATE (t));
1016 if (TYPE_P (t))
1017 {
1018 hstate.add_flag (AGGREGATE_TYPE_P (t)
1019 ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
1020 hstate.add_flag (TYPE_ADDR_SPACE (t));
1021 }
1022 else if (code == SSA_NAME)
1023 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
1024 hstate.commit_flag ();
1025
1026 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1027 hstate.add_wide_int (wi::to_widest (t));
1028
1029 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1030 {
1031 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1032 hstate.add_flag (r.cl);
1033 hstate.add_flag (r.sign);
1034 hstate.add_flag (r.signalling);
1035 hstate.add_flag (r.canonical);
1036 hstate.commit_flag ();
1037 hstate.add_int (r.uexp);
1038 hstate.add (r.sig, sizeof (r.sig));
1039 }
1040
1041 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1042 {
1043 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1044 hstate.add_int (f.mode);
1045 hstate.add_int (f.data.low);
1046 hstate.add_int (f.data.high);
1047 }
1048
1049 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1050 {
1051 hstate.add_hwi (DECL_MODE (t));
1052 hstate.add_flag (DECL_NONLOCAL (t));
1053 hstate.add_flag (DECL_VIRTUAL_P (t));
1054 hstate.add_flag (DECL_IGNORED_P (t));
1055 hstate.add_flag (DECL_ABSTRACT_P (t));
1056 hstate.add_flag (DECL_ARTIFICIAL (t));
1057 hstate.add_flag (DECL_USER_ALIGN (t));
1058 hstate.add_flag (DECL_PRESERVE_P (t));
1059 hstate.add_flag (DECL_EXTERNAL (t));
1060 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1061 hstate.commit_flag ();
1062 hstate.add_int (DECL_ALIGN (t));
1063 if (code == LABEL_DECL)
1064 {
1065 hstate.add_int (EH_LANDING_PAD_NR (t));
1066 hstate.add_int (LABEL_DECL_UID (t));
1067 }
1068 else if (code == FIELD_DECL)
1069 {
1070 hstate.add_flag (DECL_PACKED (t));
1071 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1072 hstate.add_flag (DECL_PADDING_P (t));
1073 hstate.add_int (DECL_OFFSET_ALIGN (t));
1074 }
1075 else if (code == VAR_DECL)
1076 {
1077 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1078 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1079 }
1080 if (code == RESULT_DECL
1081 || code == PARM_DECL
1082 || code == VAR_DECL)
1083 {
1084 hstate.add_flag (DECL_BY_REFERENCE (t));
1085 if (code == VAR_DECL
1086 || code == PARM_DECL)
1087 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1088 }
1089 hstate.commit_flag ();
1090 }
1091
1092 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1093 hstate.add_int (DECL_REGISTER (t));
1094
1095 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1096 {
1097 hstate.add_flag (DECL_COMMON (t));
1098 hstate.add_flag (DECL_DLLIMPORT_P (t));
1099 hstate.add_flag (DECL_WEAK (t));
1100 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1101 hstate.add_flag (DECL_COMDAT (t));
1102 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1103 hstate.add_int (DECL_VISIBILITY (t));
1104 if (code == VAR_DECL)
1105 {
1106 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1107 hstate.add_flag (DECL_HARD_REGISTER (t));
1108 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1109 }
1110 if (TREE_CODE (t) == FUNCTION_DECL)
1111 {
1112 hstate.add_flag (DECL_FINAL_P (t));
1113 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1114 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1115 }
1116 hstate.commit_flag ();
1117 }
1118
1119 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1120 {
1121 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1122 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1123 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1124 hstate.add_flag (DECL_UNINLINABLE (t));
1125 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1126 hstate.add_flag (DECL_IS_NOVOPS (t));
1127 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1128 hstate.add_flag (DECL_IS_MALLOC (t));
1129 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1130 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1131 hstate.add_flag (DECL_STATIC_CHAIN (t));
1132 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1133 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1134 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1135 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1136 hstate.add_flag (DECL_PURE_P (t));
1137 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1138 hstate.commit_flag ();
1139 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1140 hstate.add_int (DECL_FUNCTION_CODE (t));
1141 }
1142
1143 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1144 {
1145 hstate.add_hwi (TYPE_MODE (t));
1146 hstate.add_flag (TYPE_STRING_FLAG (t));
1147 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1148 no streaming. */
1149 hstate.add_flag (TYPE_PACKED (t));
1150 hstate.add_flag (TYPE_RESTRICT (t));
1151 hstate.add_flag (TYPE_USER_ALIGN (t));
1152 hstate.add_flag (TYPE_READONLY (t));
1153 if (RECORD_OR_UNION_TYPE_P (t))
1154 {
1155 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1156 hstate.add_flag (TYPE_FINAL_P (t));
1157 }
1158 else if (code == ARRAY_TYPE)
1159 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1160 if (AGGREGATE_TYPE_P (t))
1161 hstate.add_flag (TYPE_TYPELESS_STORAGE (t));
1162 hstate.commit_flag ();
1163 hstate.add_int (TYPE_PRECISION (t));
1164 hstate.add_int (TYPE_ALIGN (t));
1165 hstate.add_int (TYPE_EMPTY_P (t));
1166 }
1167
1168 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1169 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1170 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1171
1172 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1173 /* We don't stream these when passing things to a different target. */
1174 && !lto_stream_offload_p)
1175 hstate.add_hwi (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1176
1177 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1178 hstate.add_hwi (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1179
1180 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1181 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1182
1183 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1184 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1185
1186 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1187 {
1188 if (code != IDENTIFIER_NODE)
1189 visit (TREE_TYPE (t));
1190 }
1191
1192 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1193 {
1194 unsigned int count = vector_cst_encoded_nelts (t);
1195 for (unsigned int i = 0; i < count; ++i)
1196 visit (VECTOR_CST_ENCODED_ELT (t, i));
1197 }
1198
1199 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
1200 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1201 visit (POLY_INT_CST_COEFF (t, i));
1202
1203 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1204 {
1205 visit (TREE_REALPART (t));
1206 visit (TREE_IMAGPART (t));
1207 }
1208
1209 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1210 {
1211 /* Drop names that were created for anonymous entities. */
1212 if (DECL_NAME (t)
1213 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1214 && IDENTIFIER_ANON_P (DECL_NAME (t)))
1215 ;
1216 else
1217 visit (DECL_NAME (t));
1218 if (DECL_FILE_SCOPE_P (t))
1219 ;
1220 else
1221 visit (DECL_CONTEXT (t));
1222 }
1223
1224 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1225 {
1226 visit (DECL_SIZE (t));
1227 visit (DECL_SIZE_UNIT (t));
1228 visit (DECL_ATTRIBUTES (t));
1229 if ((code == VAR_DECL
1230 || code == PARM_DECL)
1231 && DECL_HAS_VALUE_EXPR_P (t))
1232 visit (DECL_VALUE_EXPR (t));
1233 if (code == VAR_DECL
1234 && DECL_HAS_DEBUG_EXPR_P (t))
1235 visit (DECL_DEBUG_EXPR (t));
1236 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1237 be able to call get_symbol_initial_value. */
1238 }
1239
1240 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1241 {
1242 if (DECL_ASSEMBLER_NAME_SET_P (t))
1243 visit (DECL_ASSEMBLER_NAME (t));
1244 }
1245
1246 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1247 {
1248 visit (DECL_FIELD_OFFSET (t));
1249 visit (DECL_BIT_FIELD_TYPE (t));
1250 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1251 visit (DECL_FIELD_BIT_OFFSET (t));
1252 }
1253
1254 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1255 {
1256 visit (DECL_FUNCTION_PERSONALITY (t));
1257 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1258 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1259 }
1260
1261 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1262 {
1263 visit (TYPE_SIZE (t));
1264 visit (TYPE_SIZE_UNIT (t));
1265 visit (TYPE_ATTRIBUTES (t));
1266 visit (TYPE_NAME (t));
1267 visit (TYPE_MAIN_VARIANT (t));
1268 if (TYPE_FILE_SCOPE_P (t))
1269 ;
1270 else
1271 visit (TYPE_CONTEXT (t));
1272 }
1273
1274 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1275 {
1276 if (code == ENUMERAL_TYPE)
1277 visit (TYPE_VALUES (t));
1278 else if (code == ARRAY_TYPE)
1279 visit (TYPE_DOMAIN (t));
1280 else if (RECORD_OR_UNION_TYPE_P (t))
1281 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1282 visit (f);
1283 else if (code == FUNCTION_TYPE
1284 || code == METHOD_TYPE)
1285 visit (TYPE_ARG_TYPES (t));
1286 if (!POINTER_TYPE_P (t))
1287 visit (TYPE_MIN_VALUE_RAW (t));
1288 visit (TYPE_MAX_VALUE_RAW (t));
1289 }
1290
1291 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1292 {
1293 visit (TREE_PURPOSE (t));
1294 visit (TREE_VALUE (t));
1295 visit (TREE_CHAIN (t));
1296 }
1297
1298 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1299 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1300 visit (TREE_VEC_ELT (t, i));
1301
1302 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1303 {
1304 hstate.add_hwi (TREE_OPERAND_LENGTH (t));
1305 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1306 visit (TREE_OPERAND (t, i));
1307 }
1308
1309 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1310 {
1311 unsigned i;
1312 tree b;
1313 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1314 visit (b);
1315 visit (BINFO_OFFSET (t));
1316 visit (BINFO_VTABLE (t));
1317 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1318 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
1319 by C++ FE only. */
1320 }
1321
1322 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1323 {
1324 unsigned i;
1325 tree index, value;
1326 hstate.add_hwi (CONSTRUCTOR_NELTS (t));
1327 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1328 {
1329 visit (index);
1330 visit (value);
1331 }
1332 }
1333
1334 if (code == OMP_CLAUSE)
1335 {
1336 int i;
1337 HOST_WIDE_INT val;
1338
1339 hstate.add_hwi (OMP_CLAUSE_CODE (t));
1340 switch (OMP_CLAUSE_CODE (t))
1341 {
1342 case OMP_CLAUSE_DEFAULT:
1343 val = OMP_CLAUSE_DEFAULT_KIND (t);
1344 break;
1345 case OMP_CLAUSE_SCHEDULE:
1346 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1347 break;
1348 case OMP_CLAUSE_DEPEND:
1349 val = OMP_CLAUSE_DEPEND_KIND (t);
1350 break;
1351 case OMP_CLAUSE_MAP:
1352 val = OMP_CLAUSE_MAP_KIND (t);
1353 break;
1354 case OMP_CLAUSE_PROC_BIND:
1355 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1356 break;
1357 case OMP_CLAUSE_REDUCTION:
1358 case OMP_CLAUSE_TASK_REDUCTION:
1359 case OMP_CLAUSE_IN_REDUCTION:
1360 val = OMP_CLAUSE_REDUCTION_CODE (t);
1361 break;
1362 default:
1363 val = 0;
1364 break;
1365 }
1366 hstate.add_hwi (val);
1367 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1368 visit (OMP_CLAUSE_OPERAND (t, i));
1369 visit (OMP_CLAUSE_CHAIN (t));
1370 }
1371
1372 return hstate.end ();
1373
1374 #undef visit
1375 }
1376
1377 /* Compare two SCC entries by their hash value for qsorting them. */
1378
1379 int
1380 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1381 {
1382 const scc_entry *p1 = (const scc_entry *) p1_;
1383 const scc_entry *p2 = (const scc_entry *) p2_;
1384 if (p1->hash < p2->hash)
1385 return -1;
1386 else if (p1->hash > p2->hash)
1387 return 1;
1388 return 0;
1389 }
1390
1391 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1392 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1393
1394 hashval_t
1395 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1396 bool ref_p, bool this_ref_p)
1397 {
1398 unsigned int last_classes = 0, iterations = 0;
1399
1400 /* Compute hash values for the SCC members. */
1401 for (unsigned i = 0; i < size; ++i)
1402 sccstack[first+i].hash
1403 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1404
1405 if (size == 1)
1406 return sccstack[first].hash;
1407
1408 /* We aim to get unique hash for every tree within SCC and compute hash value
1409 of the whole SCC by combining all values together in a stable (entry-point
1410 independent) order. This guarantees that the same SCC regions within
1411 different translation units will get the same hash values and therefore
1412 will be merged at WPA time.
1413
1414 Often the hashes are already unique. In that case we compute the SCC hash
1415 by combining individual hash values in an increasing order.
1416
1417 If there are duplicates, we seek at least one tree with unique hash (and
1418 pick one with minimal hash and this property). Then we obtain a stable
1419 order by DFS walk starting from this unique tree and then use the index
1420 within this order to make individual hash values unique.
1421
1422 If there is no tree with unique hash, we iteratively propagate the hash
1423 values across the internal edges of SCC. This usually quickly leads
1424 to unique hashes. Consider, for example, an SCC containing two pointers
1425 that are identical except for the types they point to and assume that
1426 these types are also part of the SCC. The propagation will add the
1427 points-to type information into their hash values. */
1428 do
1429 {
1430 /* Sort the SCC so we can easily check for uniqueness. */
1431 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1432
1433 unsigned int classes = 1;
1434 int firstunique = -1;
1435
1436 /* Find the tree with lowest unique hash (if it exists) and compute
1437 the number of equivalence classes. */
1438 if (sccstack[first].hash != sccstack[first+1].hash)
1439 firstunique = 0;
1440 for (unsigned i = 1; i < size; ++i)
1441 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1442 {
1443 classes++;
1444 if (firstunique == -1
1445 && (i == size - 1
1446 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1447 firstunique = i;
1448 }
1449
1450 /* If we found a tree with unique hash, stop the iteration. */
1451 if (firstunique != -1
1452 /* Also terminate if we run out of iterations or if the number of
1453 equivalence classes is no longer increasing.
1454 For example a cyclic list of trees that are all equivalent will
1455 never have unique entry point; we however do not build such SCCs
1456 in our IL. */
1457 || classes <= last_classes || iterations > 16)
1458 {
1459 hashval_t scc_hash;
1460
1461 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1462 starting from FIRSTUNIQUE to obtain a stable order. */
1463 if (classes != size && firstunique != -1)
1464 {
1465 hash_map <tree, hashval_t> map(size*2);
1466
1467 /* Store hash values into a map, so we can associate them with
1468 the reordered SCC. */
1469 for (unsigned i = 0; i < size; ++i)
1470 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1471
1472 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1473 true);
1474 gcc_assert (again.sccstack.length () == size);
1475
1476 memcpy (sccstack.address () + first,
1477 again.sccstack.address (),
1478 sizeof (scc_entry) * size);
1479
1480 /* Update hash values of individual members by hashing in the
1481 index within the stable order. This ensures uniqueness.
1482 Also compute the SCC hash by mixing in all hash values in
1483 the stable order we obtained. */
1484 sccstack[first].hash = *map.get (sccstack[first].t);
1485 scc_hash = sccstack[first].hash;
1486 for (unsigned i = 1; i < size; ++i)
1487 {
1488 sccstack[first+i].hash
1489 = iterative_hash_hashval_t (i,
1490 *map.get (sccstack[first+i].t));
1491 scc_hash
1492 = iterative_hash_hashval_t (scc_hash,
1493 sccstack[first+i].hash);
1494 }
1495 }
1496 /* If we got a unique hash value for each tree, then sort already
1497 ensured entry-point independent order. Only compute the final
1498 SCC hash.
1499
1500 If we failed to find the unique entry point, we go by the same
1501 route. We will eventually introduce unwanted hash conflicts. */
1502 else
1503 {
1504 scc_hash = sccstack[first].hash;
1505 for (unsigned i = 1; i < size; ++i)
1506 scc_hash
1507 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1508
1509 /* We cannot 100% guarantee that the hash won't conflict so as
1510 to make it impossible to find a unique hash. This however
1511 should be an extremely rare case. ICE for now so possible
1512 issues are found and evaluated. */
1513 gcc_checking_assert (classes == size);
1514 }
1515
1516 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1517 hash into the hash of each element. */
1518 for (unsigned i = 0; i < size; ++i)
1519 sccstack[first+i].hash
1520 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1521 return scc_hash;
1522 }
1523
1524 last_classes = classes;
1525 iterations++;
1526
1527 /* We failed to identify the entry point; propagate hash values across
1528 the edges. */
1529 hash_map <tree, hashval_t> map(size*2);
1530
1531 for (unsigned i = 0; i < size; ++i)
1532 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1533
1534 for (unsigned i = 0; i < size; i++)
1535 sccstack[first+i].hash
1536 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1537 }
1538 while (true);
1539 }
1540
1541 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1542 already in the streamer cache. Main routine called for
1543 each visit of EXPR. */
1544
1545 void
1546 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1547 tree expr, bool ref_p, bool this_ref_p)
1548 {
1549 /* Handle special cases. */
1550 if (expr == NULL_TREE)
1551 return;
1552
1553 /* Do not DFS walk into indexable trees. */
1554 if (this_ref_p && tree_is_indexable (expr))
1555 return;
1556
1557 /* Check if we already streamed EXPR. */
1558 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1559 return;
1560
1561 worklist w;
1562 w.expr = expr;
1563 w.from_state = from_state;
1564 w.cstate = NULL;
1565 w.ref_p = ref_p;
1566 w.this_ref_p = this_ref_p;
1567 worklist_vec.safe_push (w);
1568 }
1569
1570
1571 /* Emit the physical representation of tree node EXPR to output block OB.
1572 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1573 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1574
1575 void
1576 lto_output_tree (struct output_block *ob, tree expr,
1577 bool ref_p, bool this_ref_p)
1578 {
1579 unsigned ix;
1580 bool existed_p;
1581
1582 if (expr == NULL_TREE)
1583 {
1584 streamer_write_record_start (ob, LTO_null);
1585 return;
1586 }
1587
1588 if (this_ref_p && tree_is_indexable (expr))
1589 {
1590 lto_output_tree_ref (ob, expr);
1591 return;
1592 }
1593
1594 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1595 if (existed_p)
1596 {
1597 /* If a node has already been streamed out, make sure that
1598 we don't write it more than once. Otherwise, the reader
1599 will instantiate two different nodes for the same object. */
1600 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1601 streamer_write_uhwi (ob, ix);
1602 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1603 lto_tree_code_to_tag (TREE_CODE (expr)));
1604 lto_stats.num_pickle_refs_output++;
1605 }
1606 else
1607 {
1608 /* This is the first time we see EXPR, write all reachable
1609 trees to OB. */
1610 static bool in_dfs_walk;
1611
1612 /* Protect against recursion which means disconnect between
1613 what tree edges we walk in the DFS walk and what edges
1614 we stream out. */
1615 gcc_assert (!in_dfs_walk);
1616
1617 if (streamer_dump_file)
1618 {
1619 print_node_brief (streamer_dump_file, " Streaming SCC of ",
1620 expr, 4);
1621 fprintf (streamer_dump_file, "\n");
1622 }
1623
1624 /* Start the DFS walk. */
1625 /* Save ob state ... */
1626 /* let's see ... */
1627 in_dfs_walk = true;
1628 DFS (ob, expr, ref_p, this_ref_p, false);
1629 in_dfs_walk = false;
1630
1631 /* Finally append a reference to the tree we were writing.
1632 ??? If expr ended up as a singleton we could have
1633 inlined it here and avoid outputting a reference. */
1634 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1635 gcc_assert (existed_p);
1636 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1637 streamer_write_uhwi (ob, ix);
1638 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1639 lto_tree_code_to_tag (TREE_CODE (expr)));
1640 if (streamer_dump_file)
1641 {
1642 print_node_brief (streamer_dump_file, " Finished SCC of ",
1643 expr, 4);
1644 fprintf (streamer_dump_file, "\n\n");
1645 }
1646 lto_stats.num_pickle_refs_output++;
1647 }
1648 }
1649
1650
1651 /* Output to OB a list of try/catch handlers starting with FIRST. */
1652
1653 static void
1654 output_eh_try_list (struct output_block *ob, eh_catch first)
1655 {
1656 eh_catch n;
1657
1658 for (n = first; n; n = n->next_catch)
1659 {
1660 streamer_write_record_start (ob, LTO_eh_catch);
1661 stream_write_tree (ob, n->type_list, true);
1662 stream_write_tree (ob, n->filter_list, true);
1663 stream_write_tree (ob, n->label, true);
1664 }
1665
1666 streamer_write_record_start (ob, LTO_null);
1667 }
1668
1669
1670 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1671 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1672 detect EH region sharing. */
1673
1674 static void
1675 output_eh_region (struct output_block *ob, eh_region r)
1676 {
1677 enum LTO_tags tag;
1678
1679 if (r == NULL)
1680 {
1681 streamer_write_record_start (ob, LTO_null);
1682 return;
1683 }
1684
1685 if (r->type == ERT_CLEANUP)
1686 tag = LTO_ert_cleanup;
1687 else if (r->type == ERT_TRY)
1688 tag = LTO_ert_try;
1689 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1690 tag = LTO_ert_allowed_exceptions;
1691 else if (r->type == ERT_MUST_NOT_THROW)
1692 tag = LTO_ert_must_not_throw;
1693 else
1694 gcc_unreachable ();
1695
1696 streamer_write_record_start (ob, tag);
1697 streamer_write_hwi (ob, r->index);
1698
1699 if (r->outer)
1700 streamer_write_hwi (ob, r->outer->index);
1701 else
1702 streamer_write_zero (ob);
1703
1704 if (r->inner)
1705 streamer_write_hwi (ob, r->inner->index);
1706 else
1707 streamer_write_zero (ob);
1708
1709 if (r->next_peer)
1710 streamer_write_hwi (ob, r->next_peer->index);
1711 else
1712 streamer_write_zero (ob);
1713
1714 if (r->type == ERT_TRY)
1715 {
1716 output_eh_try_list (ob, r->u.eh_try.first_catch);
1717 }
1718 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1719 {
1720 stream_write_tree (ob, r->u.allowed.type_list, true);
1721 stream_write_tree (ob, r->u.allowed.label, true);
1722 streamer_write_uhwi (ob, r->u.allowed.filter);
1723 }
1724 else if (r->type == ERT_MUST_NOT_THROW)
1725 {
1726 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1727 bitpack_d bp = bitpack_create (ob->main_stream);
1728 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1729 streamer_write_bitpack (&bp);
1730 }
1731
1732 if (r->landing_pads)
1733 streamer_write_hwi (ob, r->landing_pads->index);
1734 else
1735 streamer_write_zero (ob);
1736 }
1737
1738
1739 /* Output landing pad LP to OB. */
1740
1741 static void
1742 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1743 {
1744 if (lp == NULL)
1745 {
1746 streamer_write_record_start (ob, LTO_null);
1747 return;
1748 }
1749
1750 streamer_write_record_start (ob, LTO_eh_landing_pad);
1751 streamer_write_hwi (ob, lp->index);
1752 if (lp->next_lp)
1753 streamer_write_hwi (ob, lp->next_lp->index);
1754 else
1755 streamer_write_zero (ob);
1756
1757 if (lp->region)
1758 streamer_write_hwi (ob, lp->region->index);
1759 else
1760 streamer_write_zero (ob);
1761
1762 stream_write_tree (ob, lp->post_landing_pad, true);
1763 }
1764
1765
1766 /* Output the existing eh_table to OB. */
1767
1768 static void
1769 output_eh_regions (struct output_block *ob, struct function *fn)
1770 {
1771 if (fn->eh && fn->eh->region_tree)
1772 {
1773 unsigned i;
1774 eh_region eh;
1775 eh_landing_pad lp;
1776 tree ttype;
1777
1778 streamer_write_record_start (ob, LTO_eh_table);
1779
1780 /* Emit the index of the root of the EH region tree. */
1781 streamer_write_hwi (ob, fn->eh->region_tree->index);
1782
1783 /* Emit all the EH regions in the region array. */
1784 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1785 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1786 output_eh_region (ob, eh);
1787
1788 /* Emit all landing pads. */
1789 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1790 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1791 output_eh_lp (ob, lp);
1792
1793 /* Emit all the runtime type data. */
1794 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1795 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1796 stream_write_tree (ob, ttype, true);
1797
1798 /* Emit the table of action chains. */
1799 if (targetm.arm_eabi_unwinder)
1800 {
1801 tree t;
1802 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1803 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1804 stream_write_tree (ob, t, true);
1805 }
1806 else
1807 {
1808 uchar c;
1809 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1810 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1811 streamer_write_char_stream (ob->main_stream, c);
1812 }
1813 }
1814
1815 /* The LTO_null either terminates the record or indicates that there
1816 are no eh_records at all. */
1817 streamer_write_record_start (ob, LTO_null);
1818 }
1819
1820
1821 /* Output all of the active ssa names to the ssa_names stream. */
1822
1823 static void
1824 output_ssa_names (struct output_block *ob, struct function *fn)
1825 {
1826 unsigned int i, len;
1827
1828 len = vec_safe_length (SSANAMES (fn));
1829 streamer_write_uhwi (ob, len);
1830
1831 for (i = 1; i < len; i++)
1832 {
1833 tree ptr = (*SSANAMES (fn))[i];
1834
1835 if (ptr == NULL_TREE
1836 || SSA_NAME_IN_FREE_LIST (ptr)
1837 || virtual_operand_p (ptr)
1838 /* Simply skip unreleased SSA names. */
1839 || (! SSA_NAME_IS_DEFAULT_DEF (ptr)
1840 && (! SSA_NAME_DEF_STMT (ptr)
1841 || ! gimple_bb (SSA_NAME_DEF_STMT (ptr)))))
1842 continue;
1843
1844 streamer_write_uhwi (ob, i);
1845 streamer_write_char_stream (ob->main_stream,
1846 SSA_NAME_IS_DEFAULT_DEF (ptr));
1847 if (SSA_NAME_VAR (ptr))
1848 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1849 else
1850 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1851 stream_write_tree (ob, TREE_TYPE (ptr), true);
1852 }
1853
1854 streamer_write_zero (ob);
1855 }
1856
1857
1858
1859 /* Output the cfg. */
1860
1861 static void
1862 output_cfg (struct output_block *ob, struct function *fn)
1863 {
1864 struct lto_output_stream *tmp_stream = ob->main_stream;
1865 basic_block bb;
1866
1867 ob->main_stream = ob->cfg_stream;
1868
1869 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1870 profile_status_for_fn (fn));
1871
1872 /* Output the number of the highest basic block. */
1873 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1874
1875 FOR_ALL_BB_FN (bb, fn)
1876 {
1877 edge_iterator ei;
1878 edge e;
1879
1880 streamer_write_hwi (ob, bb->index);
1881
1882 /* Output the successors and the edge flags. */
1883 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1884 FOR_EACH_EDGE (e, ei, bb->succs)
1885 {
1886 streamer_write_uhwi (ob, e->dest->index);
1887 e->probability.stream_out (ob);
1888 streamer_write_uhwi (ob, e->flags);
1889 }
1890 }
1891
1892 streamer_write_hwi (ob, -1);
1893
1894 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1895 while (bb->next_bb)
1896 {
1897 streamer_write_hwi (ob, bb->next_bb->index);
1898 bb = bb->next_bb;
1899 }
1900
1901 streamer_write_hwi (ob, -1);
1902
1903 /* ??? The cfgloop interface is tied to cfun. */
1904 gcc_assert (cfun == fn);
1905
1906 /* Output the number of loops. */
1907 streamer_write_uhwi (ob, number_of_loops (fn));
1908
1909 /* Output each loop, skipping the tree root which has number zero. */
1910 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1911 {
1912 struct loop *loop = get_loop (fn, i);
1913
1914 /* Write the index of the loop header. That's enough to rebuild
1915 the loop tree on the reader side. Stream -1 for an unused
1916 loop entry. */
1917 if (!loop)
1918 {
1919 streamer_write_hwi (ob, -1);
1920 continue;
1921 }
1922 else
1923 streamer_write_hwi (ob, loop->header->index);
1924
1925 /* Write everything copy_loop_info copies. */
1926 streamer_write_enum (ob->main_stream,
1927 loop_estimation, EST_LAST, loop->estimate_state);
1928 streamer_write_hwi (ob, loop->any_upper_bound);
1929 if (loop->any_upper_bound)
1930 streamer_write_widest_int (ob, loop->nb_iterations_upper_bound);
1931 streamer_write_hwi (ob, loop->any_likely_upper_bound);
1932 if (loop->any_likely_upper_bound)
1933 streamer_write_widest_int (ob, loop->nb_iterations_likely_upper_bound);
1934 streamer_write_hwi (ob, loop->any_estimate);
1935 if (loop->any_estimate)
1936 streamer_write_widest_int (ob, loop->nb_iterations_estimate);
1937
1938 /* Write OMP SIMD related info. */
1939 streamer_write_hwi (ob, loop->safelen);
1940 streamer_write_hwi (ob, loop->unroll);
1941 streamer_write_hwi (ob, loop->owned_clique);
1942 streamer_write_hwi (ob, loop->dont_vectorize);
1943 streamer_write_hwi (ob, loop->force_vectorize);
1944 stream_write_tree (ob, loop->simduid, true);
1945 }
1946
1947 ob->main_stream = tmp_stream;
1948 }
1949
1950
1951 /* Create the header in the file using OB. If the section type is for
1952 a function, set FN to the decl for that function. */
1953
1954 void
1955 produce_asm (struct output_block *ob, tree fn)
1956 {
1957 enum lto_section_type section_type = ob->section_type;
1958 struct lto_function_header header;
1959 char *section_name;
1960
1961 if (section_type == LTO_section_function_body)
1962 {
1963 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1964 section_name = lto_get_section_name (section_type, name, NULL);
1965 }
1966 else
1967 section_name = lto_get_section_name (section_type, NULL, NULL);
1968
1969 lto_begin_section (section_name, !flag_wpa);
1970 free (section_name);
1971
1972 /* The entire header is stream computed here. */
1973 memset (&header, 0, sizeof (struct lto_function_header));
1974
1975 /* Write the header. */
1976 header.major_version = LTO_major_version;
1977 header.minor_version = LTO_minor_version;
1978
1979 if (section_type == LTO_section_function_body)
1980 header.cfg_size = ob->cfg_stream->total_size;
1981 header.main_size = ob->main_stream->total_size;
1982 header.string_size = ob->string_stream->total_size;
1983 lto_write_data (&header, sizeof header);
1984
1985 /* Put all of the gimple and the string table out the asm file as a
1986 block of text. */
1987 if (section_type == LTO_section_function_body)
1988 lto_write_stream (ob->cfg_stream);
1989 lto_write_stream (ob->main_stream);
1990 lto_write_stream (ob->string_stream);
1991
1992 lto_end_section ();
1993 }
1994
1995
1996 /* Output the base body of struct function FN using output block OB. */
1997
1998 static void
1999 output_struct_function_base (struct output_block *ob, struct function *fn)
2000 {
2001 struct bitpack_d bp;
2002 unsigned i;
2003 tree t;
2004
2005 /* Output the static chain and non-local goto save area. */
2006 stream_write_tree (ob, fn->static_chain_decl, true);
2007 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
2008
2009 /* Output all the local variables in the function. */
2010 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
2011 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
2012 stream_write_tree (ob, t, true);
2013
2014 /* Output current IL state of the function. */
2015 streamer_write_uhwi (ob, fn->curr_properties);
2016
2017 /* Write all the attributes for FN. */
2018 bp = bitpack_create (ob->main_stream);
2019 bp_pack_value (&bp, fn->is_thunk, 1);
2020 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
2021 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
2022 bp_pack_value (&bp, fn->returns_struct, 1);
2023 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
2024 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
2025 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
2026 bp_pack_value (&bp, fn->after_inlining, 1);
2027 bp_pack_value (&bp, fn->stdarg, 1);
2028 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2029 bp_pack_value (&bp, fn->has_forced_label_in_static, 1);
2030 bp_pack_value (&bp, fn->calls_alloca, 1);
2031 bp_pack_value (&bp, fn->calls_setjmp, 1);
2032 bp_pack_value (&bp, fn->calls_eh_return, 1);
2033 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2034 bp_pack_value (&bp, fn->has_simduid_loops, 1);
2035 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2036 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2037 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2038
2039 /* Output the function start and end loci. */
2040 stream_output_location (ob, &bp, fn->function_start_locus);
2041 stream_output_location (ob, &bp, fn->function_end_locus);
2042
2043 /* Save the instance discriminator if present. */
2044 int *instance_number_p = NULL;
2045 if (decl_to_instance_map)
2046 instance_number_p = decl_to_instance_map->get (fn->decl);
2047 bp_pack_value (&bp, !!instance_number_p, 1);
2048 if (instance_number_p)
2049 bp_pack_value (&bp, *instance_number_p, sizeof (int) * CHAR_BIT);
2050
2051 streamer_write_bitpack (&bp);
2052 }
2053
2054
2055 /* Collect all leaf BLOCKs beyond ROOT into LEAFS. */
2056
2057 static void
2058 collect_block_tree_leafs (tree root, vec<tree> &leafs)
2059 {
2060 for (root = BLOCK_SUBBLOCKS (root); root; root = BLOCK_CHAIN (root))
2061 if (! BLOCK_SUBBLOCKS (root))
2062 leafs.safe_push (root);
2063 else
2064 collect_block_tree_leafs (BLOCK_SUBBLOCKS (root), leafs);
2065 }
2066
2067 /* Output the body of function NODE->DECL. */
2068
2069 static void
2070 output_function (struct cgraph_node *node)
2071 {
2072 tree function;
2073 struct function *fn;
2074 basic_block bb;
2075 struct output_block *ob;
2076
2077 if (streamer_dump_file)
2078 fprintf (streamer_dump_file, "\nStreaming body of %s\n",
2079 node->name ());
2080
2081 function = node->decl;
2082 fn = DECL_STRUCT_FUNCTION (function);
2083 ob = create_output_block (LTO_section_function_body);
2084
2085 clear_line_info (ob);
2086 ob->symbol = node;
2087
2088 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2089
2090 /* Set current_function_decl and cfun. */
2091 push_cfun (fn);
2092
2093 /* Make string 0 be a NULL string. */
2094 streamer_write_char_stream (ob->string_stream, 0);
2095
2096 streamer_write_record_start (ob, LTO_function);
2097
2098 /* Output decls for parameters and args. */
2099 stream_write_tree (ob, DECL_RESULT (function), true);
2100 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2101
2102 /* Output debug args if available. */
2103 vec<tree, va_gc> **debugargs = decl_debug_args_lookup (function);
2104 if (! debugargs)
2105 streamer_write_uhwi (ob, 0);
2106 else
2107 {
2108 streamer_write_uhwi (ob, (*debugargs)->length ());
2109 for (unsigned i = 0; i < (*debugargs)->length (); ++i)
2110 stream_write_tree (ob, (**debugargs)[i], true);
2111 }
2112
2113 /* Output DECL_INITIAL for the function, which contains the tree of
2114 lexical scopes. */
2115 stream_write_tree (ob, DECL_INITIAL (function), true);
2116 /* As we do not recurse into BLOCK_SUBBLOCKS but only BLOCK_SUPERCONTEXT
2117 collect block tree leafs and stream those. */
2118 auto_vec<tree> block_tree_leafs;
2119 if (DECL_INITIAL (function))
2120 collect_block_tree_leafs (DECL_INITIAL (function), block_tree_leafs);
2121 streamer_write_uhwi (ob, block_tree_leafs.length ());
2122 for (unsigned i = 0; i < block_tree_leafs.length (); ++i)
2123 stream_write_tree (ob, block_tree_leafs[i], true);
2124
2125 /* We also stream abstract functions where we stream only stuff needed for
2126 debug info. */
2127 if (gimple_has_body_p (function))
2128 {
2129 /* Fixup loops if required to match discovery done in the reader. */
2130 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
2131
2132 streamer_write_uhwi (ob, 1);
2133 output_struct_function_base (ob, fn);
2134
2135 /* Output all the SSA names used in the function. */
2136 output_ssa_names (ob, fn);
2137
2138 /* Output any exception handling regions. */
2139 output_eh_regions (ob, fn);
2140
2141
2142 /* We will renumber the statements. The code that does this uses
2143 the same ordering that we use for serializing them so we can use
2144 the same code on the other end and not have to write out the
2145 statement numbers. We do not assign UIDs to PHIs here because
2146 virtual PHIs get re-computed on-the-fly which would make numbers
2147 inconsistent. */
2148 set_gimple_stmt_max_uid (cfun, 0);
2149 FOR_ALL_BB_FN (bb, cfun)
2150 {
2151 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2152 gsi_next (&gsi))
2153 {
2154 gphi *stmt = gsi.phi ();
2155
2156 /* Virtual PHIs are not going to be streamed. */
2157 if (!virtual_operand_p (gimple_phi_result (stmt)))
2158 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2159 }
2160 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2161 gsi_next (&gsi))
2162 {
2163 gimple *stmt = gsi_stmt (gsi);
2164 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2165 }
2166 }
2167 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2168 virtual phis now. */
2169 FOR_ALL_BB_FN (bb, cfun)
2170 {
2171 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2172 gsi_next (&gsi))
2173 {
2174 gphi *stmt = gsi.phi ();
2175 if (virtual_operand_p (gimple_phi_result (stmt)))
2176 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2177 }
2178 }
2179
2180 /* Output the code for the function. */
2181 FOR_ALL_BB_FN (bb, fn)
2182 output_bb (ob, bb, fn);
2183
2184 /* The terminator for this function. */
2185 streamer_write_record_start (ob, LTO_null);
2186
2187 output_cfg (ob, fn);
2188
2189 loop_optimizer_finalize ();
2190 pop_cfun ();
2191 }
2192 else
2193 streamer_write_uhwi (ob, 0);
2194
2195 /* Create a section to hold the pickled output of this function. */
2196 produce_asm (ob, function);
2197
2198 destroy_output_block (ob);
2199 if (streamer_dump_file)
2200 fprintf (streamer_dump_file, "Finished streaming %s\n",
2201 node->name ());
2202 }
2203
2204 /* Output the body of function NODE->DECL. */
2205
2206 static void
2207 output_constructor (struct varpool_node *node)
2208 {
2209 tree var = node->decl;
2210 struct output_block *ob;
2211
2212 if (streamer_dump_file)
2213 fprintf (streamer_dump_file, "\nStreaming constructor of %s\n",
2214 node->name ());
2215
2216 ob = create_output_block (LTO_section_function_body);
2217
2218 clear_line_info (ob);
2219 ob->symbol = node;
2220
2221 /* Make string 0 be a NULL string. */
2222 streamer_write_char_stream (ob->string_stream, 0);
2223
2224 /* Output DECL_INITIAL for the function, which contains the tree of
2225 lexical scopes. */
2226 stream_write_tree (ob, DECL_INITIAL (var), true);
2227
2228 /* Create a section to hold the pickled output of this function. */
2229 produce_asm (ob, var);
2230
2231 destroy_output_block (ob);
2232 if (streamer_dump_file)
2233 fprintf (streamer_dump_file, "Finished streaming %s\n",
2234 node->name ());
2235 }
2236
2237
2238 /* Emit toplevel asms. */
2239
2240 void
2241 lto_output_toplevel_asms (void)
2242 {
2243 struct output_block *ob;
2244 struct asm_node *can;
2245 char *section_name;
2246 struct lto_simple_header_with_strings header;
2247
2248 if (!symtab->first_asm_symbol ())
2249 return;
2250
2251 ob = create_output_block (LTO_section_asm);
2252
2253 /* Make string 0 be a NULL string. */
2254 streamer_write_char_stream (ob->string_stream, 0);
2255
2256 for (can = symtab->first_asm_symbol (); can; can = can->next)
2257 {
2258 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2259 streamer_write_hwi (ob, can->order);
2260 }
2261
2262 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2263
2264 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2265 lto_begin_section (section_name, !flag_wpa);
2266 free (section_name);
2267
2268 /* The entire header stream is computed here. */
2269 memset (&header, 0, sizeof (header));
2270
2271 /* Write the header. */
2272 header.major_version = LTO_major_version;
2273 header.minor_version = LTO_minor_version;
2274
2275 header.main_size = ob->main_stream->total_size;
2276 header.string_size = ob->string_stream->total_size;
2277 lto_write_data (&header, sizeof header);
2278
2279 /* Put all of the gimple and the string table out the asm file as a
2280 block of text. */
2281 lto_write_stream (ob->main_stream);
2282 lto_write_stream (ob->string_stream);
2283
2284 lto_end_section ();
2285
2286 destroy_output_block (ob);
2287 }
2288
2289
2290 /* Copy the function body or variable constructor of NODE without deserializing. */
2291
2292 static void
2293 copy_function_or_variable (struct symtab_node *node)
2294 {
2295 tree function = node->decl;
2296 struct lto_file_decl_data *file_data = node->lto_file_data;
2297 const char *data;
2298 size_t len;
2299 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2300 char *section_name =
2301 lto_get_section_name (LTO_section_function_body, name, NULL);
2302 size_t i, j;
2303 struct lto_in_decl_state *in_state;
2304 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2305
2306 if (streamer_dump_file)
2307 fprintf (streamer_dump_file, "Copying section for %s\n", name);
2308 lto_begin_section (section_name, false);
2309 free (section_name);
2310
2311 /* We may have renamed the declaration, e.g., a static function. */
2312 name = lto_get_decl_name_mapping (file_data, name);
2313
2314 data = lto_get_raw_section_data (file_data, LTO_section_function_body,
2315 name, &len);
2316 gcc_assert (data);
2317
2318 /* Do a bit copy of the function body. */
2319 lto_write_raw_data (data, len);
2320
2321 /* Copy decls. */
2322 in_state =
2323 lto_get_function_in_decl_state (node->lto_file_data, function);
2324 out_state->compressed = in_state->compressed;
2325 gcc_assert (in_state);
2326
2327 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2328 {
2329 size_t n = vec_safe_length (in_state->streams[i]);
2330 vec<tree, va_gc> *trees = in_state->streams[i];
2331 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2332
2333 /* The out state must have the same indices and the in state.
2334 So just copy the vector. All the encoders in the in state
2335 must be empty where we reach here. */
2336 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2337 encoder->trees.reserve_exact (n);
2338 for (j = 0; j < n; j++)
2339 encoder->trees.safe_push ((*trees)[j]);
2340 }
2341
2342 lto_free_raw_section_data (file_data, LTO_section_function_body, name,
2343 data, len);
2344 lto_end_section ();
2345 }
2346
2347 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2348
2349 static tree
2350 wrap_refs (tree *tp, int *ws, void *)
2351 {
2352 tree t = *tp;
2353 if (handled_component_p (t)
2354 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
2355 && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2356 {
2357 tree decl = TREE_OPERAND (t, 0);
2358 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2359 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2360 build1 (ADDR_EXPR, ptrtype, decl),
2361 build_int_cst (ptrtype, 0));
2362 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2363 *ws = 0;
2364 }
2365 else if (TREE_CODE (t) == CONSTRUCTOR)
2366 ;
2367 else if (!EXPR_P (t))
2368 *ws = 0;
2369 return NULL_TREE;
2370 }
2371
2372 /* Remove functions that are no longer used from offload_funcs, and mark the
2373 remaining ones with DECL_PRESERVE_P. */
2374
2375 static void
2376 prune_offload_funcs (void)
2377 {
2378 if (!offload_funcs)
2379 return;
2380
2381 unsigned ix, ix2;
2382 tree *elem_ptr;
2383 VEC_ORDERED_REMOVE_IF (*offload_funcs, ix, ix2, elem_ptr,
2384 cgraph_node::get (*elem_ptr) == NULL);
2385
2386 tree fn_decl;
2387 FOR_EACH_VEC_ELT (*offload_funcs, ix, fn_decl)
2388 DECL_PRESERVE_P (fn_decl) = 1;
2389 }
2390
2391 /* Main entry point from the pass manager. */
2392
2393 void
2394 lto_output (void)
2395 {
2396 struct lto_out_decl_state *decl_state;
2397 bitmap output = NULL;
2398 int i, n_nodes;
2399 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2400
2401 prune_offload_funcs ();
2402
2403 if (flag_checking)
2404 output = lto_bitmap_alloc ();
2405
2406 /* Initialize the streamer. */
2407 lto_streamer_init ();
2408
2409 n_nodes = lto_symtab_encoder_size (encoder);
2410 /* Process only the functions with bodies. */
2411 for (i = 0; i < n_nodes; i++)
2412 {
2413 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2414 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2415 {
2416 if (lto_symtab_encoder_encode_body_p (encoder, node)
2417 && !node->alias)
2418 {
2419 if (flag_checking)
2420 {
2421 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2422 bitmap_set_bit (output, DECL_UID (node->decl));
2423 }
2424 decl_state = lto_new_out_decl_state ();
2425 lto_push_out_decl_state (decl_state);
2426 if (gimple_has_body_p (node->decl)
2427 || (!flag_wpa
2428 && flag_incremental_link != INCREMENTAL_LINK_LTO)
2429 /* Thunks have no body but they may be synthetized
2430 at WPA time. */
2431 || DECL_ARGUMENTS (node->decl))
2432 output_function (node);
2433 else
2434 copy_function_or_variable (node);
2435 gcc_assert (lto_get_out_decl_state () == decl_state);
2436 lto_pop_out_decl_state ();
2437 lto_record_function_out_decl_state (node->decl, decl_state);
2438 }
2439 }
2440 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2441 {
2442 /* Wrap symbol references inside the ctor in a type
2443 preserving MEM_REF. */
2444 tree ctor = DECL_INITIAL (node->decl);
2445 if (ctor && !in_lto_p)
2446 walk_tree (&ctor, wrap_refs, NULL, NULL);
2447 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2448 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2449 && !node->alias)
2450 {
2451 timevar_push (TV_IPA_LTO_CTORS_OUT);
2452 if (flag_checking)
2453 {
2454 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2455 bitmap_set_bit (output, DECL_UID (node->decl));
2456 }
2457 decl_state = lto_new_out_decl_state ();
2458 lto_push_out_decl_state (decl_state);
2459 if (DECL_INITIAL (node->decl) != error_mark_node
2460 || (!flag_wpa
2461 && flag_incremental_link != INCREMENTAL_LINK_LTO))
2462 output_constructor (node);
2463 else
2464 copy_function_or_variable (node);
2465 gcc_assert (lto_get_out_decl_state () == decl_state);
2466 lto_pop_out_decl_state ();
2467 lto_record_function_out_decl_state (node->decl, decl_state);
2468 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2469 }
2470 }
2471 }
2472
2473 /* Emit the callgraph after emitting function bodies. This needs to
2474 be done now to make sure that all the statements in every function
2475 have been renumbered so that edges can be associated with call
2476 statements using the statement UIDs. */
2477 output_symtab ();
2478
2479 output_offload_tables ();
2480
2481 #if CHECKING_P
2482 lto_bitmap_free (output);
2483 #endif
2484 }
2485
2486 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2487 from it and required for correct representation of its semantics.
2488 Each node in ENCODER must be a global declaration or a type. A node
2489 is written only once, even if it appears multiple times in the
2490 vector. Certain transitively-reachable nodes, such as those
2491 representing expressions, may be duplicated, but such nodes
2492 must not appear in ENCODER itself. */
2493
2494 static void
2495 write_global_stream (struct output_block *ob,
2496 struct lto_tree_ref_encoder *encoder)
2497 {
2498 tree t;
2499 size_t index;
2500 const size_t size = lto_tree_ref_encoder_size (encoder);
2501
2502 for (index = 0; index < size; index++)
2503 {
2504 t = lto_tree_ref_encoder_get_tree (encoder, index);
2505 if (streamer_dump_file)
2506 {
2507 fprintf (streamer_dump_file, " %i:", (int)index);
2508 print_node_brief (streamer_dump_file, "", t, 4);
2509 fprintf (streamer_dump_file, "\n");
2510 }
2511 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2512 stream_write_tree (ob, t, false);
2513 }
2514 }
2515
2516
2517 /* Write a sequence of indices into the globals vector corresponding
2518 to the trees in ENCODER. These are used by the reader to map the
2519 indices used to refer to global entities within function bodies to
2520 their referents. */
2521
2522 static void
2523 write_global_references (struct output_block *ob,
2524 struct lto_tree_ref_encoder *encoder)
2525 {
2526 tree t;
2527 uint32_t index;
2528 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2529
2530 /* Write size and slot indexes as 32-bit unsigned numbers. */
2531 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2532 data[0] = size;
2533
2534 for (index = 0; index < size; index++)
2535 {
2536 unsigned slot_num;
2537
2538 t = lto_tree_ref_encoder_get_tree (encoder, index);
2539 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2540 gcc_assert (slot_num != (unsigned)-1);
2541 data[index + 1] = slot_num;
2542 }
2543
2544 lto_write_data (data, sizeof (int32_t) * (size + 1));
2545 free (data);
2546 }
2547
2548
2549 /* Write all the streams in an lto_out_decl_state STATE using
2550 output block OB and output stream OUT_STREAM. */
2551
2552 void
2553 lto_output_decl_state_streams (struct output_block *ob,
2554 struct lto_out_decl_state *state)
2555 {
2556 int i;
2557
2558 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2559 write_global_stream (ob, &state->streams[i]);
2560 }
2561
2562
2563 /* Write all the references in an lto_out_decl_state STATE using
2564 output block OB and output stream OUT_STREAM. */
2565
2566 void
2567 lto_output_decl_state_refs (struct output_block *ob,
2568 struct lto_out_decl_state *state)
2569 {
2570 unsigned i;
2571 unsigned ref;
2572 tree decl;
2573
2574 /* Write reference to FUNCTION_DECL. If there is not function,
2575 write reference to void_type_node. */
2576 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2577 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2578 gcc_assert (ref != (unsigned)-1);
2579 ref = ref * 2 + (state->compressed ? 1 : 0);
2580 lto_write_data (&ref, sizeof (uint32_t));
2581
2582 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2583 write_global_references (ob, &state->streams[i]);
2584 }
2585
2586
2587 /* Return the written size of STATE. */
2588
2589 static size_t
2590 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2591 {
2592 int i;
2593 size_t size;
2594
2595 size = sizeof (int32_t); /* fn_ref. */
2596 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2597 {
2598 size += sizeof (int32_t); /* vector size. */
2599 size += (lto_tree_ref_encoder_size (&state->streams[i])
2600 * sizeof (int32_t));
2601 }
2602 return size;
2603 }
2604
2605
2606 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2607 so far. */
2608
2609 static void
2610 write_symbol (struct streamer_tree_cache_d *cache,
2611 tree t, hash_set<const char *> *seen, bool alias)
2612 {
2613 const char *name;
2614 enum gcc_plugin_symbol_kind kind;
2615 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2616 unsigned slot_num;
2617 uint64_t size;
2618 const char *comdat;
2619 unsigned char c;
2620
2621 gcc_checking_assert (TREE_PUBLIC (t)
2622 && (TREE_CODE (t) != FUNCTION_DECL
2623 || !fndecl_built_in_p (t))
2624 && !DECL_ABSTRACT_P (t)
2625 && (!VAR_P (t) || !DECL_HARD_REGISTER (t)));
2626
2627 gcc_assert (VAR_OR_FUNCTION_DECL_P (t));
2628
2629 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2630
2631 /* This behaves like assemble_name_raw in varasm.c, performing the
2632 same name manipulations that ASM_OUTPUT_LABELREF does. */
2633 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2634
2635 if (seen->add (name))
2636 return;
2637
2638 streamer_tree_cache_lookup (cache, t, &slot_num);
2639 gcc_assert (slot_num != (unsigned)-1);
2640
2641 if (DECL_EXTERNAL (t))
2642 {
2643 if (DECL_WEAK (t))
2644 kind = GCCPK_WEAKUNDEF;
2645 else
2646 kind = GCCPK_UNDEF;
2647 }
2648 else
2649 {
2650 if (DECL_WEAK (t))
2651 kind = GCCPK_WEAKDEF;
2652 else if (DECL_COMMON (t))
2653 kind = GCCPK_COMMON;
2654 else
2655 kind = GCCPK_DEF;
2656
2657 /* When something is defined, it should have node attached. */
2658 gcc_assert (alias || !VAR_P (t) || varpool_node::get (t)->definition);
2659 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2660 || (cgraph_node::get (t)
2661 && cgraph_node::get (t)->definition));
2662 }
2663
2664 /* Imitate what default_elf_asm_output_external do.
2665 When symbol is external, we need to output it with DEFAULT visibility
2666 when compiling with -fvisibility=default, while with HIDDEN visibility
2667 when symbol has attribute (visibility("hidden")) specified.
2668 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2669 right. */
2670
2671 if (DECL_EXTERNAL (t)
2672 && !targetm.binds_local_p (t))
2673 visibility = GCCPV_DEFAULT;
2674 else
2675 switch (DECL_VISIBILITY (t))
2676 {
2677 case VISIBILITY_DEFAULT:
2678 visibility = GCCPV_DEFAULT;
2679 break;
2680 case VISIBILITY_PROTECTED:
2681 visibility = GCCPV_PROTECTED;
2682 break;
2683 case VISIBILITY_HIDDEN:
2684 visibility = GCCPV_HIDDEN;
2685 break;
2686 case VISIBILITY_INTERNAL:
2687 visibility = GCCPV_INTERNAL;
2688 break;
2689 }
2690
2691 if (kind == GCCPK_COMMON
2692 && DECL_SIZE_UNIT (t)
2693 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2694 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2695 else
2696 size = 0;
2697
2698 if (DECL_ONE_ONLY (t))
2699 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2700 else
2701 comdat = "";
2702
2703 lto_write_data (name, strlen (name) + 1);
2704 lto_write_data (comdat, strlen (comdat) + 1);
2705 c = (unsigned char) kind;
2706 lto_write_data (&c, 1);
2707 c = (unsigned char) visibility;
2708 lto_write_data (&c, 1);
2709 lto_write_data (&size, 8);
2710 lto_write_data (&slot_num, 4);
2711 }
2712
2713 /* Write an IL symbol table to OB.
2714 SET and VSET are cgraph/varpool node sets we are outputting. */
2715
2716 static void
2717 produce_symtab (struct output_block *ob)
2718 {
2719 struct streamer_tree_cache_d *cache = ob->writer_cache;
2720 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2721 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2722 lto_symtab_encoder_iterator lsei;
2723
2724 lto_begin_section (section_name, false);
2725 free (section_name);
2726
2727 hash_set<const char *> seen;
2728
2729 /* Write the symbol table.
2730 First write everything defined and then all declarations.
2731 This is necessary to handle cases where we have duplicated symbols. */
2732 for (lsei = lsei_start (encoder);
2733 !lsei_end_p (lsei); lsei_next (&lsei))
2734 {
2735 symtab_node *node = lsei_node (lsei);
2736
2737 if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2738 continue;
2739 write_symbol (cache, node->decl, &seen, false);
2740 }
2741 for (lsei = lsei_start (encoder);
2742 !lsei_end_p (lsei); lsei_next (&lsei))
2743 {
2744 symtab_node *node = lsei_node (lsei);
2745
2746 if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2747 continue;
2748 write_symbol (cache, node->decl, &seen, false);
2749 }
2750
2751 lto_end_section ();
2752 }
2753
2754
2755 /* Init the streamer_mode_table for output, where we collect info on what
2756 machine_mode values have been streamed. */
2757 void
2758 lto_output_init_mode_table (void)
2759 {
2760 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2761 }
2762
2763
2764 /* Write the mode table. */
2765 static void
2766 lto_write_mode_table (void)
2767 {
2768 struct output_block *ob;
2769 ob = create_output_block (LTO_section_mode_table);
2770 bitpack_d bp = bitpack_create (ob->main_stream);
2771
2772 /* Ensure that for GET_MODE_INNER (m) != m we have
2773 also the inner mode marked. */
2774 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2775 if (streamer_mode_table[i])
2776 {
2777 machine_mode m = (machine_mode) i;
2778 machine_mode inner_m = GET_MODE_INNER (m);
2779 if (inner_m != m)
2780 streamer_mode_table[(int) inner_m] = 1;
2781 }
2782 /* First stream modes that have GET_MODE_INNER (m) == m,
2783 so that we can refer to them afterwards. */
2784 for (int pass = 0; pass < 2; pass++)
2785 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2786 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2787 {
2788 machine_mode m = (machine_mode) i;
2789 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
2790 continue;
2791 bp_pack_value (&bp, m, 8);
2792 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2793 bp_pack_poly_value (&bp, GET_MODE_SIZE (m), 16);
2794 bp_pack_poly_value (&bp, GET_MODE_PRECISION (m), 16);
2795 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2796 bp_pack_poly_value (&bp, GET_MODE_NUNITS (m), 16);
2797 switch (GET_MODE_CLASS (m))
2798 {
2799 case MODE_FRACT:
2800 case MODE_UFRACT:
2801 case MODE_ACCUM:
2802 case MODE_UACCUM:
2803 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2804 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2805 break;
2806 case MODE_FLOAT:
2807 case MODE_DECIMAL_FLOAT:
2808 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2809 break;
2810 default:
2811 break;
2812 }
2813 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2814 }
2815 bp_pack_value (&bp, VOIDmode, 8);
2816
2817 streamer_write_bitpack (&bp);
2818
2819 char *section_name
2820 = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2821 lto_begin_section (section_name, !flag_wpa);
2822 free (section_name);
2823
2824 /* The entire header stream is computed here. */
2825 struct lto_simple_header_with_strings header;
2826 memset (&header, 0, sizeof (header));
2827
2828 /* Write the header. */
2829 header.major_version = LTO_major_version;
2830 header.minor_version = LTO_minor_version;
2831
2832 header.main_size = ob->main_stream->total_size;
2833 header.string_size = ob->string_stream->total_size;
2834 lto_write_data (&header, sizeof header);
2835
2836 /* Put all of the gimple and the string table out the asm file as a
2837 block of text. */
2838 lto_write_stream (ob->main_stream);
2839 lto_write_stream (ob->string_stream);
2840
2841 lto_end_section ();
2842 destroy_output_block (ob);
2843 }
2844
2845
2846 /* This pass is run after all of the functions are serialized and all
2847 of the IPA passes have written their serialized forms. This pass
2848 causes the vector of all of the global decls and types used from
2849 this file to be written in to a section that can then be read in to
2850 recover these on other side. */
2851
2852 void
2853 produce_asm_for_decls (void)
2854 {
2855 struct lto_out_decl_state *out_state;
2856 struct lto_out_decl_state *fn_out_state;
2857 struct lto_decl_header header;
2858 char *section_name;
2859 struct output_block *ob;
2860 unsigned idx, num_fns;
2861 size_t decl_state_size;
2862 int32_t num_decl_states;
2863
2864 ob = create_output_block (LTO_section_decls);
2865
2866 memset (&header, 0, sizeof (struct lto_decl_header));
2867
2868 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2869 lto_begin_section (section_name, !flag_wpa);
2870 free (section_name);
2871
2872 /* Make string 0 be a NULL string. */
2873 streamer_write_char_stream (ob->string_stream, 0);
2874
2875 gcc_assert (!alias_pairs);
2876
2877 /* Get rid of the global decl state hash tables to save some memory. */
2878 out_state = lto_get_out_decl_state ();
2879 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2880 if (out_state->streams[i].tree_hash_table)
2881 {
2882 delete out_state->streams[i].tree_hash_table;
2883 out_state->streams[i].tree_hash_table = NULL;
2884 }
2885
2886 /* Write the global symbols. */
2887 if (streamer_dump_file)
2888 fprintf (streamer_dump_file, "Outputting global stream\n");
2889 lto_output_decl_state_streams (ob, out_state);
2890 num_fns = lto_function_decl_states.length ();
2891 for (idx = 0; idx < num_fns; idx++)
2892 {
2893 fn_out_state =
2894 lto_function_decl_states[idx];
2895 if (streamer_dump_file)
2896 fprintf (streamer_dump_file, "Outputting stream for %s\n",
2897 IDENTIFIER_POINTER
2898 (DECL_ASSEMBLER_NAME (fn_out_state->fn_decl)));
2899 lto_output_decl_state_streams (ob, fn_out_state);
2900 }
2901
2902 header.major_version = LTO_major_version;
2903 header.minor_version = LTO_minor_version;
2904
2905 /* Currently not used. This field would allow us to preallocate
2906 the globals vector, so that it need not be resized as it is extended. */
2907 header.num_nodes = -1;
2908
2909 /* Compute the total size of all decl out states. */
2910 decl_state_size = sizeof (int32_t);
2911 decl_state_size += lto_out_decl_state_written_size (out_state);
2912 for (idx = 0; idx < num_fns; idx++)
2913 {
2914 fn_out_state =
2915 lto_function_decl_states[idx];
2916 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2917 }
2918 header.decl_state_size = decl_state_size;
2919
2920 header.main_size = ob->main_stream->total_size;
2921 header.string_size = ob->string_stream->total_size;
2922
2923 lto_write_data (&header, sizeof header);
2924
2925 /* Write the main out-decl state, followed by out-decl states of
2926 functions. */
2927 num_decl_states = num_fns + 1;
2928 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2929 lto_output_decl_state_refs (ob, out_state);
2930 for (idx = 0; idx < num_fns; idx++)
2931 {
2932 fn_out_state = lto_function_decl_states[idx];
2933 lto_output_decl_state_refs (ob, fn_out_state);
2934 }
2935
2936 lto_write_stream (ob->main_stream);
2937 lto_write_stream (ob->string_stream);
2938
2939 lto_end_section ();
2940
2941 /* Write the symbol table. It is used by linker to determine dependencies
2942 and thus we can skip it for WPA. */
2943 if (!flag_wpa)
2944 produce_symtab (ob);
2945
2946 /* Write command line opts. */
2947 lto_write_options ();
2948
2949 /* Deallocate memory and clean up. */
2950 for (idx = 0; idx < num_fns; idx++)
2951 {
2952 fn_out_state =
2953 lto_function_decl_states[idx];
2954 lto_delete_out_decl_state (fn_out_state);
2955 }
2956 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2957 lto_function_decl_states.release ();
2958 destroy_output_block (ob);
2959 if (lto_stream_offload_p)
2960 lto_write_mode_table ();
2961 }