]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-streamer-out.c
Add .gnu.lto_.lto section.
[thirdparty/gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2019 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
43 #include "debug.h"
44 #include "omp-offload.h"
45 #include "print-tree.h"
46
47
48 static void lto_write_tree (struct output_block*, tree, bool);
49
50 /* Clear the line info stored in DATA_IN. */
51
52 static void
53 clear_line_info (struct output_block *ob)
54 {
55 ob->current_file = NULL;
56 ob->current_line = 0;
57 ob->current_col = 0;
58 ob->current_sysp = false;
59 }
60
61
62 /* Create the output block and return it. SECTION_TYPE is
63 LTO_section_function_body or LTO_static_initializer. */
64
65 struct output_block *
66 create_output_block (enum lto_section_type section_type)
67 {
68 struct output_block *ob = XCNEW (struct output_block);
69 if (streamer_dump_file)
70 fprintf (streamer_dump_file, "Creating output block for %s\n",
71 lto_section_name [section_type]);
72
73 ob->section_type = section_type;
74 ob->decl_state = lto_get_out_decl_state ();
75 ob->main_stream = XCNEW (struct lto_output_stream);
76 ob->string_stream = XCNEW (struct lto_output_stream);
77 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
78
79 if (section_type == LTO_section_function_body)
80 ob->cfg_stream = XCNEW (struct lto_output_stream);
81
82 clear_line_info (ob);
83
84 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
85 gcc_obstack_init (&ob->obstack);
86
87 return ob;
88 }
89
90
91 /* Destroy the output block OB. */
92
93 void
94 destroy_output_block (struct output_block *ob)
95 {
96 enum lto_section_type section_type = ob->section_type;
97
98 delete ob->string_hash_table;
99 ob->string_hash_table = NULL;
100
101 free (ob->main_stream);
102 free (ob->string_stream);
103 if (section_type == LTO_section_function_body)
104 free (ob->cfg_stream);
105
106 streamer_tree_cache_delete (ob->writer_cache);
107 obstack_free (&ob->obstack, NULL);
108
109 free (ob);
110 }
111
112
113 /* Look up NODE in the type table and write the index for it to OB. */
114
115 static void
116 output_type_ref (struct output_block *ob, tree node)
117 {
118 streamer_write_record_start (ob, LTO_type_ref);
119 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
120 }
121
122
123 /* Return true if tree node T is written to various tables. For these
124 nodes, we sometimes want to write their phyiscal representation
125 (via lto_output_tree), and sometimes we need to emit an index
126 reference into a table (via lto_output_tree_ref). */
127
128 static bool
129 tree_is_indexable (tree t)
130 {
131 /* Parameters and return values of functions of variably modified types
132 must go to global stream, because they may be used in the type
133 definition. */
134 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
135 && DECL_CONTEXT (t))
136 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
137 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared.
138 We should no longer need to stream it. */
139 else if (TREE_CODE (t) == IMPORTED_DECL)
140 gcc_unreachable ();
141 else if (TREE_CODE (t) == LABEL_DECL)
142 return FORCED_LABEL (t) || DECL_NONLOCAL (t);
143 else if (((VAR_P (t) && !TREE_STATIC (t))
144 || TREE_CODE (t) == TYPE_DECL
145 || TREE_CODE (t) == CONST_DECL
146 || TREE_CODE (t) == NAMELIST_DECL)
147 && decl_function_context (t))
148 return false;
149 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
150 return false;
151 /* Variably modified types need to be streamed alongside function
152 bodies because they can refer to local entities. Together with
153 them we have to localize their members as well.
154 ??? In theory that includes non-FIELD_DECLs as well. */
155 else if (TYPE_P (t)
156 && variably_modified_type_p (t, NULL_TREE))
157 return false;
158 else if (TREE_CODE (t) == FIELD_DECL
159 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
160 return false;
161 else
162 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
163 }
164
165
166 /* Output info about new location into bitpack BP.
167 After outputting bitpack, lto_output_location_data has
168 to be done to output actual data. */
169
170 void
171 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
172 location_t loc)
173 {
174 expanded_location xloc;
175
176 loc = LOCATION_LOCUS (loc);
177 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
178 loc < RESERVED_LOCATION_COUNT
179 ? loc : RESERVED_LOCATION_COUNT);
180 if (loc < RESERVED_LOCATION_COUNT)
181 return;
182
183 xloc = expand_location (loc);
184
185 bp_pack_value (bp, ob->current_file != xloc.file, 1);
186 bp_pack_value (bp, ob->current_line != xloc.line, 1);
187 bp_pack_value (bp, ob->current_col != xloc.column, 1);
188
189 if (ob->current_file != xloc.file)
190 {
191 bp_pack_string (ob, bp, xloc.file, true);
192 bp_pack_value (bp, xloc.sysp, 1);
193 }
194 ob->current_file = xloc.file;
195 ob->current_sysp = xloc.sysp;
196
197 if (ob->current_line != xloc.line)
198 bp_pack_var_len_unsigned (bp, xloc.line);
199 ob->current_line = xloc.line;
200
201 if (ob->current_col != xloc.column)
202 bp_pack_var_len_unsigned (bp, xloc.column);
203 ob->current_col = xloc.column;
204 }
205
206
207 /* If EXPR is an indexable tree node, output a reference to it to
208 output block OB. Otherwise, output the physical representation of
209 EXPR to OB. */
210
211 static void
212 lto_output_tree_ref (struct output_block *ob, tree expr)
213 {
214 enum tree_code code;
215
216 if (TYPE_P (expr))
217 {
218 output_type_ref (ob, expr);
219 return;
220 }
221
222 code = TREE_CODE (expr);
223 switch (code)
224 {
225 case SSA_NAME:
226 streamer_write_record_start (ob, LTO_ssa_name_ref);
227 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
228 break;
229
230 case FIELD_DECL:
231 streamer_write_record_start (ob, LTO_field_decl_ref);
232 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
233 break;
234
235 case FUNCTION_DECL:
236 streamer_write_record_start (ob, LTO_function_decl_ref);
237 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
238 break;
239
240 case VAR_DECL:
241 case DEBUG_EXPR_DECL:
242 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
243 /* FALLTHRU */
244 case PARM_DECL:
245 streamer_write_record_start (ob, LTO_global_decl_ref);
246 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
247 break;
248
249 case CONST_DECL:
250 streamer_write_record_start (ob, LTO_const_decl_ref);
251 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
252 break;
253
254 case IMPORTED_DECL:
255 gcc_assert (decl_function_context (expr) == NULL);
256 streamer_write_record_start (ob, LTO_imported_decl_ref);
257 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
258 break;
259
260 case TYPE_DECL:
261 streamer_write_record_start (ob, LTO_type_decl_ref);
262 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
263 break;
264
265 case NAMELIST_DECL:
266 streamer_write_record_start (ob, LTO_namelist_decl_ref);
267 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
268 break;
269
270 case NAMESPACE_DECL:
271 streamer_write_record_start (ob, LTO_namespace_decl_ref);
272 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
273 break;
274
275 case LABEL_DECL:
276 streamer_write_record_start (ob, LTO_label_decl_ref);
277 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
278 break;
279
280 case RESULT_DECL:
281 streamer_write_record_start (ob, LTO_result_decl_ref);
282 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
283 break;
284
285 case TRANSLATION_UNIT_DECL:
286 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
287 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
288 break;
289
290 default:
291 /* No other node is indexable, so it should have been handled by
292 lto_output_tree. */
293 gcc_unreachable ();
294 }
295 }
296
297
298 /* Return true if EXPR is a tree node that can be written to disk. */
299
300 static inline bool
301 lto_is_streamable (tree expr)
302 {
303 enum tree_code code = TREE_CODE (expr);
304
305 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
306 name version in lto_output_tree_ref (see output_ssa_names). */
307 return !is_lang_specific (expr)
308 && code != SSA_NAME
309 && code != LANG_TYPE
310 && code != MODIFY_EXPR
311 && code != INIT_EXPR
312 && code != TARGET_EXPR
313 && code != BIND_EXPR
314 && code != WITH_CLEANUP_EXPR
315 && code != STATEMENT_LIST
316 && (code == CASE_LABEL_EXPR
317 || code == DECL_EXPR
318 || TREE_CODE_CLASS (code) != tcc_statement);
319 }
320
321 /* Very rough estimate of streaming size of the initializer. If we ignored
322 presence of strings, we could simply just count number of non-indexable
323 tree nodes and number of references to indexable nodes. Strings however
324 may be very large and we do not want to dump them int othe global stream.
325
326 Count the size of initializer until the size in DATA is positive. */
327
328 static tree
329 subtract_estimated_size (tree *tp, int *ws, void *data)
330 {
331 long *sum = (long *)data;
332 if (tree_is_indexable (*tp))
333 {
334 /* Indexable tree is one reference to global stream.
335 Guess it may be about 4 bytes. */
336 *sum -= 4;
337 *ws = 0;
338 }
339 /* String table entry + base of tree node needs to be streamed. */
340 if (TREE_CODE (*tp) == STRING_CST)
341 *sum -= TREE_STRING_LENGTH (*tp) + 8;
342 else
343 {
344 /* Identifiers are also variable length but should not appear
345 naked in constructor. */
346 gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
347 /* We do not really make attempt to work out size of pickled tree, as
348 it is very variable. Make it bigger than the reference. */
349 *sum -= 16;
350 }
351 if (*sum < 0)
352 return *tp;
353 return NULL_TREE;
354 }
355
356
357 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
358
359 static tree
360 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
361 {
362 gcc_checking_assert (DECL_P (expr)
363 && TREE_CODE (expr) != FUNCTION_DECL
364 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
365
366 /* Handle DECL_INITIAL for symbols. */
367 tree initial = DECL_INITIAL (expr);
368 if (VAR_P (expr)
369 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
370 && !DECL_IN_CONSTANT_POOL (expr)
371 && initial)
372 {
373 varpool_node *vnode;
374 /* Extra section needs about 30 bytes; do not produce it for simple
375 scalar values. */
376 if (!(vnode = varpool_node::get (expr))
377 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
378 initial = error_mark_node;
379 if (initial != error_mark_node)
380 {
381 long max_size = 30;
382 if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
383 NULL))
384 initial = error_mark_node;
385 }
386 }
387
388 return initial;
389 }
390
391
392 /* Write a physical representation of tree node EXPR to output block
393 OB. If REF_P is true, the leaves of EXPR are emitted as references
394 via lto_output_tree_ref. IX is the index into the streamer cache
395 where EXPR is stored. */
396
397 static void
398 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
399 {
400 /* Pack all the non-pointer fields in EXPR into a bitpack and write
401 the resulting bitpack. */
402 streamer_write_tree_bitfields (ob, expr);
403
404 /* Write all the pointer fields in EXPR. */
405 streamer_write_tree_body (ob, expr, ref_p);
406
407 /* Write any LTO-specific data to OB. */
408 if (DECL_P (expr)
409 && TREE_CODE (expr) != FUNCTION_DECL
410 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
411 {
412 /* Handle DECL_INITIAL for symbols. */
413 tree initial = get_symbol_initial_value
414 (ob->decl_state->symtab_node_encoder, expr);
415 stream_write_tree (ob, initial, ref_p);
416 }
417
418 /* Stream references to early generated DIEs. Keep in sync with the
419 trees handled in dwarf2out_die_ref_for_decl. */
420 if ((DECL_P (expr)
421 && TREE_CODE (expr) != FIELD_DECL
422 && TREE_CODE (expr) != DEBUG_EXPR_DECL
423 && TREE_CODE (expr) != TYPE_DECL)
424 || TREE_CODE (expr) == BLOCK)
425 {
426 const char *sym;
427 unsigned HOST_WIDE_INT off;
428 if (debug_info_level > DINFO_LEVEL_NONE
429 && debug_hooks->die_ref_for_decl (expr, &sym, &off))
430 {
431 streamer_write_string (ob, ob->main_stream, sym, true);
432 streamer_write_uhwi (ob, off);
433 }
434 else
435 streamer_write_string (ob, ob->main_stream, NULL, true);
436 }
437 }
438
439 /* Write a physical representation of tree node EXPR to output block
440 OB. If REF_P is true, the leaves of EXPR are emitted as references
441 via lto_output_tree_ref. IX is the index into the streamer cache
442 where EXPR is stored. */
443
444 static void
445 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
446 {
447 if (!lto_is_streamable (expr))
448 internal_error ("tree code %qs is not supported in LTO streams",
449 get_tree_code_name (TREE_CODE (expr)));
450
451 /* Write the header, containing everything needed to materialize
452 EXPR on the reading side. */
453 streamer_write_tree_header (ob, expr);
454
455 lto_write_tree_1 (ob, expr, ref_p);
456
457 /* Mark the end of EXPR. */
458 streamer_write_zero (ob);
459 }
460
461 /* Emit the physical representation of tree node EXPR to output block OB,
462 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
463 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
464
465 static void
466 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
467 bool ref_p, bool this_ref_p)
468 {
469 unsigned ix;
470
471 gcc_checking_assert (expr != NULL_TREE
472 && !(this_ref_p && tree_is_indexable (expr)));
473
474 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
475 expr, hash, &ix);
476 gcc_assert (!exists_p);
477 if (TREE_CODE (expr) == INTEGER_CST
478 && !TREE_OVERFLOW (expr))
479 {
480 /* Shared INTEGER_CST nodes are special because they need their
481 original type to be materialized by the reader (to implement
482 TYPE_CACHED_VALUES). */
483 streamer_write_integer_cst (ob, expr, ref_p);
484 }
485 else
486 {
487 /* This is the first time we see EXPR, write its fields
488 to OB. */
489 lto_write_tree (ob, expr, ref_p);
490 }
491 }
492
493 class DFS
494 {
495 public:
496 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
497 bool single_p);
498 ~DFS ();
499
500 struct scc_entry
501 {
502 tree t;
503 hashval_t hash;
504 };
505 vec<scc_entry> sccstack;
506
507 private:
508 struct sccs
509 {
510 unsigned int dfsnum;
511 unsigned int low;
512 };
513 struct worklist
514 {
515 tree expr;
516 sccs *from_state;
517 sccs *cstate;
518 bool ref_p;
519 bool this_ref_p;
520 };
521
522 static int scc_entry_compare (const void *, const void *);
523
524 void DFS_write_tree_body (struct output_block *ob,
525 tree expr, sccs *expr_state, bool ref_p);
526
527 void DFS_write_tree (struct output_block *ob, sccs *from_state,
528 tree expr, bool ref_p, bool this_ref_p);
529
530 hashval_t
531 hash_scc (struct output_block *ob, unsigned first, unsigned size,
532 bool ref_p, bool this_ref_p);
533
534 hash_map<tree, sccs *> sccstate;
535 vec<worklist> worklist_vec;
536 struct obstack sccstate_obstack;
537 };
538
539 /* Emit the physical representation of tree node EXPR to output block OB,
540 using depth-first search on the subgraph. If THIS_REF_P is true, the
541 leaves of EXPR are emitted as references via lto_output_tree_ref.
542 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
543 this is for a rewalk of a single leaf SCC. */
544
545 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
546 bool single_p)
547 {
548 unsigned int next_dfs_num = 1;
549 sccstack.create (0);
550 gcc_obstack_init (&sccstate_obstack);
551 worklist_vec = vNULL;
552 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
553 while (!worklist_vec.is_empty ())
554 {
555 worklist &w = worklist_vec.last ();
556 expr = w.expr;
557 sccs *from_state = w.from_state;
558 sccs *cstate = w.cstate;
559 ref_p = w.ref_p;
560 this_ref_p = w.this_ref_p;
561 if (cstate == NULL)
562 {
563 sccs **slot = &sccstate.get_or_insert (expr);
564 cstate = *slot;
565 if (cstate)
566 {
567 gcc_checking_assert (from_state);
568 if (cstate->dfsnum < from_state->dfsnum)
569 from_state->low = MIN (cstate->dfsnum, from_state->low);
570 worklist_vec.pop ();
571 continue;
572 }
573
574 scc_entry e = { expr, 0 };
575 /* Not yet visited. DFS recurse and push it onto the stack. */
576 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
577 sccstack.safe_push (e);
578 cstate->dfsnum = next_dfs_num++;
579 cstate->low = cstate->dfsnum;
580 w.cstate = cstate;
581
582 if (TREE_CODE (expr) == INTEGER_CST
583 && !TREE_OVERFLOW (expr))
584 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
585 else
586 {
587 DFS_write_tree_body (ob, expr, cstate, ref_p);
588
589 /* Walk any LTO-specific edges. */
590 if (DECL_P (expr)
591 && TREE_CODE (expr) != FUNCTION_DECL
592 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
593 {
594 /* Handle DECL_INITIAL for symbols. */
595 tree initial
596 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
597 expr);
598 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
599 }
600 }
601 continue;
602 }
603
604 /* See if we found an SCC. */
605 if (cstate->low == cstate->dfsnum)
606 {
607 unsigned first, size;
608 tree x;
609
610 /* If we are re-walking a single leaf SCC just pop it,
611 let earlier worklist item access the sccstack. */
612 if (single_p)
613 {
614 worklist_vec.pop ();
615 continue;
616 }
617
618 /* Pop the SCC and compute its size. */
619 first = sccstack.length ();
620 do
621 {
622 x = sccstack[--first].t;
623 }
624 while (x != expr);
625 size = sccstack.length () - first;
626
627 /* No need to compute hashes for LTRANS units, we don't perform
628 any merging there. */
629 hashval_t scc_hash = 0;
630 unsigned scc_entry_len = 0;
631 if (!flag_wpa)
632 {
633 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
634
635 /* Put the entries with the least number of collisions first. */
636 unsigned entry_start = 0;
637 scc_entry_len = size + 1;
638 for (unsigned i = 0; i < size;)
639 {
640 unsigned from = i;
641 for (i = i + 1; i < size
642 && (sccstack[first + i].hash
643 == sccstack[first + from].hash); ++i)
644 ;
645 if (i - from < scc_entry_len)
646 {
647 scc_entry_len = i - from;
648 entry_start = from;
649 }
650 }
651 for (unsigned i = 0; i < scc_entry_len; ++i)
652 std::swap (sccstack[first + i],
653 sccstack[first + entry_start + i]);
654
655 /* We already sorted SCC deterministically in hash_scc. */
656
657 /* Check that we have only one SCC.
658 Naturally we may have conflicts if hash function is not
659 strong enough. Lets see how far this gets. */
660 gcc_checking_assert (scc_entry_len == 1);
661 }
662
663 /* Write LTO_tree_scc. */
664 streamer_write_record_start (ob, LTO_tree_scc);
665 streamer_write_uhwi (ob, size);
666 streamer_write_uhwi (ob, scc_hash);
667
668 /* Write size-1 SCCs without wrapping them inside SCC bundles.
669 All INTEGER_CSTs need to be handled this way as we need
670 their type to materialize them. Also builtins are handled
671 this way.
672 ??? We still wrap these in LTO_tree_scc so at the
673 input side we can properly identify the tree we want
674 to ultimatively return. */
675 if (size == 1)
676 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
677 else
678 {
679 /* Write the size of the SCC entry candidates. */
680 streamer_write_uhwi (ob, scc_entry_len);
681
682 /* Write all headers and populate the streamer cache. */
683 for (unsigned i = 0; i < size; ++i)
684 {
685 hashval_t hash = sccstack[first+i].hash;
686 tree t = sccstack[first+i].t;
687 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
688 t, hash, NULL);
689 gcc_assert (!exists_p);
690
691 if (!lto_is_streamable (t))
692 internal_error ("tree code %qs is not supported "
693 "in LTO streams",
694 get_tree_code_name (TREE_CODE (t)));
695
696 /* Write the header, containing everything needed to
697 materialize EXPR on the reading side. */
698 streamer_write_tree_header (ob, t);
699 }
700
701 /* Write the bitpacks and tree references. */
702 for (unsigned i = 0; i < size; ++i)
703 {
704 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
705
706 /* Mark the end of the tree. */
707 streamer_write_zero (ob);
708 }
709 }
710
711 /* Finally truncate the vector. */
712 sccstack.truncate (first);
713
714 if (from_state)
715 from_state->low = MIN (from_state->low, cstate->low);
716 worklist_vec.pop ();
717 continue;
718 }
719
720 gcc_checking_assert (from_state);
721 from_state->low = MIN (from_state->low, cstate->low);
722 if (cstate->dfsnum < from_state->dfsnum)
723 from_state->low = MIN (cstate->dfsnum, from_state->low);
724 worklist_vec.pop ();
725 }
726 worklist_vec.release ();
727 }
728
729 DFS::~DFS ()
730 {
731 sccstack.release ();
732 obstack_free (&sccstate_obstack, NULL);
733 }
734
735 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
736 DFS recurse for all tree edges originating from it. */
737
738 void
739 DFS::DFS_write_tree_body (struct output_block *ob,
740 tree expr, sccs *expr_state, bool ref_p)
741 {
742 #define DFS_follow_tree_edge(DEST) \
743 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
744
745 enum tree_code code;
746
747 if (streamer_dump_file)
748 {
749 print_node_brief (streamer_dump_file, " Streaming ",
750 expr, 4);
751 fprintf (streamer_dump_file, " to %s\n",
752 lto_section_name [ob->section_type]);
753 }
754
755 code = TREE_CODE (expr);
756
757 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
758 {
759 if (TREE_CODE (expr) != IDENTIFIER_NODE)
760 DFS_follow_tree_edge (TREE_TYPE (expr));
761 }
762
763 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
764 {
765 unsigned int count = vector_cst_encoded_nelts (expr);
766 for (unsigned int i = 0; i < count; ++i)
767 DFS_follow_tree_edge (VECTOR_CST_ENCODED_ELT (expr, i));
768 }
769
770 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
771 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
772 DFS_follow_tree_edge (POLY_INT_CST_COEFF (expr, i));
773
774 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
775 {
776 DFS_follow_tree_edge (TREE_REALPART (expr));
777 DFS_follow_tree_edge (TREE_IMAGPART (expr));
778 }
779
780 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
781 {
782 /* Drop names that were created for anonymous entities. */
783 if (DECL_NAME (expr)
784 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
785 && IDENTIFIER_ANON_P (DECL_NAME (expr)))
786 ;
787 else
788 DFS_follow_tree_edge (DECL_NAME (expr));
789 if (TREE_CODE (expr) != TRANSLATION_UNIT_DECL
790 && ! DECL_CONTEXT (expr))
791 DFS_follow_tree_edge ((*all_translation_units)[0]);
792 else
793 DFS_follow_tree_edge (DECL_CONTEXT (expr));
794 }
795
796 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
797 {
798 DFS_follow_tree_edge (DECL_SIZE (expr));
799 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
800
801 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
802 special handling in LTO, it must be handled by streamer hooks. */
803
804 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
805
806 /* We use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
807 declarations which should be eliminated by decl merging. Be sure none
808 leaks to this point. */
809 gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
810 DFS_follow_tree_edge (DECL_ABSTRACT_ORIGIN (expr));
811
812 if ((VAR_P (expr)
813 || TREE_CODE (expr) == PARM_DECL)
814 && DECL_HAS_VALUE_EXPR_P (expr))
815 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
816 if (VAR_P (expr)
817 && DECL_HAS_DEBUG_EXPR_P (expr))
818 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
819 }
820
821 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
822 {
823 /* Make sure we don't inadvertently set the assembler name. */
824 if (DECL_ASSEMBLER_NAME_SET_P (expr))
825 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
826 }
827
828 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
829 {
830 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
831 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
832 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
833 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
834 gcc_checking_assert (!DECL_FCONTEXT (expr));
835 }
836
837 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
838 {
839 gcc_checking_assert (DECL_VINDEX (expr) == NULL);
840 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
841 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
842 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
843 }
844
845 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
846 {
847 DFS_follow_tree_edge (TYPE_SIZE (expr));
848 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
849 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
850 DFS_follow_tree_edge (TYPE_NAME (expr));
851 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
852 reconstructed during fixup. */
853 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
854 during fixup. */
855 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
856 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
857 /* TYPE_CANONICAL is re-computed during type merging, so no need
858 to follow it here. */
859 /* Do not stream TYPE_STUB_DECL; it is not needed by LTO but currently
860 it cannot be freed by free_lang_data without triggering ICEs in
861 langhooks. */
862 }
863
864 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
865 {
866 if (TREE_CODE (expr) == ENUMERAL_TYPE)
867 DFS_follow_tree_edge (TYPE_VALUES (expr));
868 else if (TREE_CODE (expr) == ARRAY_TYPE)
869 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
870 else if (RECORD_OR_UNION_TYPE_P (expr))
871 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
872 DFS_follow_tree_edge (t);
873 else if (TREE_CODE (expr) == FUNCTION_TYPE
874 || TREE_CODE (expr) == METHOD_TYPE)
875 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
876
877 if (!POINTER_TYPE_P (expr))
878 DFS_follow_tree_edge (TYPE_MIN_VALUE_RAW (expr));
879 DFS_follow_tree_edge (TYPE_MAX_VALUE_RAW (expr));
880 }
881
882 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
883 {
884 DFS_follow_tree_edge (TREE_PURPOSE (expr));
885 DFS_follow_tree_edge (TREE_VALUE (expr));
886 DFS_follow_tree_edge (TREE_CHAIN (expr));
887 }
888
889 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
890 {
891 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
892 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
893 }
894
895 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
896 {
897 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
898 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
899 DFS_follow_tree_edge (TREE_BLOCK (expr));
900 }
901
902 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
903 {
904 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
905 {
906 /* We would have to stream externals in the block chain as
907 non-references but we should have dropped them in
908 free-lang-data. */
909 gcc_assert (!VAR_OR_FUNCTION_DECL_P (t) || !DECL_EXTERNAL (t));
910 DFS_follow_tree_edge (t);
911 }
912
913 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
914 DFS_follow_tree_edge (BLOCK_ABSTRACT_ORIGIN (expr));
915
916 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
917 information for early inlined BLOCKs so drop it on the floor instead
918 of ICEing in dwarf2out.c. */
919
920 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
921 streaming time. */
922
923 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
924 list is re-constructed from BLOCK_SUPERCONTEXT. */
925 }
926
927 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
928 {
929 unsigned i;
930 tree t;
931
932 /* Note that the number of BINFO slots has already been emitted in
933 EXPR's header (see streamer_write_tree_header) because this length
934 is needed to build the empty BINFO node on the reader side. */
935 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
936 DFS_follow_tree_edge (t);
937 DFS_follow_tree_edge (BINFO_OFFSET (expr));
938 DFS_follow_tree_edge (BINFO_VTABLE (expr));
939
940 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX,
941 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
942 by C++ FE only. */
943 }
944
945 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
946 {
947 unsigned i;
948 tree index, value;
949
950 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
951 {
952 DFS_follow_tree_edge (index);
953 DFS_follow_tree_edge (value);
954 }
955 }
956
957 if (code == OMP_CLAUSE)
958 {
959 int i;
960 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
961 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
962 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
963 }
964
965 #undef DFS_follow_tree_edge
966 }
967
968 /* Return a hash value for the tree T.
969 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
970 may hold hash values if trees inside current SCC. */
971
972 static hashval_t
973 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
974 {
975 inchash::hash hstate;
976
977 #define visit(SIBLING) \
978 do { \
979 unsigned ix; \
980 if (!SIBLING) \
981 hstate.add_int (0); \
982 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
983 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
984 else if (map) \
985 hstate.add_int (*map->get (SIBLING)); \
986 else \
987 hstate.add_int (1); \
988 } while (0)
989
990 /* Hash TS_BASE. */
991 enum tree_code code = TREE_CODE (t);
992 hstate.add_int (code);
993 if (!TYPE_P (t))
994 {
995 hstate.add_flag (TREE_SIDE_EFFECTS (t));
996 hstate.add_flag (TREE_CONSTANT (t));
997 hstate.add_flag (TREE_READONLY (t));
998 hstate.add_flag (TREE_PUBLIC (t));
999 }
1000 hstate.add_flag (TREE_ADDRESSABLE (t));
1001 hstate.add_flag (TREE_THIS_VOLATILE (t));
1002 if (DECL_P (t))
1003 hstate.add_flag (DECL_UNSIGNED (t));
1004 else if (TYPE_P (t))
1005 hstate.add_flag (TYPE_UNSIGNED (t));
1006 if (TYPE_P (t))
1007 hstate.add_flag (TYPE_ARTIFICIAL (t));
1008 else
1009 hstate.add_flag (TREE_NO_WARNING (t));
1010 hstate.add_flag (TREE_NOTHROW (t));
1011 hstate.add_flag (TREE_STATIC (t));
1012 hstate.add_flag (TREE_PROTECTED (t));
1013 hstate.add_flag (TREE_DEPRECATED (t));
1014 if (code != TREE_BINFO)
1015 hstate.add_flag (TREE_PRIVATE (t));
1016 if (TYPE_P (t))
1017 {
1018 hstate.add_flag (AGGREGATE_TYPE_P (t)
1019 ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
1020 hstate.add_flag (TYPE_ADDR_SPACE (t));
1021 }
1022 else if (code == SSA_NAME)
1023 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
1024 hstate.commit_flag ();
1025
1026 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1027 hstate.add_wide_int (wi::to_widest (t));
1028
1029 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1030 {
1031 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1032 hstate.add_flag (r.cl);
1033 hstate.add_flag (r.sign);
1034 hstate.add_flag (r.signalling);
1035 hstate.add_flag (r.canonical);
1036 hstate.commit_flag ();
1037 hstate.add_int (r.uexp);
1038 hstate.add (r.sig, sizeof (r.sig));
1039 }
1040
1041 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1042 {
1043 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1044 hstate.add_int (f.mode);
1045 hstate.add_int (f.data.low);
1046 hstate.add_int (f.data.high);
1047 }
1048
1049 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1050 {
1051 hstate.add_hwi (DECL_MODE (t));
1052 hstate.add_flag (DECL_NONLOCAL (t));
1053 hstate.add_flag (DECL_VIRTUAL_P (t));
1054 hstate.add_flag (DECL_IGNORED_P (t));
1055 hstate.add_flag (DECL_ABSTRACT_P (t));
1056 hstate.add_flag (DECL_ARTIFICIAL (t));
1057 hstate.add_flag (DECL_USER_ALIGN (t));
1058 hstate.add_flag (DECL_PRESERVE_P (t));
1059 hstate.add_flag (DECL_EXTERNAL (t));
1060 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1061 hstate.commit_flag ();
1062 hstate.add_int (DECL_ALIGN (t));
1063 if (code == LABEL_DECL)
1064 {
1065 hstate.add_int (EH_LANDING_PAD_NR (t));
1066 hstate.add_int (LABEL_DECL_UID (t));
1067 }
1068 else if (code == FIELD_DECL)
1069 {
1070 hstate.add_flag (DECL_PACKED (t));
1071 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1072 hstate.add_flag (DECL_PADDING_P (t));
1073 hstate.add_int (DECL_OFFSET_ALIGN (t));
1074 }
1075 else if (code == VAR_DECL)
1076 {
1077 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1078 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1079 }
1080 if (code == RESULT_DECL
1081 || code == PARM_DECL
1082 || code == VAR_DECL)
1083 {
1084 hstate.add_flag (DECL_BY_REFERENCE (t));
1085 if (code == VAR_DECL
1086 || code == PARM_DECL)
1087 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1088 }
1089 hstate.commit_flag ();
1090 }
1091
1092 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1093 hstate.add_int (DECL_REGISTER (t));
1094
1095 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1096 {
1097 hstate.add_flag (DECL_COMMON (t));
1098 hstate.add_flag (DECL_DLLIMPORT_P (t));
1099 hstate.add_flag (DECL_WEAK (t));
1100 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1101 hstate.add_flag (DECL_COMDAT (t));
1102 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1103 hstate.add_int (DECL_VISIBILITY (t));
1104 if (code == VAR_DECL)
1105 {
1106 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1107 hstate.add_flag (DECL_HARD_REGISTER (t));
1108 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1109 }
1110 if (TREE_CODE (t) == FUNCTION_DECL)
1111 {
1112 hstate.add_flag (DECL_FINAL_P (t));
1113 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1114 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1115 }
1116 hstate.commit_flag ();
1117 }
1118
1119 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1120 {
1121 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1122 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1123 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1124 hstate.add_flag (DECL_UNINLINABLE (t));
1125 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1126 hstate.add_flag (DECL_IS_NOVOPS (t));
1127 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1128 hstate.add_flag (DECL_IS_MALLOC (t));
1129 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1130 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1131 hstate.add_flag (DECL_STATIC_CHAIN (t));
1132 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1133 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1134 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1135 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1136 hstate.add_flag (DECL_PURE_P (t));
1137 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1138 hstate.commit_flag ();
1139 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1140 hstate.add_int (DECL_FUNCTION_CODE (t));
1141 }
1142
1143 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1144 {
1145 hstate.add_hwi (TYPE_MODE (t));
1146 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1147 no streaming. */
1148 hstate.add_flag (TYPE_PACKED (t));
1149 hstate.add_flag (TYPE_RESTRICT (t));
1150 hstate.add_flag (TYPE_USER_ALIGN (t));
1151 hstate.add_flag (TYPE_READONLY (t));
1152 if (RECORD_OR_UNION_TYPE_P (t))
1153 {
1154 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1155 hstate.add_flag (TYPE_FINAL_P (t));
1156 hstate.add_flag (TYPE_CXX_ODR_P (t));
1157 }
1158 else if (code == ARRAY_TYPE)
1159 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1160 if (code == ARRAY_TYPE || code == INTEGER_TYPE)
1161 hstate.add_flag (TYPE_STRING_FLAG (t));
1162 if (AGGREGATE_TYPE_P (t))
1163 hstate.add_flag (TYPE_TYPELESS_STORAGE (t));
1164 hstate.commit_flag ();
1165 hstate.add_int (TYPE_PRECISION (t));
1166 hstate.add_int (TYPE_ALIGN (t));
1167 hstate.add_int (TYPE_EMPTY_P (t));
1168 }
1169
1170 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1171 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1172 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1173
1174 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1175 /* We don't stream these when passing things to a different target. */
1176 && !lto_stream_offload_p)
1177 hstate.add_hwi (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1178
1179 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1180 hstate.add_hwi (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1181
1182 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1183 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1184
1185 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1186 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1187
1188 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1189 {
1190 if (code != IDENTIFIER_NODE)
1191 visit (TREE_TYPE (t));
1192 }
1193
1194 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1195 {
1196 unsigned int count = vector_cst_encoded_nelts (t);
1197 for (unsigned int i = 0; i < count; ++i)
1198 visit (VECTOR_CST_ENCODED_ELT (t, i));
1199 }
1200
1201 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
1202 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1203 visit (POLY_INT_CST_COEFF (t, i));
1204
1205 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1206 {
1207 visit (TREE_REALPART (t));
1208 visit (TREE_IMAGPART (t));
1209 }
1210
1211 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1212 {
1213 /* Drop names that were created for anonymous entities. */
1214 if (DECL_NAME (t)
1215 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1216 && IDENTIFIER_ANON_P (DECL_NAME (t)))
1217 ;
1218 else
1219 visit (DECL_NAME (t));
1220 if (DECL_FILE_SCOPE_P (t))
1221 ;
1222 else
1223 visit (DECL_CONTEXT (t));
1224 }
1225
1226 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1227 {
1228 visit (DECL_SIZE (t));
1229 visit (DECL_SIZE_UNIT (t));
1230 visit (DECL_ATTRIBUTES (t));
1231 if ((code == VAR_DECL
1232 || code == PARM_DECL)
1233 && DECL_HAS_VALUE_EXPR_P (t))
1234 visit (DECL_VALUE_EXPR (t));
1235 if (code == VAR_DECL
1236 && DECL_HAS_DEBUG_EXPR_P (t))
1237 visit (DECL_DEBUG_EXPR (t));
1238 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1239 be able to call get_symbol_initial_value. */
1240 }
1241
1242 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1243 {
1244 if (DECL_ASSEMBLER_NAME_SET_P (t))
1245 visit (DECL_ASSEMBLER_NAME (t));
1246 }
1247
1248 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1249 {
1250 visit (DECL_FIELD_OFFSET (t));
1251 visit (DECL_BIT_FIELD_TYPE (t));
1252 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1253 visit (DECL_FIELD_BIT_OFFSET (t));
1254 }
1255
1256 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1257 {
1258 visit (DECL_FUNCTION_PERSONALITY (t));
1259 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1260 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1261 }
1262
1263 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1264 {
1265 visit (TYPE_SIZE (t));
1266 visit (TYPE_SIZE_UNIT (t));
1267 visit (TYPE_ATTRIBUTES (t));
1268 visit (TYPE_NAME (t));
1269 visit (TYPE_MAIN_VARIANT (t));
1270 if (TYPE_FILE_SCOPE_P (t))
1271 ;
1272 else
1273 visit (TYPE_CONTEXT (t));
1274 }
1275
1276 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1277 {
1278 if (code == ENUMERAL_TYPE)
1279 visit (TYPE_VALUES (t));
1280 else if (code == ARRAY_TYPE)
1281 visit (TYPE_DOMAIN (t));
1282 else if (RECORD_OR_UNION_TYPE_P (t))
1283 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1284 visit (f);
1285 else if (code == FUNCTION_TYPE
1286 || code == METHOD_TYPE)
1287 visit (TYPE_ARG_TYPES (t));
1288 if (!POINTER_TYPE_P (t))
1289 visit (TYPE_MIN_VALUE_RAW (t));
1290 visit (TYPE_MAX_VALUE_RAW (t));
1291 }
1292
1293 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1294 {
1295 visit (TREE_PURPOSE (t));
1296 visit (TREE_VALUE (t));
1297 visit (TREE_CHAIN (t));
1298 }
1299
1300 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1301 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1302 visit (TREE_VEC_ELT (t, i));
1303
1304 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1305 {
1306 hstate.add_hwi (TREE_OPERAND_LENGTH (t));
1307 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1308 visit (TREE_OPERAND (t, i));
1309 }
1310
1311 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1312 {
1313 unsigned i;
1314 tree b;
1315 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1316 visit (b);
1317 visit (BINFO_OFFSET (t));
1318 visit (BINFO_VTABLE (t));
1319 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1320 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
1321 by C++ FE only. */
1322 }
1323
1324 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1325 {
1326 unsigned i;
1327 tree index, value;
1328 hstate.add_hwi (CONSTRUCTOR_NELTS (t));
1329 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1330 {
1331 visit (index);
1332 visit (value);
1333 }
1334 }
1335
1336 if (code == OMP_CLAUSE)
1337 {
1338 int i;
1339 HOST_WIDE_INT val;
1340
1341 hstate.add_hwi (OMP_CLAUSE_CODE (t));
1342 switch (OMP_CLAUSE_CODE (t))
1343 {
1344 case OMP_CLAUSE_DEFAULT:
1345 val = OMP_CLAUSE_DEFAULT_KIND (t);
1346 break;
1347 case OMP_CLAUSE_SCHEDULE:
1348 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1349 break;
1350 case OMP_CLAUSE_DEPEND:
1351 val = OMP_CLAUSE_DEPEND_KIND (t);
1352 break;
1353 case OMP_CLAUSE_MAP:
1354 val = OMP_CLAUSE_MAP_KIND (t);
1355 break;
1356 case OMP_CLAUSE_PROC_BIND:
1357 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1358 break;
1359 case OMP_CLAUSE_REDUCTION:
1360 case OMP_CLAUSE_TASK_REDUCTION:
1361 case OMP_CLAUSE_IN_REDUCTION:
1362 val = OMP_CLAUSE_REDUCTION_CODE (t);
1363 break;
1364 default:
1365 val = 0;
1366 break;
1367 }
1368 hstate.add_hwi (val);
1369 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1370 visit (OMP_CLAUSE_OPERAND (t, i));
1371 visit (OMP_CLAUSE_CHAIN (t));
1372 }
1373
1374 return hstate.end ();
1375
1376 #undef visit
1377 }
1378
1379 /* Compare two SCC entries by their hash value for qsorting them. */
1380
1381 int
1382 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1383 {
1384 const scc_entry *p1 = (const scc_entry *) p1_;
1385 const scc_entry *p2 = (const scc_entry *) p2_;
1386 if (p1->hash < p2->hash)
1387 return -1;
1388 else if (p1->hash > p2->hash)
1389 return 1;
1390 return 0;
1391 }
1392
1393 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1394 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1395
1396 hashval_t
1397 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1398 bool ref_p, bool this_ref_p)
1399 {
1400 unsigned int last_classes = 0, iterations = 0;
1401
1402 /* Compute hash values for the SCC members. */
1403 for (unsigned i = 0; i < size; ++i)
1404 sccstack[first+i].hash
1405 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1406
1407 if (size == 1)
1408 return sccstack[first].hash;
1409
1410 /* We aim to get unique hash for every tree within SCC and compute hash value
1411 of the whole SCC by combining all values together in a stable (entry-point
1412 independent) order. This guarantees that the same SCC regions within
1413 different translation units will get the same hash values and therefore
1414 will be merged at WPA time.
1415
1416 Often the hashes are already unique. In that case we compute the SCC hash
1417 by combining individual hash values in an increasing order.
1418
1419 If there are duplicates, we seek at least one tree with unique hash (and
1420 pick one with minimal hash and this property). Then we obtain a stable
1421 order by DFS walk starting from this unique tree and then use the index
1422 within this order to make individual hash values unique.
1423
1424 If there is no tree with unique hash, we iteratively propagate the hash
1425 values across the internal edges of SCC. This usually quickly leads
1426 to unique hashes. Consider, for example, an SCC containing two pointers
1427 that are identical except for the types they point to and assume that
1428 these types are also part of the SCC. The propagation will add the
1429 points-to type information into their hash values. */
1430 do
1431 {
1432 /* Sort the SCC so we can easily check for uniqueness. */
1433 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1434
1435 unsigned int classes = 1;
1436 int firstunique = -1;
1437
1438 /* Find the tree with lowest unique hash (if it exists) and compute
1439 the number of equivalence classes. */
1440 if (sccstack[first].hash != sccstack[first+1].hash)
1441 firstunique = 0;
1442 for (unsigned i = 1; i < size; ++i)
1443 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1444 {
1445 classes++;
1446 if (firstunique == -1
1447 && (i == size - 1
1448 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1449 firstunique = i;
1450 }
1451
1452 /* If we found a tree with unique hash, stop the iteration. */
1453 if (firstunique != -1
1454 /* Also terminate if we run out of iterations or if the number of
1455 equivalence classes is no longer increasing.
1456 For example a cyclic list of trees that are all equivalent will
1457 never have unique entry point; we however do not build such SCCs
1458 in our IL. */
1459 || classes <= last_classes || iterations > 16)
1460 {
1461 hashval_t scc_hash;
1462
1463 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1464 starting from FIRSTUNIQUE to obtain a stable order. */
1465 if (classes != size && firstunique != -1)
1466 {
1467 hash_map <tree, hashval_t> map(size*2);
1468
1469 /* Store hash values into a map, so we can associate them with
1470 the reordered SCC. */
1471 for (unsigned i = 0; i < size; ++i)
1472 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1473
1474 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1475 true);
1476 gcc_assert (again.sccstack.length () == size);
1477
1478 memcpy (sccstack.address () + first,
1479 again.sccstack.address (),
1480 sizeof (scc_entry) * size);
1481
1482 /* Update hash values of individual members by hashing in the
1483 index within the stable order. This ensures uniqueness.
1484 Also compute the SCC hash by mixing in all hash values in
1485 the stable order we obtained. */
1486 sccstack[first].hash = *map.get (sccstack[first].t);
1487 scc_hash = sccstack[first].hash;
1488 for (unsigned i = 1; i < size; ++i)
1489 {
1490 sccstack[first+i].hash
1491 = iterative_hash_hashval_t (i,
1492 *map.get (sccstack[first+i].t));
1493 scc_hash
1494 = iterative_hash_hashval_t (scc_hash,
1495 sccstack[first+i].hash);
1496 }
1497 }
1498 /* If we got a unique hash value for each tree, then sort already
1499 ensured entry-point independent order. Only compute the final
1500 SCC hash.
1501
1502 If we failed to find the unique entry point, we go by the same
1503 route. We will eventually introduce unwanted hash conflicts. */
1504 else
1505 {
1506 scc_hash = sccstack[first].hash;
1507 for (unsigned i = 1; i < size; ++i)
1508 scc_hash
1509 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1510
1511 /* We cannot 100% guarantee that the hash won't conflict so as
1512 to make it impossible to find a unique hash. This however
1513 should be an extremely rare case. ICE for now so possible
1514 issues are found and evaluated. */
1515 gcc_checking_assert (classes == size);
1516 }
1517
1518 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1519 hash into the hash of each element. */
1520 for (unsigned i = 0; i < size; ++i)
1521 sccstack[first+i].hash
1522 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1523 return scc_hash;
1524 }
1525
1526 last_classes = classes;
1527 iterations++;
1528
1529 /* We failed to identify the entry point; propagate hash values across
1530 the edges. */
1531 hash_map <tree, hashval_t> map(size*2);
1532
1533 for (unsigned i = 0; i < size; ++i)
1534 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1535
1536 for (unsigned i = 0; i < size; i++)
1537 sccstack[first+i].hash
1538 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1539 }
1540 while (true);
1541 }
1542
1543 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1544 already in the streamer cache. Main routine called for
1545 each visit of EXPR. */
1546
1547 void
1548 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1549 tree expr, bool ref_p, bool this_ref_p)
1550 {
1551 /* Handle special cases. */
1552 if (expr == NULL_TREE)
1553 return;
1554
1555 /* Do not DFS walk into indexable trees. */
1556 if (this_ref_p && tree_is_indexable (expr))
1557 return;
1558
1559 /* Check if we already streamed EXPR. */
1560 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1561 return;
1562
1563 worklist w;
1564 w.expr = expr;
1565 w.from_state = from_state;
1566 w.cstate = NULL;
1567 w.ref_p = ref_p;
1568 w.this_ref_p = this_ref_p;
1569 worklist_vec.safe_push (w);
1570 }
1571
1572
1573 /* Emit the physical representation of tree node EXPR to output block OB.
1574 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1575 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1576
1577 void
1578 lto_output_tree (struct output_block *ob, tree expr,
1579 bool ref_p, bool this_ref_p)
1580 {
1581 unsigned ix;
1582 bool existed_p;
1583
1584 if (expr == NULL_TREE)
1585 {
1586 streamer_write_record_start (ob, LTO_null);
1587 return;
1588 }
1589
1590 if (this_ref_p && tree_is_indexable (expr))
1591 {
1592 lto_output_tree_ref (ob, expr);
1593 return;
1594 }
1595
1596 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1597 if (existed_p)
1598 {
1599 /* If a node has already been streamed out, make sure that
1600 we don't write it more than once. Otherwise, the reader
1601 will instantiate two different nodes for the same object. */
1602 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1603 streamer_write_uhwi (ob, ix);
1604 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1605 lto_tree_code_to_tag (TREE_CODE (expr)));
1606 lto_stats.num_pickle_refs_output++;
1607 }
1608 else
1609 {
1610 /* This is the first time we see EXPR, write all reachable
1611 trees to OB. */
1612 static bool in_dfs_walk;
1613
1614 /* Protect against recursion which means disconnect between
1615 what tree edges we walk in the DFS walk and what edges
1616 we stream out. */
1617 gcc_assert (!in_dfs_walk);
1618
1619 if (streamer_dump_file)
1620 {
1621 print_node_brief (streamer_dump_file, " Streaming SCC of ",
1622 expr, 4);
1623 fprintf (streamer_dump_file, "\n");
1624 }
1625
1626 /* Start the DFS walk. */
1627 /* Save ob state ... */
1628 /* let's see ... */
1629 in_dfs_walk = true;
1630 DFS (ob, expr, ref_p, this_ref_p, false);
1631 in_dfs_walk = false;
1632
1633 /* Finally append a reference to the tree we were writing.
1634 ??? If expr ended up as a singleton we could have
1635 inlined it here and avoid outputting a reference. */
1636 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1637 gcc_assert (existed_p);
1638 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1639 streamer_write_uhwi (ob, ix);
1640 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1641 lto_tree_code_to_tag (TREE_CODE (expr)));
1642 if (streamer_dump_file)
1643 {
1644 print_node_brief (streamer_dump_file, " Finished SCC of ",
1645 expr, 4);
1646 fprintf (streamer_dump_file, "\n\n");
1647 }
1648 lto_stats.num_pickle_refs_output++;
1649 }
1650 }
1651
1652
1653 /* Output to OB a list of try/catch handlers starting with FIRST. */
1654
1655 static void
1656 output_eh_try_list (struct output_block *ob, eh_catch first)
1657 {
1658 eh_catch n;
1659
1660 for (n = first; n; n = n->next_catch)
1661 {
1662 streamer_write_record_start (ob, LTO_eh_catch);
1663 stream_write_tree (ob, n->type_list, true);
1664 stream_write_tree (ob, n->filter_list, true);
1665 stream_write_tree (ob, n->label, true);
1666 }
1667
1668 streamer_write_record_start (ob, LTO_null);
1669 }
1670
1671
1672 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1673 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1674 detect EH region sharing. */
1675
1676 static void
1677 output_eh_region (struct output_block *ob, eh_region r)
1678 {
1679 enum LTO_tags tag;
1680
1681 if (r == NULL)
1682 {
1683 streamer_write_record_start (ob, LTO_null);
1684 return;
1685 }
1686
1687 if (r->type == ERT_CLEANUP)
1688 tag = LTO_ert_cleanup;
1689 else if (r->type == ERT_TRY)
1690 tag = LTO_ert_try;
1691 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1692 tag = LTO_ert_allowed_exceptions;
1693 else if (r->type == ERT_MUST_NOT_THROW)
1694 tag = LTO_ert_must_not_throw;
1695 else
1696 gcc_unreachable ();
1697
1698 streamer_write_record_start (ob, tag);
1699 streamer_write_hwi (ob, r->index);
1700
1701 if (r->outer)
1702 streamer_write_hwi (ob, r->outer->index);
1703 else
1704 streamer_write_zero (ob);
1705
1706 if (r->inner)
1707 streamer_write_hwi (ob, r->inner->index);
1708 else
1709 streamer_write_zero (ob);
1710
1711 if (r->next_peer)
1712 streamer_write_hwi (ob, r->next_peer->index);
1713 else
1714 streamer_write_zero (ob);
1715
1716 if (r->type == ERT_TRY)
1717 {
1718 output_eh_try_list (ob, r->u.eh_try.first_catch);
1719 }
1720 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1721 {
1722 stream_write_tree (ob, r->u.allowed.type_list, true);
1723 stream_write_tree (ob, r->u.allowed.label, true);
1724 streamer_write_uhwi (ob, r->u.allowed.filter);
1725 }
1726 else if (r->type == ERT_MUST_NOT_THROW)
1727 {
1728 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1729 bitpack_d bp = bitpack_create (ob->main_stream);
1730 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1731 streamer_write_bitpack (&bp);
1732 }
1733
1734 if (r->landing_pads)
1735 streamer_write_hwi (ob, r->landing_pads->index);
1736 else
1737 streamer_write_zero (ob);
1738 }
1739
1740
1741 /* Output landing pad LP to OB. */
1742
1743 static void
1744 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1745 {
1746 if (lp == NULL)
1747 {
1748 streamer_write_record_start (ob, LTO_null);
1749 return;
1750 }
1751
1752 streamer_write_record_start (ob, LTO_eh_landing_pad);
1753 streamer_write_hwi (ob, lp->index);
1754 if (lp->next_lp)
1755 streamer_write_hwi (ob, lp->next_lp->index);
1756 else
1757 streamer_write_zero (ob);
1758
1759 if (lp->region)
1760 streamer_write_hwi (ob, lp->region->index);
1761 else
1762 streamer_write_zero (ob);
1763
1764 stream_write_tree (ob, lp->post_landing_pad, true);
1765 }
1766
1767
1768 /* Output the existing eh_table to OB. */
1769
1770 static void
1771 output_eh_regions (struct output_block *ob, struct function *fn)
1772 {
1773 if (fn->eh && fn->eh->region_tree)
1774 {
1775 unsigned i;
1776 eh_region eh;
1777 eh_landing_pad lp;
1778 tree ttype;
1779
1780 streamer_write_record_start (ob, LTO_eh_table);
1781
1782 /* Emit the index of the root of the EH region tree. */
1783 streamer_write_hwi (ob, fn->eh->region_tree->index);
1784
1785 /* Emit all the EH regions in the region array. */
1786 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1787 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1788 output_eh_region (ob, eh);
1789
1790 /* Emit all landing pads. */
1791 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1792 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1793 output_eh_lp (ob, lp);
1794
1795 /* Emit all the runtime type data. */
1796 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1797 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1798 stream_write_tree (ob, ttype, true);
1799
1800 /* Emit the table of action chains. */
1801 if (targetm.arm_eabi_unwinder)
1802 {
1803 tree t;
1804 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1805 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1806 stream_write_tree (ob, t, true);
1807 }
1808 else
1809 {
1810 uchar c;
1811 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1812 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1813 streamer_write_char_stream (ob->main_stream, c);
1814 }
1815 }
1816
1817 /* The LTO_null either terminates the record or indicates that there
1818 are no eh_records at all. */
1819 streamer_write_record_start (ob, LTO_null);
1820 }
1821
1822
1823 /* Output all of the active ssa names to the ssa_names stream. */
1824
1825 static void
1826 output_ssa_names (struct output_block *ob, struct function *fn)
1827 {
1828 unsigned int i, len;
1829
1830 len = vec_safe_length (SSANAMES (fn));
1831 streamer_write_uhwi (ob, len);
1832
1833 for (i = 1; i < len; i++)
1834 {
1835 tree ptr = (*SSANAMES (fn))[i];
1836
1837 if (ptr == NULL_TREE
1838 || SSA_NAME_IN_FREE_LIST (ptr)
1839 || virtual_operand_p (ptr)
1840 /* Simply skip unreleased SSA names. */
1841 || (! SSA_NAME_IS_DEFAULT_DEF (ptr)
1842 && (! SSA_NAME_DEF_STMT (ptr)
1843 || ! gimple_bb (SSA_NAME_DEF_STMT (ptr)))))
1844 continue;
1845
1846 streamer_write_uhwi (ob, i);
1847 streamer_write_char_stream (ob->main_stream,
1848 SSA_NAME_IS_DEFAULT_DEF (ptr));
1849 if (SSA_NAME_VAR (ptr))
1850 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1851 else
1852 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1853 stream_write_tree (ob, TREE_TYPE (ptr), true);
1854 }
1855
1856 streamer_write_zero (ob);
1857 }
1858
1859
1860
1861 /* Output the cfg. */
1862
1863 static void
1864 output_cfg (struct output_block *ob, struct function *fn)
1865 {
1866 struct lto_output_stream *tmp_stream = ob->main_stream;
1867 basic_block bb;
1868
1869 ob->main_stream = ob->cfg_stream;
1870
1871 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1872 profile_status_for_fn (fn));
1873
1874 /* Output the number of the highest basic block. */
1875 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1876
1877 FOR_ALL_BB_FN (bb, fn)
1878 {
1879 edge_iterator ei;
1880 edge e;
1881
1882 streamer_write_hwi (ob, bb->index);
1883
1884 /* Output the successors and the edge flags. */
1885 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1886 FOR_EACH_EDGE (e, ei, bb->succs)
1887 {
1888 streamer_write_uhwi (ob, e->dest->index);
1889 e->probability.stream_out (ob);
1890 streamer_write_uhwi (ob, e->flags);
1891 }
1892 }
1893
1894 streamer_write_hwi (ob, -1);
1895
1896 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1897 while (bb->next_bb)
1898 {
1899 streamer_write_hwi (ob, bb->next_bb->index);
1900 bb = bb->next_bb;
1901 }
1902
1903 streamer_write_hwi (ob, -1);
1904
1905 /* ??? The cfgloop interface is tied to cfun. */
1906 gcc_assert (cfun == fn);
1907
1908 /* Output the number of loops. */
1909 streamer_write_uhwi (ob, number_of_loops (fn));
1910
1911 /* Output each loop, skipping the tree root which has number zero. */
1912 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1913 {
1914 struct loop *loop = get_loop (fn, i);
1915
1916 /* Write the index of the loop header. That's enough to rebuild
1917 the loop tree on the reader side. Stream -1 for an unused
1918 loop entry. */
1919 if (!loop)
1920 {
1921 streamer_write_hwi (ob, -1);
1922 continue;
1923 }
1924 else
1925 streamer_write_hwi (ob, loop->header->index);
1926
1927 /* Write everything copy_loop_info copies. */
1928 streamer_write_enum (ob->main_stream,
1929 loop_estimation, EST_LAST, loop->estimate_state);
1930 streamer_write_hwi (ob, loop->any_upper_bound);
1931 if (loop->any_upper_bound)
1932 streamer_write_widest_int (ob, loop->nb_iterations_upper_bound);
1933 streamer_write_hwi (ob, loop->any_likely_upper_bound);
1934 if (loop->any_likely_upper_bound)
1935 streamer_write_widest_int (ob, loop->nb_iterations_likely_upper_bound);
1936 streamer_write_hwi (ob, loop->any_estimate);
1937 if (loop->any_estimate)
1938 streamer_write_widest_int (ob, loop->nb_iterations_estimate);
1939
1940 /* Write OMP SIMD related info. */
1941 streamer_write_hwi (ob, loop->safelen);
1942 streamer_write_hwi (ob, loop->unroll);
1943 streamer_write_hwi (ob, loop->owned_clique);
1944 streamer_write_hwi (ob, loop->dont_vectorize);
1945 streamer_write_hwi (ob, loop->force_vectorize);
1946 stream_write_tree (ob, loop->simduid, true);
1947 }
1948
1949 ob->main_stream = tmp_stream;
1950 }
1951
1952
1953 /* Create the header in the file using OB. If the section type is for
1954 a function, set FN to the decl for that function. */
1955
1956 void
1957 produce_asm (struct output_block *ob, tree fn)
1958 {
1959 enum lto_section_type section_type = ob->section_type;
1960 struct lto_function_header header;
1961 char *section_name;
1962
1963 if (section_type == LTO_section_function_body)
1964 {
1965 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1966 section_name = lto_get_section_name (section_type, name, NULL);
1967 }
1968 else
1969 section_name = lto_get_section_name (section_type, NULL, NULL);
1970
1971 lto_begin_section (section_name, !flag_wpa);
1972 free (section_name);
1973
1974 /* The entire header is stream computed here. */
1975 memset (&header, 0, sizeof (struct lto_function_header));
1976
1977 if (section_type == LTO_section_function_body)
1978 header.cfg_size = ob->cfg_stream->total_size;
1979 header.main_size = ob->main_stream->total_size;
1980 header.string_size = ob->string_stream->total_size;
1981 lto_write_data (&header, sizeof header);
1982
1983 /* Put all of the gimple and the string table out the asm file as a
1984 block of text. */
1985 if (section_type == LTO_section_function_body)
1986 lto_write_stream (ob->cfg_stream);
1987 lto_write_stream (ob->main_stream);
1988 lto_write_stream (ob->string_stream);
1989
1990 lto_end_section ();
1991 }
1992
1993
1994 /* Output the base body of struct function FN using output block OB. */
1995
1996 static void
1997 output_struct_function_base (struct output_block *ob, struct function *fn)
1998 {
1999 struct bitpack_d bp;
2000 unsigned i;
2001 tree t;
2002
2003 /* Output the static chain and non-local goto save area. */
2004 stream_write_tree (ob, fn->static_chain_decl, true);
2005 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
2006
2007 /* Output all the local variables in the function. */
2008 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
2009 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
2010 stream_write_tree (ob, t, true);
2011
2012 /* Output current IL state of the function. */
2013 streamer_write_uhwi (ob, fn->curr_properties);
2014
2015 /* Write all the attributes for FN. */
2016 bp = bitpack_create (ob->main_stream);
2017 bp_pack_value (&bp, fn->is_thunk, 1);
2018 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
2019 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
2020 bp_pack_value (&bp, fn->returns_struct, 1);
2021 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
2022 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
2023 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
2024 bp_pack_value (&bp, fn->after_inlining, 1);
2025 bp_pack_value (&bp, fn->stdarg, 1);
2026 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2027 bp_pack_value (&bp, fn->has_forced_label_in_static, 1);
2028 bp_pack_value (&bp, fn->calls_alloca, 1);
2029 bp_pack_value (&bp, fn->calls_setjmp, 1);
2030 bp_pack_value (&bp, fn->calls_eh_return, 1);
2031 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2032 bp_pack_value (&bp, fn->has_simduid_loops, 1);
2033 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2034 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2035 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2036
2037 /* Output the function start and end loci. */
2038 stream_output_location (ob, &bp, fn->function_start_locus);
2039 stream_output_location (ob, &bp, fn->function_end_locus);
2040
2041 /* Save the instance discriminator if present. */
2042 int *instance_number_p = NULL;
2043 if (decl_to_instance_map)
2044 instance_number_p = decl_to_instance_map->get (fn->decl);
2045 bp_pack_value (&bp, !!instance_number_p, 1);
2046 if (instance_number_p)
2047 bp_pack_value (&bp, *instance_number_p, sizeof (int) * CHAR_BIT);
2048
2049 streamer_write_bitpack (&bp);
2050 }
2051
2052
2053 /* Collect all leaf BLOCKs beyond ROOT into LEAFS. */
2054
2055 static void
2056 collect_block_tree_leafs (tree root, vec<tree> &leafs)
2057 {
2058 for (root = BLOCK_SUBBLOCKS (root); root; root = BLOCK_CHAIN (root))
2059 if (! BLOCK_SUBBLOCKS (root))
2060 leafs.safe_push (root);
2061 else
2062 collect_block_tree_leafs (BLOCK_SUBBLOCKS (root), leafs);
2063 }
2064
2065 /* Output the body of function NODE->DECL. */
2066
2067 static void
2068 output_function (struct cgraph_node *node)
2069 {
2070 tree function;
2071 struct function *fn;
2072 basic_block bb;
2073 struct output_block *ob;
2074
2075 if (streamer_dump_file)
2076 fprintf (streamer_dump_file, "\nStreaming body of %s\n",
2077 node->name ());
2078
2079 function = node->decl;
2080 fn = DECL_STRUCT_FUNCTION (function);
2081 ob = create_output_block (LTO_section_function_body);
2082
2083 clear_line_info (ob);
2084 ob->symbol = node;
2085
2086 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2087
2088 /* Set current_function_decl and cfun. */
2089 push_cfun (fn);
2090
2091 /* Make string 0 be a NULL string. */
2092 streamer_write_char_stream (ob->string_stream, 0);
2093
2094 streamer_write_record_start (ob, LTO_function);
2095
2096 /* Output decls for parameters and args. */
2097 stream_write_tree (ob, DECL_RESULT (function), true);
2098 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2099
2100 /* Output debug args if available. */
2101 vec<tree, va_gc> **debugargs = decl_debug_args_lookup (function);
2102 if (! debugargs)
2103 streamer_write_uhwi (ob, 0);
2104 else
2105 {
2106 streamer_write_uhwi (ob, (*debugargs)->length ());
2107 for (unsigned i = 0; i < (*debugargs)->length (); ++i)
2108 stream_write_tree (ob, (**debugargs)[i], true);
2109 }
2110
2111 /* Output DECL_INITIAL for the function, which contains the tree of
2112 lexical scopes. */
2113 stream_write_tree (ob, DECL_INITIAL (function), true);
2114 /* As we do not recurse into BLOCK_SUBBLOCKS but only BLOCK_SUPERCONTEXT
2115 collect block tree leafs and stream those. */
2116 auto_vec<tree> block_tree_leafs;
2117 if (DECL_INITIAL (function))
2118 collect_block_tree_leafs (DECL_INITIAL (function), block_tree_leafs);
2119 streamer_write_uhwi (ob, block_tree_leafs.length ());
2120 for (unsigned i = 0; i < block_tree_leafs.length (); ++i)
2121 stream_write_tree (ob, block_tree_leafs[i], true);
2122
2123 /* We also stream abstract functions where we stream only stuff needed for
2124 debug info. */
2125 if (gimple_has_body_p (function))
2126 {
2127 /* Fixup loops if required to match discovery done in the reader. */
2128 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
2129
2130 streamer_write_uhwi (ob, 1);
2131 output_struct_function_base (ob, fn);
2132
2133 /* Output all the SSA names used in the function. */
2134 output_ssa_names (ob, fn);
2135
2136 /* Output any exception handling regions. */
2137 output_eh_regions (ob, fn);
2138
2139
2140 /* We will renumber the statements. The code that does this uses
2141 the same ordering that we use for serializing them so we can use
2142 the same code on the other end and not have to write out the
2143 statement numbers. We do not assign UIDs to PHIs here because
2144 virtual PHIs get re-computed on-the-fly which would make numbers
2145 inconsistent. */
2146 set_gimple_stmt_max_uid (cfun, 0);
2147 FOR_ALL_BB_FN (bb, cfun)
2148 {
2149 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2150 gsi_next (&gsi))
2151 {
2152 gphi *stmt = gsi.phi ();
2153
2154 /* Virtual PHIs are not going to be streamed. */
2155 if (!virtual_operand_p (gimple_phi_result (stmt)))
2156 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2157 }
2158 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2159 gsi_next (&gsi))
2160 {
2161 gimple *stmt = gsi_stmt (gsi);
2162 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2163 }
2164 }
2165 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2166 virtual phis now. */
2167 FOR_ALL_BB_FN (bb, cfun)
2168 {
2169 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2170 gsi_next (&gsi))
2171 {
2172 gphi *stmt = gsi.phi ();
2173 if (virtual_operand_p (gimple_phi_result (stmt)))
2174 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2175 }
2176 }
2177
2178 /* Output the code for the function. */
2179 FOR_ALL_BB_FN (bb, fn)
2180 output_bb (ob, bb, fn);
2181
2182 /* The terminator for this function. */
2183 streamer_write_record_start (ob, LTO_null);
2184
2185 output_cfg (ob, fn);
2186
2187 loop_optimizer_finalize ();
2188 pop_cfun ();
2189 }
2190 else
2191 streamer_write_uhwi (ob, 0);
2192
2193 /* Create a section to hold the pickled output of this function. */
2194 produce_asm (ob, function);
2195
2196 destroy_output_block (ob);
2197 if (streamer_dump_file)
2198 fprintf (streamer_dump_file, "Finished streaming %s\n",
2199 node->name ());
2200 }
2201
2202 /* Output the body of function NODE->DECL. */
2203
2204 static void
2205 output_constructor (struct varpool_node *node)
2206 {
2207 tree var = node->decl;
2208 struct output_block *ob;
2209
2210 if (streamer_dump_file)
2211 fprintf (streamer_dump_file, "\nStreaming constructor of %s\n",
2212 node->name ());
2213
2214 ob = create_output_block (LTO_section_function_body);
2215
2216 clear_line_info (ob);
2217 ob->symbol = node;
2218
2219 /* Make string 0 be a NULL string. */
2220 streamer_write_char_stream (ob->string_stream, 0);
2221
2222 /* Output DECL_INITIAL for the function, which contains the tree of
2223 lexical scopes. */
2224 stream_write_tree (ob, DECL_INITIAL (var), true);
2225
2226 /* Create a section to hold the pickled output of this function. */
2227 produce_asm (ob, var);
2228
2229 destroy_output_block (ob);
2230 if (streamer_dump_file)
2231 fprintf (streamer_dump_file, "Finished streaming %s\n",
2232 node->name ());
2233 }
2234
2235
2236 /* Emit toplevel asms. */
2237
2238 void
2239 lto_output_toplevel_asms (void)
2240 {
2241 struct output_block *ob;
2242 struct asm_node *can;
2243 char *section_name;
2244 struct lto_simple_header_with_strings header;
2245
2246 if (!symtab->first_asm_symbol ())
2247 return;
2248
2249 ob = create_output_block (LTO_section_asm);
2250
2251 /* Make string 0 be a NULL string. */
2252 streamer_write_char_stream (ob->string_stream, 0);
2253
2254 for (can = symtab->first_asm_symbol (); can; can = can->next)
2255 {
2256 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2257 streamer_write_hwi (ob, can->order);
2258 }
2259
2260 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2261
2262 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2263 lto_begin_section (section_name, !flag_wpa);
2264 free (section_name);
2265
2266 /* The entire header stream is computed here. */
2267 memset (&header, 0, sizeof (header));
2268
2269 header.main_size = ob->main_stream->total_size;
2270 header.string_size = ob->string_stream->total_size;
2271 lto_write_data (&header, sizeof header);
2272
2273 /* Put all of the gimple and the string table out the asm file as a
2274 block of text. */
2275 lto_write_stream (ob->main_stream);
2276 lto_write_stream (ob->string_stream);
2277
2278 lto_end_section ();
2279
2280 destroy_output_block (ob);
2281 }
2282
2283
2284 /* Copy the function body or variable constructor of NODE without deserializing. */
2285
2286 static void
2287 copy_function_or_variable (struct symtab_node *node)
2288 {
2289 tree function = node->decl;
2290 struct lto_file_decl_data *file_data = node->lto_file_data;
2291 const char *data;
2292 size_t len;
2293 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2294 char *section_name =
2295 lto_get_section_name (LTO_section_function_body, name, NULL);
2296 size_t i, j;
2297 struct lto_in_decl_state *in_state;
2298 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2299
2300 if (streamer_dump_file)
2301 fprintf (streamer_dump_file, "Copying section for %s\n", name);
2302 lto_begin_section (section_name, false);
2303 free (section_name);
2304
2305 /* We may have renamed the declaration, e.g., a static function. */
2306 name = lto_get_decl_name_mapping (file_data, name);
2307
2308 data = lto_get_raw_section_data (file_data, LTO_section_function_body,
2309 name, &len);
2310 gcc_assert (data);
2311
2312 /* Do a bit copy of the function body. */
2313 lto_write_raw_data (data, len);
2314
2315 /* Copy decls. */
2316 in_state =
2317 lto_get_function_in_decl_state (node->lto_file_data, function);
2318 out_state->compressed = in_state->compressed;
2319 gcc_assert (in_state);
2320
2321 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2322 {
2323 size_t n = vec_safe_length (in_state->streams[i]);
2324 vec<tree, va_gc> *trees = in_state->streams[i];
2325 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2326
2327 /* The out state must have the same indices and the in state.
2328 So just copy the vector. All the encoders in the in state
2329 must be empty where we reach here. */
2330 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2331 encoder->trees.reserve_exact (n);
2332 for (j = 0; j < n; j++)
2333 encoder->trees.safe_push ((*trees)[j]);
2334 }
2335
2336 lto_free_raw_section_data (file_data, LTO_section_function_body, name,
2337 data, len);
2338 lto_end_section ();
2339 }
2340
2341 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2342
2343 static tree
2344 wrap_refs (tree *tp, int *ws, void *)
2345 {
2346 tree t = *tp;
2347 if (handled_component_p (t)
2348 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
2349 && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2350 {
2351 tree decl = TREE_OPERAND (t, 0);
2352 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2353 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2354 build1 (ADDR_EXPR, ptrtype, decl),
2355 build_int_cst (ptrtype, 0));
2356 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2357 *ws = 0;
2358 }
2359 else if (TREE_CODE (t) == CONSTRUCTOR)
2360 ;
2361 else if (!EXPR_P (t))
2362 *ws = 0;
2363 return NULL_TREE;
2364 }
2365
2366 /* Remove functions that are no longer used from offload_funcs, and mark the
2367 remaining ones with DECL_PRESERVE_P. */
2368
2369 static void
2370 prune_offload_funcs (void)
2371 {
2372 if (!offload_funcs)
2373 return;
2374
2375 unsigned ix, ix2;
2376 tree *elem_ptr;
2377 VEC_ORDERED_REMOVE_IF (*offload_funcs, ix, ix2, elem_ptr,
2378 cgraph_node::get (*elem_ptr) == NULL);
2379
2380 tree fn_decl;
2381 FOR_EACH_VEC_ELT (*offload_funcs, ix, fn_decl)
2382 DECL_PRESERVE_P (fn_decl) = 1;
2383 }
2384
2385 /* Produce LTO section that contains global information
2386 about LTO bytecode. */
2387
2388 static void
2389 produce_lto_section ()
2390 {
2391 /* Stream LTO meta section. */
2392 output_block *ob = create_output_block (LTO_section_lto);
2393
2394 char * section_name = lto_get_section_name (LTO_section_lto, NULL, NULL);
2395 lto_begin_section (section_name, false);
2396 free (section_name);
2397
2398 lto_compression compression = ZLIB;
2399
2400 bool slim_object = flag_generate_lto && !flag_fat_lto_objects;
2401 lto_section s
2402 = { LTO_major_version, LTO_minor_version, slim_object, compression, 0 };
2403 lto_write_data (&s, sizeof s);
2404 lto_end_section ();
2405 destroy_output_block (ob);
2406 }
2407
2408 /* Main entry point from the pass manager. */
2409
2410 void
2411 lto_output (void)
2412 {
2413 struct lto_out_decl_state *decl_state;
2414 bitmap output = NULL;
2415 bitmap_obstack output_obstack;
2416 int i, n_nodes;
2417 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2418
2419 prune_offload_funcs ();
2420
2421 if (flag_checking)
2422 {
2423 bitmap_obstack_initialize (&output_obstack);
2424 output = BITMAP_ALLOC (&output_obstack);
2425 }
2426
2427 /* Initialize the streamer. */
2428 lto_streamer_init ();
2429
2430 produce_lto_section ();
2431
2432 n_nodes = lto_symtab_encoder_size (encoder);
2433 /* Process only the functions with bodies. */
2434 for (i = 0; i < n_nodes; i++)
2435 {
2436 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2437 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2438 {
2439 if (lto_symtab_encoder_encode_body_p (encoder, node)
2440 && !node->alias)
2441 {
2442 if (flag_checking)
2443 gcc_assert (bitmap_set_bit (output, DECL_UID (node->decl)));
2444 decl_state = lto_new_out_decl_state ();
2445 lto_push_out_decl_state (decl_state);
2446 if (gimple_has_body_p (node->decl)
2447 || (!flag_wpa
2448 && flag_incremental_link != INCREMENTAL_LINK_LTO)
2449 /* Thunks have no body but they may be synthetized
2450 at WPA time. */
2451 || DECL_ARGUMENTS (node->decl))
2452 output_function (node);
2453 else
2454 copy_function_or_variable (node);
2455 gcc_assert (lto_get_out_decl_state () == decl_state);
2456 lto_pop_out_decl_state ();
2457 lto_record_function_out_decl_state (node->decl, decl_state);
2458 }
2459 }
2460 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2461 {
2462 /* Wrap symbol references inside the ctor in a type
2463 preserving MEM_REF. */
2464 tree ctor = DECL_INITIAL (node->decl);
2465 if (ctor && !in_lto_p)
2466 walk_tree (&ctor, wrap_refs, NULL, NULL);
2467 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2468 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2469 && !node->alias)
2470 {
2471 timevar_push (TV_IPA_LTO_CTORS_OUT);
2472 if (flag_checking)
2473 gcc_assert (bitmap_set_bit (output, DECL_UID (node->decl)));
2474 decl_state = lto_new_out_decl_state ();
2475 lto_push_out_decl_state (decl_state);
2476 if (DECL_INITIAL (node->decl) != error_mark_node
2477 || (!flag_wpa
2478 && flag_incremental_link != INCREMENTAL_LINK_LTO))
2479 output_constructor (node);
2480 else
2481 copy_function_or_variable (node);
2482 gcc_assert (lto_get_out_decl_state () == decl_state);
2483 lto_pop_out_decl_state ();
2484 lto_record_function_out_decl_state (node->decl, decl_state);
2485 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2486 }
2487 }
2488 }
2489
2490 /* Emit the callgraph after emitting function bodies. This needs to
2491 be done now to make sure that all the statements in every function
2492 have been renumbered so that edges can be associated with call
2493 statements using the statement UIDs. */
2494 output_symtab ();
2495
2496 output_offload_tables ();
2497
2498 if (flag_checking)
2499 {
2500 BITMAP_FREE (output);
2501 bitmap_obstack_release (&output_obstack);
2502 }
2503 }
2504
2505 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2506 from it and required for correct representation of its semantics.
2507 Each node in ENCODER must be a global declaration or a type. A node
2508 is written only once, even if it appears multiple times in the
2509 vector. Certain transitively-reachable nodes, such as those
2510 representing expressions, may be duplicated, but such nodes
2511 must not appear in ENCODER itself. */
2512
2513 static void
2514 write_global_stream (struct output_block *ob,
2515 struct lto_tree_ref_encoder *encoder)
2516 {
2517 tree t;
2518 size_t index;
2519 const size_t size = lto_tree_ref_encoder_size (encoder);
2520
2521 for (index = 0; index < size; index++)
2522 {
2523 t = lto_tree_ref_encoder_get_tree (encoder, index);
2524 if (streamer_dump_file)
2525 {
2526 fprintf (streamer_dump_file, " %i:", (int)index);
2527 print_node_brief (streamer_dump_file, "", t, 4);
2528 fprintf (streamer_dump_file, "\n");
2529 }
2530 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2531 stream_write_tree (ob, t, false);
2532 }
2533 }
2534
2535
2536 /* Write a sequence of indices into the globals vector corresponding
2537 to the trees in ENCODER. These are used by the reader to map the
2538 indices used to refer to global entities within function bodies to
2539 their referents. */
2540
2541 static void
2542 write_global_references (struct output_block *ob,
2543 struct lto_tree_ref_encoder *encoder)
2544 {
2545 tree t;
2546 uint32_t index;
2547 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2548
2549 /* Write size and slot indexes as 32-bit unsigned numbers. */
2550 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2551 data[0] = size;
2552
2553 for (index = 0; index < size; index++)
2554 {
2555 unsigned slot_num;
2556
2557 t = lto_tree_ref_encoder_get_tree (encoder, index);
2558 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2559 gcc_assert (slot_num != (unsigned)-1);
2560 data[index + 1] = slot_num;
2561 }
2562
2563 lto_write_data (data, sizeof (int32_t) * (size + 1));
2564 free (data);
2565 }
2566
2567
2568 /* Write all the streams in an lto_out_decl_state STATE using
2569 output block OB and output stream OUT_STREAM. */
2570
2571 void
2572 lto_output_decl_state_streams (struct output_block *ob,
2573 struct lto_out_decl_state *state)
2574 {
2575 int i;
2576
2577 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2578 write_global_stream (ob, &state->streams[i]);
2579 }
2580
2581
2582 /* Write all the references in an lto_out_decl_state STATE using
2583 output block OB and output stream OUT_STREAM. */
2584
2585 void
2586 lto_output_decl_state_refs (struct output_block *ob,
2587 struct lto_out_decl_state *state)
2588 {
2589 unsigned i;
2590 unsigned ref;
2591 tree decl;
2592
2593 /* Write reference to FUNCTION_DECL. If there is not function,
2594 write reference to void_type_node. */
2595 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2596 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2597 gcc_assert (ref != (unsigned)-1);
2598 ref = ref * 2 + (state->compressed ? 1 : 0);
2599 lto_write_data (&ref, sizeof (uint32_t));
2600
2601 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2602 write_global_references (ob, &state->streams[i]);
2603 }
2604
2605
2606 /* Return the written size of STATE. */
2607
2608 static size_t
2609 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2610 {
2611 int i;
2612 size_t size;
2613
2614 size = sizeof (int32_t); /* fn_ref. */
2615 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2616 {
2617 size += sizeof (int32_t); /* vector size. */
2618 size += (lto_tree_ref_encoder_size (&state->streams[i])
2619 * sizeof (int32_t));
2620 }
2621 return size;
2622 }
2623
2624
2625 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2626 so far. */
2627
2628 static void
2629 write_symbol (struct streamer_tree_cache_d *cache,
2630 tree t, hash_set<const char *> *seen, bool alias)
2631 {
2632 const char *name;
2633 enum gcc_plugin_symbol_kind kind;
2634 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2635 unsigned slot_num;
2636 uint64_t size;
2637 const char *comdat;
2638 unsigned char c;
2639
2640 gcc_checking_assert (TREE_PUBLIC (t)
2641 && (TREE_CODE (t) != FUNCTION_DECL
2642 || !fndecl_built_in_p (t))
2643 && !DECL_ABSTRACT_P (t)
2644 && (!VAR_P (t) || !DECL_HARD_REGISTER (t)));
2645
2646 gcc_assert (VAR_OR_FUNCTION_DECL_P (t));
2647
2648 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2649
2650 /* This behaves like assemble_name_raw in varasm.c, performing the
2651 same name manipulations that ASM_OUTPUT_LABELREF does. */
2652 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2653
2654 if (seen->add (name))
2655 return;
2656
2657 streamer_tree_cache_lookup (cache, t, &slot_num);
2658 gcc_assert (slot_num != (unsigned)-1);
2659
2660 if (DECL_EXTERNAL (t))
2661 {
2662 if (DECL_WEAK (t))
2663 kind = GCCPK_WEAKUNDEF;
2664 else
2665 kind = GCCPK_UNDEF;
2666 }
2667 else
2668 {
2669 if (DECL_WEAK (t))
2670 kind = GCCPK_WEAKDEF;
2671 else if (DECL_COMMON (t))
2672 kind = GCCPK_COMMON;
2673 else
2674 kind = GCCPK_DEF;
2675
2676 /* When something is defined, it should have node attached. */
2677 gcc_assert (alias || !VAR_P (t) || varpool_node::get (t)->definition);
2678 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2679 || (cgraph_node::get (t)
2680 && cgraph_node::get (t)->definition));
2681 }
2682
2683 /* Imitate what default_elf_asm_output_external do.
2684 When symbol is external, we need to output it with DEFAULT visibility
2685 when compiling with -fvisibility=default, while with HIDDEN visibility
2686 when symbol has attribute (visibility("hidden")) specified.
2687 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2688 right. */
2689
2690 if (DECL_EXTERNAL (t)
2691 && !targetm.binds_local_p (t))
2692 visibility = GCCPV_DEFAULT;
2693 else
2694 switch (DECL_VISIBILITY (t))
2695 {
2696 case VISIBILITY_DEFAULT:
2697 visibility = GCCPV_DEFAULT;
2698 break;
2699 case VISIBILITY_PROTECTED:
2700 visibility = GCCPV_PROTECTED;
2701 break;
2702 case VISIBILITY_HIDDEN:
2703 visibility = GCCPV_HIDDEN;
2704 break;
2705 case VISIBILITY_INTERNAL:
2706 visibility = GCCPV_INTERNAL;
2707 break;
2708 }
2709
2710 if (kind == GCCPK_COMMON
2711 && DECL_SIZE_UNIT (t)
2712 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2713 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2714 else
2715 size = 0;
2716
2717 if (DECL_ONE_ONLY (t))
2718 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2719 else
2720 comdat = "";
2721
2722 lto_write_data (name, strlen (name) + 1);
2723 lto_write_data (comdat, strlen (comdat) + 1);
2724 c = (unsigned char) kind;
2725 lto_write_data (&c, 1);
2726 c = (unsigned char) visibility;
2727 lto_write_data (&c, 1);
2728 lto_write_data (&size, 8);
2729 lto_write_data (&slot_num, 4);
2730 }
2731
2732 /* Write an IL symbol table to OB.
2733 SET and VSET are cgraph/varpool node sets we are outputting. */
2734
2735 static void
2736 produce_symtab (struct output_block *ob)
2737 {
2738 struct streamer_tree_cache_d *cache = ob->writer_cache;
2739 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2740 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2741 lto_symtab_encoder_iterator lsei;
2742
2743 lto_begin_section (section_name, false);
2744 free (section_name);
2745
2746 hash_set<const char *> seen;
2747
2748 /* Write the symbol table.
2749 First write everything defined and then all declarations.
2750 This is necessary to handle cases where we have duplicated symbols. */
2751 for (lsei = lsei_start (encoder);
2752 !lsei_end_p (lsei); lsei_next (&lsei))
2753 {
2754 symtab_node *node = lsei_node (lsei);
2755
2756 if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2757 continue;
2758 write_symbol (cache, node->decl, &seen, false);
2759 }
2760 for (lsei = lsei_start (encoder);
2761 !lsei_end_p (lsei); lsei_next (&lsei))
2762 {
2763 symtab_node *node = lsei_node (lsei);
2764
2765 if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2766 continue;
2767 write_symbol (cache, node->decl, &seen, false);
2768 }
2769
2770 lto_end_section ();
2771 }
2772
2773
2774 /* Init the streamer_mode_table for output, where we collect info on what
2775 machine_mode values have been streamed. */
2776 void
2777 lto_output_init_mode_table (void)
2778 {
2779 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2780 }
2781
2782
2783 /* Write the mode table. */
2784 static void
2785 lto_write_mode_table (void)
2786 {
2787 struct output_block *ob;
2788 ob = create_output_block (LTO_section_mode_table);
2789 bitpack_d bp = bitpack_create (ob->main_stream);
2790
2791 /* Ensure that for GET_MODE_INNER (m) != m we have
2792 also the inner mode marked. */
2793 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2794 if (streamer_mode_table[i])
2795 {
2796 machine_mode m = (machine_mode) i;
2797 machine_mode inner_m = GET_MODE_INNER (m);
2798 if (inner_m != m)
2799 streamer_mode_table[(int) inner_m] = 1;
2800 }
2801 /* First stream modes that have GET_MODE_INNER (m) == m,
2802 so that we can refer to them afterwards. */
2803 for (int pass = 0; pass < 2; pass++)
2804 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2805 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2806 {
2807 machine_mode m = (machine_mode) i;
2808 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
2809 continue;
2810 bp_pack_value (&bp, m, 8);
2811 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2812 bp_pack_poly_value (&bp, GET_MODE_SIZE (m), 16);
2813 bp_pack_poly_value (&bp, GET_MODE_PRECISION (m), 16);
2814 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2815 bp_pack_poly_value (&bp, GET_MODE_NUNITS (m), 16);
2816 switch (GET_MODE_CLASS (m))
2817 {
2818 case MODE_FRACT:
2819 case MODE_UFRACT:
2820 case MODE_ACCUM:
2821 case MODE_UACCUM:
2822 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2823 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2824 break;
2825 case MODE_FLOAT:
2826 case MODE_DECIMAL_FLOAT:
2827 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2828 break;
2829 default:
2830 break;
2831 }
2832 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2833 }
2834 bp_pack_value (&bp, VOIDmode, 8);
2835
2836 streamer_write_bitpack (&bp);
2837
2838 char *section_name
2839 = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2840 lto_begin_section (section_name, !flag_wpa);
2841 free (section_name);
2842
2843 /* The entire header stream is computed here. */
2844 struct lto_simple_header_with_strings header;
2845 memset (&header, 0, sizeof (header));
2846
2847 header.main_size = ob->main_stream->total_size;
2848 header.string_size = ob->string_stream->total_size;
2849 lto_write_data (&header, sizeof header);
2850
2851 /* Put all of the gimple and the string table out the asm file as a
2852 block of text. */
2853 lto_write_stream (ob->main_stream);
2854 lto_write_stream (ob->string_stream);
2855
2856 lto_end_section ();
2857 destroy_output_block (ob);
2858 }
2859
2860
2861 /* This pass is run after all of the functions are serialized and all
2862 of the IPA passes have written their serialized forms. This pass
2863 causes the vector of all of the global decls and types used from
2864 this file to be written in to a section that can then be read in to
2865 recover these on other side. */
2866
2867 void
2868 produce_asm_for_decls (void)
2869 {
2870 struct lto_out_decl_state *out_state;
2871 struct lto_out_decl_state *fn_out_state;
2872 struct lto_decl_header header;
2873 char *section_name;
2874 struct output_block *ob;
2875 unsigned idx, num_fns;
2876 size_t decl_state_size;
2877 int32_t num_decl_states;
2878
2879 ob = create_output_block (LTO_section_decls);
2880
2881 memset (&header, 0, sizeof (struct lto_decl_header));
2882
2883 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2884 lto_begin_section (section_name, !flag_wpa);
2885 free (section_name);
2886
2887 /* Make string 0 be a NULL string. */
2888 streamer_write_char_stream (ob->string_stream, 0);
2889
2890 gcc_assert (!alias_pairs);
2891
2892 /* Get rid of the global decl state hash tables to save some memory. */
2893 out_state = lto_get_out_decl_state ();
2894 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2895 if (out_state->streams[i].tree_hash_table)
2896 {
2897 delete out_state->streams[i].tree_hash_table;
2898 out_state->streams[i].tree_hash_table = NULL;
2899 }
2900
2901 /* Write the global symbols. */
2902 if (streamer_dump_file)
2903 fprintf (streamer_dump_file, "Outputting global stream\n");
2904 lto_output_decl_state_streams (ob, out_state);
2905 num_fns = lto_function_decl_states.length ();
2906 for (idx = 0; idx < num_fns; idx++)
2907 {
2908 fn_out_state =
2909 lto_function_decl_states[idx];
2910 if (streamer_dump_file)
2911 fprintf (streamer_dump_file, "Outputting stream for %s\n",
2912 IDENTIFIER_POINTER
2913 (DECL_ASSEMBLER_NAME (fn_out_state->fn_decl)));
2914 lto_output_decl_state_streams (ob, fn_out_state);
2915 }
2916
2917 /* Currently not used. This field would allow us to preallocate
2918 the globals vector, so that it need not be resized as it is extended. */
2919 header.num_nodes = -1;
2920
2921 /* Compute the total size of all decl out states. */
2922 decl_state_size = sizeof (int32_t);
2923 decl_state_size += lto_out_decl_state_written_size (out_state);
2924 for (idx = 0; idx < num_fns; idx++)
2925 {
2926 fn_out_state =
2927 lto_function_decl_states[idx];
2928 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2929 }
2930 header.decl_state_size = decl_state_size;
2931
2932 header.main_size = ob->main_stream->total_size;
2933 header.string_size = ob->string_stream->total_size;
2934
2935 lto_write_data (&header, sizeof header);
2936
2937 /* Write the main out-decl state, followed by out-decl states of
2938 functions. */
2939 num_decl_states = num_fns + 1;
2940 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2941 lto_output_decl_state_refs (ob, out_state);
2942 for (idx = 0; idx < num_fns; idx++)
2943 {
2944 fn_out_state = lto_function_decl_states[idx];
2945 lto_output_decl_state_refs (ob, fn_out_state);
2946 }
2947
2948 lto_write_stream (ob->main_stream);
2949 lto_write_stream (ob->string_stream);
2950
2951 lto_end_section ();
2952
2953 /* Write the symbol table. It is used by linker to determine dependencies
2954 and thus we can skip it for WPA. */
2955 if (!flag_wpa)
2956 produce_symtab (ob);
2957
2958 /* Write command line opts. */
2959 lto_write_options ();
2960
2961 /* Deallocate memory and clean up. */
2962 for (idx = 0; idx < num_fns; idx++)
2963 {
2964 fn_out_state =
2965 lto_function_decl_states[idx];
2966 lto_delete_out_decl_state (fn_out_state);
2967 }
2968 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2969 lto_function_decl_states.release ();
2970 destroy_output_block (ob);
2971 if (lto_stream_offload_p)
2972 lto_write_mode_table ();
2973 }