]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-streamer-out.c
RISC-V: Handle implied extension for -march parser.
[thirdparty/gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2020 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
43 #include "debug.h"
44 #include "omp-offload.h"
45 #include "print-tree.h"
46 #include "tree-dfa.h"
47 #include "file-prefix-map.h" /* remap_debug_filename() */
48 #include "output.h"
49
50
51 static void lto_write_tree (struct output_block*, tree, bool);
52
53 /* Clear the line info stored in DATA_IN. */
54
55 static void
56 clear_line_info (struct output_block *ob)
57 {
58 ob->current_file = NULL;
59 ob->current_line = 0;
60 ob->current_col = 0;
61 ob->current_sysp = false;
62 }
63
64
65 /* Create the output block and return it. SECTION_TYPE is
66 LTO_section_function_body or LTO_static_initializer. */
67
68 struct output_block *
69 create_output_block (enum lto_section_type section_type)
70 {
71 struct output_block *ob = XCNEW (struct output_block);
72 if (streamer_dump_file)
73 fprintf (streamer_dump_file, "Creating output block for %s\n",
74 lto_section_name [section_type]);
75
76 ob->section_type = section_type;
77 ob->decl_state = lto_get_out_decl_state ();
78 ob->main_stream = XCNEW (struct lto_output_stream);
79 ob->string_stream = XCNEW (struct lto_output_stream);
80 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
81
82 if (section_type == LTO_section_function_body)
83 ob->cfg_stream = XCNEW (struct lto_output_stream);
84
85 clear_line_info (ob);
86
87 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
88 gcc_obstack_init (&ob->obstack);
89
90 return ob;
91 }
92
93
94 /* Destroy the output block OB. */
95
96 void
97 destroy_output_block (struct output_block *ob)
98 {
99 enum lto_section_type section_type = ob->section_type;
100
101 delete ob->string_hash_table;
102 ob->string_hash_table = NULL;
103
104 free (ob->main_stream);
105 free (ob->string_stream);
106 if (section_type == LTO_section_function_body)
107 free (ob->cfg_stream);
108
109 streamer_tree_cache_delete (ob->writer_cache);
110 obstack_free (&ob->obstack, NULL);
111
112 free (ob);
113 }
114
115
116 /* Look up NODE in the type table and write the index for it to OB. */
117
118 static void
119 output_type_ref (struct output_block *ob, tree node)
120 {
121 streamer_write_record_start (ob, LTO_type_ref);
122 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
123 }
124
125 /* Wrapper around variably_modified_type_p avoiding type modification
126 during WPA streaming. */
127
128 static bool
129 lto_variably_modified_type_p (tree type)
130 {
131 return (in_lto_p
132 ? TYPE_LANG_FLAG_0 (TYPE_MAIN_VARIANT (type))
133 : variably_modified_type_p (type, NULL_TREE));
134 }
135
136
137 /* Return true if tree node T is written to various tables. For these
138 nodes, we sometimes want to write their phyiscal representation
139 (via lto_output_tree), and sometimes we need to emit an index
140 reference into a table (via lto_output_tree_ref). */
141
142 static bool
143 tree_is_indexable (tree t)
144 {
145 /* Parameters and return values of functions of variably modified types
146 must go to global stream, because they may be used in the type
147 definition. */
148 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
149 && DECL_CONTEXT (t))
150 return lto_variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)));
151 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared.
152 We should no longer need to stream it. */
153 else if (TREE_CODE (t) == IMPORTED_DECL)
154 gcc_unreachable ();
155 else if (TREE_CODE (t) == LABEL_DECL)
156 return FORCED_LABEL (t) || DECL_NONLOCAL (t);
157 else if (((VAR_P (t) && !TREE_STATIC (t))
158 || TREE_CODE (t) == TYPE_DECL
159 || TREE_CODE (t) == CONST_DECL
160 || TREE_CODE (t) == NAMELIST_DECL)
161 && decl_function_context (t))
162 return false;
163 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
164 return false;
165 /* Variably modified types need to be streamed alongside function
166 bodies because they can refer to local entities. Together with
167 them we have to localize their members as well.
168 ??? In theory that includes non-FIELD_DECLs as well. */
169 else if (TYPE_P (t)
170 && lto_variably_modified_type_p (t))
171 return false;
172 else if (TREE_CODE (t) == FIELD_DECL
173 && lto_variably_modified_type_p (DECL_CONTEXT (t)))
174 return false;
175 else
176 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
177 }
178
179
180 /* Output info about new location into bitpack BP.
181 After outputting bitpack, lto_output_location_data has
182 to be done to output actual data. */
183
184 void
185 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
186 location_t loc)
187 {
188 expanded_location xloc;
189
190 loc = LOCATION_LOCUS (loc);
191 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
192 loc < RESERVED_LOCATION_COUNT
193 ? loc : RESERVED_LOCATION_COUNT);
194 if (loc < RESERVED_LOCATION_COUNT)
195 return;
196
197 xloc = expand_location (loc);
198
199 bp_pack_value (bp, ob->current_file != xloc.file, 1);
200 bp_pack_value (bp, ob->current_line != xloc.line, 1);
201 bp_pack_value (bp, ob->current_col != xloc.column, 1);
202
203 if (ob->current_file != xloc.file)
204 {
205 bp_pack_string (ob, bp, remap_debug_filename (xloc.file), true);
206 bp_pack_value (bp, xloc.sysp, 1);
207 }
208 ob->current_file = xloc.file;
209 ob->current_sysp = xloc.sysp;
210
211 if (ob->current_line != xloc.line)
212 bp_pack_var_len_unsigned (bp, xloc.line);
213 ob->current_line = xloc.line;
214
215 if (ob->current_col != xloc.column)
216 bp_pack_var_len_unsigned (bp, xloc.column);
217 ob->current_col = xloc.column;
218 }
219
220
221 /* If EXPR is an indexable tree node, output a reference to it to
222 output block OB. Otherwise, output the physical representation of
223 EXPR to OB. */
224
225 static void
226 lto_output_tree_ref (struct output_block *ob, tree expr)
227 {
228 enum tree_code code;
229
230 if (TYPE_P (expr))
231 {
232 output_type_ref (ob, expr);
233 return;
234 }
235
236 code = TREE_CODE (expr);
237 switch (code)
238 {
239 case SSA_NAME:
240 streamer_write_record_start (ob, LTO_ssa_name_ref);
241 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
242 break;
243
244 case FIELD_DECL:
245 streamer_write_record_start (ob, LTO_field_decl_ref);
246 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
247 break;
248
249 case FUNCTION_DECL:
250 streamer_write_record_start (ob, LTO_function_decl_ref);
251 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
252 break;
253
254 case VAR_DECL:
255 case DEBUG_EXPR_DECL:
256 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
257 /* FALLTHRU */
258 case PARM_DECL:
259 streamer_write_record_start (ob, LTO_global_decl_ref);
260 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
261 break;
262
263 case CONST_DECL:
264 streamer_write_record_start (ob, LTO_const_decl_ref);
265 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
266 break;
267
268 case IMPORTED_DECL:
269 gcc_assert (decl_function_context (expr) == NULL);
270 streamer_write_record_start (ob, LTO_imported_decl_ref);
271 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
272 break;
273
274 case TYPE_DECL:
275 streamer_write_record_start (ob, LTO_type_decl_ref);
276 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
277 break;
278
279 case NAMELIST_DECL:
280 streamer_write_record_start (ob, LTO_namelist_decl_ref);
281 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
282 break;
283
284 case NAMESPACE_DECL:
285 streamer_write_record_start (ob, LTO_namespace_decl_ref);
286 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
287 break;
288
289 case LABEL_DECL:
290 streamer_write_record_start (ob, LTO_label_decl_ref);
291 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
292 break;
293
294 case RESULT_DECL:
295 streamer_write_record_start (ob, LTO_result_decl_ref);
296 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
297 break;
298
299 case TRANSLATION_UNIT_DECL:
300 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
301 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
302 break;
303
304 default:
305 /* No other node is indexable, so it should have been handled by
306 lto_output_tree. */
307 gcc_unreachable ();
308 }
309 }
310
311
312 /* Return true if EXPR is a tree node that can be written to disk. */
313
314 static inline bool
315 lto_is_streamable (tree expr)
316 {
317 enum tree_code code = TREE_CODE (expr);
318
319 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
320 name version in lto_output_tree_ref (see output_ssa_names). */
321 return !is_lang_specific (expr)
322 && code != SSA_NAME
323 && code != LANG_TYPE
324 && code != MODIFY_EXPR
325 && code != INIT_EXPR
326 && code != TARGET_EXPR
327 && code != BIND_EXPR
328 && code != WITH_CLEANUP_EXPR
329 && code != STATEMENT_LIST
330 && (code == CASE_LABEL_EXPR
331 || code == DECL_EXPR
332 || TREE_CODE_CLASS (code) != tcc_statement);
333 }
334
335 /* Very rough estimate of streaming size of the initializer. If we ignored
336 presence of strings, we could simply just count number of non-indexable
337 tree nodes and number of references to indexable nodes. Strings however
338 may be very large and we do not want to dump them int othe global stream.
339
340 Count the size of initializer until the size in DATA is positive. */
341
342 static tree
343 subtract_estimated_size (tree *tp, int *ws, void *data)
344 {
345 long *sum = (long *)data;
346 if (tree_is_indexable (*tp))
347 {
348 /* Indexable tree is one reference to global stream.
349 Guess it may be about 4 bytes. */
350 *sum -= 4;
351 *ws = 0;
352 }
353 /* String table entry + base of tree node needs to be streamed. */
354 if (TREE_CODE (*tp) == STRING_CST)
355 *sum -= TREE_STRING_LENGTH (*tp) + 8;
356 else
357 {
358 /* Identifiers are also variable length but should not appear
359 naked in constructor. */
360 gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
361 /* We do not really make attempt to work out size of pickled tree, as
362 it is very variable. Make it bigger than the reference. */
363 *sum -= 16;
364 }
365 if (*sum < 0)
366 return *tp;
367 return NULL_TREE;
368 }
369
370
371 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
372
373 static tree
374 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
375 {
376 gcc_checking_assert (DECL_P (expr)
377 && TREE_CODE (expr) != FUNCTION_DECL
378 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
379
380 /* Handle DECL_INITIAL for symbols. */
381 tree initial = DECL_INITIAL (expr);
382 if (VAR_P (expr)
383 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
384 && !DECL_IN_CONSTANT_POOL (expr)
385 && initial)
386 {
387 varpool_node *vnode;
388 /* Extra section needs about 30 bytes; do not produce it for simple
389 scalar values. */
390 if (!(vnode = varpool_node::get (expr))
391 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
392 initial = error_mark_node;
393 if (initial != error_mark_node)
394 {
395 long max_size = 30;
396 if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
397 NULL))
398 initial = error_mark_node;
399 }
400 }
401
402 return initial;
403 }
404
405
406 /* Write a physical representation of tree node EXPR to output block
407 OB. If REF_P is true, the leaves of EXPR are emitted as references
408 via lto_output_tree_ref. IX is the index into the streamer cache
409 where EXPR is stored. */
410
411 static void
412 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
413 {
414 /* Pack all the non-pointer fields in EXPR into a bitpack and write
415 the resulting bitpack. */
416 streamer_write_tree_bitfields (ob, expr);
417
418 /* Write all the pointer fields in EXPR. */
419 streamer_write_tree_body (ob, expr, ref_p);
420
421 /* Write any LTO-specific data to OB. */
422 if (DECL_P (expr)
423 && TREE_CODE (expr) != FUNCTION_DECL
424 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
425 {
426 /* Handle DECL_INITIAL for symbols. */
427 tree initial = get_symbol_initial_value
428 (ob->decl_state->symtab_node_encoder, expr);
429 stream_write_tree (ob, initial, ref_p);
430 }
431
432 /* Stream references to early generated DIEs. Keep in sync with the
433 trees handled in dwarf2out_die_ref_for_decl. */
434 if ((DECL_P (expr)
435 && TREE_CODE (expr) != FIELD_DECL
436 && TREE_CODE (expr) != DEBUG_EXPR_DECL
437 && TREE_CODE (expr) != TYPE_DECL)
438 || TREE_CODE (expr) == BLOCK)
439 {
440 const char *sym;
441 unsigned HOST_WIDE_INT off;
442 if (debug_info_level > DINFO_LEVEL_NONE
443 && debug_hooks->die_ref_for_decl (expr, &sym, &off))
444 {
445 streamer_write_string (ob, ob->main_stream, sym, true);
446 streamer_write_uhwi (ob, off);
447 }
448 else
449 streamer_write_string (ob, ob->main_stream, NULL, true);
450 }
451 }
452
453 /* Write a physical representation of tree node EXPR to output block
454 OB. If REF_P is true, the leaves of EXPR are emitted as references
455 via lto_output_tree_ref. IX is the index into the streamer cache
456 where EXPR is stored. */
457
458 static void
459 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
460 {
461 if (!lto_is_streamable (expr))
462 internal_error ("tree code %qs is not supported in LTO streams",
463 get_tree_code_name (TREE_CODE (expr)));
464
465 /* Write the header, containing everything needed to materialize
466 EXPR on the reading side. */
467 streamer_write_tree_header (ob, expr);
468
469 lto_write_tree_1 (ob, expr, ref_p);
470
471 /* Mark the end of EXPR. */
472 streamer_write_zero (ob);
473 }
474
475 /* Emit the physical representation of tree node EXPR to output block OB,
476 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
477 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
478
479 static void
480 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
481 bool ref_p, bool this_ref_p)
482 {
483 unsigned ix;
484
485 gcc_checking_assert (expr != NULL_TREE
486 && !(this_ref_p && tree_is_indexable (expr)));
487
488 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
489 expr, hash, &ix);
490 gcc_assert (!exists_p);
491 if (TREE_CODE (expr) == INTEGER_CST
492 && !TREE_OVERFLOW (expr))
493 {
494 /* Shared INTEGER_CST nodes are special because they need their
495 original type to be materialized by the reader (to implement
496 TYPE_CACHED_VALUES). */
497 streamer_write_integer_cst (ob, expr, ref_p);
498 }
499 else
500 {
501 /* This is the first time we see EXPR, write its fields
502 to OB. */
503 lto_write_tree (ob, expr, ref_p);
504 }
505 }
506
507 class DFS
508 {
509 public:
510 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
511 bool single_p);
512 ~DFS ();
513
514 struct scc_entry
515 {
516 tree t;
517 hashval_t hash;
518 };
519 auto_vec<scc_entry,32> sccstack;
520
521 private:
522 struct sccs
523 {
524 unsigned int dfsnum;
525 unsigned int low;
526 };
527 struct worklist
528 {
529 tree expr;
530 sccs *from_state;
531 sccs *cstate;
532 bool ref_p;
533 bool this_ref_p;
534 };
535
536 static int scc_entry_compare (const void *, const void *);
537
538 void DFS_write_tree_body (struct output_block *ob,
539 tree expr, sccs *expr_state, bool ref_p);
540
541 void DFS_write_tree (struct output_block *ob, sccs *from_state,
542 tree expr, bool ref_p, bool this_ref_p);
543
544 hashval_t
545 hash_scc (struct output_block *ob, unsigned first, unsigned size,
546 bool ref_p, bool this_ref_p);
547
548 hash_map<tree, sccs *> sccstate;
549 auto_vec<worklist, 32> worklist_vec;
550 struct obstack sccstate_obstack;
551 };
552
553 /* Emit the physical representation of tree node EXPR to output block OB,
554 using depth-first search on the subgraph. If THIS_REF_P is true, the
555 leaves of EXPR are emitted as references via lto_output_tree_ref.
556 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
557 this is for a rewalk of a single leaf SCC. */
558
559 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
560 bool single_p)
561 {
562 unsigned int next_dfs_num = 1;
563 gcc_obstack_init (&sccstate_obstack);
564 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
565 while (!worklist_vec.is_empty ())
566 {
567 worklist &w = worklist_vec.last ();
568 expr = w.expr;
569 sccs *from_state = w.from_state;
570 sccs *cstate = w.cstate;
571 ref_p = w.ref_p;
572 this_ref_p = w.this_ref_p;
573 if (cstate == NULL)
574 {
575 sccs **slot = &sccstate.get_or_insert (expr);
576 cstate = *slot;
577 if (cstate)
578 {
579 gcc_checking_assert (from_state);
580 if (cstate->dfsnum < from_state->dfsnum)
581 from_state->low = MIN (cstate->dfsnum, from_state->low);
582 worklist_vec.pop ();
583 continue;
584 }
585
586 scc_entry e = { expr, 0 };
587 /* Not yet visited. DFS recurse and push it onto the stack. */
588 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
589 sccstack.safe_push (e);
590 cstate->dfsnum = next_dfs_num++;
591 cstate->low = cstate->dfsnum;
592 w.cstate = cstate;
593
594 if (TREE_CODE (expr) == INTEGER_CST
595 && !TREE_OVERFLOW (expr))
596 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
597 else
598 {
599 DFS_write_tree_body (ob, expr, cstate, ref_p);
600
601 /* Walk any LTO-specific edges. */
602 if (DECL_P (expr)
603 && TREE_CODE (expr) != FUNCTION_DECL
604 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
605 {
606 /* Handle DECL_INITIAL for symbols. */
607 tree initial
608 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
609 expr);
610 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
611 }
612 }
613 continue;
614 }
615
616 /* See if we found an SCC. */
617 if (cstate->low == cstate->dfsnum)
618 {
619 unsigned first, size;
620 tree x;
621
622 /* If we are re-walking a single leaf SCC just pop it,
623 let earlier worklist item access the sccstack. */
624 if (single_p)
625 {
626 worklist_vec.pop ();
627 continue;
628 }
629
630 /* Pop the SCC and compute its size. */
631 first = sccstack.length ();
632 do
633 {
634 x = sccstack[--first].t;
635 }
636 while (x != expr);
637 size = sccstack.length () - first;
638
639 /* No need to compute hashes for LTRANS units, we don't perform
640 any merging there. */
641 hashval_t scc_hash = 0;
642 unsigned scc_entry_len = 0;
643 if (!flag_wpa)
644 {
645 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
646
647 /* Put the entries with the least number of collisions first. */
648 unsigned entry_start = 0;
649 scc_entry_len = size + 1;
650 for (unsigned i = 0; i < size;)
651 {
652 unsigned from = i;
653 for (i = i + 1; i < size
654 && (sccstack[first + i].hash
655 == sccstack[first + from].hash); ++i)
656 ;
657 if (i - from < scc_entry_len)
658 {
659 scc_entry_len = i - from;
660 entry_start = from;
661 }
662 }
663 for (unsigned i = 0; i < scc_entry_len; ++i)
664 std::swap (sccstack[first + i],
665 sccstack[first + entry_start + i]);
666
667 /* We already sorted SCC deterministically in hash_scc. */
668
669 /* Check that we have only one SCC.
670 Naturally we may have conflicts if hash function is not
671 strong enough. Lets see how far this gets. */
672 gcc_checking_assert (scc_entry_len == 1);
673 }
674
675 /* Write LTO_tree_scc. */
676 streamer_write_record_start (ob, LTO_tree_scc);
677 streamer_write_uhwi (ob, size);
678 streamer_write_uhwi (ob, scc_hash);
679
680 /* Write size-1 SCCs without wrapping them inside SCC bundles.
681 All INTEGER_CSTs need to be handled this way as we need
682 their type to materialize them. Also builtins are handled
683 this way.
684 ??? We still wrap these in LTO_tree_scc so at the
685 input side we can properly identify the tree we want
686 to ultimatively return. */
687 if (size == 1)
688 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
689 else
690 {
691 /* Write the size of the SCC entry candidates. */
692 streamer_write_uhwi (ob, scc_entry_len);
693
694 /* Write all headers and populate the streamer cache. */
695 for (unsigned i = 0; i < size; ++i)
696 {
697 hashval_t hash = sccstack[first+i].hash;
698 tree t = sccstack[first+i].t;
699 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
700 t, hash, NULL);
701 gcc_assert (!exists_p);
702
703 if (!lto_is_streamable (t))
704 internal_error ("tree code %qs is not supported "
705 "in LTO streams",
706 get_tree_code_name (TREE_CODE (t)));
707
708 /* Write the header, containing everything needed to
709 materialize EXPR on the reading side. */
710 streamer_write_tree_header (ob, t);
711 }
712
713 /* Write the bitpacks and tree references. */
714 for (unsigned i = 0; i < size; ++i)
715 {
716 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
717
718 /* Mark the end of the tree. */
719 streamer_write_zero (ob);
720 }
721 }
722
723 /* Finally truncate the vector. */
724 sccstack.truncate (first);
725
726 if (from_state)
727 from_state->low = MIN (from_state->low, cstate->low);
728 worklist_vec.pop ();
729 continue;
730 }
731
732 gcc_checking_assert (from_state);
733 from_state->low = MIN (from_state->low, cstate->low);
734 if (cstate->dfsnum < from_state->dfsnum)
735 from_state->low = MIN (cstate->dfsnum, from_state->low);
736 worklist_vec.pop ();
737 }
738 }
739
740 DFS::~DFS ()
741 {
742 obstack_free (&sccstate_obstack, NULL);
743 }
744
745 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
746 DFS recurse for all tree edges originating from it. */
747
748 void
749 DFS::DFS_write_tree_body (struct output_block *ob,
750 tree expr, sccs *expr_state, bool ref_p)
751 {
752 #define DFS_follow_tree_edge(DEST) \
753 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
754
755 enum tree_code code;
756
757 if (streamer_dump_file)
758 {
759 print_node_brief (streamer_dump_file, " Streaming ",
760 expr, 4);
761 fprintf (streamer_dump_file, " to %s\n",
762 lto_section_name [ob->section_type]);
763 }
764
765 code = TREE_CODE (expr);
766
767 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
768 {
769 if (TREE_CODE (expr) != IDENTIFIER_NODE)
770 DFS_follow_tree_edge (TREE_TYPE (expr));
771 }
772
773 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
774 {
775 unsigned int count = vector_cst_encoded_nelts (expr);
776 for (unsigned int i = 0; i < count; ++i)
777 DFS_follow_tree_edge (VECTOR_CST_ENCODED_ELT (expr, i));
778 }
779
780 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
781 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
782 DFS_follow_tree_edge (POLY_INT_CST_COEFF (expr, i));
783
784 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
785 {
786 DFS_follow_tree_edge (TREE_REALPART (expr));
787 DFS_follow_tree_edge (TREE_IMAGPART (expr));
788 }
789
790 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
791 {
792 /* Drop names that were created for anonymous entities. */
793 if (DECL_NAME (expr)
794 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
795 && IDENTIFIER_ANON_P (DECL_NAME (expr)))
796 ;
797 else
798 DFS_follow_tree_edge (DECL_NAME (expr));
799 if (TREE_CODE (expr) != TRANSLATION_UNIT_DECL
800 && ! DECL_CONTEXT (expr))
801 DFS_follow_tree_edge ((*all_translation_units)[0]);
802 else
803 DFS_follow_tree_edge (DECL_CONTEXT (expr));
804 }
805
806 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
807 {
808 DFS_follow_tree_edge (DECL_SIZE (expr));
809 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
810
811 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
812 special handling in LTO, it must be handled by streamer hooks. */
813
814 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
815
816 /* We use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
817 declarations which should be eliminated by decl merging. Be sure none
818 leaks to this point. */
819 gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
820 DFS_follow_tree_edge (DECL_ABSTRACT_ORIGIN (expr));
821
822 if ((VAR_P (expr)
823 || TREE_CODE (expr) == PARM_DECL)
824 && DECL_HAS_VALUE_EXPR_P (expr))
825 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
826 if (VAR_P (expr)
827 && DECL_HAS_DEBUG_EXPR_P (expr))
828 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
829 }
830
831 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
832 {
833 /* Make sure we don't inadvertently set the assembler name. */
834 if (DECL_ASSEMBLER_NAME_SET_P (expr))
835 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
836 }
837
838 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
839 {
840 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
841 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
842 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
843 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
844 gcc_checking_assert (!DECL_FCONTEXT (expr));
845 }
846
847 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
848 {
849 gcc_checking_assert (DECL_VINDEX (expr) == NULL);
850 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
851 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
852 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
853 }
854
855 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
856 {
857 DFS_follow_tree_edge (TYPE_SIZE (expr));
858 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
859 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
860 DFS_follow_tree_edge (TYPE_NAME (expr));
861 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
862 reconstructed during fixup. */
863 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
864 during fixup. */
865 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
866 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
867 /* TYPE_CANONICAL is re-computed during type merging, so no need
868 to follow it here. */
869 /* Do not stream TYPE_STUB_DECL; it is not needed by LTO but currently
870 it cannot be freed by free_lang_data without triggering ICEs in
871 langhooks. */
872 }
873
874 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
875 {
876 if (TREE_CODE (expr) == ENUMERAL_TYPE)
877 DFS_follow_tree_edge (TYPE_VALUES (expr));
878 else if (TREE_CODE (expr) == ARRAY_TYPE)
879 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
880 else if (RECORD_OR_UNION_TYPE_P (expr))
881 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
882 DFS_follow_tree_edge (t);
883 else if (TREE_CODE (expr) == FUNCTION_TYPE
884 || TREE_CODE (expr) == METHOD_TYPE)
885 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
886
887 if (!POINTER_TYPE_P (expr))
888 DFS_follow_tree_edge (TYPE_MIN_VALUE_RAW (expr));
889 DFS_follow_tree_edge (TYPE_MAX_VALUE_RAW (expr));
890 }
891
892 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
893 {
894 DFS_follow_tree_edge (TREE_PURPOSE (expr));
895 DFS_follow_tree_edge (TREE_VALUE (expr));
896 DFS_follow_tree_edge (TREE_CHAIN (expr));
897 }
898
899 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
900 {
901 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
902 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
903 }
904
905 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
906 {
907 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
908 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
909 DFS_follow_tree_edge (TREE_BLOCK (expr));
910 }
911
912 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
913 {
914 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
915 {
916 /* We would have to stream externals in the block chain as
917 non-references but we should have dropped them in
918 free-lang-data. */
919 gcc_assert (!VAR_OR_FUNCTION_DECL_P (t) || !DECL_EXTERNAL (t));
920 DFS_follow_tree_edge (t);
921 }
922
923 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
924 DFS_follow_tree_edge (BLOCK_ABSTRACT_ORIGIN (expr));
925
926 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
927 information for early inlined BLOCKs so drop it on the floor instead
928 of ICEing in dwarf2out.c. */
929
930 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
931 streaming time. */
932
933 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
934 list is re-constructed from BLOCK_SUPERCONTEXT. */
935 }
936
937 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
938 {
939 unsigned i;
940 tree t;
941
942 /* Note that the number of BINFO slots has already been emitted in
943 EXPR's header (see streamer_write_tree_header) because this length
944 is needed to build the empty BINFO node on the reader side. */
945 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
946 DFS_follow_tree_edge (t);
947 DFS_follow_tree_edge (BINFO_OFFSET (expr));
948 DFS_follow_tree_edge (BINFO_VTABLE (expr));
949
950 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX,
951 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
952 by C++ FE only. */
953 }
954
955 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
956 {
957 unsigned i;
958 tree index, value;
959
960 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
961 {
962 DFS_follow_tree_edge (index);
963 DFS_follow_tree_edge (value);
964 }
965 }
966
967 if (code == OMP_CLAUSE)
968 {
969 int i;
970 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
971 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
972 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
973 }
974
975 #undef DFS_follow_tree_edge
976 }
977
978 /* Return a hash value for the tree T.
979 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
980 may hold hash values if trees inside current SCC. */
981
982 static hashval_t
983 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
984 {
985 inchash::hash hstate;
986
987 #define visit(SIBLING) \
988 do { \
989 unsigned ix; \
990 if (!SIBLING) \
991 hstate.add_int (0); \
992 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
993 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
994 else if (map) \
995 hstate.add_int (*map->get (SIBLING)); \
996 else \
997 hstate.add_int (1); \
998 } while (0)
999
1000 /* Hash TS_BASE. */
1001 enum tree_code code = TREE_CODE (t);
1002 hstate.add_int (code);
1003 if (!TYPE_P (t))
1004 {
1005 hstate.add_flag (TREE_SIDE_EFFECTS (t));
1006 hstate.add_flag (TREE_CONSTANT (t));
1007 hstate.add_flag (TREE_READONLY (t));
1008 hstate.add_flag (TREE_PUBLIC (t));
1009 }
1010 hstate.add_flag (TREE_ADDRESSABLE (t));
1011 hstate.add_flag (TREE_THIS_VOLATILE (t));
1012 if (DECL_P (t))
1013 hstate.add_flag (DECL_UNSIGNED (t));
1014 else if (TYPE_P (t))
1015 hstate.add_flag (TYPE_UNSIGNED (t));
1016 if (TYPE_P (t))
1017 hstate.add_flag (TYPE_ARTIFICIAL (t));
1018 else
1019 hstate.add_flag (TREE_NO_WARNING (t));
1020 hstate.add_flag (TREE_NOTHROW (t));
1021 hstate.add_flag (TREE_STATIC (t));
1022 hstate.add_flag (TREE_PROTECTED (t));
1023 hstate.add_flag (TREE_DEPRECATED (t));
1024 if (code != TREE_BINFO)
1025 hstate.add_flag (TREE_PRIVATE (t));
1026 if (TYPE_P (t))
1027 {
1028 hstate.add_flag (AGGREGATE_TYPE_P (t)
1029 ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
1030 hstate.add_flag (TYPE_ADDR_SPACE (t));
1031 }
1032 else if (code == SSA_NAME)
1033 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
1034 hstate.commit_flag ();
1035
1036 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1037 hstate.add_wide_int (wi::to_widest (t));
1038
1039 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1040 {
1041 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1042 hstate.add_flag (r.cl);
1043 hstate.add_flag (r.sign);
1044 hstate.add_flag (r.signalling);
1045 hstate.add_flag (r.canonical);
1046 hstate.commit_flag ();
1047 hstate.add_int (r.uexp);
1048 hstate.add (r.sig, sizeof (r.sig));
1049 }
1050
1051 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1052 {
1053 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1054 hstate.add_int (f.mode);
1055 hstate.add_int (f.data.low);
1056 hstate.add_int (f.data.high);
1057 }
1058
1059 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1060 {
1061 hstate.add_hwi (DECL_MODE (t));
1062 hstate.add_flag (DECL_NONLOCAL (t));
1063 hstate.add_flag (DECL_VIRTUAL_P (t));
1064 hstate.add_flag (DECL_IGNORED_P (t));
1065 hstate.add_flag (DECL_ABSTRACT_P (t));
1066 hstate.add_flag (DECL_ARTIFICIAL (t));
1067 hstate.add_flag (DECL_USER_ALIGN (t));
1068 hstate.add_flag (DECL_PRESERVE_P (t));
1069 hstate.add_flag (DECL_EXTERNAL (t));
1070 hstate.add_flag (DECL_NOT_GIMPLE_REG_P (t));
1071 hstate.commit_flag ();
1072 hstate.add_int (DECL_ALIGN (t));
1073 if (code == LABEL_DECL)
1074 {
1075 hstate.add_int (EH_LANDING_PAD_NR (t));
1076 hstate.add_int (LABEL_DECL_UID (t));
1077 }
1078 else if (code == FIELD_DECL)
1079 {
1080 hstate.add_flag (DECL_PACKED (t));
1081 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1082 hstate.add_flag (DECL_PADDING_P (t));
1083 hstate.add_flag (DECL_FIELD_ABI_IGNORED (t));
1084 hstate.add_int (DECL_OFFSET_ALIGN (t));
1085 }
1086 else if (code == VAR_DECL)
1087 {
1088 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1089 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1090 }
1091 if (code == RESULT_DECL
1092 || code == PARM_DECL
1093 || code == VAR_DECL)
1094 {
1095 hstate.add_flag (DECL_BY_REFERENCE (t));
1096 if (code == VAR_DECL
1097 || code == PARM_DECL)
1098 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1099 }
1100 hstate.commit_flag ();
1101 }
1102
1103 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1104 hstate.add_int (DECL_REGISTER (t));
1105
1106 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1107 {
1108 hstate.add_flag (DECL_COMMON (t));
1109 hstate.add_flag (DECL_DLLIMPORT_P (t));
1110 hstate.add_flag (DECL_WEAK (t));
1111 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1112 hstate.add_flag (DECL_COMDAT (t));
1113 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1114 hstate.add_int (DECL_VISIBILITY (t));
1115 if (code == VAR_DECL)
1116 {
1117 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1118 hstate.add_flag (DECL_HARD_REGISTER (t));
1119 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1120 }
1121 if (TREE_CODE (t) == FUNCTION_DECL)
1122 {
1123 hstate.add_flag (DECL_FINAL_P (t));
1124 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1125 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1126 }
1127 hstate.commit_flag ();
1128 }
1129
1130 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1131 {
1132 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1133 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1134 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1135 hstate.add_flag (FUNCTION_DECL_DECL_TYPE (t));
1136 hstate.add_flag (DECL_UNINLINABLE (t));
1137 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1138 hstate.add_flag (DECL_IS_NOVOPS (t));
1139 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1140 hstate.add_flag (DECL_IS_MALLOC (t));
1141 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1142 hstate.add_flag (DECL_STATIC_CHAIN (t));
1143 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1144 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1145 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1146 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1147 hstate.add_flag (DECL_PURE_P (t));
1148 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1149 hstate.commit_flag ();
1150 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1151 hstate.add_int (DECL_UNCHECKED_FUNCTION_CODE (t));
1152 }
1153
1154 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1155 {
1156 hstate.add_hwi (TYPE_MODE (t));
1157 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1158 no streaming. */
1159 hstate.add_flag (TYPE_PACKED (t));
1160 hstate.add_flag (TYPE_RESTRICT (t));
1161 hstate.add_flag (TYPE_USER_ALIGN (t));
1162 hstate.add_flag (TYPE_READONLY (t));
1163 if (RECORD_OR_UNION_TYPE_P (t))
1164 {
1165 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1166 hstate.add_flag (TYPE_FINAL_P (t));
1167 hstate.add_flag (TYPE_CXX_ODR_P (t));
1168 }
1169 else if (code == ARRAY_TYPE)
1170 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1171 if (code == ARRAY_TYPE || code == INTEGER_TYPE)
1172 hstate.add_flag (TYPE_STRING_FLAG (t));
1173 if (AGGREGATE_TYPE_P (t))
1174 hstate.add_flag (TYPE_TYPELESS_STORAGE (t));
1175 hstate.commit_flag ();
1176 hstate.add_int (TYPE_PRECISION (t));
1177 hstate.add_int (TYPE_ALIGN (t));
1178 hstate.add_int (TYPE_EMPTY_P (t));
1179 }
1180
1181 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1182 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1183 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1184
1185 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1186 /* We don't stream these when passing things to a different target. */
1187 && !lto_stream_offload_p)
1188 hstate.add_hwi (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1189
1190 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1191 hstate.add_hwi (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1192
1193 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1194 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1195
1196 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1197 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1198
1199 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1200 {
1201 if (code != IDENTIFIER_NODE)
1202 visit (TREE_TYPE (t));
1203 }
1204
1205 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1206 {
1207 unsigned int count = vector_cst_encoded_nelts (t);
1208 for (unsigned int i = 0; i < count; ++i)
1209 visit (VECTOR_CST_ENCODED_ELT (t, i));
1210 }
1211
1212 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
1213 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1214 visit (POLY_INT_CST_COEFF (t, i));
1215
1216 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1217 {
1218 visit (TREE_REALPART (t));
1219 visit (TREE_IMAGPART (t));
1220 }
1221
1222 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1223 {
1224 /* Drop names that were created for anonymous entities. */
1225 if (DECL_NAME (t)
1226 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1227 && IDENTIFIER_ANON_P (DECL_NAME (t)))
1228 ;
1229 else
1230 visit (DECL_NAME (t));
1231 if (DECL_FILE_SCOPE_P (t))
1232 ;
1233 else
1234 visit (DECL_CONTEXT (t));
1235 }
1236
1237 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1238 {
1239 visit (DECL_SIZE (t));
1240 visit (DECL_SIZE_UNIT (t));
1241 visit (DECL_ATTRIBUTES (t));
1242 if ((code == VAR_DECL
1243 || code == PARM_DECL)
1244 && DECL_HAS_VALUE_EXPR_P (t))
1245 visit (DECL_VALUE_EXPR (t));
1246 if (code == VAR_DECL
1247 && DECL_HAS_DEBUG_EXPR_P (t))
1248 visit (DECL_DEBUG_EXPR (t));
1249 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1250 be able to call get_symbol_initial_value. */
1251 }
1252
1253 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1254 {
1255 if (DECL_ASSEMBLER_NAME_SET_P (t))
1256 visit (DECL_ASSEMBLER_NAME (t));
1257 }
1258
1259 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1260 {
1261 visit (DECL_FIELD_OFFSET (t));
1262 visit (DECL_BIT_FIELD_TYPE (t));
1263 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1264 visit (DECL_FIELD_BIT_OFFSET (t));
1265 }
1266
1267 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1268 {
1269 visit (DECL_FUNCTION_PERSONALITY (t));
1270 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1271 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1272 }
1273
1274 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1275 {
1276 visit (TYPE_SIZE (t));
1277 visit (TYPE_SIZE_UNIT (t));
1278 visit (TYPE_ATTRIBUTES (t));
1279 visit (TYPE_NAME (t));
1280 visit (TYPE_MAIN_VARIANT (t));
1281 if (TYPE_FILE_SCOPE_P (t))
1282 ;
1283 else
1284 visit (TYPE_CONTEXT (t));
1285 }
1286
1287 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1288 {
1289 if (code == ENUMERAL_TYPE)
1290 visit (TYPE_VALUES (t));
1291 else if (code == ARRAY_TYPE)
1292 visit (TYPE_DOMAIN (t));
1293 else if (RECORD_OR_UNION_TYPE_P (t))
1294 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1295 visit (f);
1296 else if (code == FUNCTION_TYPE
1297 || code == METHOD_TYPE)
1298 visit (TYPE_ARG_TYPES (t));
1299 if (!POINTER_TYPE_P (t))
1300 visit (TYPE_MIN_VALUE_RAW (t));
1301 visit (TYPE_MAX_VALUE_RAW (t));
1302 }
1303
1304 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1305 {
1306 visit (TREE_PURPOSE (t));
1307 visit (TREE_VALUE (t));
1308 visit (TREE_CHAIN (t));
1309 }
1310
1311 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1312 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1313 visit (TREE_VEC_ELT (t, i));
1314
1315 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1316 {
1317 hstate.add_hwi (TREE_OPERAND_LENGTH (t));
1318 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1319 visit (TREE_OPERAND (t, i));
1320 }
1321
1322 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1323 {
1324 unsigned i;
1325 tree b;
1326 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1327 visit (b);
1328 visit (BINFO_OFFSET (t));
1329 visit (BINFO_VTABLE (t));
1330 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1331 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
1332 by C++ FE only. */
1333 }
1334
1335 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1336 {
1337 unsigned i;
1338 tree index, value;
1339 hstate.add_hwi (CONSTRUCTOR_NELTS (t));
1340 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1341 {
1342 visit (index);
1343 visit (value);
1344 }
1345 }
1346
1347 if (code == OMP_CLAUSE)
1348 {
1349 int i;
1350 HOST_WIDE_INT val;
1351
1352 hstate.add_hwi (OMP_CLAUSE_CODE (t));
1353 switch (OMP_CLAUSE_CODE (t))
1354 {
1355 case OMP_CLAUSE_DEFAULT:
1356 val = OMP_CLAUSE_DEFAULT_KIND (t);
1357 break;
1358 case OMP_CLAUSE_SCHEDULE:
1359 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1360 break;
1361 case OMP_CLAUSE_DEPEND:
1362 val = OMP_CLAUSE_DEPEND_KIND (t);
1363 break;
1364 case OMP_CLAUSE_MAP:
1365 val = OMP_CLAUSE_MAP_KIND (t);
1366 break;
1367 case OMP_CLAUSE_PROC_BIND:
1368 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1369 break;
1370 case OMP_CLAUSE_REDUCTION:
1371 case OMP_CLAUSE_TASK_REDUCTION:
1372 case OMP_CLAUSE_IN_REDUCTION:
1373 val = OMP_CLAUSE_REDUCTION_CODE (t);
1374 break;
1375 default:
1376 val = 0;
1377 break;
1378 }
1379 hstate.add_hwi (val);
1380 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1381 visit (OMP_CLAUSE_OPERAND (t, i));
1382 visit (OMP_CLAUSE_CHAIN (t));
1383 }
1384
1385 return hstate.end ();
1386
1387 #undef visit
1388 }
1389
1390 /* Compare two SCC entries by their hash value for qsorting them. */
1391
1392 int
1393 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1394 {
1395 const scc_entry *p1 = (const scc_entry *) p1_;
1396 const scc_entry *p2 = (const scc_entry *) p2_;
1397 if (p1->hash < p2->hash)
1398 return -1;
1399 else if (p1->hash > p2->hash)
1400 return 1;
1401 return 0;
1402 }
1403
1404 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1405 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1406
1407 hashval_t
1408 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1409 bool ref_p, bool this_ref_p)
1410 {
1411 unsigned int last_classes = 0, iterations = 0;
1412
1413 /* Compute hash values for the SCC members. */
1414 for (unsigned i = 0; i < size; ++i)
1415 sccstack[first+i].hash
1416 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1417
1418 if (size == 1)
1419 return sccstack[first].hash;
1420
1421 /* We aim to get unique hash for every tree within SCC and compute hash value
1422 of the whole SCC by combining all values together in a stable (entry-point
1423 independent) order. This guarantees that the same SCC regions within
1424 different translation units will get the same hash values and therefore
1425 will be merged at WPA time.
1426
1427 Often the hashes are already unique. In that case we compute the SCC hash
1428 by combining individual hash values in an increasing order.
1429
1430 If there are duplicates, we seek at least one tree with unique hash (and
1431 pick one with minimal hash and this property). Then we obtain a stable
1432 order by DFS walk starting from this unique tree and then use the index
1433 within this order to make individual hash values unique.
1434
1435 If there is no tree with unique hash, we iteratively propagate the hash
1436 values across the internal edges of SCC. This usually quickly leads
1437 to unique hashes. Consider, for example, an SCC containing two pointers
1438 that are identical except for the types they point to and assume that
1439 these types are also part of the SCC. The propagation will add the
1440 points-to type information into their hash values. */
1441 do
1442 {
1443 /* Sort the SCC so we can easily check for uniqueness. */
1444 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1445
1446 unsigned int classes = 1;
1447 int firstunique = -1;
1448
1449 /* Find the tree with lowest unique hash (if it exists) and compute
1450 the number of equivalence classes. */
1451 if (sccstack[first].hash != sccstack[first+1].hash)
1452 firstunique = 0;
1453 for (unsigned i = 1; i < size; ++i)
1454 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1455 {
1456 classes++;
1457 if (firstunique == -1
1458 && (i == size - 1
1459 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1460 firstunique = i;
1461 }
1462
1463 /* If we found a tree with unique hash, stop the iteration. */
1464 if (firstunique != -1
1465 /* Also terminate if we run out of iterations or if the number of
1466 equivalence classes is no longer increasing.
1467 For example a cyclic list of trees that are all equivalent will
1468 never have unique entry point; we however do not build such SCCs
1469 in our IL. */
1470 || classes <= last_classes || iterations > 16)
1471 {
1472 hashval_t scc_hash;
1473
1474 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1475 starting from FIRSTUNIQUE to obtain a stable order. */
1476 if (classes != size && firstunique != -1)
1477 {
1478 hash_map <tree, hashval_t> map(size*2);
1479
1480 /* Store hash values into a map, so we can associate them with
1481 the reordered SCC. */
1482 for (unsigned i = 0; i < size; ++i)
1483 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1484
1485 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1486 true);
1487 gcc_assert (again.sccstack.length () == size);
1488
1489 memcpy (sccstack.address () + first,
1490 again.sccstack.address (),
1491 sizeof (scc_entry) * size);
1492
1493 /* Update hash values of individual members by hashing in the
1494 index within the stable order. This ensures uniqueness.
1495 Also compute the SCC hash by mixing in all hash values in
1496 the stable order we obtained. */
1497 sccstack[first].hash = *map.get (sccstack[first].t);
1498 scc_hash = sccstack[first].hash;
1499 for (unsigned i = 1; i < size; ++i)
1500 {
1501 sccstack[first+i].hash
1502 = iterative_hash_hashval_t (i,
1503 *map.get (sccstack[first+i].t));
1504 scc_hash
1505 = iterative_hash_hashval_t (scc_hash,
1506 sccstack[first+i].hash);
1507 }
1508 }
1509 /* If we got a unique hash value for each tree, then sort already
1510 ensured entry-point independent order. Only compute the final
1511 SCC hash.
1512
1513 If we failed to find the unique entry point, we go by the same
1514 route. We will eventually introduce unwanted hash conflicts. */
1515 else
1516 {
1517 scc_hash = sccstack[first].hash;
1518 for (unsigned i = 1; i < size; ++i)
1519 scc_hash
1520 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1521
1522 /* We cannot 100% guarantee that the hash won't conflict so as
1523 to make it impossible to find a unique hash. This however
1524 should be an extremely rare case. ICE for now so possible
1525 issues are found and evaluated. */
1526 gcc_checking_assert (classes == size);
1527 }
1528
1529 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1530 hash into the hash of each element. */
1531 for (unsigned i = 0; i < size; ++i)
1532 sccstack[first+i].hash
1533 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1534 return scc_hash;
1535 }
1536
1537 last_classes = classes;
1538 iterations++;
1539
1540 /* We failed to identify the entry point; propagate hash values across
1541 the edges. */
1542 hash_map <tree, hashval_t> map(size*2);
1543
1544 for (unsigned i = 0; i < size; ++i)
1545 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1546
1547 for (unsigned i = 0; i < size; i++)
1548 sccstack[first+i].hash
1549 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1550 }
1551 while (true);
1552 }
1553
1554 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1555 already in the streamer cache. Main routine called for
1556 each visit of EXPR. */
1557
1558 void
1559 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1560 tree expr, bool ref_p, bool this_ref_p)
1561 {
1562 /* Handle special cases. */
1563 if (expr == NULL_TREE)
1564 return;
1565
1566 /* Do not DFS walk into indexable trees. */
1567 if (this_ref_p && tree_is_indexable (expr))
1568 return;
1569
1570 /* Check if we already streamed EXPR. */
1571 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1572 return;
1573
1574 worklist w;
1575 w.expr = expr;
1576 w.from_state = from_state;
1577 w.cstate = NULL;
1578 w.ref_p = ref_p;
1579 w.this_ref_p = this_ref_p;
1580 worklist_vec.safe_push (w);
1581 }
1582
1583
1584 /* Emit the physical representation of tree node EXPR to output block OB.
1585 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1586 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1587
1588 void
1589 lto_output_tree (struct output_block *ob, tree expr,
1590 bool ref_p, bool this_ref_p)
1591 {
1592 unsigned ix;
1593 bool existed_p;
1594
1595 if (expr == NULL_TREE)
1596 {
1597 streamer_write_record_start (ob, LTO_null);
1598 return;
1599 }
1600
1601 if (this_ref_p && tree_is_indexable (expr))
1602 {
1603 lto_output_tree_ref (ob, expr);
1604 return;
1605 }
1606
1607 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1608 if (existed_p)
1609 {
1610 /* If a node has already been streamed out, make sure that
1611 we don't write it more than once. Otherwise, the reader
1612 will instantiate two different nodes for the same object. */
1613 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1614 streamer_write_uhwi (ob, ix);
1615 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1616 lto_tree_code_to_tag (TREE_CODE (expr)));
1617 lto_stats.num_pickle_refs_output++;
1618 }
1619 else
1620 {
1621 /* This is the first time we see EXPR, write all reachable
1622 trees to OB. */
1623 static bool in_dfs_walk;
1624
1625 /* Protect against recursion which means disconnect between
1626 what tree edges we walk in the DFS walk and what edges
1627 we stream out. */
1628 gcc_assert (!in_dfs_walk);
1629
1630 if (streamer_dump_file)
1631 {
1632 print_node_brief (streamer_dump_file, " Streaming SCC of ",
1633 expr, 4);
1634 fprintf (streamer_dump_file, "\n");
1635 }
1636
1637 /* Start the DFS walk. */
1638 /* Save ob state ... */
1639 /* let's see ... */
1640 in_dfs_walk = true;
1641 DFS (ob, expr, ref_p, this_ref_p, false);
1642 in_dfs_walk = false;
1643
1644 /* Finally append a reference to the tree we were writing.
1645 ??? If expr ended up as a singleton we could have
1646 inlined it here and avoid outputting a reference. */
1647 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1648 gcc_assert (existed_p);
1649 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1650 streamer_write_uhwi (ob, ix);
1651 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1652 lto_tree_code_to_tag (TREE_CODE (expr)));
1653 if (streamer_dump_file)
1654 {
1655 print_node_brief (streamer_dump_file, " Finished SCC of ",
1656 expr, 4);
1657 fprintf (streamer_dump_file, "\n\n");
1658 }
1659 lto_stats.num_pickle_refs_output++;
1660 }
1661 }
1662
1663
1664 /* Output to OB a list of try/catch handlers starting with FIRST. */
1665
1666 static void
1667 output_eh_try_list (struct output_block *ob, eh_catch first)
1668 {
1669 eh_catch n;
1670
1671 for (n = first; n; n = n->next_catch)
1672 {
1673 streamer_write_record_start (ob, LTO_eh_catch);
1674 stream_write_tree (ob, n->type_list, true);
1675 stream_write_tree (ob, n->filter_list, true);
1676 stream_write_tree (ob, n->label, true);
1677 }
1678
1679 streamer_write_record_start (ob, LTO_null);
1680 }
1681
1682
1683 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1684 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1685 detect EH region sharing. */
1686
1687 static void
1688 output_eh_region (struct output_block *ob, eh_region r)
1689 {
1690 enum LTO_tags tag;
1691
1692 if (r == NULL)
1693 {
1694 streamer_write_record_start (ob, LTO_null);
1695 return;
1696 }
1697
1698 if (r->type == ERT_CLEANUP)
1699 tag = LTO_ert_cleanup;
1700 else if (r->type == ERT_TRY)
1701 tag = LTO_ert_try;
1702 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1703 tag = LTO_ert_allowed_exceptions;
1704 else if (r->type == ERT_MUST_NOT_THROW)
1705 tag = LTO_ert_must_not_throw;
1706 else
1707 gcc_unreachable ();
1708
1709 streamer_write_record_start (ob, tag);
1710 streamer_write_hwi (ob, r->index);
1711
1712 if (r->outer)
1713 streamer_write_hwi (ob, r->outer->index);
1714 else
1715 streamer_write_zero (ob);
1716
1717 if (r->inner)
1718 streamer_write_hwi (ob, r->inner->index);
1719 else
1720 streamer_write_zero (ob);
1721
1722 if (r->next_peer)
1723 streamer_write_hwi (ob, r->next_peer->index);
1724 else
1725 streamer_write_zero (ob);
1726
1727 if (r->type == ERT_TRY)
1728 {
1729 output_eh_try_list (ob, r->u.eh_try.first_catch);
1730 }
1731 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1732 {
1733 stream_write_tree (ob, r->u.allowed.type_list, true);
1734 stream_write_tree (ob, r->u.allowed.label, true);
1735 streamer_write_uhwi (ob, r->u.allowed.filter);
1736 }
1737 else if (r->type == ERT_MUST_NOT_THROW)
1738 {
1739 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1740 bitpack_d bp = bitpack_create (ob->main_stream);
1741 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1742 streamer_write_bitpack (&bp);
1743 }
1744
1745 if (r->landing_pads)
1746 streamer_write_hwi (ob, r->landing_pads->index);
1747 else
1748 streamer_write_zero (ob);
1749 }
1750
1751
1752 /* Output landing pad LP to OB. */
1753
1754 static void
1755 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1756 {
1757 if (lp == NULL)
1758 {
1759 streamer_write_record_start (ob, LTO_null);
1760 return;
1761 }
1762
1763 streamer_write_record_start (ob, LTO_eh_landing_pad);
1764 streamer_write_hwi (ob, lp->index);
1765 if (lp->next_lp)
1766 streamer_write_hwi (ob, lp->next_lp->index);
1767 else
1768 streamer_write_zero (ob);
1769
1770 if (lp->region)
1771 streamer_write_hwi (ob, lp->region->index);
1772 else
1773 streamer_write_zero (ob);
1774
1775 stream_write_tree (ob, lp->post_landing_pad, true);
1776 }
1777
1778
1779 /* Output the existing eh_table to OB. */
1780
1781 static void
1782 output_eh_regions (struct output_block *ob, struct function *fn)
1783 {
1784 if (fn->eh && fn->eh->region_tree)
1785 {
1786 unsigned i;
1787 eh_region eh;
1788 eh_landing_pad lp;
1789 tree ttype;
1790
1791 streamer_write_record_start (ob, LTO_eh_table);
1792
1793 /* Emit the index of the root of the EH region tree. */
1794 streamer_write_hwi (ob, fn->eh->region_tree->index);
1795
1796 /* Emit all the EH regions in the region array. */
1797 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1798 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1799 output_eh_region (ob, eh);
1800
1801 /* Emit all landing pads. */
1802 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1803 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1804 output_eh_lp (ob, lp);
1805
1806 /* Emit all the runtime type data. */
1807 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1808 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1809 stream_write_tree (ob, ttype, true);
1810
1811 /* Emit the table of action chains. */
1812 if (targetm.arm_eabi_unwinder)
1813 {
1814 tree t;
1815 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1816 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1817 stream_write_tree (ob, t, true);
1818 }
1819 else
1820 {
1821 uchar c;
1822 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1823 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1824 streamer_write_char_stream (ob->main_stream, c);
1825 }
1826 }
1827
1828 /* The LTO_null either terminates the record or indicates that there
1829 are no eh_records at all. */
1830 streamer_write_record_start (ob, LTO_null);
1831 }
1832
1833
1834 /* Output all of the active ssa names to the ssa_names stream. */
1835
1836 static void
1837 output_ssa_names (struct output_block *ob, struct function *fn)
1838 {
1839 unsigned int i, len;
1840
1841 len = vec_safe_length (SSANAMES (fn));
1842 streamer_write_uhwi (ob, len);
1843
1844 for (i = 1; i < len; i++)
1845 {
1846 tree ptr = (*SSANAMES (fn))[i];
1847
1848 if (ptr == NULL_TREE
1849 || SSA_NAME_IN_FREE_LIST (ptr)
1850 || virtual_operand_p (ptr)
1851 /* Simply skip unreleased SSA names. */
1852 || (! SSA_NAME_IS_DEFAULT_DEF (ptr)
1853 && (! SSA_NAME_DEF_STMT (ptr)
1854 || ! gimple_bb (SSA_NAME_DEF_STMT (ptr)))))
1855 continue;
1856
1857 streamer_write_uhwi (ob, i);
1858 streamer_write_char_stream (ob->main_stream,
1859 SSA_NAME_IS_DEFAULT_DEF (ptr));
1860 if (SSA_NAME_VAR (ptr))
1861 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1862 else
1863 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1864 stream_write_tree (ob, TREE_TYPE (ptr), true);
1865 }
1866
1867 streamer_write_zero (ob);
1868 }
1869
1870
1871
1872 /* Output the cfg. */
1873
1874 static void
1875 output_cfg (struct output_block *ob, struct function *fn)
1876 {
1877 struct lto_output_stream *tmp_stream = ob->main_stream;
1878 basic_block bb;
1879
1880 ob->main_stream = ob->cfg_stream;
1881
1882 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1883 profile_status_for_fn (fn));
1884
1885 /* Output the number of the highest basic block. */
1886 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1887
1888 FOR_ALL_BB_FN (bb, fn)
1889 {
1890 edge_iterator ei;
1891 edge e;
1892
1893 streamer_write_hwi (ob, bb->index);
1894
1895 /* Output the successors and the edge flags. */
1896 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1897 FOR_EACH_EDGE (e, ei, bb->succs)
1898 {
1899 streamer_write_uhwi (ob, e->dest->index);
1900 e->probability.stream_out (ob);
1901 streamer_write_uhwi (ob, e->flags);
1902 }
1903 }
1904
1905 streamer_write_hwi (ob, -1);
1906
1907 bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
1908 while (bb->next_bb)
1909 {
1910 streamer_write_hwi (ob, bb->next_bb->index);
1911 bb = bb->next_bb;
1912 }
1913
1914 streamer_write_hwi (ob, -1);
1915
1916 /* Output the number of loops. */
1917 streamer_write_uhwi (ob, number_of_loops (fn));
1918
1919 /* Output each loop, skipping the tree root which has number zero. */
1920 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1921 {
1922 class loop *loop = get_loop (fn, i);
1923
1924 /* Write the index of the loop header. That's enough to rebuild
1925 the loop tree on the reader side. Stream -1 for an unused
1926 loop entry. */
1927 if (!loop)
1928 {
1929 streamer_write_hwi (ob, -1);
1930 continue;
1931 }
1932 else
1933 streamer_write_hwi (ob, loop->header->index);
1934
1935 /* Write everything copy_loop_info copies. */
1936 streamer_write_enum (ob->main_stream,
1937 loop_estimation, EST_LAST, loop->estimate_state);
1938 streamer_write_hwi (ob, loop->any_upper_bound);
1939 if (loop->any_upper_bound)
1940 streamer_write_widest_int (ob, loop->nb_iterations_upper_bound);
1941 streamer_write_hwi (ob, loop->any_likely_upper_bound);
1942 if (loop->any_likely_upper_bound)
1943 streamer_write_widest_int (ob, loop->nb_iterations_likely_upper_bound);
1944 streamer_write_hwi (ob, loop->any_estimate);
1945 if (loop->any_estimate)
1946 streamer_write_widest_int (ob, loop->nb_iterations_estimate);
1947
1948 /* Write OMP SIMD related info. */
1949 streamer_write_hwi (ob, loop->safelen);
1950 streamer_write_hwi (ob, loop->unroll);
1951 streamer_write_hwi (ob, loop->owned_clique);
1952 streamer_write_hwi (ob, loop->dont_vectorize);
1953 streamer_write_hwi (ob, loop->force_vectorize);
1954 streamer_write_hwi (ob, loop->finite_p);
1955 stream_write_tree (ob, loop->simduid, true);
1956 }
1957
1958 ob->main_stream = tmp_stream;
1959 }
1960
1961
1962 /* Create the header in the file using OB. If the section type is for
1963 a function, set FN to the decl for that function. */
1964
1965 void
1966 produce_asm (struct output_block *ob, tree fn)
1967 {
1968 enum lto_section_type section_type = ob->section_type;
1969 struct lto_function_header header;
1970 char *section_name;
1971
1972 if (section_type == LTO_section_function_body)
1973 {
1974 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1975 section_name = lto_get_section_name (section_type, name,
1976 symtab_node::get (fn)->order,
1977 NULL);
1978 }
1979 else
1980 section_name = lto_get_section_name (section_type, NULL, 0, NULL);
1981
1982 lto_begin_section (section_name, !flag_wpa);
1983 free (section_name);
1984
1985 /* The entire header is stream computed here. */
1986 memset (&header, 0, sizeof (struct lto_function_header));
1987
1988 if (section_type == LTO_section_function_body)
1989 header.cfg_size = ob->cfg_stream->total_size;
1990 header.main_size = ob->main_stream->total_size;
1991 header.string_size = ob->string_stream->total_size;
1992 lto_write_data (&header, sizeof header);
1993
1994 /* Put all of the gimple and the string table out the asm file as a
1995 block of text. */
1996 if (section_type == LTO_section_function_body)
1997 lto_write_stream (ob->cfg_stream);
1998 lto_write_stream (ob->main_stream);
1999 lto_write_stream (ob->string_stream);
2000
2001 lto_end_section ();
2002 }
2003
2004
2005 /* Output the base body of struct function FN using output block OB. */
2006
2007 static void
2008 output_struct_function_base (struct output_block *ob, struct function *fn)
2009 {
2010 struct bitpack_d bp;
2011 unsigned i;
2012 tree t;
2013
2014 /* Output the static chain and non-local goto save area. */
2015 stream_write_tree (ob, fn->static_chain_decl, true);
2016 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
2017
2018 /* Output all the local variables in the function. */
2019 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
2020 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
2021 stream_write_tree (ob, t, true);
2022
2023 /* Output current IL state of the function. */
2024 streamer_write_uhwi (ob, fn->curr_properties);
2025
2026 /* Write all the attributes for FN. */
2027 bp = bitpack_create (ob->main_stream);
2028 bp_pack_value (&bp, fn->is_thunk, 1);
2029 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
2030 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
2031 bp_pack_value (&bp, fn->returns_struct, 1);
2032 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
2033 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
2034 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
2035 bp_pack_value (&bp, fn->after_inlining, 1);
2036 bp_pack_value (&bp, fn->stdarg, 1);
2037 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2038 bp_pack_value (&bp, fn->has_forced_label_in_static, 1);
2039 bp_pack_value (&bp, fn->calls_alloca, 1);
2040 bp_pack_value (&bp, fn->calls_setjmp, 1);
2041 bp_pack_value (&bp, fn->calls_eh_return, 1);
2042 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2043 bp_pack_value (&bp, fn->has_simduid_loops, 1);
2044 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2045 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2046 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2047
2048 /* Output the function start and end loci. */
2049 stream_output_location (ob, &bp, fn->function_start_locus);
2050 stream_output_location (ob, &bp, fn->function_end_locus);
2051
2052 /* Save the instance discriminator if present. */
2053 int *instance_number_p = NULL;
2054 if (decl_to_instance_map)
2055 instance_number_p = decl_to_instance_map->get (fn->decl);
2056 bp_pack_value (&bp, !!instance_number_p, 1);
2057 if (instance_number_p)
2058 bp_pack_value (&bp, *instance_number_p, sizeof (int) * CHAR_BIT);
2059
2060 streamer_write_bitpack (&bp);
2061 }
2062
2063
2064 /* Collect all leaf BLOCKs beyond ROOT into LEAFS. */
2065
2066 static void
2067 collect_block_tree_leafs (tree root, vec<tree> &leafs)
2068 {
2069 for (root = BLOCK_SUBBLOCKS (root); root; root = BLOCK_CHAIN (root))
2070 if (! BLOCK_SUBBLOCKS (root))
2071 leafs.safe_push (root);
2072 else
2073 collect_block_tree_leafs (BLOCK_SUBBLOCKS (root), leafs);
2074 }
2075
2076 /* This performs function body modifications that are needed for streaming
2077 to work. */
2078
2079 void
2080 lto_prepare_function_for_streaming (struct cgraph_node *node)
2081 {
2082 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2083 basic_block bb;
2084
2085 if (number_of_loops (fn))
2086 {
2087 push_cfun (fn);
2088 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
2089 loop_optimizer_finalize ();
2090 pop_cfun ();
2091 }
2092 /* We will renumber the statements. The code that does this uses
2093 the same ordering that we use for serializing them so we can use
2094 the same code on the other end and not have to write out the
2095 statement numbers. We do not assign UIDs to PHIs here because
2096 virtual PHIs get re-computed on-the-fly which would make numbers
2097 inconsistent. */
2098 set_gimple_stmt_max_uid (fn, 0);
2099 FOR_ALL_BB_FN (bb, fn)
2100 {
2101 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2102 gsi_next (&gsi))
2103 {
2104 gphi *stmt = gsi.phi ();
2105
2106 /* Virtual PHIs are not going to be streamed. */
2107 if (!virtual_operand_p (gimple_phi_result (stmt)))
2108 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fn));
2109 }
2110 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2111 gsi_next (&gsi))
2112 {
2113 gimple *stmt = gsi_stmt (gsi);
2114 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fn));
2115 }
2116 }
2117 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2118 virtual phis now. */
2119 FOR_ALL_BB_FN (bb, fn)
2120 {
2121 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2122 gsi_next (&gsi))
2123 {
2124 gphi *stmt = gsi.phi ();
2125 if (virtual_operand_p (gimple_phi_result (stmt)))
2126 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fn));
2127 }
2128 }
2129
2130 }
2131
2132 /* Output the body of function NODE->DECL. */
2133
2134 static void
2135 output_function (struct cgraph_node *node)
2136 {
2137 tree function;
2138 struct function *fn;
2139 basic_block bb;
2140 struct output_block *ob;
2141
2142 if (streamer_dump_file)
2143 fprintf (streamer_dump_file, "\nStreaming body of %s\n",
2144 node->dump_name ());
2145
2146 function = node->decl;
2147 fn = DECL_STRUCT_FUNCTION (function);
2148 ob = create_output_block (LTO_section_function_body);
2149
2150 clear_line_info (ob);
2151 ob->symbol = node;
2152
2153 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2154
2155 /* Make string 0 be a NULL string. */
2156 streamer_write_char_stream (ob->string_stream, 0);
2157
2158 streamer_write_record_start (ob, LTO_function);
2159
2160 /* Output decls for parameters and args. */
2161 stream_write_tree (ob, DECL_RESULT (function), true);
2162 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2163
2164 /* Output debug args if available. */
2165 vec<tree, va_gc> **debugargs = decl_debug_args_lookup (function);
2166 if (! debugargs)
2167 streamer_write_uhwi (ob, 0);
2168 else
2169 {
2170 streamer_write_uhwi (ob, (*debugargs)->length ());
2171 for (unsigned i = 0; i < (*debugargs)->length (); ++i)
2172 stream_write_tree (ob, (**debugargs)[i], true);
2173 }
2174
2175 /* Output DECL_INITIAL for the function, which contains the tree of
2176 lexical scopes. */
2177 stream_write_tree (ob, DECL_INITIAL (function), true);
2178 /* As we do not recurse into BLOCK_SUBBLOCKS but only BLOCK_SUPERCONTEXT
2179 collect block tree leafs and stream those. */
2180 auto_vec<tree> block_tree_leafs;
2181 if (DECL_INITIAL (function))
2182 collect_block_tree_leafs (DECL_INITIAL (function), block_tree_leafs);
2183 streamer_write_uhwi (ob, block_tree_leafs.length ());
2184 for (unsigned i = 0; i < block_tree_leafs.length (); ++i)
2185 stream_write_tree (ob, block_tree_leafs[i], true);
2186
2187 /* We also stream abstract functions where we stream only stuff needed for
2188 debug info. */
2189 if (gimple_has_body_p (function))
2190 {
2191 streamer_write_uhwi (ob, 1);
2192 output_struct_function_base (ob, fn);
2193
2194 /* Output all the SSA names used in the function. */
2195 output_ssa_names (ob, fn);
2196
2197 /* Output any exception handling regions. */
2198 output_eh_regions (ob, fn);
2199
2200 /* Output the code for the function. */
2201 FOR_ALL_BB_FN (bb, fn)
2202 output_bb (ob, bb, fn);
2203
2204 /* The terminator for this function. */
2205 streamer_write_record_start (ob, LTO_null);
2206
2207 output_cfg (ob, fn);
2208 }
2209 else
2210 streamer_write_uhwi (ob, 0);
2211
2212 /* Create a section to hold the pickled output of this function. */
2213 produce_asm (ob, function);
2214
2215 destroy_output_block (ob);
2216 if (streamer_dump_file)
2217 fprintf (streamer_dump_file, "Finished streaming %s\n",
2218 node->dump_name ());
2219 }
2220
2221 /* Output the body of function NODE->DECL. */
2222
2223 static void
2224 output_constructor (struct varpool_node *node)
2225 {
2226 tree var = node->decl;
2227 struct output_block *ob;
2228
2229 if (streamer_dump_file)
2230 fprintf (streamer_dump_file, "\nStreaming constructor of %s\n",
2231 node->dump_name ());
2232
2233 timevar_push (TV_IPA_LTO_CTORS_OUT);
2234 ob = create_output_block (LTO_section_function_body);
2235
2236 clear_line_info (ob);
2237 ob->symbol = node;
2238
2239 /* Make string 0 be a NULL string. */
2240 streamer_write_char_stream (ob->string_stream, 0);
2241
2242 /* Output DECL_INITIAL for the function, which contains the tree of
2243 lexical scopes. */
2244 stream_write_tree (ob, DECL_INITIAL (var), true);
2245
2246 /* Create a section to hold the pickled output of this function. */
2247 produce_asm (ob, var);
2248
2249 destroy_output_block (ob);
2250 if (streamer_dump_file)
2251 fprintf (streamer_dump_file, "Finished streaming %s\n",
2252 node->dump_name ());
2253 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2254 }
2255
2256
2257 /* Emit toplevel asms. */
2258
2259 void
2260 lto_output_toplevel_asms (void)
2261 {
2262 struct output_block *ob;
2263 struct asm_node *can;
2264 char *section_name;
2265 struct lto_simple_header_with_strings header;
2266
2267 if (!symtab->first_asm_symbol ())
2268 return;
2269
2270 ob = create_output_block (LTO_section_asm);
2271
2272 /* Make string 0 be a NULL string. */
2273 streamer_write_char_stream (ob->string_stream, 0);
2274
2275 for (can = symtab->first_asm_symbol (); can; can = can->next)
2276 {
2277 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2278 streamer_write_hwi (ob, can->order);
2279 }
2280
2281 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2282
2283 section_name = lto_get_section_name (LTO_section_asm, NULL, 0, NULL);
2284 lto_begin_section (section_name, !flag_wpa);
2285 free (section_name);
2286
2287 /* The entire header stream is computed here. */
2288 memset (&header, 0, sizeof (header));
2289
2290 header.main_size = ob->main_stream->total_size;
2291 header.string_size = ob->string_stream->total_size;
2292 lto_write_data (&header, sizeof header);
2293
2294 /* Put all of the gimple and the string table out the asm file as a
2295 block of text. */
2296 lto_write_stream (ob->main_stream);
2297 lto_write_stream (ob->string_stream);
2298
2299 lto_end_section ();
2300
2301 destroy_output_block (ob);
2302 }
2303
2304
2305 /* Copy the function body or variable constructor of NODE without deserializing. */
2306
2307 static void
2308 copy_function_or_variable (struct symtab_node *node)
2309 {
2310 tree function = node->decl;
2311 struct lto_file_decl_data *file_data = node->lto_file_data;
2312 const char *data;
2313 size_t len;
2314 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2315 char *section_name =
2316 lto_get_section_name (LTO_section_function_body, name, node->order, NULL);
2317 size_t i, j;
2318 struct lto_in_decl_state *in_state;
2319 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2320
2321 if (streamer_dump_file)
2322 fprintf (streamer_dump_file, "Copying section for %s\n", name);
2323 lto_begin_section (section_name, false);
2324 free (section_name);
2325
2326 /* We may have renamed the declaration, e.g., a static function. */
2327 name = lto_get_decl_name_mapping (file_data, name);
2328
2329 data = lto_get_raw_section_data (file_data, LTO_section_function_body,
2330 name, node->order - file_data->order_base,
2331 &len);
2332 gcc_assert (data);
2333
2334 /* Do a bit copy of the function body. */
2335 lto_write_raw_data (data, len);
2336
2337 /* Copy decls. */
2338 in_state =
2339 lto_get_function_in_decl_state (node->lto_file_data, function);
2340 out_state->compressed = in_state->compressed;
2341 gcc_assert (in_state);
2342
2343 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2344 {
2345 size_t n = vec_safe_length (in_state->streams[i]);
2346 vec<tree, va_gc> *trees = in_state->streams[i];
2347 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2348
2349 /* The out state must have the same indices and the in state.
2350 So just copy the vector. All the encoders in the in state
2351 must be empty where we reach here. */
2352 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2353 encoder->trees.reserve_exact (n);
2354 for (j = 0; j < n; j++)
2355 encoder->trees.safe_push ((*trees)[j]);
2356 }
2357
2358 lto_free_raw_section_data (file_data, LTO_section_function_body, name,
2359 data, len);
2360 lto_end_section ();
2361 }
2362
2363 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2364
2365 static tree
2366 wrap_refs (tree *tp, int *ws, void *)
2367 {
2368 tree t = *tp;
2369 if (handled_component_p (t)
2370 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
2371 && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2372 {
2373 tree decl = TREE_OPERAND (t, 0);
2374 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2375 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2376 build1 (ADDR_EXPR, ptrtype, decl),
2377 build_int_cst (ptrtype, 0));
2378 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2379 *ws = 0;
2380 }
2381 else if (TREE_CODE (t) == CONSTRUCTOR)
2382 ;
2383 else if (!EXPR_P (t))
2384 *ws = 0;
2385 return NULL_TREE;
2386 }
2387
2388 /* Remove functions that are no longer used from offload_funcs, and mark the
2389 remaining ones with DECL_PRESERVE_P. */
2390
2391 static void
2392 prune_offload_funcs (void)
2393 {
2394 if (!offload_funcs)
2395 return;
2396
2397 unsigned ix, ix2;
2398 tree *elem_ptr;
2399 VEC_ORDERED_REMOVE_IF (*offload_funcs, ix, ix2, elem_ptr,
2400 cgraph_node::get (*elem_ptr) == NULL);
2401
2402 tree fn_decl;
2403 FOR_EACH_VEC_ELT (*offload_funcs, ix, fn_decl)
2404 DECL_PRESERVE_P (fn_decl) = 1;
2405 }
2406
2407 /* Produce LTO section that contains global information
2408 about LTO bytecode. */
2409
2410 static void
2411 produce_lto_section ()
2412 {
2413 /* Stream LTO meta section. */
2414 output_block *ob = create_output_block (LTO_section_lto);
2415
2416 char * section_name = lto_get_section_name (LTO_section_lto, NULL, 0, NULL);
2417 lto_begin_section (section_name, false);
2418 free (section_name);
2419
2420 #ifdef HAVE_ZSTD_H
2421 lto_compression compression = ZSTD;
2422 #else
2423 lto_compression compression = ZLIB;
2424 #endif
2425
2426 bool slim_object = flag_generate_lto && !flag_fat_lto_objects;
2427 lto_section s
2428 = { LTO_major_version, LTO_minor_version, slim_object, 0 };
2429 s.set_compression (compression);
2430 lto_write_data (&s, sizeof s);
2431 lto_end_section ();
2432 destroy_output_block (ob);
2433 }
2434
2435 /* Compare symbols to get them sorted by filename (to optimize streaming) */
2436
2437 static int
2438 cmp_symbol_files (const void *pn1, const void *pn2)
2439 {
2440 const symtab_node *n1 = *(const symtab_node * const *)pn1;
2441 const symtab_node *n2 = *(const symtab_node * const *)pn2;
2442
2443 int file_order1 = n1->lto_file_data ? n1->lto_file_data->order : -1;
2444 int file_order2 = n2->lto_file_data ? n2->lto_file_data->order : -1;
2445
2446 /* Order files same way as they appeared in the command line to reduce
2447 seeking while copying sections. */
2448 if (file_order1 != file_order2)
2449 return file_order1 - file_order2;
2450
2451 /* Order within static library. */
2452 if (n1->lto_file_data && n1->lto_file_data->id != n2->lto_file_data->id)
2453 {
2454 if (n1->lto_file_data->id > n2->lto_file_data->id)
2455 return 1;
2456 if (n1->lto_file_data->id < n2->lto_file_data->id)
2457 return -1;
2458 }
2459
2460 /* And finaly order by the definition order. */
2461 return n1->order - n2->order;
2462 }
2463
2464 /* Main entry point from the pass manager. */
2465
2466 void
2467 lto_output (void)
2468 {
2469 struct lto_out_decl_state *decl_state;
2470 bitmap output = NULL;
2471 bitmap_obstack output_obstack;
2472 unsigned int i, n_nodes;
2473 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2474 auto_vec<symtab_node *> symbols_to_copy;
2475
2476 prune_offload_funcs ();
2477
2478 if (flag_checking)
2479 {
2480 bitmap_obstack_initialize (&output_obstack);
2481 output = BITMAP_ALLOC (&output_obstack);
2482 }
2483
2484 /* Initialize the streamer. */
2485 lto_streamer_init ();
2486
2487 produce_lto_section ();
2488
2489 n_nodes = lto_symtab_encoder_size (encoder);
2490 /* Prepare vector of functions to output and then sort it to optimize
2491 section copying. */
2492 for (i = 0; i < n_nodes; i++)
2493 {
2494 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2495 if (snode->alias)
2496 continue;
2497 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2498 {
2499 if (lto_symtab_encoder_encode_body_p (encoder, node))
2500 symbols_to_copy.safe_push (node);
2501 }
2502 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2503 {
2504 /* Wrap symbol references inside the ctor in a type
2505 preserving MEM_REF. */
2506 tree ctor = DECL_INITIAL (node->decl);
2507 if (ctor && !in_lto_p)
2508 walk_tree (&ctor, wrap_refs, NULL, NULL);
2509 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2510 && lto_symtab_encoder_encode_initializer_p (encoder, node))
2511 symbols_to_copy.safe_push (node);
2512 }
2513 }
2514 symbols_to_copy.qsort (cmp_symbol_files);
2515 for (i = 0; i < symbols_to_copy.length (); i++)
2516 {
2517 symtab_node *snode = symbols_to_copy[i];
2518 cgraph_node *cnode;
2519 varpool_node *vnode;
2520
2521 if (flag_checking)
2522 gcc_assert (bitmap_set_bit (output, DECL_UID (snode->decl)));
2523
2524 decl_state = lto_new_out_decl_state ();
2525 lto_push_out_decl_state (decl_state);
2526
2527 if ((cnode = dyn_cast <cgraph_node *> (snode))
2528 && (gimple_has_body_p (cnode->decl)
2529 || (!flag_wpa
2530 && flag_incremental_link != INCREMENTAL_LINK_LTO)
2531 /* Thunks have no body but they may be synthetized
2532 at WPA time. */
2533 || DECL_ARGUMENTS (cnode->decl)))
2534 output_function (cnode);
2535 else if ((vnode = dyn_cast <varpool_node *> (snode))
2536 && (DECL_INITIAL (vnode->decl) != error_mark_node
2537 || (!flag_wpa
2538 && flag_incremental_link != INCREMENTAL_LINK_LTO)))
2539 output_constructor (vnode);
2540 else
2541 copy_function_or_variable (snode);
2542 gcc_assert (lto_get_out_decl_state () == decl_state);
2543 lto_pop_out_decl_state ();
2544 lto_record_function_out_decl_state (snode->decl, decl_state);
2545 }
2546
2547 /* Emit the callgraph after emitting function bodies. This needs to
2548 be done now to make sure that all the statements in every function
2549 have been renumbered so that edges can be associated with call
2550 statements using the statement UIDs. */
2551 output_symtab ();
2552
2553 output_offload_tables ();
2554
2555 if (flag_checking)
2556 {
2557 BITMAP_FREE (output);
2558 bitmap_obstack_release (&output_obstack);
2559 }
2560 }
2561
2562 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2563 from it and required for correct representation of its semantics.
2564 Each node in ENCODER must be a global declaration or a type. A node
2565 is written only once, even if it appears multiple times in the
2566 vector. Certain transitively-reachable nodes, such as those
2567 representing expressions, may be duplicated, but such nodes
2568 must not appear in ENCODER itself. */
2569
2570 static void
2571 write_global_stream (struct output_block *ob,
2572 struct lto_tree_ref_encoder *encoder)
2573 {
2574 tree t;
2575 size_t index;
2576 const size_t size = lto_tree_ref_encoder_size (encoder);
2577
2578 for (index = 0; index < size; index++)
2579 {
2580 t = lto_tree_ref_encoder_get_tree (encoder, index);
2581 if (streamer_dump_file)
2582 {
2583 fprintf (streamer_dump_file, " %i:", (int)index);
2584 print_node_brief (streamer_dump_file, "", t, 4);
2585 fprintf (streamer_dump_file, "\n");
2586 }
2587 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2588 stream_write_tree (ob, t, false);
2589 }
2590 }
2591
2592
2593 /* Write a sequence of indices into the globals vector corresponding
2594 to the trees in ENCODER. These are used by the reader to map the
2595 indices used to refer to global entities within function bodies to
2596 their referents. */
2597
2598 static void
2599 write_global_references (struct output_block *ob,
2600 struct lto_tree_ref_encoder *encoder)
2601 {
2602 tree t;
2603 uint32_t index;
2604 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2605
2606 /* Write size and slot indexes as 32-bit unsigned numbers. */
2607 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2608 data[0] = size;
2609
2610 for (index = 0; index < size; index++)
2611 {
2612 unsigned slot_num;
2613
2614 t = lto_tree_ref_encoder_get_tree (encoder, index);
2615 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2616 gcc_assert (slot_num != (unsigned)-1);
2617 data[index + 1] = slot_num;
2618 }
2619
2620 lto_write_data (data, sizeof (int32_t) * (size + 1));
2621 free (data);
2622 }
2623
2624
2625 /* Write all the streams in an lto_out_decl_state STATE using
2626 output block OB and output stream OUT_STREAM. */
2627
2628 void
2629 lto_output_decl_state_streams (struct output_block *ob,
2630 struct lto_out_decl_state *state)
2631 {
2632 int i;
2633
2634 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2635 write_global_stream (ob, &state->streams[i]);
2636 }
2637
2638
2639 /* Write all the references in an lto_out_decl_state STATE using
2640 output block OB and output stream OUT_STREAM. */
2641
2642 void
2643 lto_output_decl_state_refs (struct output_block *ob,
2644 struct lto_out_decl_state *state)
2645 {
2646 unsigned i;
2647 unsigned ref;
2648 tree decl;
2649
2650 /* Write reference to FUNCTION_DECL. If there is not function,
2651 write reference to void_type_node. */
2652 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2653 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2654 gcc_assert (ref != (unsigned)-1);
2655 ref = ref * 2 + (state->compressed ? 1 : 0);
2656 lto_write_data (&ref, sizeof (uint32_t));
2657
2658 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2659 write_global_references (ob, &state->streams[i]);
2660 }
2661
2662
2663 /* Return the written size of STATE. */
2664
2665 static size_t
2666 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2667 {
2668 int i;
2669 size_t size;
2670
2671 size = sizeof (int32_t); /* fn_ref. */
2672 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2673 {
2674 size += sizeof (int32_t); /* vector size. */
2675 size += (lto_tree_ref_encoder_size (&state->streams[i])
2676 * sizeof (int32_t));
2677 }
2678 return size;
2679 }
2680
2681
2682 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2683 so far. */
2684
2685 static void
2686 write_symbol (struct streamer_tree_cache_d *cache,
2687 tree t, hash_set<const char *> *seen, bool alias)
2688 {
2689 const char *name;
2690 enum gcc_plugin_symbol_kind kind;
2691 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2692 unsigned slot_num;
2693 uint64_t size;
2694 const char *comdat;
2695 unsigned char c;
2696
2697 gcc_assert (VAR_OR_FUNCTION_DECL_P (t));
2698
2699 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2700
2701 /* This behaves like assemble_name_raw in varasm.c, performing the
2702 same name manipulations that ASM_OUTPUT_LABELREF does. */
2703 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2704
2705 if (seen->add (name))
2706 return;
2707
2708 streamer_tree_cache_lookup (cache, t, &slot_num);
2709 gcc_assert (slot_num != (unsigned)-1);
2710
2711 if (DECL_EXTERNAL (t))
2712 {
2713 if (DECL_WEAK (t))
2714 kind = GCCPK_WEAKUNDEF;
2715 else
2716 kind = GCCPK_UNDEF;
2717 }
2718 else
2719 {
2720 if (DECL_WEAK (t))
2721 kind = GCCPK_WEAKDEF;
2722 else if (DECL_COMMON (t))
2723 kind = GCCPK_COMMON;
2724 else
2725 kind = GCCPK_DEF;
2726
2727 /* When something is defined, it should have node attached. */
2728 gcc_assert (alias || !VAR_P (t) || varpool_node::get (t)->definition);
2729 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2730 || (cgraph_node::get (t)
2731 && cgraph_node::get (t)->definition));
2732 }
2733
2734 /* Imitate what default_elf_asm_output_external do.
2735 When symbol is external, we need to output it with DEFAULT visibility
2736 when compiling with -fvisibility=default, while with HIDDEN visibility
2737 when symbol has attribute (visibility("hidden")) specified.
2738 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2739 right. */
2740
2741 if (DECL_EXTERNAL (t)
2742 && !targetm.binds_local_p (t))
2743 visibility = GCCPV_DEFAULT;
2744 else
2745 switch (DECL_VISIBILITY (t))
2746 {
2747 case VISIBILITY_DEFAULT:
2748 visibility = GCCPV_DEFAULT;
2749 break;
2750 case VISIBILITY_PROTECTED:
2751 visibility = GCCPV_PROTECTED;
2752 break;
2753 case VISIBILITY_HIDDEN:
2754 visibility = GCCPV_HIDDEN;
2755 break;
2756 case VISIBILITY_INTERNAL:
2757 visibility = GCCPV_INTERNAL;
2758 break;
2759 }
2760
2761 if (kind == GCCPK_COMMON
2762 && DECL_SIZE_UNIT (t)
2763 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2764 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2765 else
2766 size = 0;
2767
2768 if (DECL_ONE_ONLY (t))
2769 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2770 else
2771 comdat = "";
2772
2773 lto_write_data (name, strlen (name) + 1);
2774 lto_write_data (comdat, strlen (comdat) + 1);
2775 c = (unsigned char) kind;
2776 lto_write_data (&c, 1);
2777 c = (unsigned char) visibility;
2778 lto_write_data (&c, 1);
2779 lto_write_data (&size, 8);
2780 lto_write_data (&slot_num, 4);
2781 }
2782
2783 /* Write extension information for symbols (symbol type, section flags). */
2784
2785 static void
2786 write_symbol_extension_info (tree t)
2787 {
2788 unsigned char c;
2789 c = ((unsigned char) TREE_CODE (t) == VAR_DECL
2790 ? GCCST_VARIABLE : GCCST_FUNCTION);
2791 lto_write_data (&c, 1);
2792 unsigned char section_kind = 0;
2793 if (TREE_CODE (t) == VAR_DECL)
2794 {
2795 section *s = get_variable_section (t, false);
2796 if (s->common.flags & SECTION_BSS)
2797 section_kind |= GCCSSK_BSS;
2798 }
2799 lto_write_data (&section_kind, 1);
2800 }
2801
2802 /* Write an IL symbol table to OB.
2803 SET and VSET are cgraph/varpool node sets we are outputting. */
2804
2805 static unsigned int
2806 produce_symtab (struct output_block *ob)
2807 {
2808 unsigned int streamed_symbols = 0;
2809 struct streamer_tree_cache_d *cache = ob->writer_cache;
2810 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, 0, NULL);
2811 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2812 lto_symtab_encoder_iterator lsei;
2813
2814 lto_begin_section (section_name, false);
2815 free (section_name);
2816
2817 hash_set<const char *> seen;
2818
2819 /* Write the symbol table.
2820 First write everything defined and then all declarations.
2821 This is necessary to handle cases where we have duplicated symbols. */
2822 for (lsei = lsei_start (encoder);
2823 !lsei_end_p (lsei); lsei_next (&lsei))
2824 {
2825 symtab_node *node = lsei_node (lsei);
2826
2827 if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2828 continue;
2829 write_symbol (cache, node->decl, &seen, false);
2830 ++streamed_symbols;
2831 }
2832 for (lsei = lsei_start (encoder);
2833 !lsei_end_p (lsei); lsei_next (&lsei))
2834 {
2835 symtab_node *node = lsei_node (lsei);
2836
2837 if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2838 continue;
2839 write_symbol (cache, node->decl, &seen, false);
2840 ++streamed_symbols;
2841 }
2842
2843 lto_end_section ();
2844
2845 return streamed_symbols;
2846 }
2847
2848 /* Symtab extension version. */
2849 #define LTO_SYMTAB_EXTENSION_VERSION 1
2850
2851 /* Write an IL symbol table extension to OB.
2852 SET and VSET are cgraph/varpool node sets we are outputting. */
2853
2854 static void
2855 produce_symtab_extension (struct output_block *ob,
2856 unsigned int previous_streamed_symbols)
2857 {
2858 unsigned int streamed_symbols = 0;
2859 char *section_name = lto_get_section_name (LTO_section_symtab_extension,
2860 NULL, 0, NULL);
2861 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2862 lto_symtab_encoder_iterator lsei;
2863
2864 lto_begin_section (section_name, false);
2865 free (section_name);
2866
2867 unsigned char version = LTO_SYMTAB_EXTENSION_VERSION;
2868 lto_write_data (&version, 1);
2869
2870 /* Write the symbol table.
2871 First write everything defined and then all declarations.
2872 This is necessary to handle cases where we have duplicated symbols. */
2873 for (lsei = lsei_start (encoder);
2874 !lsei_end_p (lsei); lsei_next (&lsei))
2875 {
2876 symtab_node *node = lsei_node (lsei);
2877
2878 if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2879 continue;
2880 write_symbol_extension_info (node->decl);
2881 ++streamed_symbols;
2882 }
2883 for (lsei = lsei_start (encoder);
2884 !lsei_end_p (lsei); lsei_next (&lsei))
2885 {
2886 symtab_node *node = lsei_node (lsei);
2887
2888 if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2889 continue;
2890 write_symbol_extension_info (node->decl);
2891 ++streamed_symbols;
2892 }
2893
2894 gcc_assert (previous_streamed_symbols == streamed_symbols);
2895 lto_end_section ();
2896 }
2897
2898
2899 /* Init the streamer_mode_table for output, where we collect info on what
2900 machine_mode values have been streamed. */
2901 void
2902 lto_output_init_mode_table (void)
2903 {
2904 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2905 }
2906
2907
2908 /* Write the mode table. */
2909 static void
2910 lto_write_mode_table (void)
2911 {
2912 struct output_block *ob;
2913 ob = create_output_block (LTO_section_mode_table);
2914 bitpack_d bp = bitpack_create (ob->main_stream);
2915
2916 /* Ensure that for GET_MODE_INNER (m) != m we have
2917 also the inner mode marked. */
2918 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2919 if (streamer_mode_table[i])
2920 {
2921 machine_mode m = (machine_mode) i;
2922 machine_mode inner_m = GET_MODE_INNER (m);
2923 if (inner_m != m)
2924 streamer_mode_table[(int) inner_m] = 1;
2925 }
2926 /* First stream modes that have GET_MODE_INNER (m) == m,
2927 so that we can refer to them afterwards. */
2928 for (int pass = 0; pass < 2; pass++)
2929 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2930 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2931 {
2932 machine_mode m = (machine_mode) i;
2933 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
2934 continue;
2935 bp_pack_value (&bp, m, 8);
2936 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2937 bp_pack_poly_value (&bp, GET_MODE_SIZE (m), 16);
2938 bp_pack_poly_value (&bp, GET_MODE_PRECISION (m), 16);
2939 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2940 bp_pack_poly_value (&bp, GET_MODE_NUNITS (m), 16);
2941 switch (GET_MODE_CLASS (m))
2942 {
2943 case MODE_FRACT:
2944 case MODE_UFRACT:
2945 case MODE_ACCUM:
2946 case MODE_UACCUM:
2947 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2948 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2949 break;
2950 case MODE_FLOAT:
2951 case MODE_DECIMAL_FLOAT:
2952 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2953 break;
2954 default:
2955 break;
2956 }
2957 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2958 }
2959 bp_pack_value (&bp, VOIDmode, 8);
2960
2961 streamer_write_bitpack (&bp);
2962
2963 char *section_name
2964 = lto_get_section_name (LTO_section_mode_table, NULL, 0, NULL);
2965 lto_begin_section (section_name, !flag_wpa);
2966 free (section_name);
2967
2968 /* The entire header stream is computed here. */
2969 struct lto_simple_header_with_strings header;
2970 memset (&header, 0, sizeof (header));
2971
2972 header.main_size = ob->main_stream->total_size;
2973 header.string_size = ob->string_stream->total_size;
2974 lto_write_data (&header, sizeof header);
2975
2976 /* Put all of the gimple and the string table out the asm file as a
2977 block of text. */
2978 lto_write_stream (ob->main_stream);
2979 lto_write_stream (ob->string_stream);
2980
2981 lto_end_section ();
2982 destroy_output_block (ob);
2983 }
2984
2985
2986 /* This pass is run after all of the functions are serialized and all
2987 of the IPA passes have written their serialized forms. This pass
2988 causes the vector of all of the global decls and types used from
2989 this file to be written in to a section that can then be read in to
2990 recover these on other side. */
2991
2992 void
2993 produce_asm_for_decls (void)
2994 {
2995 struct lto_out_decl_state *out_state;
2996 struct lto_out_decl_state *fn_out_state;
2997 struct lto_decl_header header;
2998 char *section_name;
2999 struct output_block *ob;
3000 unsigned idx, num_fns;
3001 size_t decl_state_size;
3002 int32_t num_decl_states;
3003
3004 ob = create_output_block (LTO_section_decls);
3005
3006 memset (&header, 0, sizeof (struct lto_decl_header));
3007
3008 section_name = lto_get_section_name (LTO_section_decls, NULL, 0, NULL);
3009 lto_begin_section (section_name, !flag_wpa);
3010 free (section_name);
3011
3012 /* Make string 0 be a NULL string. */
3013 streamer_write_char_stream (ob->string_stream, 0);
3014
3015 gcc_assert (!alias_pairs);
3016
3017 /* Get rid of the global decl state hash tables to save some memory. */
3018 out_state = lto_get_out_decl_state ();
3019 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
3020 if (out_state->streams[i].tree_hash_table)
3021 {
3022 delete out_state->streams[i].tree_hash_table;
3023 out_state->streams[i].tree_hash_table = NULL;
3024 }
3025
3026 /* Write the global symbols. */
3027 if (streamer_dump_file)
3028 fprintf (streamer_dump_file, "Outputting global stream\n");
3029 lto_output_decl_state_streams (ob, out_state);
3030 num_fns = lto_function_decl_states.length ();
3031 for (idx = 0; idx < num_fns; idx++)
3032 {
3033 fn_out_state =
3034 lto_function_decl_states[idx];
3035 if (streamer_dump_file)
3036 fprintf (streamer_dump_file, "Outputting stream for %s\n",
3037 IDENTIFIER_POINTER
3038 (DECL_ASSEMBLER_NAME (fn_out_state->fn_decl)));
3039 lto_output_decl_state_streams (ob, fn_out_state);
3040 }
3041
3042 /* Currently not used. This field would allow us to preallocate
3043 the globals vector, so that it need not be resized as it is extended. */
3044 header.num_nodes = -1;
3045
3046 /* Compute the total size of all decl out states. */
3047 decl_state_size = sizeof (int32_t);
3048 decl_state_size += lto_out_decl_state_written_size (out_state);
3049 for (idx = 0; idx < num_fns; idx++)
3050 {
3051 fn_out_state =
3052 lto_function_decl_states[idx];
3053 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
3054 }
3055 header.decl_state_size = decl_state_size;
3056
3057 header.main_size = ob->main_stream->total_size;
3058 header.string_size = ob->string_stream->total_size;
3059
3060 lto_write_data (&header, sizeof header);
3061
3062 /* Write the main out-decl state, followed by out-decl states of
3063 functions. */
3064 num_decl_states = num_fns + 1;
3065 lto_write_data (&num_decl_states, sizeof (num_decl_states));
3066 lto_output_decl_state_refs (ob, out_state);
3067 for (idx = 0; idx < num_fns; idx++)
3068 {
3069 fn_out_state = lto_function_decl_states[idx];
3070 lto_output_decl_state_refs (ob, fn_out_state);
3071 }
3072
3073 lto_write_stream (ob->main_stream);
3074 lto_write_stream (ob->string_stream);
3075
3076 lto_end_section ();
3077
3078 /* Write the symbol table. It is used by linker to determine dependencies
3079 and thus we can skip it for WPA. */
3080 if (!flag_wpa)
3081 {
3082 unsigned int streamed_symbols = produce_symtab (ob);
3083 produce_symtab_extension (ob, streamed_symbols);
3084 }
3085
3086 /* Write command line opts. */
3087 lto_write_options ();
3088
3089 /* Deallocate memory and clean up. */
3090 for (idx = 0; idx < num_fns; idx++)
3091 {
3092 fn_out_state =
3093 lto_function_decl_states[idx];
3094 lto_delete_out_decl_state (fn_out_state);
3095 }
3096 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
3097 lto_function_decl_states.release ();
3098 destroy_output_block (ob);
3099 if (lto_stream_offload_p)
3100 lto_write_mode_table ();
3101 }