]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-streamer-out.c
2015-06-04 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2015 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "hash-set.h"
28 #include "vec.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "inchash.h"
33 #include "tree.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "stringpool.h"
37 #include "hashtab.h"
38 #include "hard-reg-set.h"
39 #include "function.h"
40 #include "rtl.h"
41 #include "flags.h"
42 #include "statistics.h"
43 #include "insn-config.h"
44 #include "expmed.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "emit-rtl.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "params.h"
53 #include "predict.h"
54 #include "dominance.h"
55 #include "cfg.h"
56 #include "basic-block.h"
57 #include "tree-ssa-alias.h"
58 #include "internal-fn.h"
59 #include "gimple-expr.h"
60 #include "is-a.h"
61 #include "gimple.h"
62 #include "gimple-iterator.h"
63 #include "gimple-ssa.h"
64 #include "tree-ssanames.h"
65 #include "tree-pass.h"
66 #include "diagnostic-core.h"
67 #include "except.h"
68 #include "lto-symtab.h"
69 #include "hash-map.h"
70 #include "plugin-api.h"
71 #include "ipa-ref.h"
72 #include "cgraph.h"
73 #include "lto-streamer.h"
74 #include "data-streamer.h"
75 #include "gimple-streamer.h"
76 #include "tree-streamer.h"
77 #include "streamer-hooks.h"
78 #include "cfgloop.h"
79 #include "builtins.h"
80 #include "gomp-constants.h"
81
82
83 static void lto_write_tree (struct output_block*, tree, bool);
84
85 /* Clear the line info stored in DATA_IN. */
86
87 static void
88 clear_line_info (struct output_block *ob)
89 {
90 ob->current_file = NULL;
91 ob->current_line = 0;
92 ob->current_col = 0;
93 }
94
95
96 /* Create the output block and return it. SECTION_TYPE is
97 LTO_section_function_body or LTO_static_initializer. */
98
99 struct output_block *
100 create_output_block (enum lto_section_type section_type)
101 {
102 struct output_block *ob = XCNEW (struct output_block);
103
104 ob->section_type = section_type;
105 ob->decl_state = lto_get_out_decl_state ();
106 ob->main_stream = XCNEW (struct lto_output_stream);
107 ob->string_stream = XCNEW (struct lto_output_stream);
108 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
109
110 if (section_type == LTO_section_function_body)
111 ob->cfg_stream = XCNEW (struct lto_output_stream);
112
113 clear_line_info (ob);
114
115 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
116 gcc_obstack_init (&ob->obstack);
117
118 return ob;
119 }
120
121
122 /* Destroy the output block OB. */
123
124 void
125 destroy_output_block (struct output_block *ob)
126 {
127 enum lto_section_type section_type = ob->section_type;
128
129 delete ob->string_hash_table;
130 ob->string_hash_table = NULL;
131
132 free (ob->main_stream);
133 free (ob->string_stream);
134 if (section_type == LTO_section_function_body)
135 free (ob->cfg_stream);
136
137 streamer_tree_cache_delete (ob->writer_cache);
138 obstack_free (&ob->obstack, NULL);
139
140 free (ob);
141 }
142
143
144 /* Look up NODE in the type table and write the index for it to OB. */
145
146 static void
147 output_type_ref (struct output_block *ob, tree node)
148 {
149 streamer_write_record_start (ob, LTO_type_ref);
150 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
151 }
152
153
154 /* Return true if tree node T is written to various tables. For these
155 nodes, we sometimes want to write their phyiscal representation
156 (via lto_output_tree), and sometimes we need to emit an index
157 reference into a table (via lto_output_tree_ref). */
158
159 static bool
160 tree_is_indexable (tree t)
161 {
162 /* Parameters and return values of functions of variably modified types
163 must go to global stream, because they may be used in the type
164 definition. */
165 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
166 && DECL_CONTEXT (t))
167 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
168 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
169 else if (TREE_CODE (t) == IMPORTED_DECL)
170 return false;
171 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
172 || TREE_CODE (t) == TYPE_DECL
173 || TREE_CODE (t) == CONST_DECL
174 || TREE_CODE (t) == NAMELIST_DECL)
175 && decl_function_context (t))
176 return false;
177 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
178 return false;
179 /* Variably modified types need to be streamed alongside function
180 bodies because they can refer to local entities. Together with
181 them we have to localize their members as well.
182 ??? In theory that includes non-FIELD_DECLs as well. */
183 else if (TYPE_P (t)
184 && variably_modified_type_p (t, NULL_TREE))
185 return false;
186 else if (TREE_CODE (t) == FIELD_DECL
187 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
188 return false;
189 else
190 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
191 }
192
193
194 /* Output info about new location into bitpack BP.
195 After outputting bitpack, lto_output_location_data has
196 to be done to output actual data. */
197
198 void
199 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
200 location_t loc)
201 {
202 expanded_location xloc;
203
204 loc = LOCATION_LOCUS (loc);
205 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
206 if (loc == UNKNOWN_LOCATION)
207 return;
208
209 xloc = expand_location (loc);
210
211 bp_pack_value (bp, ob->current_file != xloc.file, 1);
212 bp_pack_value (bp, ob->current_line != xloc.line, 1);
213 bp_pack_value (bp, ob->current_col != xloc.column, 1);
214
215 if (ob->current_file != xloc.file)
216 bp_pack_string (ob, bp, xloc.file, true);
217 ob->current_file = xloc.file;
218
219 if (ob->current_line != xloc.line)
220 bp_pack_var_len_unsigned (bp, xloc.line);
221 ob->current_line = xloc.line;
222
223 if (ob->current_col != xloc.column)
224 bp_pack_var_len_unsigned (bp, xloc.column);
225 ob->current_col = xloc.column;
226 }
227
228
229 /* If EXPR is an indexable tree node, output a reference to it to
230 output block OB. Otherwise, output the physical representation of
231 EXPR to OB. */
232
233 static void
234 lto_output_tree_ref (struct output_block *ob, tree expr)
235 {
236 enum tree_code code;
237
238 if (TYPE_P (expr))
239 {
240 output_type_ref (ob, expr);
241 return;
242 }
243
244 code = TREE_CODE (expr);
245 switch (code)
246 {
247 case SSA_NAME:
248 streamer_write_record_start (ob, LTO_ssa_name_ref);
249 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
250 break;
251
252 case FIELD_DECL:
253 streamer_write_record_start (ob, LTO_field_decl_ref);
254 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
255 break;
256
257 case FUNCTION_DECL:
258 streamer_write_record_start (ob, LTO_function_decl_ref);
259 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
260 break;
261
262 case VAR_DECL:
263 case DEBUG_EXPR_DECL:
264 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
265 case PARM_DECL:
266 streamer_write_record_start (ob, LTO_global_decl_ref);
267 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
268 break;
269
270 case CONST_DECL:
271 streamer_write_record_start (ob, LTO_const_decl_ref);
272 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
273 break;
274
275 case IMPORTED_DECL:
276 gcc_assert (decl_function_context (expr) == NULL);
277 streamer_write_record_start (ob, LTO_imported_decl_ref);
278 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
279 break;
280
281 case TYPE_DECL:
282 streamer_write_record_start (ob, LTO_type_decl_ref);
283 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
284 break;
285
286 case NAMELIST_DECL:
287 streamer_write_record_start (ob, LTO_namelist_decl_ref);
288 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
289 break;
290
291 case NAMESPACE_DECL:
292 streamer_write_record_start (ob, LTO_namespace_decl_ref);
293 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
294 break;
295
296 case LABEL_DECL:
297 streamer_write_record_start (ob, LTO_label_decl_ref);
298 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
299 break;
300
301 case RESULT_DECL:
302 streamer_write_record_start (ob, LTO_result_decl_ref);
303 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
304 break;
305
306 case TRANSLATION_UNIT_DECL:
307 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
308 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
309 break;
310
311 default:
312 /* No other node is indexable, so it should have been handled by
313 lto_output_tree. */
314 gcc_unreachable ();
315 }
316 }
317
318
319 /* Return true if EXPR is a tree node that can be written to disk. */
320
321 static inline bool
322 lto_is_streamable (tree expr)
323 {
324 enum tree_code code = TREE_CODE (expr);
325
326 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
327 name version in lto_output_tree_ref (see output_ssa_names). */
328 return !is_lang_specific (expr)
329 && code != SSA_NAME
330 && code != CALL_EXPR
331 && code != LANG_TYPE
332 && code != MODIFY_EXPR
333 && code != INIT_EXPR
334 && code != TARGET_EXPR
335 && code != BIND_EXPR
336 && code != WITH_CLEANUP_EXPR
337 && code != STATEMENT_LIST
338 && (code == CASE_LABEL_EXPR
339 || code == DECL_EXPR
340 || TREE_CODE_CLASS (code) != tcc_statement);
341 }
342
343
344 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
345
346 static tree
347 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
348 {
349 gcc_checking_assert (DECL_P (expr)
350 && TREE_CODE (expr) != FUNCTION_DECL
351 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
352
353 /* Handle DECL_INITIAL for symbols. */
354 tree initial = DECL_INITIAL (expr);
355 if (TREE_CODE (expr) == VAR_DECL
356 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
357 && !DECL_IN_CONSTANT_POOL (expr)
358 && initial)
359 {
360 varpool_node *vnode;
361 /* Extra section needs about 30 bytes; do not produce it for simple
362 scalar values. */
363 if (TREE_CODE (DECL_INITIAL (expr)) == CONSTRUCTOR
364 || !(vnode = varpool_node::get (expr))
365 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
366 initial = error_mark_node;
367 }
368
369 return initial;
370 }
371
372
373 /* Write a physical representation of tree node EXPR to output block
374 OB. If REF_P is true, the leaves of EXPR are emitted as references
375 via lto_output_tree_ref. IX is the index into the streamer cache
376 where EXPR is stored. */
377
378 static void
379 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
380 {
381 /* Pack all the non-pointer fields in EXPR into a bitpack and write
382 the resulting bitpack. */
383 streamer_write_tree_bitfields (ob, expr);
384
385 /* Write all the pointer fields in EXPR. */
386 streamer_write_tree_body (ob, expr, ref_p);
387
388 /* Write any LTO-specific data to OB. */
389 if (DECL_P (expr)
390 && TREE_CODE (expr) != FUNCTION_DECL
391 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
392 {
393 /* Handle DECL_INITIAL for symbols. */
394 tree initial = get_symbol_initial_value
395 (ob->decl_state->symtab_node_encoder, expr);
396 stream_write_tree (ob, initial, ref_p);
397 }
398 }
399
400 /* Write a physical representation of tree node EXPR to output block
401 OB. If REF_P is true, the leaves of EXPR are emitted as references
402 via lto_output_tree_ref. IX is the index into the streamer cache
403 where EXPR is stored. */
404
405 static void
406 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
407 {
408 if (!lto_is_streamable (expr))
409 internal_error ("tree code %qs is not supported in LTO streams",
410 get_tree_code_name (TREE_CODE (expr)));
411
412 /* Write the header, containing everything needed to materialize
413 EXPR on the reading side. */
414 streamer_write_tree_header (ob, expr);
415
416 lto_write_tree_1 (ob, expr, ref_p);
417
418 /* Mark the end of EXPR. */
419 streamer_write_zero (ob);
420 }
421
422 /* Emit the physical representation of tree node EXPR to output block
423 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
424 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
425
426 static void
427 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
428 bool ref_p, bool this_ref_p)
429 {
430 unsigned ix;
431
432 gcc_checking_assert (expr != NULL_TREE
433 && !(this_ref_p && tree_is_indexable (expr)));
434
435 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
436 expr, hash, &ix);
437 gcc_assert (!exists_p);
438 if (streamer_handle_as_builtin_p (expr))
439 {
440 /* MD and NORMAL builtins do not need to be written out
441 completely as they are always instantiated by the
442 compiler on startup. The only builtins that need to
443 be written out are BUILT_IN_FRONTEND. For all other
444 builtins, we simply write the class and code. */
445 streamer_write_builtin (ob, expr);
446 }
447 else if (TREE_CODE (expr) == INTEGER_CST
448 && !TREE_OVERFLOW (expr))
449 {
450 /* Shared INTEGER_CST nodes are special because they need their
451 original type to be materialized by the reader (to implement
452 TYPE_CACHED_VALUES). */
453 streamer_write_integer_cst (ob, expr, ref_p);
454 }
455 else
456 {
457 /* This is the first time we see EXPR, write its fields
458 to OB. */
459 lto_write_tree (ob, expr, ref_p);
460 }
461 }
462
463 class DFS
464 {
465 public:
466 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
467 bool single_p);
468 ~DFS ();
469
470 struct scc_entry
471 {
472 tree t;
473 hashval_t hash;
474 };
475 vec<scc_entry> sccstack;
476
477 private:
478 struct sccs
479 {
480 unsigned int dfsnum;
481 unsigned int low;
482 };
483 struct worklist
484 {
485 tree expr;
486 sccs *from_state;
487 sccs *cstate;
488 bool ref_p;
489 bool this_ref_p;
490 };
491
492 static int scc_entry_compare (const void *, const void *);
493
494 void DFS_write_tree_body (struct output_block *ob,
495 tree expr, sccs *expr_state, bool ref_p);
496
497 void DFS_write_tree (struct output_block *ob, sccs *from_state,
498 tree expr, bool ref_p, bool this_ref_p);
499
500 hashval_t
501 hash_scc (struct output_block *ob, unsigned first, unsigned size);
502
503 hash_map<tree, sccs *> sccstate;
504 vec<worklist> worklist_vec;
505 struct obstack sccstate_obstack;
506 };
507
508 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
509 bool single_p)
510 {
511 unsigned int next_dfs_num = 1;
512 sccstack.create (0);
513 gcc_obstack_init (&sccstate_obstack);
514 worklist_vec = vNULL;
515 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
516 while (!worklist_vec.is_empty ())
517 {
518 worklist &w = worklist_vec.last ();
519 expr = w.expr;
520 sccs *from_state = w.from_state;
521 sccs *cstate = w.cstate;
522 ref_p = w.ref_p;
523 this_ref_p = w.this_ref_p;
524 if (cstate == NULL)
525 {
526 sccs **slot = &sccstate.get_or_insert (expr);
527 cstate = *slot;
528 if (cstate)
529 {
530 gcc_checking_assert (from_state);
531 if (cstate->dfsnum < from_state->dfsnum)
532 from_state->low = MIN (cstate->dfsnum, from_state->low);
533 worklist_vec.pop ();
534 continue;
535 }
536
537 scc_entry e = { expr, 0 };
538 /* Not yet visited. DFS recurse and push it onto the stack. */
539 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
540 sccstack.safe_push (e);
541 cstate->dfsnum = next_dfs_num++;
542 cstate->low = cstate->dfsnum;
543 w.cstate = cstate;
544
545 if (streamer_handle_as_builtin_p (expr))
546 ;
547 else if (TREE_CODE (expr) == INTEGER_CST
548 && !TREE_OVERFLOW (expr))
549 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
550 else
551 {
552 DFS_write_tree_body (ob, expr, cstate, ref_p);
553
554 /* Walk any LTO-specific edges. */
555 if (DECL_P (expr)
556 && TREE_CODE (expr) != FUNCTION_DECL
557 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
558 {
559 /* Handle DECL_INITIAL for symbols. */
560 tree initial
561 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
562 expr);
563 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
564 }
565 }
566 continue;
567 }
568
569 /* See if we found an SCC. */
570 if (cstate->low == cstate->dfsnum)
571 {
572 unsigned first, size;
573 tree x;
574
575 /* If we are re-walking a single leaf-SCC just pop it,
576 let earlier worklist item access the sccstack. */
577 if (single_p)
578 {
579 worklist_vec.pop ();
580 continue;
581 }
582
583 /* Pop the SCC and compute its size. */
584 first = sccstack.length ();
585 do
586 {
587 x = sccstack[--first].t;
588 }
589 while (x != expr);
590 size = sccstack.length () - first;
591
592 /* No need to compute hashes for LTRANS units, we don't perform
593 any merging there. */
594 hashval_t scc_hash = 0;
595 unsigned scc_entry_len = 0;
596 if (!flag_wpa)
597 {
598 scc_hash = hash_scc (ob, first, size);
599
600 /* Put the entries with the least number of collisions first. */
601 unsigned entry_start = 0;
602 scc_entry_len = size + 1;
603 for (unsigned i = 0; i < size;)
604 {
605 unsigned from = i;
606 for (i = i + 1; i < size
607 && (sccstack[first + i].hash
608 == sccstack[first + from].hash); ++i)
609 ;
610 if (i - from < scc_entry_len)
611 {
612 scc_entry_len = i - from;
613 entry_start = from;
614 }
615 }
616 for (unsigned i = 0; i < scc_entry_len; ++i)
617 {
618 scc_entry tem = sccstack[first + i];
619 sccstack[first + i] = sccstack[first + entry_start + i];
620 sccstack[first + entry_start + i] = tem;
621 }
622
623 if (scc_entry_len == 1)
624 ; /* We already sorted SCC deterministically in hash_scc. */
625 else
626 /* Check that we have only one SCC.
627 Naturally we may have conflicts if hash function is not
628 strong enough. Lets see how far this gets. */
629 {
630 #ifdef ENABLE_CHECKING
631 gcc_unreachable ();
632 #endif
633 }
634 }
635
636 /* Write LTO_tree_scc. */
637 streamer_write_record_start (ob, LTO_tree_scc);
638 streamer_write_uhwi (ob, size);
639 streamer_write_uhwi (ob, scc_hash);
640
641 /* Write size-1 SCCs without wrapping them inside SCC bundles.
642 All INTEGER_CSTs need to be handled this way as we need
643 their type to materialize them. Also builtins are handled
644 this way.
645 ??? We still wrap these in LTO_tree_scc so at the
646 input side we can properly identify the tree we want
647 to ultimatively return. */
648 if (size == 1)
649 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
650 else
651 {
652 /* Write the size of the SCC entry candidates. */
653 streamer_write_uhwi (ob, scc_entry_len);
654
655 /* Write all headers and populate the streamer cache. */
656 for (unsigned i = 0; i < size; ++i)
657 {
658 hashval_t hash = sccstack[first+i].hash;
659 tree t = sccstack[first+i].t;
660 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
661 t, hash, NULL);
662 gcc_assert (!exists_p);
663
664 if (!lto_is_streamable (t))
665 internal_error ("tree code %qs is not supported "
666 "in LTO streams",
667 get_tree_code_name (TREE_CODE (t)));
668
669 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
670
671 /* Write the header, containing everything needed to
672 materialize EXPR on the reading side. */
673 streamer_write_tree_header (ob, t);
674 }
675
676 /* Write the bitpacks and tree references. */
677 for (unsigned i = 0; i < size; ++i)
678 {
679 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
680
681 /* Mark the end of the tree. */
682 streamer_write_zero (ob);
683 }
684 }
685
686 /* Finally truncate the vector. */
687 sccstack.truncate (first);
688
689 if (from_state)
690 from_state->low = MIN (from_state->low, cstate->low);
691 worklist_vec.pop ();
692 continue;
693 }
694
695 gcc_checking_assert (from_state);
696 from_state->low = MIN (from_state->low, cstate->low);
697 if (cstate->dfsnum < from_state->dfsnum)
698 from_state->low = MIN (cstate->dfsnum, from_state->low);
699 worklist_vec.pop ();
700 }
701 worklist_vec.release ();
702 }
703
704 DFS::~DFS ()
705 {
706 sccstack.release ();
707 obstack_free (&sccstate_obstack, NULL);
708 }
709
710 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
711 DFS recurse for all tree edges originating from it. */
712
713 void
714 DFS::DFS_write_tree_body (struct output_block *ob,
715 tree expr, sccs *expr_state, bool ref_p)
716 {
717 #define DFS_follow_tree_edge(DEST) \
718 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
719
720 enum tree_code code;
721
722 code = TREE_CODE (expr);
723
724 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
725 {
726 if (TREE_CODE (expr) != IDENTIFIER_NODE)
727 DFS_follow_tree_edge (TREE_TYPE (expr));
728 }
729
730 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
731 {
732 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
733 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
734 }
735
736 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
737 {
738 DFS_follow_tree_edge (TREE_REALPART (expr));
739 DFS_follow_tree_edge (TREE_IMAGPART (expr));
740 }
741
742 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
743 {
744 /* Drop names that were created for anonymous entities. */
745 if (DECL_NAME (expr)
746 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
747 && ANON_AGGRNAME_P (DECL_NAME (expr)))
748 ;
749 else
750 DFS_follow_tree_edge (DECL_NAME (expr));
751 DFS_follow_tree_edge (DECL_CONTEXT (expr));
752 }
753
754 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
755 {
756 DFS_follow_tree_edge (DECL_SIZE (expr));
757 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
758
759 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
760 special handling in LTO, it must be handled by streamer hooks. */
761
762 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
763
764 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
765 for early inlining so drop it on the floor instead of ICEing in
766 dwarf2out.c. */
767
768 if ((TREE_CODE (expr) == VAR_DECL
769 || TREE_CODE (expr) == PARM_DECL)
770 && DECL_HAS_VALUE_EXPR_P (expr))
771 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
772 if (TREE_CODE (expr) == VAR_DECL)
773 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
774 }
775
776 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
777 {
778 if (TREE_CODE (expr) == TYPE_DECL)
779 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
780 }
781
782 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
783 {
784 /* Make sure we don't inadvertently set the assembler name. */
785 if (DECL_ASSEMBLER_NAME_SET_P (expr))
786 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
787 }
788
789 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
790 {
791 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
792 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
793 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
794 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
795 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
796 }
797
798 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
799 {
800 DFS_follow_tree_edge (DECL_VINDEX (expr));
801 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
802 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
803 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
804 }
805
806 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
807 {
808 DFS_follow_tree_edge (TYPE_SIZE (expr));
809 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
810 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
811 DFS_follow_tree_edge (TYPE_NAME (expr));
812 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
813 reconstructed during fixup. */
814 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
815 during fixup. */
816 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
817 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
818 /* TYPE_CANONICAL is re-computed during type merging, so no need
819 to follow it here. */
820 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
821 }
822
823 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
824 {
825 if (TREE_CODE (expr) == ENUMERAL_TYPE)
826 DFS_follow_tree_edge (TYPE_VALUES (expr));
827 else if (TREE_CODE (expr) == ARRAY_TYPE)
828 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
829 else if (RECORD_OR_UNION_TYPE_P (expr))
830 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
831 DFS_follow_tree_edge (t);
832 else if (TREE_CODE (expr) == FUNCTION_TYPE
833 || TREE_CODE (expr) == METHOD_TYPE)
834 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
835
836 if (!POINTER_TYPE_P (expr))
837 DFS_follow_tree_edge (TYPE_MINVAL (expr));
838 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
839 if (RECORD_OR_UNION_TYPE_P (expr))
840 DFS_follow_tree_edge (TYPE_BINFO (expr));
841 }
842
843 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
844 {
845 DFS_follow_tree_edge (TREE_PURPOSE (expr));
846 DFS_follow_tree_edge (TREE_VALUE (expr));
847 DFS_follow_tree_edge (TREE_CHAIN (expr));
848 }
849
850 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
851 {
852 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
853 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
854 }
855
856 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
857 {
858 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
859 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
860 DFS_follow_tree_edge (TREE_BLOCK (expr));
861 }
862
863 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
864 {
865 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
866 if (VAR_OR_FUNCTION_DECL_P (t)
867 && DECL_EXTERNAL (t))
868 /* We have to stream externals in the block chain as
869 non-references. See also
870 tree-streamer-out.c:streamer_write_chain. */
871 DFS_write_tree (ob, expr_state, t, ref_p, false);
872 else
873 DFS_follow_tree_edge (t);
874
875 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
876
877 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
878 handle - those that represent inlined function scopes.
879 For the drop rest them on the floor instead of ICEing
880 in dwarf2out.c. */
881 if (inlined_function_outer_scope_p (expr))
882 {
883 tree ultimate_origin = block_ultimate_origin (expr);
884 DFS_follow_tree_edge (ultimate_origin);
885 }
886 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
887 information for early inlined BLOCKs so drop it on the floor instead
888 of ICEing in dwarf2out.c. */
889
890 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
891 streaming time. */
892
893 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
894 list is re-constructed from BLOCK_SUPERCONTEXT. */
895 }
896
897 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
898 {
899 unsigned i;
900 tree t;
901
902 /* Note that the number of BINFO slots has already been emitted in
903 EXPR's header (see streamer_write_tree_header) because this length
904 is needed to build the empty BINFO node on the reader side. */
905 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
906 DFS_follow_tree_edge (t);
907 DFS_follow_tree_edge (BINFO_OFFSET (expr));
908 DFS_follow_tree_edge (BINFO_VTABLE (expr));
909 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
910
911 /* The number of BINFO_BASE_ACCESSES has already been emitted in
912 EXPR's bitfield section. */
913 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
914 DFS_follow_tree_edge (t);
915
916 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
917 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
918 }
919
920 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
921 {
922 unsigned i;
923 tree index, value;
924
925 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
926 {
927 DFS_follow_tree_edge (index);
928 DFS_follow_tree_edge (value);
929 }
930 }
931
932 if (code == OMP_CLAUSE)
933 {
934 int i;
935 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
936 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
937 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
938 }
939
940 #undef DFS_follow_tree_edge
941 }
942
943 /* Return a hash value for the tree T.
944 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
945 may hold hash values if trees inside current SCC. */
946
947 static hashval_t
948 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
949 {
950 inchash::hash hstate;
951
952 #define visit(SIBLING) \
953 do { \
954 unsigned ix; \
955 if (!SIBLING) \
956 hstate.add_int (0); \
957 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
958 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
959 else if (map) \
960 hstate.add_int (*map->get (SIBLING)); \
961 else \
962 hstate.add_int (1); \
963 } while (0)
964
965 /* Hash TS_BASE. */
966 enum tree_code code = TREE_CODE (t);
967 hstate.add_int (code);
968 if (!TYPE_P (t))
969 {
970 hstate.add_flag (TREE_SIDE_EFFECTS (t));
971 hstate.add_flag (TREE_CONSTANT (t));
972 hstate.add_flag (TREE_READONLY (t));
973 hstate.add_flag (TREE_PUBLIC (t));
974 }
975 hstate.add_flag (TREE_ADDRESSABLE (t));
976 hstate.add_flag (TREE_THIS_VOLATILE (t));
977 if (DECL_P (t))
978 hstate.add_flag (DECL_UNSIGNED (t));
979 else if (TYPE_P (t))
980 hstate.add_flag (TYPE_UNSIGNED (t));
981 if (TYPE_P (t))
982 hstate.add_flag (TYPE_ARTIFICIAL (t));
983 else
984 hstate.add_flag (TREE_NO_WARNING (t));
985 hstate.add_flag (TREE_NOTHROW (t));
986 hstate.add_flag (TREE_STATIC (t));
987 hstate.add_flag (TREE_PROTECTED (t));
988 hstate.add_flag (TREE_DEPRECATED (t));
989 if (code != TREE_BINFO)
990 hstate.add_flag (TREE_PRIVATE (t));
991 if (TYPE_P (t))
992 {
993 hstate.add_flag (TYPE_SATURATING (t));
994 hstate.add_flag (TYPE_ADDR_SPACE (t));
995 }
996 else if (code == SSA_NAME)
997 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
998 hstate.commit_flag ();
999
1000 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1001 {
1002 int i;
1003 hstate.add_wide_int (TREE_INT_CST_NUNITS (t));
1004 hstate.add_wide_int (TREE_INT_CST_EXT_NUNITS (t));
1005 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1006 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
1007 }
1008
1009 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1010 {
1011 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1012 hstate.add_flag (r.cl);
1013 hstate.add_flag (r.sign);
1014 hstate.add_flag (r.signalling);
1015 hstate.add_flag (r.canonical);
1016 hstate.commit_flag ();
1017 hstate.add_int (r.uexp);
1018 hstate.add (r.sig, sizeof (r.sig));
1019 }
1020
1021 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1022 {
1023 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1024 hstate.add_int (f.mode);
1025 hstate.add_int (f.data.low);
1026 hstate.add_int (f.data.high);
1027 }
1028
1029 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1030 {
1031 hstate.add_wide_int (DECL_MODE (t));
1032 hstate.add_flag (DECL_NONLOCAL (t));
1033 hstate.add_flag (DECL_VIRTUAL_P (t));
1034 hstate.add_flag (DECL_IGNORED_P (t));
1035 hstate.add_flag (DECL_ABSTRACT_P (t));
1036 hstate.add_flag (DECL_ARTIFICIAL (t));
1037 hstate.add_flag (DECL_USER_ALIGN (t));
1038 hstate.add_flag (DECL_PRESERVE_P (t));
1039 hstate.add_flag (DECL_EXTERNAL (t));
1040 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1041 hstate.commit_flag ();
1042 hstate.add_int (DECL_ALIGN (t));
1043 if (code == LABEL_DECL)
1044 {
1045 hstate.add_int (EH_LANDING_PAD_NR (t));
1046 hstate.add_int (LABEL_DECL_UID (t));
1047 }
1048 else if (code == FIELD_DECL)
1049 {
1050 hstate.add_flag (DECL_PACKED (t));
1051 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1052 hstate.add_int (DECL_OFFSET_ALIGN (t));
1053 }
1054 else if (code == VAR_DECL)
1055 {
1056 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1057 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1058 }
1059 if (code == RESULT_DECL
1060 || code == PARM_DECL
1061 || code == VAR_DECL)
1062 {
1063 hstate.add_flag (DECL_BY_REFERENCE (t));
1064 if (code == VAR_DECL
1065 || code == PARM_DECL)
1066 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1067 }
1068 hstate.commit_flag ();
1069 }
1070
1071 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1072 hstate.add_int (DECL_REGISTER (t));
1073
1074 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1075 {
1076 hstate.add_flag (DECL_COMMON (t));
1077 hstate.add_flag (DECL_DLLIMPORT_P (t));
1078 hstate.add_flag (DECL_WEAK (t));
1079 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1080 hstate.add_flag (DECL_COMDAT (t));
1081 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1082 hstate.add_int (DECL_VISIBILITY (t));
1083 if (code == VAR_DECL)
1084 {
1085 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1086 hstate.add_flag (DECL_HARD_REGISTER (t));
1087 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1088 }
1089 if (TREE_CODE (t) == FUNCTION_DECL)
1090 {
1091 hstate.add_flag (DECL_FINAL_P (t));
1092 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1093 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1094 }
1095 hstate.commit_flag ();
1096 }
1097
1098 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1099 {
1100 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1101 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1102 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1103 hstate.add_flag (DECL_UNINLINABLE (t));
1104 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1105 hstate.add_flag (DECL_IS_NOVOPS (t));
1106 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1107 hstate.add_flag (DECL_IS_MALLOC (t));
1108 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1109 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1110 hstate.add_flag (DECL_STATIC_CHAIN (t));
1111 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1112 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1113 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1114 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1115 hstate.add_flag (DECL_PURE_P (t));
1116 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1117 hstate.commit_flag ();
1118 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1119 hstate.add_int (DECL_FUNCTION_CODE (t));
1120 }
1121
1122 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1123 {
1124 hstate.add_wide_int (TYPE_MODE (t));
1125 hstate.add_flag (TYPE_STRING_FLAG (t));
1126 hstate.add_flag (TYPE_NO_FORCE_BLK (t));
1127 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
1128 hstate.add_flag (TYPE_PACKED (t));
1129 hstate.add_flag (TYPE_RESTRICT (t));
1130 hstate.add_flag (TYPE_USER_ALIGN (t));
1131 hstate.add_flag (TYPE_READONLY (t));
1132 if (RECORD_OR_UNION_TYPE_P (t))
1133 {
1134 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1135 hstate.add_flag (TYPE_FINAL_P (t));
1136 }
1137 else if (code == ARRAY_TYPE)
1138 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1139 hstate.commit_flag ();
1140 hstate.add_int (TYPE_PRECISION (t));
1141 hstate.add_int (TYPE_ALIGN (t));
1142 hstate.add_int ((TYPE_ALIAS_SET (t) == 0
1143 || (!in_lto_p
1144 && get_alias_set (t) == 0))
1145 ? 0 : -1);
1146 }
1147
1148 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1149 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1150 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1151
1152 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1153 /* We don't stream these when passing things to a different target. */
1154 && !lto_stream_offload_p)
1155 hstate.add_wide_int (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1156
1157 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1158 hstate.add_wide_int (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1159
1160 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1161 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1162
1163 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1164 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1165
1166 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1167 {
1168 if (code != IDENTIFIER_NODE)
1169 visit (TREE_TYPE (t));
1170 }
1171
1172 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1173 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
1174 visit (VECTOR_CST_ELT (t, i));
1175
1176 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1177 {
1178 visit (TREE_REALPART (t));
1179 visit (TREE_IMAGPART (t));
1180 }
1181
1182 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1183 {
1184 /* Drop names that were created for anonymous entities. */
1185 if (DECL_NAME (t)
1186 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1187 && ANON_AGGRNAME_P (DECL_NAME (t)))
1188 ;
1189 else
1190 visit (DECL_NAME (t));
1191 if (DECL_FILE_SCOPE_P (t))
1192 ;
1193 else
1194 visit (DECL_CONTEXT (t));
1195 }
1196
1197 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1198 {
1199 visit (DECL_SIZE (t));
1200 visit (DECL_SIZE_UNIT (t));
1201 visit (DECL_ATTRIBUTES (t));
1202 if ((code == VAR_DECL
1203 || code == PARM_DECL)
1204 && DECL_HAS_VALUE_EXPR_P (t))
1205 visit (DECL_VALUE_EXPR (t));
1206 if (code == VAR_DECL
1207 && DECL_HAS_DEBUG_EXPR_P (t))
1208 visit (DECL_DEBUG_EXPR (t));
1209 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1210 be able to call get_symbol_initial_value. */
1211 }
1212
1213 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1214 {
1215 if (code == TYPE_DECL)
1216 visit (DECL_ORIGINAL_TYPE (t));
1217 }
1218
1219 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1220 {
1221 if (DECL_ASSEMBLER_NAME_SET_P (t))
1222 visit (DECL_ASSEMBLER_NAME (t));
1223 }
1224
1225 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1226 {
1227 visit (DECL_FIELD_OFFSET (t));
1228 visit (DECL_BIT_FIELD_TYPE (t));
1229 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1230 visit (DECL_FIELD_BIT_OFFSET (t));
1231 visit (DECL_FCONTEXT (t));
1232 }
1233
1234 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1235 {
1236 visit (DECL_VINDEX (t));
1237 visit (DECL_FUNCTION_PERSONALITY (t));
1238 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1239 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1240 }
1241
1242 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1243 {
1244 visit (TYPE_SIZE (t));
1245 visit (TYPE_SIZE_UNIT (t));
1246 visit (TYPE_ATTRIBUTES (t));
1247 visit (TYPE_NAME (t));
1248 visit (TYPE_MAIN_VARIANT (t));
1249 if (TYPE_FILE_SCOPE_P (t))
1250 ;
1251 else
1252 visit (TYPE_CONTEXT (t));
1253 visit (TYPE_STUB_DECL (t));
1254 }
1255
1256 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1257 {
1258 if (code == ENUMERAL_TYPE)
1259 visit (TYPE_VALUES (t));
1260 else if (code == ARRAY_TYPE)
1261 visit (TYPE_DOMAIN (t));
1262 else if (RECORD_OR_UNION_TYPE_P (t))
1263 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1264 visit (f);
1265 else if (code == FUNCTION_TYPE
1266 || code == METHOD_TYPE)
1267 visit (TYPE_ARG_TYPES (t));
1268 if (!POINTER_TYPE_P (t))
1269 visit (TYPE_MINVAL (t));
1270 visit (TYPE_MAXVAL (t));
1271 if (RECORD_OR_UNION_TYPE_P (t))
1272 visit (TYPE_BINFO (t));
1273 }
1274
1275 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1276 {
1277 visit (TREE_PURPOSE (t));
1278 visit (TREE_VALUE (t));
1279 visit (TREE_CHAIN (t));
1280 }
1281
1282 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1283 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1284 visit (TREE_VEC_ELT (t, i));
1285
1286 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1287 {
1288 hstate.add_wide_int (TREE_OPERAND_LENGTH (t));
1289 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1290 visit (TREE_OPERAND (t, i));
1291 }
1292
1293 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1294 {
1295 unsigned i;
1296 tree b;
1297 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1298 visit (b);
1299 visit (BINFO_OFFSET (t));
1300 visit (BINFO_VTABLE (t));
1301 visit (BINFO_VPTR_FIELD (t));
1302 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1303 visit (b);
1304 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1305 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1306 }
1307
1308 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1309 {
1310 unsigned i;
1311 tree index, value;
1312 hstate.add_wide_int (CONSTRUCTOR_NELTS (t));
1313 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1314 {
1315 visit (index);
1316 visit (value);
1317 }
1318 }
1319
1320 if (code == OMP_CLAUSE)
1321 {
1322 int i;
1323 HOST_WIDE_INT val;
1324
1325 hstate.add_wide_int (OMP_CLAUSE_CODE (t));
1326 switch (OMP_CLAUSE_CODE (t))
1327 {
1328 case OMP_CLAUSE_DEFAULT:
1329 val = OMP_CLAUSE_DEFAULT_KIND (t);
1330 break;
1331 case OMP_CLAUSE_SCHEDULE:
1332 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1333 break;
1334 case OMP_CLAUSE_DEPEND:
1335 val = OMP_CLAUSE_DEPEND_KIND (t);
1336 break;
1337 case OMP_CLAUSE_MAP:
1338 val = OMP_CLAUSE_MAP_KIND (t);
1339 break;
1340 case OMP_CLAUSE_PROC_BIND:
1341 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1342 break;
1343 case OMP_CLAUSE_REDUCTION:
1344 val = OMP_CLAUSE_REDUCTION_CODE (t);
1345 break;
1346 default:
1347 val = 0;
1348 break;
1349 }
1350 hstate.add_wide_int (val);
1351 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1352 visit (OMP_CLAUSE_OPERAND (t, i));
1353 visit (OMP_CLAUSE_CHAIN (t));
1354 }
1355
1356 return hstate.end ();
1357
1358 #undef visit
1359 }
1360
1361 /* Compare two SCC entries by their hash value for qsorting them. */
1362
1363 int
1364 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1365 {
1366 const scc_entry *p1 = (const scc_entry *) p1_;
1367 const scc_entry *p2 = (const scc_entry *) p2_;
1368 if (p1->hash < p2->hash)
1369 return -1;
1370 else if (p1->hash > p2->hash)
1371 return 1;
1372 return 0;
1373 }
1374
1375 /* Return a hash value for the SCC on the SCC stack from FIRST with
1376 size SIZE. */
1377
1378 hashval_t
1379 DFS::hash_scc (struct output_block *ob,
1380 unsigned first, unsigned size)
1381 {
1382 unsigned int last_classes = 0, iterations = 0;
1383
1384 /* Compute hash values for the SCC members. */
1385 for (unsigned i = 0; i < size; ++i)
1386 sccstack[first+i].hash = hash_tree (ob->writer_cache, NULL,
1387 sccstack[first+i].t);
1388
1389 if (size == 1)
1390 return sccstack[first].hash;
1391
1392 /* We aim to get unique hash for every tree within SCC and compute hash value
1393 of the whole SCC by combing all values together in an stable (entry point
1394 independent) order. This guarantees that the same SCC regions within
1395 different translation units will get the same hash values and therefore
1396 will be merged at WPA time.
1397
1398 Often the hashes are already unique. In that case we compute scc hash
1399 by combining individual hash values in an increasing order.
1400
1401 If thre are duplicates we seek at least one tree with unique hash (and
1402 pick one with minimal hash and this property). Then we obtain stable
1403 order by DFS walk starting from this unique tree and then use index
1404 within this order to make individual hash values unique.
1405
1406 If there is no tree with unique hash, we iteratively propagate the hash
1407 values across the internal edges of SCC. This usually quickly leads
1408 to unique hashes. Consider, for example, an SCC containing two pointers
1409 that are identical except for type they point and assume that these
1410 types are also part of the SCC.
1411 The propagation will add the points-to type information into their hash
1412 values. */
1413 do
1414 {
1415 /* Sort the SCC so we can easily see check for uniqueness. */
1416 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1417
1418 unsigned int classes = 1;
1419 int firstunique = -1;
1420
1421 /* Find tree with lowest unique hash (if it exists) and compute
1422 number of equivalence classes. */
1423 if (sccstack[first].hash != sccstack[first+1].hash)
1424 firstunique = 0;
1425 for (unsigned i = 1; i < size; ++i)
1426 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1427 {
1428 classes++;
1429 if (firstunique == -1
1430 && (i == size - 1
1431 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1432 firstunique = i;
1433 }
1434
1435 /* If we found tree with unique hash; stop the iteration. */
1436 if (firstunique != -1
1437 /* Also terminate if we run out of iterations or if the number of
1438 equivalence classes is no longer increasing.
1439 For example a cyclic list of trees that are all equivalent will
1440 never have unique entry point; we however do not build such SCCs
1441 in our IL. */
1442 || classes <= last_classes || iterations > 16)
1443 {
1444 hashval_t scc_hash;
1445
1446 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1447 starting from FIRSTUNIQUE to obstain stable order. */
1448 if (classes != size && firstunique != -1)
1449 {
1450 hash_map <tree, hashval_t> map(size*2);
1451
1452 /* Store hash values into a map, so we can associate them with
1453 reordered SCC. */
1454 for (unsigned i = 0; i < size; ++i)
1455 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1456
1457 DFS again (ob, sccstack[first+firstunique].t, false, false, true);
1458 gcc_assert (again.sccstack.length () == size);
1459
1460 memcpy (sccstack.address () + first,
1461 again.sccstack.address (),
1462 sizeof (scc_entry) * size);
1463
1464 /* Update hash values of individual members by hashing in the
1465 index within the stable order. This ensures uniqueness.
1466 Also compute the scc_hash by mixing in all hash values in the
1467 stable order we obtained. */
1468 sccstack[first].hash = *map.get (sccstack[first].t);
1469 scc_hash = sccstack[first].hash;
1470 for (unsigned i = 1; i < size; ++i)
1471 {
1472 sccstack[first+i].hash
1473 = iterative_hash_hashval_t (i,
1474 *map.get (sccstack[first+i].t));
1475 scc_hash = iterative_hash_hashval_t (scc_hash,
1476 sccstack[first+i].hash);
1477 }
1478 }
1479 /* If we got unique hash values for each tree, then sort already
1480 ensured entry point independent order. Only compute the final
1481 scc hash.
1482
1483 If we failed to find the unique entry point, we go by the same
1484 route. We will eventually introduce unwanted hash conflicts. */
1485 else
1486 {
1487 scc_hash = sccstack[first].hash;
1488 for (unsigned i = 1; i < size; ++i)
1489 scc_hash = iterative_hash_hashval_t (scc_hash,
1490 sccstack[first+i].hash);
1491 /* We can not 100% guarantee that the hash will not conflict in
1492 in a way so the unique hash is not found. This however
1493 should be extremely rare situation. ICE for now so possible
1494 issues are found and evaulated. */
1495 gcc_checking_assert (classes == size);
1496 }
1497
1498 /* To avoid conflicts across SCCs iteratively hash the whole SCC
1499 hash into the hash of each of the elements. */
1500 for (unsigned i = 0; i < size; ++i)
1501 sccstack[first+i].hash
1502 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1503 return scc_hash;
1504 }
1505
1506 last_classes = classes;
1507 iterations++;
1508
1509 /* We failed to identify the entry point; propagate hash values across
1510 the edges. */
1511 {
1512 hash_map <tree, hashval_t> map(size*2);
1513 for (unsigned i = 0; i < size; ++i)
1514 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1515
1516 for (unsigned i = 0; i < size; i++)
1517 sccstack[first+i].hash = hash_tree (ob->writer_cache, &map,
1518 sccstack[first+i].t);
1519 }
1520 }
1521 while (true);
1522 }
1523
1524 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1525 already in the streamer cache. Main routine called for
1526 each visit of EXPR. */
1527
1528 void
1529 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1530 tree expr, bool ref_p, bool this_ref_p)
1531 {
1532 /* Handle special cases. */
1533 if (expr == NULL_TREE)
1534 return;
1535
1536 /* Do not DFS walk into indexable trees. */
1537 if (this_ref_p && tree_is_indexable (expr))
1538 return;
1539
1540 /* Check if we already streamed EXPR. */
1541 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1542 return;
1543
1544 worklist w;
1545 w.expr = expr;
1546 w.from_state = from_state;
1547 w.cstate = NULL;
1548 w.ref_p = ref_p;
1549 w.this_ref_p = this_ref_p;
1550 worklist_vec.safe_push (w);
1551 }
1552
1553
1554 /* Emit the physical representation of tree node EXPR to output block
1555 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1556 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1557
1558 void
1559 lto_output_tree (struct output_block *ob, tree expr,
1560 bool ref_p, bool this_ref_p)
1561 {
1562 unsigned ix;
1563 bool existed_p;
1564
1565 if (expr == NULL_TREE)
1566 {
1567 streamer_write_record_start (ob, LTO_null);
1568 return;
1569 }
1570
1571 if (this_ref_p && tree_is_indexable (expr))
1572 {
1573 lto_output_tree_ref (ob, expr);
1574 return;
1575 }
1576
1577 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1578 if (existed_p)
1579 {
1580 /* If a node has already been streamed out, make sure that
1581 we don't write it more than once. Otherwise, the reader
1582 will instantiate two different nodes for the same object. */
1583 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1584 streamer_write_uhwi (ob, ix);
1585 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1586 lto_tree_code_to_tag (TREE_CODE (expr)));
1587 lto_stats.num_pickle_refs_output++;
1588 }
1589 else
1590 {
1591 /* This is the first time we see EXPR, write all reachable
1592 trees to OB. */
1593 static bool in_dfs_walk;
1594
1595 /* Protect against recursion which means disconnect between
1596 what tree edges we walk in the DFS walk and what edges
1597 we stream out. */
1598 gcc_assert (!in_dfs_walk);
1599
1600 /* Start the DFS walk. */
1601 /* Save ob state ... */
1602 /* let's see ... */
1603 in_dfs_walk = true;
1604 DFS (ob, expr, ref_p, this_ref_p, false);
1605 in_dfs_walk = false;
1606
1607 /* Finally append a reference to the tree we were writing.
1608 ??? If expr ended up as a singleton we could have
1609 inlined it here and avoid outputting a reference. */
1610 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1611 gcc_assert (existed_p);
1612 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1613 streamer_write_uhwi (ob, ix);
1614 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1615 lto_tree_code_to_tag (TREE_CODE (expr)));
1616 lto_stats.num_pickle_refs_output++;
1617 }
1618 }
1619
1620
1621 /* Output to OB a list of try/catch handlers starting with FIRST. */
1622
1623 static void
1624 output_eh_try_list (struct output_block *ob, eh_catch first)
1625 {
1626 eh_catch n;
1627
1628 for (n = first; n; n = n->next_catch)
1629 {
1630 streamer_write_record_start (ob, LTO_eh_catch);
1631 stream_write_tree (ob, n->type_list, true);
1632 stream_write_tree (ob, n->filter_list, true);
1633 stream_write_tree (ob, n->label, true);
1634 }
1635
1636 streamer_write_record_start (ob, LTO_null);
1637 }
1638
1639
1640 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1641 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1642 detect EH region sharing. */
1643
1644 static void
1645 output_eh_region (struct output_block *ob, eh_region r)
1646 {
1647 enum LTO_tags tag;
1648
1649 if (r == NULL)
1650 {
1651 streamer_write_record_start (ob, LTO_null);
1652 return;
1653 }
1654
1655 if (r->type == ERT_CLEANUP)
1656 tag = LTO_ert_cleanup;
1657 else if (r->type == ERT_TRY)
1658 tag = LTO_ert_try;
1659 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1660 tag = LTO_ert_allowed_exceptions;
1661 else if (r->type == ERT_MUST_NOT_THROW)
1662 tag = LTO_ert_must_not_throw;
1663 else
1664 gcc_unreachable ();
1665
1666 streamer_write_record_start (ob, tag);
1667 streamer_write_hwi (ob, r->index);
1668
1669 if (r->outer)
1670 streamer_write_hwi (ob, r->outer->index);
1671 else
1672 streamer_write_zero (ob);
1673
1674 if (r->inner)
1675 streamer_write_hwi (ob, r->inner->index);
1676 else
1677 streamer_write_zero (ob);
1678
1679 if (r->next_peer)
1680 streamer_write_hwi (ob, r->next_peer->index);
1681 else
1682 streamer_write_zero (ob);
1683
1684 if (r->type == ERT_TRY)
1685 {
1686 output_eh_try_list (ob, r->u.eh_try.first_catch);
1687 }
1688 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1689 {
1690 stream_write_tree (ob, r->u.allowed.type_list, true);
1691 stream_write_tree (ob, r->u.allowed.label, true);
1692 streamer_write_uhwi (ob, r->u.allowed.filter);
1693 }
1694 else if (r->type == ERT_MUST_NOT_THROW)
1695 {
1696 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1697 bitpack_d bp = bitpack_create (ob->main_stream);
1698 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1699 streamer_write_bitpack (&bp);
1700 }
1701
1702 if (r->landing_pads)
1703 streamer_write_hwi (ob, r->landing_pads->index);
1704 else
1705 streamer_write_zero (ob);
1706 }
1707
1708
1709 /* Output landing pad LP to OB. */
1710
1711 static void
1712 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1713 {
1714 if (lp == NULL)
1715 {
1716 streamer_write_record_start (ob, LTO_null);
1717 return;
1718 }
1719
1720 streamer_write_record_start (ob, LTO_eh_landing_pad);
1721 streamer_write_hwi (ob, lp->index);
1722 if (lp->next_lp)
1723 streamer_write_hwi (ob, lp->next_lp->index);
1724 else
1725 streamer_write_zero (ob);
1726
1727 if (lp->region)
1728 streamer_write_hwi (ob, lp->region->index);
1729 else
1730 streamer_write_zero (ob);
1731
1732 stream_write_tree (ob, lp->post_landing_pad, true);
1733 }
1734
1735
1736 /* Output the existing eh_table to OB. */
1737
1738 static void
1739 output_eh_regions (struct output_block *ob, struct function *fn)
1740 {
1741 if (fn->eh && fn->eh->region_tree)
1742 {
1743 unsigned i;
1744 eh_region eh;
1745 eh_landing_pad lp;
1746 tree ttype;
1747
1748 streamer_write_record_start (ob, LTO_eh_table);
1749
1750 /* Emit the index of the root of the EH region tree. */
1751 streamer_write_hwi (ob, fn->eh->region_tree->index);
1752
1753 /* Emit all the EH regions in the region array. */
1754 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1755 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1756 output_eh_region (ob, eh);
1757
1758 /* Emit all landing pads. */
1759 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1760 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1761 output_eh_lp (ob, lp);
1762
1763 /* Emit all the runtime type data. */
1764 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1765 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1766 stream_write_tree (ob, ttype, true);
1767
1768 /* Emit the table of action chains. */
1769 if (targetm.arm_eabi_unwinder)
1770 {
1771 tree t;
1772 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1773 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1774 stream_write_tree (ob, t, true);
1775 }
1776 else
1777 {
1778 uchar c;
1779 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1780 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1781 streamer_write_char_stream (ob->main_stream, c);
1782 }
1783 }
1784
1785 /* The LTO_null either terminates the record or indicates that there
1786 are no eh_records at all. */
1787 streamer_write_record_start (ob, LTO_null);
1788 }
1789
1790
1791 /* Output all of the active ssa names to the ssa_names stream. */
1792
1793 static void
1794 output_ssa_names (struct output_block *ob, struct function *fn)
1795 {
1796 unsigned int i, len;
1797
1798 len = vec_safe_length (SSANAMES (fn));
1799 streamer_write_uhwi (ob, len);
1800
1801 for (i = 1; i < len; i++)
1802 {
1803 tree ptr = (*SSANAMES (fn))[i];
1804
1805 if (ptr == NULL_TREE
1806 || SSA_NAME_IN_FREE_LIST (ptr)
1807 || virtual_operand_p (ptr))
1808 continue;
1809
1810 streamer_write_uhwi (ob, i);
1811 streamer_write_char_stream (ob->main_stream,
1812 SSA_NAME_IS_DEFAULT_DEF (ptr));
1813 if (SSA_NAME_VAR (ptr))
1814 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1815 else
1816 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1817 stream_write_tree (ob, TREE_TYPE (ptr), true);
1818 }
1819
1820 streamer_write_zero (ob);
1821 }
1822
1823
1824 /* Output a wide-int. */
1825
1826 static void
1827 streamer_write_wi (struct output_block *ob,
1828 const widest_int &w)
1829 {
1830 int len = w.get_len ();
1831
1832 streamer_write_uhwi (ob, w.get_precision ());
1833 streamer_write_uhwi (ob, len);
1834 for (int i = 0; i < len; i++)
1835 streamer_write_hwi (ob, w.elt (i));
1836 }
1837
1838
1839 /* Output the cfg. */
1840
1841 static void
1842 output_cfg (struct output_block *ob, struct function *fn)
1843 {
1844 struct lto_output_stream *tmp_stream = ob->main_stream;
1845 basic_block bb;
1846
1847 ob->main_stream = ob->cfg_stream;
1848
1849 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1850 profile_status_for_fn (fn));
1851
1852 /* Output the number of the highest basic block. */
1853 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1854
1855 FOR_ALL_BB_FN (bb, fn)
1856 {
1857 edge_iterator ei;
1858 edge e;
1859
1860 streamer_write_hwi (ob, bb->index);
1861
1862 /* Output the successors and the edge flags. */
1863 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1864 FOR_EACH_EDGE (e, ei, bb->succs)
1865 {
1866 streamer_write_uhwi (ob, e->dest->index);
1867 streamer_write_hwi (ob, e->probability);
1868 streamer_write_gcov_count (ob, e->count);
1869 streamer_write_uhwi (ob, e->flags);
1870 }
1871 }
1872
1873 streamer_write_hwi (ob, -1);
1874
1875 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1876 while (bb->next_bb)
1877 {
1878 streamer_write_hwi (ob, bb->next_bb->index);
1879 bb = bb->next_bb;
1880 }
1881
1882 streamer_write_hwi (ob, -1);
1883
1884 /* ??? The cfgloop interface is tied to cfun. */
1885 gcc_assert (cfun == fn);
1886
1887 /* Output the number of loops. */
1888 streamer_write_uhwi (ob, number_of_loops (fn));
1889
1890 /* Output each loop, skipping the tree root which has number zero. */
1891 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1892 {
1893 struct loop *loop = get_loop (fn, i);
1894
1895 /* Write the index of the loop header. That's enough to rebuild
1896 the loop tree on the reader side. Stream -1 for an unused
1897 loop entry. */
1898 if (!loop)
1899 {
1900 streamer_write_hwi (ob, -1);
1901 continue;
1902 }
1903 else
1904 streamer_write_hwi (ob, loop->header->index);
1905
1906 /* Write everything copy_loop_info copies. */
1907 streamer_write_enum (ob->main_stream,
1908 loop_estimation, EST_LAST, loop->estimate_state);
1909 streamer_write_hwi (ob, loop->any_upper_bound);
1910 if (loop->any_upper_bound)
1911 streamer_write_wi (ob, loop->nb_iterations_upper_bound);
1912 streamer_write_hwi (ob, loop->any_estimate);
1913 if (loop->any_estimate)
1914 streamer_write_wi (ob, loop->nb_iterations_estimate);
1915
1916 /* Write OMP SIMD related info. */
1917 streamer_write_hwi (ob, loop->safelen);
1918 streamer_write_hwi (ob, loop->dont_vectorize);
1919 streamer_write_hwi (ob, loop->force_vectorize);
1920 stream_write_tree (ob, loop->simduid, true);
1921 }
1922
1923 ob->main_stream = tmp_stream;
1924 }
1925
1926
1927 /* Create the header in the file using OB. If the section type is for
1928 a function, set FN to the decl for that function. */
1929
1930 void
1931 produce_asm (struct output_block *ob, tree fn)
1932 {
1933 enum lto_section_type section_type = ob->section_type;
1934 struct lto_function_header header;
1935 char *section_name;
1936
1937 if (section_type == LTO_section_function_body)
1938 {
1939 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1940 section_name = lto_get_section_name (section_type, name, NULL);
1941 }
1942 else
1943 section_name = lto_get_section_name (section_type, NULL, NULL);
1944
1945 lto_begin_section (section_name, !flag_wpa);
1946 free (section_name);
1947
1948 /* The entire header is stream computed here. */
1949 memset (&header, 0, sizeof (struct lto_function_header));
1950
1951 /* Write the header. */
1952 header.major_version = LTO_major_version;
1953 header.minor_version = LTO_minor_version;
1954
1955 if (section_type == LTO_section_function_body)
1956 header.cfg_size = ob->cfg_stream->total_size;
1957 header.main_size = ob->main_stream->total_size;
1958 header.string_size = ob->string_stream->total_size;
1959 lto_write_data (&header, sizeof header);
1960
1961 /* Put all of the gimple and the string table out the asm file as a
1962 block of text. */
1963 if (section_type == LTO_section_function_body)
1964 lto_write_stream (ob->cfg_stream);
1965 lto_write_stream (ob->main_stream);
1966 lto_write_stream (ob->string_stream);
1967
1968 lto_end_section ();
1969 }
1970
1971
1972 /* Output the base body of struct function FN using output block OB. */
1973
1974 static void
1975 output_struct_function_base (struct output_block *ob, struct function *fn)
1976 {
1977 struct bitpack_d bp;
1978 unsigned i;
1979 tree t;
1980
1981 /* Output the static chain and non-local goto save area. */
1982 stream_write_tree (ob, fn->static_chain_decl, true);
1983 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1984
1985 /* Output all the local variables in the function. */
1986 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1987 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1988 stream_write_tree (ob, t, true);
1989
1990 /* Output current IL state of the function. */
1991 streamer_write_uhwi (ob, fn->curr_properties);
1992
1993 /* Write all the attributes for FN. */
1994 bp = bitpack_create (ob->main_stream);
1995 bp_pack_value (&bp, fn->is_thunk, 1);
1996 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1997 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1998 bp_pack_value (&bp, fn->returns_struct, 1);
1999 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
2000 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
2001 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
2002 bp_pack_value (&bp, fn->after_inlining, 1);
2003 bp_pack_value (&bp, fn->stdarg, 1);
2004 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2005 bp_pack_value (&bp, fn->calls_alloca, 1);
2006 bp_pack_value (&bp, fn->calls_setjmp, 1);
2007 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2008 bp_pack_value (&bp, fn->has_simduid_loops, 1);
2009 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2010 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2011 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2012
2013 /* Output the function start and end loci. */
2014 stream_output_location (ob, &bp, fn->function_start_locus);
2015 stream_output_location (ob, &bp, fn->function_end_locus);
2016
2017 streamer_write_bitpack (&bp);
2018 }
2019
2020
2021 /* Output the body of function NODE->DECL. */
2022
2023 static void
2024 output_function (struct cgraph_node *node)
2025 {
2026 tree function;
2027 struct function *fn;
2028 basic_block bb;
2029 struct output_block *ob;
2030
2031 function = node->decl;
2032 fn = DECL_STRUCT_FUNCTION (function);
2033 ob = create_output_block (LTO_section_function_body);
2034
2035 clear_line_info (ob);
2036 ob->symbol = node;
2037
2038 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2039
2040 /* Set current_function_decl and cfun. */
2041 push_cfun (fn);
2042
2043 /* Make string 0 be a NULL string. */
2044 streamer_write_char_stream (ob->string_stream, 0);
2045
2046 streamer_write_record_start (ob, LTO_function);
2047
2048 /* Output decls for parameters and args. */
2049 stream_write_tree (ob, DECL_RESULT (function), true);
2050 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2051
2052 /* Output DECL_INITIAL for the function, which contains the tree of
2053 lexical scopes. */
2054 stream_write_tree (ob, DECL_INITIAL (function), true);
2055
2056 /* We also stream abstract functions where we stream only stuff needed for
2057 debug info. */
2058 if (gimple_has_body_p (function))
2059 {
2060 streamer_write_uhwi (ob, 1);
2061 output_struct_function_base (ob, fn);
2062
2063 /* Output all the SSA names used in the function. */
2064 output_ssa_names (ob, fn);
2065
2066 /* Output any exception handling regions. */
2067 output_eh_regions (ob, fn);
2068
2069
2070 /* We will renumber the statements. The code that does this uses
2071 the same ordering that we use for serializing them so we can use
2072 the same code on the other end and not have to write out the
2073 statement numbers. We do not assign UIDs to PHIs here because
2074 virtual PHIs get re-computed on-the-fly which would make numbers
2075 inconsistent. */
2076 set_gimple_stmt_max_uid (cfun, 0);
2077 FOR_ALL_BB_FN (bb, cfun)
2078 {
2079 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2080 gsi_next (&gsi))
2081 {
2082 gphi *stmt = gsi.phi ();
2083
2084 /* Virtual PHIs are not going to be streamed. */
2085 if (!virtual_operand_p (gimple_phi_result (stmt)))
2086 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2087 }
2088 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2089 gsi_next (&gsi))
2090 {
2091 gimple stmt = gsi_stmt (gsi);
2092 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2093 }
2094 }
2095 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2096 virtual phis now. */
2097 FOR_ALL_BB_FN (bb, cfun)
2098 {
2099 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2100 gsi_next (&gsi))
2101 {
2102 gphi *stmt = gsi.phi ();
2103 if (virtual_operand_p (gimple_phi_result (stmt)))
2104 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2105 }
2106 }
2107
2108 /* Output the code for the function. */
2109 FOR_ALL_BB_FN (bb, fn)
2110 output_bb (ob, bb, fn);
2111
2112 /* The terminator for this function. */
2113 streamer_write_record_start (ob, LTO_null);
2114
2115 output_cfg (ob, fn);
2116
2117 pop_cfun ();
2118 }
2119 else
2120 streamer_write_uhwi (ob, 0);
2121
2122 /* Create a section to hold the pickled output of this function. */
2123 produce_asm (ob, function);
2124
2125 destroy_output_block (ob);
2126 }
2127
2128 /* Output the body of function NODE->DECL. */
2129
2130 static void
2131 output_constructor (struct varpool_node *node)
2132 {
2133 tree var = node->decl;
2134 struct output_block *ob;
2135
2136 ob = create_output_block (LTO_section_function_body);
2137
2138 clear_line_info (ob);
2139 ob->symbol = node;
2140
2141 /* Make string 0 be a NULL string. */
2142 streamer_write_char_stream (ob->string_stream, 0);
2143
2144 /* Output DECL_INITIAL for the function, which contains the tree of
2145 lexical scopes. */
2146 stream_write_tree (ob, DECL_INITIAL (var), true);
2147
2148 /* Create a section to hold the pickled output of this function. */
2149 produce_asm (ob, var);
2150
2151 destroy_output_block (ob);
2152 }
2153
2154
2155 /* Emit toplevel asms. */
2156
2157 void
2158 lto_output_toplevel_asms (void)
2159 {
2160 struct output_block *ob;
2161 struct asm_node *can;
2162 char *section_name;
2163 struct lto_simple_header_with_strings header;
2164
2165 if (!symtab->first_asm_symbol ())
2166 return;
2167
2168 ob = create_output_block (LTO_section_asm);
2169
2170 /* Make string 0 be a NULL string. */
2171 streamer_write_char_stream (ob->string_stream, 0);
2172
2173 for (can = symtab->first_asm_symbol (); can; can = can->next)
2174 {
2175 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2176 streamer_write_hwi (ob, can->order);
2177 }
2178
2179 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2180
2181 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2182 lto_begin_section (section_name, !flag_wpa);
2183 free (section_name);
2184
2185 /* The entire header stream is computed here. */
2186 memset (&header, 0, sizeof (header));
2187
2188 /* Write the header. */
2189 header.major_version = LTO_major_version;
2190 header.minor_version = LTO_minor_version;
2191
2192 header.main_size = ob->main_stream->total_size;
2193 header.string_size = ob->string_stream->total_size;
2194 lto_write_data (&header, sizeof header);
2195
2196 /* Put all of the gimple and the string table out the asm file as a
2197 block of text. */
2198 lto_write_stream (ob->main_stream);
2199 lto_write_stream (ob->string_stream);
2200
2201 lto_end_section ();
2202
2203 destroy_output_block (ob);
2204 }
2205
2206
2207 /* Copy the function body or variable constructor of NODE without deserializing. */
2208
2209 static void
2210 copy_function_or_variable (struct symtab_node *node)
2211 {
2212 tree function = node->decl;
2213 struct lto_file_decl_data *file_data = node->lto_file_data;
2214 const char *data;
2215 size_t len;
2216 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2217 char *section_name =
2218 lto_get_section_name (LTO_section_function_body, name, NULL);
2219 size_t i, j;
2220 struct lto_in_decl_state *in_state;
2221 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2222
2223 lto_begin_section (section_name, !flag_wpa);
2224 free (section_name);
2225
2226 /* We may have renamed the declaration, e.g., a static function. */
2227 name = lto_get_decl_name_mapping (file_data, name);
2228
2229 data = lto_get_section_data (file_data, LTO_section_function_body,
2230 name, &len);
2231 gcc_assert (data);
2232
2233 /* Do a bit copy of the function body. */
2234 lto_write_data (data, len);
2235
2236 /* Copy decls. */
2237 in_state =
2238 lto_get_function_in_decl_state (node->lto_file_data, function);
2239 gcc_assert (in_state);
2240
2241 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2242 {
2243 size_t n = vec_safe_length (in_state->streams[i]);
2244 vec<tree, va_gc> *trees = in_state->streams[i];
2245 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2246
2247 /* The out state must have the same indices and the in state.
2248 So just copy the vector. All the encoders in the in state
2249 must be empty where we reach here. */
2250 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2251 encoder->trees.reserve_exact (n);
2252 for (j = 0; j < n; j++)
2253 encoder->trees.safe_push ((*trees)[j]);
2254 }
2255
2256 lto_free_section_data (file_data, LTO_section_function_body, name,
2257 data, len);
2258 lto_end_section ();
2259 }
2260
2261 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2262
2263 static tree
2264 wrap_refs (tree *tp, int *ws, void *)
2265 {
2266 tree t = *tp;
2267 if (handled_component_p (t)
2268 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL)
2269 {
2270 tree decl = TREE_OPERAND (t, 0);
2271 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2272 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2273 build1 (ADDR_EXPR, ptrtype, decl),
2274 build_int_cst (ptrtype, 0));
2275 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2276 *ws = 0;
2277 }
2278 else if (TREE_CODE (t) == CONSTRUCTOR)
2279 ;
2280 else if (!EXPR_P (t))
2281 *ws = 0;
2282 return NULL_TREE;
2283 }
2284
2285 /* Main entry point from the pass manager. */
2286
2287 void
2288 lto_output (void)
2289 {
2290 struct lto_out_decl_state *decl_state;
2291 #ifdef ENABLE_CHECKING
2292 bitmap output = lto_bitmap_alloc ();
2293 #endif
2294 int i, n_nodes;
2295 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2296
2297 /* Initialize the streamer. */
2298 lto_streamer_init ();
2299
2300 n_nodes = lto_symtab_encoder_size (encoder);
2301 /* Process only the functions with bodies. */
2302 for (i = 0; i < n_nodes; i++)
2303 {
2304 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2305 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2306 {
2307 if (lto_symtab_encoder_encode_body_p (encoder, node)
2308 && !node->alias)
2309 {
2310 #ifdef ENABLE_CHECKING
2311 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2312 bitmap_set_bit (output, DECL_UID (node->decl));
2313 #endif
2314 decl_state = lto_new_out_decl_state ();
2315 lto_push_out_decl_state (decl_state);
2316 if (gimple_has_body_p (node->decl) || !flag_wpa
2317 /* Thunks have no body but they may be synthetized
2318 at WPA time. */
2319 || DECL_ARGUMENTS (node->decl))
2320 output_function (node);
2321 else
2322 copy_function_or_variable (node);
2323 gcc_assert (lto_get_out_decl_state () == decl_state);
2324 lto_pop_out_decl_state ();
2325 lto_record_function_out_decl_state (node->decl, decl_state);
2326 }
2327 }
2328 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2329 {
2330 /* Wrap symbol references inside the ctor in a type
2331 preserving MEM_REF. */
2332 tree ctor = DECL_INITIAL (node->decl);
2333 if (ctor && !in_lto_p)
2334 walk_tree (&ctor, wrap_refs, NULL, NULL);
2335 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2336 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2337 && !node->alias)
2338 {
2339 timevar_push (TV_IPA_LTO_CTORS_OUT);
2340 #ifdef ENABLE_CHECKING
2341 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2342 bitmap_set_bit (output, DECL_UID (node->decl));
2343 #endif
2344 decl_state = lto_new_out_decl_state ();
2345 lto_push_out_decl_state (decl_state);
2346 if (DECL_INITIAL (node->decl) != error_mark_node
2347 || !flag_wpa)
2348 output_constructor (node);
2349 else
2350 copy_function_or_variable (node);
2351 gcc_assert (lto_get_out_decl_state () == decl_state);
2352 lto_pop_out_decl_state ();
2353 lto_record_function_out_decl_state (node->decl, decl_state);
2354 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2355 }
2356 }
2357 }
2358
2359 /* Emit the callgraph after emitting function bodies. This needs to
2360 be done now to make sure that all the statements in every function
2361 have been renumbered so that edges can be associated with call
2362 statements using the statement UIDs. */
2363 output_symtab ();
2364
2365 output_offload_tables ();
2366
2367 #ifdef ENABLE_CHECKING
2368 lto_bitmap_free (output);
2369 #endif
2370 }
2371
2372 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2373 from it and required for correct representation of its semantics.
2374 Each node in ENCODER must be a global declaration or a type. A node
2375 is written only once, even if it appears multiple times in the
2376 vector. Certain transitively-reachable nodes, such as those
2377 representing expressions, may be duplicated, but such nodes
2378 must not appear in ENCODER itself. */
2379
2380 static void
2381 write_global_stream (struct output_block *ob,
2382 struct lto_tree_ref_encoder *encoder)
2383 {
2384 tree t;
2385 size_t index;
2386 const size_t size = lto_tree_ref_encoder_size (encoder);
2387
2388 for (index = 0; index < size; index++)
2389 {
2390 t = lto_tree_ref_encoder_get_tree (encoder, index);
2391 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2392 stream_write_tree (ob, t, false);
2393 }
2394 }
2395
2396
2397 /* Write a sequence of indices into the globals vector corresponding
2398 to the trees in ENCODER. These are used by the reader to map the
2399 indices used to refer to global entities within function bodies to
2400 their referents. */
2401
2402 static void
2403 write_global_references (struct output_block *ob,
2404 struct lto_tree_ref_encoder *encoder)
2405 {
2406 tree t;
2407 uint32_t index;
2408 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2409
2410 /* Write size and slot indexes as 32-bit unsigned numbers. */
2411 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2412 data[0] = size;
2413
2414 for (index = 0; index < size; index++)
2415 {
2416 uint32_t slot_num;
2417
2418 t = lto_tree_ref_encoder_get_tree (encoder, index);
2419 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2420 gcc_assert (slot_num != (unsigned)-1);
2421 data[index + 1] = slot_num;
2422 }
2423
2424 lto_write_data (data, sizeof (int32_t) * (size + 1));
2425 free (data);
2426 }
2427
2428
2429 /* Write all the streams in an lto_out_decl_state STATE using
2430 output block OB and output stream OUT_STREAM. */
2431
2432 void
2433 lto_output_decl_state_streams (struct output_block *ob,
2434 struct lto_out_decl_state *state)
2435 {
2436 int i;
2437
2438 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2439 write_global_stream (ob, &state->streams[i]);
2440 }
2441
2442
2443 /* Write all the references in an lto_out_decl_state STATE using
2444 output block OB and output stream OUT_STREAM. */
2445
2446 void
2447 lto_output_decl_state_refs (struct output_block *ob,
2448 struct lto_out_decl_state *state)
2449 {
2450 unsigned i;
2451 uint32_t ref;
2452 tree decl;
2453
2454 /* Write reference to FUNCTION_DECL. If there is not function,
2455 write reference to void_type_node. */
2456 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2457 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2458 gcc_assert (ref != (unsigned)-1);
2459 lto_write_data (&ref, sizeof (uint32_t));
2460
2461 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2462 write_global_references (ob, &state->streams[i]);
2463 }
2464
2465
2466 /* Return the written size of STATE. */
2467
2468 static size_t
2469 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2470 {
2471 int i;
2472 size_t size;
2473
2474 size = sizeof (int32_t); /* fn_ref. */
2475 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2476 {
2477 size += sizeof (int32_t); /* vector size. */
2478 size += (lto_tree_ref_encoder_size (&state->streams[i])
2479 * sizeof (int32_t));
2480 }
2481 return size;
2482 }
2483
2484
2485 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2486 so far. */
2487
2488 static void
2489 write_symbol (struct streamer_tree_cache_d *cache,
2490 tree t, hash_set<const char *> *seen, bool alias)
2491 {
2492 const char *name;
2493 enum gcc_plugin_symbol_kind kind;
2494 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2495 unsigned slot_num;
2496 uint64_t size;
2497 const char *comdat;
2498 unsigned char c;
2499
2500 /* None of the following kinds of symbols are needed in the
2501 symbol table. */
2502 if (!TREE_PUBLIC (t)
2503 || is_builtin_fn (t)
2504 || DECL_ABSTRACT_P (t)
2505 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2506 return;
2507 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2508
2509 gcc_assert (TREE_CODE (t) == VAR_DECL
2510 || TREE_CODE (t) == FUNCTION_DECL);
2511
2512 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2513
2514 /* This behaves like assemble_name_raw in varasm.c, performing the
2515 same name manipulations that ASM_OUTPUT_LABELREF does. */
2516 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2517
2518 if (seen->add (name))
2519 return;
2520
2521 streamer_tree_cache_lookup (cache, t, &slot_num);
2522 gcc_assert (slot_num != (unsigned)-1);
2523
2524 if (DECL_EXTERNAL (t))
2525 {
2526 if (DECL_WEAK (t))
2527 kind = GCCPK_WEAKUNDEF;
2528 else
2529 kind = GCCPK_UNDEF;
2530 }
2531 else
2532 {
2533 if (DECL_WEAK (t))
2534 kind = GCCPK_WEAKDEF;
2535 else if (DECL_COMMON (t))
2536 kind = GCCPK_COMMON;
2537 else
2538 kind = GCCPK_DEF;
2539
2540 /* When something is defined, it should have node attached. */
2541 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2542 || varpool_node::get (t)->definition);
2543 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2544 || (cgraph_node::get (t)
2545 && cgraph_node::get (t)->definition));
2546 }
2547
2548 /* Imitate what default_elf_asm_output_external do.
2549 When symbol is external, we need to output it with DEFAULT visibility
2550 when compiling with -fvisibility=default, while with HIDDEN visibility
2551 when symbol has attribute (visibility("hidden")) specified.
2552 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2553 right. */
2554
2555 if (DECL_EXTERNAL (t)
2556 && !targetm.binds_local_p (t))
2557 visibility = GCCPV_DEFAULT;
2558 else
2559 switch (DECL_VISIBILITY (t))
2560 {
2561 case VISIBILITY_DEFAULT:
2562 visibility = GCCPV_DEFAULT;
2563 break;
2564 case VISIBILITY_PROTECTED:
2565 visibility = GCCPV_PROTECTED;
2566 break;
2567 case VISIBILITY_HIDDEN:
2568 visibility = GCCPV_HIDDEN;
2569 break;
2570 case VISIBILITY_INTERNAL:
2571 visibility = GCCPV_INTERNAL;
2572 break;
2573 }
2574
2575 if (kind == GCCPK_COMMON
2576 && DECL_SIZE_UNIT (t)
2577 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2578 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2579 else
2580 size = 0;
2581
2582 if (DECL_ONE_ONLY (t))
2583 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2584 else
2585 comdat = "";
2586
2587 lto_write_data (name, strlen (name) + 1);
2588 lto_write_data (comdat, strlen (comdat) + 1);
2589 c = (unsigned char) kind;
2590 lto_write_data (&c, 1);
2591 c = (unsigned char) visibility;
2592 lto_write_data (&c, 1);
2593 lto_write_data (&size, 8);
2594 lto_write_data (&slot_num, 4);
2595 }
2596
2597 /* Return true if NODE should appear in the plugin symbol table. */
2598
2599 bool
2600 output_symbol_p (symtab_node *node)
2601 {
2602 struct cgraph_node *cnode;
2603 if (!node->real_symbol_p ())
2604 return false;
2605 /* We keep external functions in symtab for sake of inlining
2606 and devirtualization. We do not want to see them in symbol table as
2607 references unless they are really used. */
2608 cnode = dyn_cast <cgraph_node *> (node);
2609 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2610 && cnode->callers)
2611 return true;
2612
2613 /* Ignore all references from external vars initializers - they are not really
2614 part of the compilation unit until they are used by folding. Some symbols,
2615 like references to external construction vtables can not be referred to at all.
2616 We decide this at can_refer_decl_in_current_unit_p. */
2617 if (!node->definition || DECL_EXTERNAL (node->decl))
2618 {
2619 int i;
2620 struct ipa_ref *ref;
2621 for (i = 0; node->iterate_referring (i, ref); i++)
2622 {
2623 if (ref->use == IPA_REF_ALIAS)
2624 continue;
2625 if (is_a <cgraph_node *> (ref->referring))
2626 return true;
2627 if (!DECL_EXTERNAL (ref->referring->decl))
2628 return true;
2629 }
2630 return false;
2631 }
2632 return true;
2633 }
2634
2635
2636 /* Write an IL symbol table to OB.
2637 SET and VSET are cgraph/varpool node sets we are outputting. */
2638
2639 static void
2640 produce_symtab (struct output_block *ob)
2641 {
2642 struct streamer_tree_cache_d *cache = ob->writer_cache;
2643 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2644 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2645 lto_symtab_encoder_iterator lsei;
2646
2647 lto_begin_section (section_name, false);
2648 free (section_name);
2649
2650 hash_set<const char *> seen;
2651
2652 /* Write the symbol table.
2653 First write everything defined and then all declarations.
2654 This is necessary to handle cases where we have duplicated symbols. */
2655 for (lsei = lsei_start (encoder);
2656 !lsei_end_p (lsei); lsei_next (&lsei))
2657 {
2658 symtab_node *node = lsei_node (lsei);
2659
2660 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2661 continue;
2662 write_symbol (cache, node->decl, &seen, false);
2663 }
2664 for (lsei = lsei_start (encoder);
2665 !lsei_end_p (lsei); lsei_next (&lsei))
2666 {
2667 symtab_node *node = lsei_node (lsei);
2668
2669 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2670 continue;
2671 write_symbol (cache, node->decl, &seen, false);
2672 }
2673
2674 lto_end_section ();
2675 }
2676
2677
2678 /* Init the streamer_mode_table for output, where we collect info on what
2679 machine_mode values have been streamed. */
2680 void
2681 lto_output_init_mode_table (void)
2682 {
2683 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2684 }
2685
2686
2687 /* Write the mode table. */
2688 static void
2689 lto_write_mode_table (void)
2690 {
2691 struct output_block *ob;
2692 ob = create_output_block (LTO_section_mode_table);
2693 bitpack_d bp = bitpack_create (ob->main_stream);
2694
2695 /* Ensure that for GET_MODE_INNER (m) != VOIDmode we have
2696 also the inner mode marked. */
2697 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2698 if (streamer_mode_table[i])
2699 {
2700 machine_mode m = (machine_mode) i;
2701 if (GET_MODE_INNER (m) != VOIDmode)
2702 streamer_mode_table[(int) GET_MODE_INNER (m)] = 1;
2703 }
2704 /* First stream modes that have GET_MODE_INNER (m) == VOIDmode,
2705 so that we can refer to them afterwards. */
2706 for (int pass = 0; pass < 2; pass++)
2707 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2708 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2709 {
2710 machine_mode m = (machine_mode) i;
2711 if ((GET_MODE_INNER (m) == VOIDmode) ^ (pass == 0))
2712 continue;
2713 bp_pack_value (&bp, m, 8);
2714 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2715 bp_pack_value (&bp, GET_MODE_SIZE (m), 8);
2716 bp_pack_value (&bp, GET_MODE_PRECISION (m), 16);
2717 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2718 bp_pack_value (&bp, GET_MODE_NUNITS (m), 8);
2719 switch (GET_MODE_CLASS (m))
2720 {
2721 case MODE_FRACT:
2722 case MODE_UFRACT:
2723 case MODE_ACCUM:
2724 case MODE_UACCUM:
2725 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2726 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2727 break;
2728 case MODE_FLOAT:
2729 case MODE_DECIMAL_FLOAT:
2730 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2731 break;
2732 default:
2733 break;
2734 }
2735 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2736 }
2737 bp_pack_value (&bp, VOIDmode, 8);
2738
2739 streamer_write_bitpack (&bp);
2740
2741 char *section_name
2742 = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2743 lto_begin_section (section_name, !flag_wpa);
2744 free (section_name);
2745
2746 /* The entire header stream is computed here. */
2747 struct lto_simple_header_with_strings header;
2748 memset (&header, 0, sizeof (header));
2749
2750 /* Write the header. */
2751 header.major_version = LTO_major_version;
2752 header.minor_version = LTO_minor_version;
2753
2754 header.main_size = ob->main_stream->total_size;
2755 header.string_size = ob->string_stream->total_size;
2756 lto_write_data (&header, sizeof header);
2757
2758 /* Put all of the gimple and the string table out the asm file as a
2759 block of text. */
2760 lto_write_stream (ob->main_stream);
2761 lto_write_stream (ob->string_stream);
2762
2763 lto_end_section ();
2764 destroy_output_block (ob);
2765 }
2766
2767
2768 /* This pass is run after all of the functions are serialized and all
2769 of the IPA passes have written their serialized forms. This pass
2770 causes the vector of all of the global decls and types used from
2771 this file to be written in to a section that can then be read in to
2772 recover these on other side. */
2773
2774 void
2775 produce_asm_for_decls (void)
2776 {
2777 struct lto_out_decl_state *out_state;
2778 struct lto_out_decl_state *fn_out_state;
2779 struct lto_decl_header header;
2780 char *section_name;
2781 struct output_block *ob;
2782 unsigned idx, num_fns;
2783 size_t decl_state_size;
2784 int32_t num_decl_states;
2785
2786 ob = create_output_block (LTO_section_decls);
2787
2788 memset (&header, 0, sizeof (struct lto_decl_header));
2789
2790 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2791 lto_begin_section (section_name, !flag_wpa);
2792 free (section_name);
2793
2794 /* Make string 0 be a NULL string. */
2795 streamer_write_char_stream (ob->string_stream, 0);
2796
2797 gcc_assert (!alias_pairs);
2798
2799 /* Get rid of the global decl state hash tables to save some memory. */
2800 out_state = lto_get_out_decl_state ();
2801 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2802 if (out_state->streams[i].tree_hash_table)
2803 {
2804 delete out_state->streams[i].tree_hash_table;
2805 out_state->streams[i].tree_hash_table = NULL;
2806 }
2807
2808 /* Write the global symbols. */
2809 lto_output_decl_state_streams (ob, out_state);
2810 num_fns = lto_function_decl_states.length ();
2811 for (idx = 0; idx < num_fns; idx++)
2812 {
2813 fn_out_state =
2814 lto_function_decl_states[idx];
2815 lto_output_decl_state_streams (ob, fn_out_state);
2816 }
2817
2818 header.major_version = LTO_major_version;
2819 header.minor_version = LTO_minor_version;
2820
2821 /* Currently not used. This field would allow us to preallocate
2822 the globals vector, so that it need not be resized as it is extended. */
2823 header.num_nodes = -1;
2824
2825 /* Compute the total size of all decl out states. */
2826 decl_state_size = sizeof (int32_t);
2827 decl_state_size += lto_out_decl_state_written_size (out_state);
2828 for (idx = 0; idx < num_fns; idx++)
2829 {
2830 fn_out_state =
2831 lto_function_decl_states[idx];
2832 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2833 }
2834 header.decl_state_size = decl_state_size;
2835
2836 header.main_size = ob->main_stream->total_size;
2837 header.string_size = ob->string_stream->total_size;
2838
2839 lto_write_data (&header, sizeof header);
2840
2841 /* Write the main out-decl state, followed by out-decl states of
2842 functions. */
2843 num_decl_states = num_fns + 1;
2844 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2845 lto_output_decl_state_refs (ob, out_state);
2846 for (idx = 0; idx < num_fns; idx++)
2847 {
2848 fn_out_state = lto_function_decl_states[idx];
2849 lto_output_decl_state_refs (ob, fn_out_state);
2850 }
2851
2852 lto_write_stream (ob->main_stream);
2853 lto_write_stream (ob->string_stream);
2854
2855 lto_end_section ();
2856
2857 /* Write the symbol table. It is used by linker to determine dependencies
2858 and thus we can skip it for WPA. */
2859 if (!flag_wpa)
2860 produce_symtab (ob);
2861
2862 /* Write command line opts. */
2863 lto_write_options ();
2864
2865 /* Deallocate memory and clean up. */
2866 for (idx = 0; idx < num_fns; idx++)
2867 {
2868 fn_out_state =
2869 lto_function_decl_states[idx];
2870 lto_delete_out_decl_state (fn_out_state);
2871 }
2872 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2873 lto_function_decl_states.release ();
2874 destroy_output_block (ob);
2875 if (lto_stream_offload_p)
2876 lto_write_mode_table ();
2877 }