]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-streamer-out.c
re PR target/79671 (mapnik miscompilation on armv7hl since r235622)
[thirdparty/gcc.git] / gcc / lto-streamer-out.c
1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2017 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
43
44
45 static void lto_write_tree (struct output_block*, tree, bool);
46
47 /* Clear the line info stored in DATA_IN. */
48
49 static void
50 clear_line_info (struct output_block *ob)
51 {
52 ob->current_file = NULL;
53 ob->current_line = 0;
54 ob->current_col = 0;
55 ob->current_sysp = false;
56 }
57
58
59 /* Create the output block and return it. SECTION_TYPE is
60 LTO_section_function_body or LTO_static_initializer. */
61
62 struct output_block *
63 create_output_block (enum lto_section_type section_type)
64 {
65 struct output_block *ob = XCNEW (struct output_block);
66
67 ob->section_type = section_type;
68 ob->decl_state = lto_get_out_decl_state ();
69 ob->main_stream = XCNEW (struct lto_output_stream);
70 ob->string_stream = XCNEW (struct lto_output_stream);
71 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
72
73 if (section_type == LTO_section_function_body)
74 ob->cfg_stream = XCNEW (struct lto_output_stream);
75
76 clear_line_info (ob);
77
78 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
79 gcc_obstack_init (&ob->obstack);
80
81 return ob;
82 }
83
84
85 /* Destroy the output block OB. */
86
87 void
88 destroy_output_block (struct output_block *ob)
89 {
90 enum lto_section_type section_type = ob->section_type;
91
92 delete ob->string_hash_table;
93 ob->string_hash_table = NULL;
94
95 free (ob->main_stream);
96 free (ob->string_stream);
97 if (section_type == LTO_section_function_body)
98 free (ob->cfg_stream);
99
100 streamer_tree_cache_delete (ob->writer_cache);
101 obstack_free (&ob->obstack, NULL);
102
103 free (ob);
104 }
105
106
107 /* Look up NODE in the type table and write the index for it to OB. */
108
109 static void
110 output_type_ref (struct output_block *ob, tree node)
111 {
112 streamer_write_record_start (ob, LTO_type_ref);
113 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
114 }
115
116
117 /* Return true if tree node T is written to various tables. For these
118 nodes, we sometimes want to write their phyiscal representation
119 (via lto_output_tree), and sometimes we need to emit an index
120 reference into a table (via lto_output_tree_ref). */
121
122 static bool
123 tree_is_indexable (tree t)
124 {
125 /* Parameters and return values of functions of variably modified types
126 must go to global stream, because they may be used in the type
127 definition. */
128 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
129 && DECL_CONTEXT (t))
130 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
131 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
132 else if (TREE_CODE (t) == IMPORTED_DECL)
133 return false;
134 else if (((VAR_P (t) && !TREE_STATIC (t))
135 || TREE_CODE (t) == TYPE_DECL
136 || TREE_CODE (t) == CONST_DECL
137 || TREE_CODE (t) == NAMELIST_DECL)
138 && decl_function_context (t))
139 return false;
140 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
141 return false;
142 /* Variably modified types need to be streamed alongside function
143 bodies because they can refer to local entities. Together with
144 them we have to localize their members as well.
145 ??? In theory that includes non-FIELD_DECLs as well. */
146 else if (TYPE_P (t)
147 && variably_modified_type_p (t, NULL_TREE))
148 return false;
149 else if (TREE_CODE (t) == FIELD_DECL
150 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
151 return false;
152 else
153 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
154 }
155
156
157 /* Output info about new location into bitpack BP.
158 After outputting bitpack, lto_output_location_data has
159 to be done to output actual data. */
160
161 void
162 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
163 location_t loc)
164 {
165 expanded_location xloc;
166
167 loc = LOCATION_LOCUS (loc);
168 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
169 loc < RESERVED_LOCATION_COUNT
170 ? loc : RESERVED_LOCATION_COUNT);
171 if (loc < RESERVED_LOCATION_COUNT)
172 return;
173
174 xloc = expand_location (loc);
175
176 bp_pack_value (bp, ob->current_file != xloc.file, 1);
177 bp_pack_value (bp, ob->current_line != xloc.line, 1);
178 bp_pack_value (bp, ob->current_col != xloc.column, 1);
179
180 if (ob->current_file != xloc.file)
181 {
182 bp_pack_string (ob, bp, xloc.file, true);
183 bp_pack_value (bp, xloc.sysp, 1);
184 }
185 ob->current_file = xloc.file;
186 ob->current_sysp = xloc.sysp;
187
188 if (ob->current_line != xloc.line)
189 bp_pack_var_len_unsigned (bp, xloc.line);
190 ob->current_line = xloc.line;
191
192 if (ob->current_col != xloc.column)
193 bp_pack_var_len_unsigned (bp, xloc.column);
194 ob->current_col = xloc.column;
195 }
196
197
198 /* If EXPR is an indexable tree node, output a reference to it to
199 output block OB. Otherwise, output the physical representation of
200 EXPR to OB. */
201
202 static void
203 lto_output_tree_ref (struct output_block *ob, tree expr)
204 {
205 enum tree_code code;
206
207 if (TYPE_P (expr))
208 {
209 output_type_ref (ob, expr);
210 return;
211 }
212
213 code = TREE_CODE (expr);
214 switch (code)
215 {
216 case SSA_NAME:
217 streamer_write_record_start (ob, LTO_ssa_name_ref);
218 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
219 break;
220
221 case FIELD_DECL:
222 streamer_write_record_start (ob, LTO_field_decl_ref);
223 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
224 break;
225
226 case FUNCTION_DECL:
227 streamer_write_record_start (ob, LTO_function_decl_ref);
228 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
229 break;
230
231 case VAR_DECL:
232 case DEBUG_EXPR_DECL:
233 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
234 /* FALLTHRU */
235 case PARM_DECL:
236 streamer_write_record_start (ob, LTO_global_decl_ref);
237 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
238 break;
239
240 case CONST_DECL:
241 streamer_write_record_start (ob, LTO_const_decl_ref);
242 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
243 break;
244
245 case IMPORTED_DECL:
246 gcc_assert (decl_function_context (expr) == NULL);
247 streamer_write_record_start (ob, LTO_imported_decl_ref);
248 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
249 break;
250
251 case TYPE_DECL:
252 streamer_write_record_start (ob, LTO_type_decl_ref);
253 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
254 break;
255
256 case NAMELIST_DECL:
257 streamer_write_record_start (ob, LTO_namelist_decl_ref);
258 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
259 break;
260
261 case NAMESPACE_DECL:
262 streamer_write_record_start (ob, LTO_namespace_decl_ref);
263 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
264 break;
265
266 case LABEL_DECL:
267 streamer_write_record_start (ob, LTO_label_decl_ref);
268 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
269 break;
270
271 case RESULT_DECL:
272 streamer_write_record_start (ob, LTO_result_decl_ref);
273 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
274 break;
275
276 case TRANSLATION_UNIT_DECL:
277 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
278 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
279 break;
280
281 default:
282 /* No other node is indexable, so it should have been handled by
283 lto_output_tree. */
284 gcc_unreachable ();
285 }
286 }
287
288
289 /* Return true if EXPR is a tree node that can be written to disk. */
290
291 static inline bool
292 lto_is_streamable (tree expr)
293 {
294 enum tree_code code = TREE_CODE (expr);
295
296 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
297 name version in lto_output_tree_ref (see output_ssa_names). */
298 return !is_lang_specific (expr)
299 && code != SSA_NAME
300 && code != CALL_EXPR
301 && code != LANG_TYPE
302 && code != MODIFY_EXPR
303 && code != INIT_EXPR
304 && code != TARGET_EXPR
305 && code != BIND_EXPR
306 && code != WITH_CLEANUP_EXPR
307 && code != STATEMENT_LIST
308 && (code == CASE_LABEL_EXPR
309 || code == DECL_EXPR
310 || TREE_CODE_CLASS (code) != tcc_statement);
311 }
312
313 /* Very rough estimate of streaming size of the initializer. If we ignored
314 presence of strings, we could simply just count number of non-indexable
315 tree nodes and number of references to indexable nodes. Strings however
316 may be very large and we do not want to dump them int othe global stream.
317
318 Count the size of initializer until the size in DATA is positive. */
319
320 static tree
321 subtract_estimated_size (tree *tp, int *ws, void *data)
322 {
323 long *sum = (long *)data;
324 if (tree_is_indexable (*tp))
325 {
326 /* Indexable tree is one reference to global stream.
327 Guess it may be about 4 bytes. */
328 *sum -= 4;
329 *ws = 0;
330 }
331 /* String table entry + base of tree node needs to be streamed. */
332 if (TREE_CODE (*tp) == STRING_CST)
333 *sum -= TREE_STRING_LENGTH (*tp) + 8;
334 else
335 {
336 /* Identifiers are also variable length but should not appear
337 naked in constructor. */
338 gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
339 /* We do not really make attempt to work out size of pickled tree, as
340 it is very variable. Make it bigger than the reference. */
341 *sum -= 16;
342 }
343 if (*sum < 0)
344 return *tp;
345 return NULL_TREE;
346 }
347
348
349 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
350
351 static tree
352 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
353 {
354 gcc_checking_assert (DECL_P (expr)
355 && TREE_CODE (expr) != FUNCTION_DECL
356 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
357
358 /* Handle DECL_INITIAL for symbols. */
359 tree initial = DECL_INITIAL (expr);
360 if (VAR_P (expr)
361 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
362 && !DECL_IN_CONSTANT_POOL (expr)
363 && initial)
364 {
365 varpool_node *vnode;
366 /* Extra section needs about 30 bytes; do not produce it for simple
367 scalar values. */
368 if (!(vnode = varpool_node::get (expr))
369 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
370 initial = error_mark_node;
371 if (initial != error_mark_node)
372 {
373 long max_size = 30;
374 if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
375 NULL))
376 initial = error_mark_node;
377 }
378 }
379
380 return initial;
381 }
382
383
384 /* Write a physical representation of tree node EXPR to output block
385 OB. If REF_P is true, the leaves of EXPR are emitted as references
386 via lto_output_tree_ref. IX is the index into the streamer cache
387 where EXPR is stored. */
388
389 static void
390 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
391 {
392 /* Pack all the non-pointer fields in EXPR into a bitpack and write
393 the resulting bitpack. */
394 streamer_write_tree_bitfields (ob, expr);
395
396 /* Write all the pointer fields in EXPR. */
397 streamer_write_tree_body (ob, expr, ref_p);
398
399 /* Write any LTO-specific data to OB. */
400 if (DECL_P (expr)
401 && TREE_CODE (expr) != FUNCTION_DECL
402 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
403 {
404 /* Handle DECL_INITIAL for symbols. */
405 tree initial = get_symbol_initial_value
406 (ob->decl_state->symtab_node_encoder, expr);
407 stream_write_tree (ob, initial, ref_p);
408 }
409 }
410
411 /* Write a physical representation of tree node EXPR to output block
412 OB. If REF_P is true, the leaves of EXPR are emitted as references
413 via lto_output_tree_ref. IX is the index into the streamer cache
414 where EXPR is stored. */
415
416 static void
417 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
418 {
419 if (!lto_is_streamable (expr))
420 internal_error ("tree code %qs is not supported in LTO streams",
421 get_tree_code_name (TREE_CODE (expr)));
422
423 /* Write the header, containing everything needed to materialize
424 EXPR on the reading side. */
425 streamer_write_tree_header (ob, expr);
426
427 lto_write_tree_1 (ob, expr, ref_p);
428
429 /* Mark the end of EXPR. */
430 streamer_write_zero (ob);
431 }
432
433 /* Emit the physical representation of tree node EXPR to output block OB,
434 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
435 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
436
437 static void
438 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
439 bool ref_p, bool this_ref_p)
440 {
441 unsigned ix;
442
443 gcc_checking_assert (expr != NULL_TREE
444 && !(this_ref_p && tree_is_indexable (expr)));
445
446 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
447 expr, hash, &ix);
448 gcc_assert (!exists_p);
449 if (TREE_CODE (expr) == INTEGER_CST
450 && !TREE_OVERFLOW (expr))
451 {
452 /* Shared INTEGER_CST nodes are special because they need their
453 original type to be materialized by the reader (to implement
454 TYPE_CACHED_VALUES). */
455 streamer_write_integer_cst (ob, expr, ref_p);
456 }
457 else
458 {
459 /* This is the first time we see EXPR, write its fields
460 to OB. */
461 lto_write_tree (ob, expr, ref_p);
462 }
463 }
464
465 class DFS
466 {
467 public:
468 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
469 bool single_p);
470 ~DFS ();
471
472 struct scc_entry
473 {
474 tree t;
475 hashval_t hash;
476 };
477 vec<scc_entry> sccstack;
478
479 private:
480 struct sccs
481 {
482 unsigned int dfsnum;
483 unsigned int low;
484 };
485 struct worklist
486 {
487 tree expr;
488 sccs *from_state;
489 sccs *cstate;
490 bool ref_p;
491 bool this_ref_p;
492 };
493
494 static int scc_entry_compare (const void *, const void *);
495
496 void DFS_write_tree_body (struct output_block *ob,
497 tree expr, sccs *expr_state, bool ref_p);
498
499 void DFS_write_tree (struct output_block *ob, sccs *from_state,
500 tree expr, bool ref_p, bool this_ref_p);
501
502 hashval_t
503 hash_scc (struct output_block *ob, unsigned first, unsigned size,
504 bool ref_p, bool this_ref_p);
505
506 hash_map<tree, sccs *> sccstate;
507 vec<worklist> worklist_vec;
508 struct obstack sccstate_obstack;
509 };
510
511 /* Emit the physical representation of tree node EXPR to output block OB,
512 using depth-first search on the subgraph. If THIS_REF_P is true, the
513 leaves of EXPR are emitted as references via lto_output_tree_ref.
514 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
515 this is for a rewalk of a single leaf SCC. */
516
517 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
518 bool single_p)
519 {
520 unsigned int next_dfs_num = 1;
521 sccstack.create (0);
522 gcc_obstack_init (&sccstate_obstack);
523 worklist_vec = vNULL;
524 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
525 while (!worklist_vec.is_empty ())
526 {
527 worklist &w = worklist_vec.last ();
528 expr = w.expr;
529 sccs *from_state = w.from_state;
530 sccs *cstate = w.cstate;
531 ref_p = w.ref_p;
532 this_ref_p = w.this_ref_p;
533 if (cstate == NULL)
534 {
535 sccs **slot = &sccstate.get_or_insert (expr);
536 cstate = *slot;
537 if (cstate)
538 {
539 gcc_checking_assert (from_state);
540 if (cstate->dfsnum < from_state->dfsnum)
541 from_state->low = MIN (cstate->dfsnum, from_state->low);
542 worklist_vec.pop ();
543 continue;
544 }
545
546 scc_entry e = { expr, 0 };
547 /* Not yet visited. DFS recurse and push it onto the stack. */
548 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
549 sccstack.safe_push (e);
550 cstate->dfsnum = next_dfs_num++;
551 cstate->low = cstate->dfsnum;
552 w.cstate = cstate;
553
554 if (TREE_CODE (expr) == INTEGER_CST
555 && !TREE_OVERFLOW (expr))
556 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
557 else
558 {
559 DFS_write_tree_body (ob, expr, cstate, ref_p);
560
561 /* Walk any LTO-specific edges. */
562 if (DECL_P (expr)
563 && TREE_CODE (expr) != FUNCTION_DECL
564 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
565 {
566 /* Handle DECL_INITIAL for symbols. */
567 tree initial
568 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
569 expr);
570 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
571 }
572 }
573 continue;
574 }
575
576 /* See if we found an SCC. */
577 if (cstate->low == cstate->dfsnum)
578 {
579 unsigned first, size;
580 tree x;
581
582 /* If we are re-walking a single leaf SCC just pop it,
583 let earlier worklist item access the sccstack. */
584 if (single_p)
585 {
586 worklist_vec.pop ();
587 continue;
588 }
589
590 /* Pop the SCC and compute its size. */
591 first = sccstack.length ();
592 do
593 {
594 x = sccstack[--first].t;
595 }
596 while (x != expr);
597 size = sccstack.length () - first;
598
599 /* No need to compute hashes for LTRANS units, we don't perform
600 any merging there. */
601 hashval_t scc_hash = 0;
602 unsigned scc_entry_len = 0;
603 if (!flag_wpa)
604 {
605 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
606
607 /* Put the entries with the least number of collisions first. */
608 unsigned entry_start = 0;
609 scc_entry_len = size + 1;
610 for (unsigned i = 0; i < size;)
611 {
612 unsigned from = i;
613 for (i = i + 1; i < size
614 && (sccstack[first + i].hash
615 == sccstack[first + from].hash); ++i)
616 ;
617 if (i - from < scc_entry_len)
618 {
619 scc_entry_len = i - from;
620 entry_start = from;
621 }
622 }
623 for (unsigned i = 0; i < scc_entry_len; ++i)
624 std::swap (sccstack[first + i],
625 sccstack[first + entry_start + i]);
626
627 /* We already sorted SCC deterministically in hash_scc. */
628
629 /* Check that we have only one SCC.
630 Naturally we may have conflicts if hash function is not
631 strong enough. Lets see how far this gets. */
632 gcc_checking_assert (scc_entry_len == 1);
633 }
634
635 /* Write LTO_tree_scc. */
636 streamer_write_record_start (ob, LTO_tree_scc);
637 streamer_write_uhwi (ob, size);
638 streamer_write_uhwi (ob, scc_hash);
639
640 /* Write size-1 SCCs without wrapping them inside SCC bundles.
641 All INTEGER_CSTs need to be handled this way as we need
642 their type to materialize them. Also builtins are handled
643 this way.
644 ??? We still wrap these in LTO_tree_scc so at the
645 input side we can properly identify the tree we want
646 to ultimatively return. */
647 if (size == 1)
648 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
649 else
650 {
651 /* Write the size of the SCC entry candidates. */
652 streamer_write_uhwi (ob, scc_entry_len);
653
654 /* Write all headers and populate the streamer cache. */
655 for (unsigned i = 0; i < size; ++i)
656 {
657 hashval_t hash = sccstack[first+i].hash;
658 tree t = sccstack[first+i].t;
659 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
660 t, hash, NULL);
661 gcc_assert (!exists_p);
662
663 if (!lto_is_streamable (t))
664 internal_error ("tree code %qs is not supported "
665 "in LTO streams",
666 get_tree_code_name (TREE_CODE (t)));
667
668 /* Write the header, containing everything needed to
669 materialize EXPR on the reading side. */
670 streamer_write_tree_header (ob, t);
671 }
672
673 /* Write the bitpacks and tree references. */
674 for (unsigned i = 0; i < size; ++i)
675 {
676 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
677
678 /* Mark the end of the tree. */
679 streamer_write_zero (ob);
680 }
681 }
682
683 /* Finally truncate the vector. */
684 sccstack.truncate (first);
685
686 if (from_state)
687 from_state->low = MIN (from_state->low, cstate->low);
688 worklist_vec.pop ();
689 continue;
690 }
691
692 gcc_checking_assert (from_state);
693 from_state->low = MIN (from_state->low, cstate->low);
694 if (cstate->dfsnum < from_state->dfsnum)
695 from_state->low = MIN (cstate->dfsnum, from_state->low);
696 worklist_vec.pop ();
697 }
698 worklist_vec.release ();
699 }
700
701 DFS::~DFS ()
702 {
703 sccstack.release ();
704 obstack_free (&sccstate_obstack, NULL);
705 }
706
707 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
708 DFS recurse for all tree edges originating from it. */
709
710 void
711 DFS::DFS_write_tree_body (struct output_block *ob,
712 tree expr, sccs *expr_state, bool ref_p)
713 {
714 #define DFS_follow_tree_edge(DEST) \
715 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
716
717 enum tree_code code;
718
719 code = TREE_CODE (expr);
720
721 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
722 {
723 if (TREE_CODE (expr) != IDENTIFIER_NODE)
724 DFS_follow_tree_edge (TREE_TYPE (expr));
725 }
726
727 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
728 {
729 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
730 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
731 }
732
733 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
734 {
735 DFS_follow_tree_edge (TREE_REALPART (expr));
736 DFS_follow_tree_edge (TREE_IMAGPART (expr));
737 }
738
739 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
740 {
741 /* Drop names that were created for anonymous entities. */
742 if (DECL_NAME (expr)
743 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
744 && anon_aggrname_p (DECL_NAME (expr)))
745 ;
746 else
747 DFS_follow_tree_edge (DECL_NAME (expr));
748 DFS_follow_tree_edge (DECL_CONTEXT (expr));
749 }
750
751 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
752 {
753 DFS_follow_tree_edge (DECL_SIZE (expr));
754 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
755
756 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
757 special handling in LTO, it must be handled by streamer hooks. */
758
759 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
760
761 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
762 for early inlining so drop it on the floor instead of ICEing in
763 dwarf2out.c.
764 We however use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
765 declarations which should be eliminated by decl merging. Be sure none
766 leaks to this point. */
767 gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
768
769 if ((VAR_P (expr)
770 || TREE_CODE (expr) == PARM_DECL)
771 && DECL_HAS_VALUE_EXPR_P (expr))
772 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
773 if (VAR_P (expr))
774 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
775 }
776
777 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
778 {
779 if (TREE_CODE (expr) == TYPE_DECL)
780 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
781 }
782
783 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
784 {
785 /* Make sure we don't inadvertently set the assembler name. */
786 if (DECL_ASSEMBLER_NAME_SET_P (expr))
787 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
788 }
789
790 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
791 {
792 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
793 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
794 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
795 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
796 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
797 }
798
799 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
800 {
801 DFS_follow_tree_edge (DECL_VINDEX (expr));
802 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
803 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
804 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
805 }
806
807 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
808 {
809 DFS_follow_tree_edge (TYPE_SIZE (expr));
810 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
811 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
812 DFS_follow_tree_edge (TYPE_NAME (expr));
813 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
814 reconstructed during fixup. */
815 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
816 during fixup. */
817 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
818 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
819 /* TYPE_CANONICAL is re-computed during type merging, so no need
820 to follow it here. */
821 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
822 }
823
824 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
825 {
826 if (TREE_CODE (expr) == ENUMERAL_TYPE)
827 DFS_follow_tree_edge (TYPE_VALUES (expr));
828 else if (TREE_CODE (expr) == ARRAY_TYPE)
829 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
830 else if (RECORD_OR_UNION_TYPE_P (expr))
831 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
832 DFS_follow_tree_edge (t);
833 else if (TREE_CODE (expr) == FUNCTION_TYPE
834 || TREE_CODE (expr) == METHOD_TYPE)
835 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
836
837 if (!POINTER_TYPE_P (expr))
838 DFS_follow_tree_edge (TYPE_MINVAL (expr));
839 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
840 if (RECORD_OR_UNION_TYPE_P (expr))
841 DFS_follow_tree_edge (TYPE_BINFO (expr));
842 }
843
844 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
845 {
846 DFS_follow_tree_edge (TREE_PURPOSE (expr));
847 DFS_follow_tree_edge (TREE_VALUE (expr));
848 DFS_follow_tree_edge (TREE_CHAIN (expr));
849 }
850
851 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
852 {
853 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
854 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
855 }
856
857 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
858 {
859 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
860 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
861 DFS_follow_tree_edge (TREE_BLOCK (expr));
862 }
863
864 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
865 {
866 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
867 if (VAR_OR_FUNCTION_DECL_P (t)
868 && DECL_EXTERNAL (t))
869 /* We have to stream externals in the block chain as
870 non-references. See also
871 tree-streamer-out.c:streamer_write_chain. */
872 DFS_write_tree (ob, expr_state, t, ref_p, false);
873 else
874 DFS_follow_tree_edge (t);
875
876 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
877
878 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
879 handle - those that represent inlined function scopes.
880 For the drop rest them on the floor instead of ICEing
881 in dwarf2out.c, but keep the notion of whether the block
882 is an inlined block by refering to itself for the sake of
883 tree_nonartificial_location. */
884 if (inlined_function_outer_scope_p (expr))
885 {
886 tree ultimate_origin = block_ultimate_origin (expr);
887 DFS_follow_tree_edge (ultimate_origin);
888 }
889 else if (BLOCK_ABSTRACT_ORIGIN (expr))
890 DFS_follow_tree_edge (expr);
891 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
892 information for early inlined BLOCKs so drop it on the floor instead
893 of ICEing in dwarf2out.c. */
894
895 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
896 streaming time. */
897
898 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
899 list is re-constructed from BLOCK_SUPERCONTEXT. */
900 }
901
902 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
903 {
904 unsigned i;
905 tree t;
906
907 /* Note that the number of BINFO slots has already been emitted in
908 EXPR's header (see streamer_write_tree_header) because this length
909 is needed to build the empty BINFO node on the reader side. */
910 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
911 DFS_follow_tree_edge (t);
912 DFS_follow_tree_edge (BINFO_OFFSET (expr));
913 DFS_follow_tree_edge (BINFO_VTABLE (expr));
914 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
915
916 /* The number of BINFO_BASE_ACCESSES has already been emitted in
917 EXPR's bitfield section. */
918 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
919 DFS_follow_tree_edge (t);
920
921 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
922 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
923 }
924
925 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
926 {
927 unsigned i;
928 tree index, value;
929
930 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
931 {
932 DFS_follow_tree_edge (index);
933 DFS_follow_tree_edge (value);
934 }
935 }
936
937 if (code == OMP_CLAUSE)
938 {
939 int i;
940 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
941 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
942 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
943 }
944
945 #undef DFS_follow_tree_edge
946 }
947
948 /* Return a hash value for the tree T.
949 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
950 may hold hash values if trees inside current SCC. */
951
952 static hashval_t
953 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
954 {
955 inchash::hash hstate;
956
957 #define visit(SIBLING) \
958 do { \
959 unsigned ix; \
960 if (!SIBLING) \
961 hstate.add_int (0); \
962 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
963 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
964 else if (map) \
965 hstate.add_int (*map->get (SIBLING)); \
966 else \
967 hstate.add_int (1); \
968 } while (0)
969
970 /* Hash TS_BASE. */
971 enum tree_code code = TREE_CODE (t);
972 hstate.add_int (code);
973 if (!TYPE_P (t))
974 {
975 hstate.add_flag (TREE_SIDE_EFFECTS (t));
976 hstate.add_flag (TREE_CONSTANT (t));
977 hstate.add_flag (TREE_READONLY (t));
978 hstate.add_flag (TREE_PUBLIC (t));
979 }
980 hstate.add_flag (TREE_ADDRESSABLE (t));
981 hstate.add_flag (TREE_THIS_VOLATILE (t));
982 if (DECL_P (t))
983 hstate.add_flag (DECL_UNSIGNED (t));
984 else if (TYPE_P (t))
985 hstate.add_flag (TYPE_UNSIGNED (t));
986 if (TYPE_P (t))
987 hstate.add_flag (TYPE_ARTIFICIAL (t));
988 else
989 hstate.add_flag (TREE_NO_WARNING (t));
990 hstate.add_flag (TREE_NOTHROW (t));
991 hstate.add_flag (TREE_STATIC (t));
992 hstate.add_flag (TREE_PROTECTED (t));
993 hstate.add_flag (TREE_DEPRECATED (t));
994 if (code != TREE_BINFO)
995 hstate.add_flag (TREE_PRIVATE (t));
996 if (TYPE_P (t))
997 {
998 hstate.add_flag (AGGREGATE_TYPE_P (t)
999 ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
1000 hstate.add_flag (TYPE_ADDR_SPACE (t));
1001 }
1002 else if (code == SSA_NAME)
1003 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
1004 hstate.commit_flag ();
1005
1006 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1007 {
1008 int i;
1009 hstate.add_wide_int (TREE_INT_CST_NUNITS (t));
1010 hstate.add_wide_int (TREE_INT_CST_EXT_NUNITS (t));
1011 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1012 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
1013 }
1014
1015 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1016 {
1017 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1018 hstate.add_flag (r.cl);
1019 hstate.add_flag (r.sign);
1020 hstate.add_flag (r.signalling);
1021 hstate.add_flag (r.canonical);
1022 hstate.commit_flag ();
1023 hstate.add_int (r.uexp);
1024 hstate.add (r.sig, sizeof (r.sig));
1025 }
1026
1027 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1028 {
1029 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1030 hstate.add_int (f.mode);
1031 hstate.add_int (f.data.low);
1032 hstate.add_int (f.data.high);
1033 }
1034
1035 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1036 {
1037 hstate.add_wide_int (DECL_MODE (t));
1038 hstate.add_flag (DECL_NONLOCAL (t));
1039 hstate.add_flag (DECL_VIRTUAL_P (t));
1040 hstate.add_flag (DECL_IGNORED_P (t));
1041 hstate.add_flag (DECL_ABSTRACT_P (t));
1042 hstate.add_flag (DECL_ARTIFICIAL (t));
1043 hstate.add_flag (DECL_USER_ALIGN (t));
1044 hstate.add_flag (DECL_PRESERVE_P (t));
1045 hstate.add_flag (DECL_EXTERNAL (t));
1046 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1047 hstate.commit_flag ();
1048 hstate.add_int (DECL_ALIGN (t));
1049 if (code == LABEL_DECL)
1050 {
1051 hstate.add_int (EH_LANDING_PAD_NR (t));
1052 hstate.add_int (LABEL_DECL_UID (t));
1053 }
1054 else if (code == FIELD_DECL)
1055 {
1056 hstate.add_flag (DECL_PACKED (t));
1057 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1058 hstate.add_int (DECL_OFFSET_ALIGN (t));
1059 }
1060 else if (code == VAR_DECL)
1061 {
1062 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1063 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1064 }
1065 if (code == RESULT_DECL
1066 || code == PARM_DECL
1067 || code == VAR_DECL)
1068 {
1069 hstate.add_flag (DECL_BY_REFERENCE (t));
1070 if (code == VAR_DECL
1071 || code == PARM_DECL)
1072 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1073 }
1074 hstate.commit_flag ();
1075 }
1076
1077 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1078 hstate.add_int (DECL_REGISTER (t));
1079
1080 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1081 {
1082 hstate.add_flag (DECL_COMMON (t));
1083 hstate.add_flag (DECL_DLLIMPORT_P (t));
1084 hstate.add_flag (DECL_WEAK (t));
1085 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1086 hstate.add_flag (DECL_COMDAT (t));
1087 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1088 hstate.add_int (DECL_VISIBILITY (t));
1089 if (code == VAR_DECL)
1090 {
1091 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1092 hstate.add_flag (DECL_HARD_REGISTER (t));
1093 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1094 }
1095 if (TREE_CODE (t) == FUNCTION_DECL)
1096 {
1097 hstate.add_flag (DECL_FINAL_P (t));
1098 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1099 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1100 }
1101 hstate.commit_flag ();
1102 }
1103
1104 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1105 {
1106 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1107 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1108 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1109 hstate.add_flag (DECL_UNINLINABLE (t));
1110 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1111 hstate.add_flag (DECL_IS_NOVOPS (t));
1112 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1113 hstate.add_flag (DECL_IS_MALLOC (t));
1114 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1115 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1116 hstate.add_flag (DECL_STATIC_CHAIN (t));
1117 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1118 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1119 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1120 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1121 hstate.add_flag (DECL_PURE_P (t));
1122 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1123 hstate.commit_flag ();
1124 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1125 hstate.add_int (DECL_FUNCTION_CODE (t));
1126 }
1127
1128 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1129 {
1130 hstate.add_wide_int (TYPE_MODE (t));
1131 hstate.add_flag (TYPE_STRING_FLAG (t));
1132 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1133 no streaming. */
1134 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
1135 hstate.add_flag (TYPE_PACKED (t));
1136 hstate.add_flag (TYPE_RESTRICT (t));
1137 hstate.add_flag (TYPE_USER_ALIGN (t));
1138 hstate.add_flag (TYPE_READONLY (t));
1139 if (RECORD_OR_UNION_TYPE_P (t))
1140 {
1141 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1142 hstate.add_flag (TYPE_FINAL_P (t));
1143 }
1144 else if (code == ARRAY_TYPE)
1145 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1146 if (AGGREGATE_TYPE_P (t))
1147 hstate.add_flag (TYPE_TYPELESS_STORAGE (t));
1148 hstate.commit_flag ();
1149 hstate.add_int (TYPE_PRECISION (t));
1150 hstate.add_int (TYPE_ALIGN (t));
1151 }
1152
1153 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1154 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1155 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1156
1157 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1158 /* We don't stream these when passing things to a different target. */
1159 && !lto_stream_offload_p)
1160 hstate.add_wide_int (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1161
1162 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1163 hstate.add_wide_int (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1164
1165 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1166 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1167
1168 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1169 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1170
1171 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1172 {
1173 if (code != IDENTIFIER_NODE)
1174 visit (TREE_TYPE (t));
1175 }
1176
1177 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1178 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
1179 visit (VECTOR_CST_ELT (t, i));
1180
1181 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1182 {
1183 visit (TREE_REALPART (t));
1184 visit (TREE_IMAGPART (t));
1185 }
1186
1187 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1188 {
1189 /* Drop names that were created for anonymous entities. */
1190 if (DECL_NAME (t)
1191 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1192 && anon_aggrname_p (DECL_NAME (t)))
1193 ;
1194 else
1195 visit (DECL_NAME (t));
1196 if (DECL_FILE_SCOPE_P (t))
1197 ;
1198 else
1199 visit (DECL_CONTEXT (t));
1200 }
1201
1202 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1203 {
1204 visit (DECL_SIZE (t));
1205 visit (DECL_SIZE_UNIT (t));
1206 visit (DECL_ATTRIBUTES (t));
1207 if ((code == VAR_DECL
1208 || code == PARM_DECL)
1209 && DECL_HAS_VALUE_EXPR_P (t))
1210 visit (DECL_VALUE_EXPR (t));
1211 if (code == VAR_DECL
1212 && DECL_HAS_DEBUG_EXPR_P (t))
1213 visit (DECL_DEBUG_EXPR (t));
1214 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1215 be able to call get_symbol_initial_value. */
1216 }
1217
1218 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1219 {
1220 if (code == TYPE_DECL)
1221 visit (DECL_ORIGINAL_TYPE (t));
1222 }
1223
1224 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1225 {
1226 if (DECL_ASSEMBLER_NAME_SET_P (t))
1227 visit (DECL_ASSEMBLER_NAME (t));
1228 }
1229
1230 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1231 {
1232 visit (DECL_FIELD_OFFSET (t));
1233 visit (DECL_BIT_FIELD_TYPE (t));
1234 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1235 visit (DECL_FIELD_BIT_OFFSET (t));
1236 visit (DECL_FCONTEXT (t));
1237 }
1238
1239 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1240 {
1241 visit (DECL_VINDEX (t));
1242 visit (DECL_FUNCTION_PERSONALITY (t));
1243 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1244 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1245 }
1246
1247 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1248 {
1249 visit (TYPE_SIZE (t));
1250 visit (TYPE_SIZE_UNIT (t));
1251 visit (TYPE_ATTRIBUTES (t));
1252 visit (TYPE_NAME (t));
1253 visit (TYPE_MAIN_VARIANT (t));
1254 if (TYPE_FILE_SCOPE_P (t))
1255 ;
1256 else
1257 visit (TYPE_CONTEXT (t));
1258 visit (TYPE_STUB_DECL (t));
1259 }
1260
1261 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1262 {
1263 if (code == ENUMERAL_TYPE)
1264 visit (TYPE_VALUES (t));
1265 else if (code == ARRAY_TYPE)
1266 visit (TYPE_DOMAIN (t));
1267 else if (RECORD_OR_UNION_TYPE_P (t))
1268 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1269 visit (f);
1270 else if (code == FUNCTION_TYPE
1271 || code == METHOD_TYPE)
1272 visit (TYPE_ARG_TYPES (t));
1273 if (!POINTER_TYPE_P (t))
1274 visit (TYPE_MINVAL (t));
1275 visit (TYPE_MAXVAL (t));
1276 if (RECORD_OR_UNION_TYPE_P (t))
1277 visit (TYPE_BINFO (t));
1278 }
1279
1280 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1281 {
1282 visit (TREE_PURPOSE (t));
1283 visit (TREE_VALUE (t));
1284 visit (TREE_CHAIN (t));
1285 }
1286
1287 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1288 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1289 visit (TREE_VEC_ELT (t, i));
1290
1291 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1292 {
1293 hstate.add_wide_int (TREE_OPERAND_LENGTH (t));
1294 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1295 visit (TREE_OPERAND (t, i));
1296 }
1297
1298 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1299 {
1300 unsigned i;
1301 tree b;
1302 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1303 visit (b);
1304 visit (BINFO_OFFSET (t));
1305 visit (BINFO_VTABLE (t));
1306 visit (BINFO_VPTR_FIELD (t));
1307 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1308 visit (b);
1309 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1310 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1311 }
1312
1313 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1314 {
1315 unsigned i;
1316 tree index, value;
1317 hstate.add_wide_int (CONSTRUCTOR_NELTS (t));
1318 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1319 {
1320 visit (index);
1321 visit (value);
1322 }
1323 }
1324
1325 if (code == OMP_CLAUSE)
1326 {
1327 int i;
1328 HOST_WIDE_INT val;
1329
1330 hstate.add_wide_int (OMP_CLAUSE_CODE (t));
1331 switch (OMP_CLAUSE_CODE (t))
1332 {
1333 case OMP_CLAUSE_DEFAULT:
1334 val = OMP_CLAUSE_DEFAULT_KIND (t);
1335 break;
1336 case OMP_CLAUSE_SCHEDULE:
1337 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1338 break;
1339 case OMP_CLAUSE_DEPEND:
1340 val = OMP_CLAUSE_DEPEND_KIND (t);
1341 break;
1342 case OMP_CLAUSE_MAP:
1343 val = OMP_CLAUSE_MAP_KIND (t);
1344 break;
1345 case OMP_CLAUSE_PROC_BIND:
1346 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1347 break;
1348 case OMP_CLAUSE_REDUCTION:
1349 val = OMP_CLAUSE_REDUCTION_CODE (t);
1350 break;
1351 default:
1352 val = 0;
1353 break;
1354 }
1355 hstate.add_wide_int (val);
1356 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1357 visit (OMP_CLAUSE_OPERAND (t, i));
1358 visit (OMP_CLAUSE_CHAIN (t));
1359 }
1360
1361 return hstate.end ();
1362
1363 #undef visit
1364 }
1365
1366 /* Compare two SCC entries by their hash value for qsorting them. */
1367
1368 int
1369 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1370 {
1371 const scc_entry *p1 = (const scc_entry *) p1_;
1372 const scc_entry *p2 = (const scc_entry *) p2_;
1373 if (p1->hash < p2->hash)
1374 return -1;
1375 else if (p1->hash > p2->hash)
1376 return 1;
1377 return 0;
1378 }
1379
1380 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1381 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1382
1383 hashval_t
1384 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1385 bool ref_p, bool this_ref_p)
1386 {
1387 unsigned int last_classes = 0, iterations = 0;
1388
1389 /* Compute hash values for the SCC members. */
1390 for (unsigned i = 0; i < size; ++i)
1391 sccstack[first+i].hash
1392 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1393
1394 if (size == 1)
1395 return sccstack[first].hash;
1396
1397 /* We aim to get unique hash for every tree within SCC and compute hash value
1398 of the whole SCC by combining all values together in a stable (entry-point
1399 independent) order. This guarantees that the same SCC regions within
1400 different translation units will get the same hash values and therefore
1401 will be merged at WPA time.
1402
1403 Often the hashes are already unique. In that case we compute the SCC hash
1404 by combining individual hash values in an increasing order.
1405
1406 If there are duplicates, we seek at least one tree with unique hash (and
1407 pick one with minimal hash and this property). Then we obtain a stable
1408 order by DFS walk starting from this unique tree and then use the index
1409 within this order to make individual hash values unique.
1410
1411 If there is no tree with unique hash, we iteratively propagate the hash
1412 values across the internal edges of SCC. This usually quickly leads
1413 to unique hashes. Consider, for example, an SCC containing two pointers
1414 that are identical except for the types they point to and assume that
1415 these types are also part of the SCC. The propagation will add the
1416 points-to type information into their hash values. */
1417 do
1418 {
1419 /* Sort the SCC so we can easily check for uniqueness. */
1420 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1421
1422 unsigned int classes = 1;
1423 int firstunique = -1;
1424
1425 /* Find the tree with lowest unique hash (if it exists) and compute
1426 the number of equivalence classes. */
1427 if (sccstack[first].hash != sccstack[first+1].hash)
1428 firstunique = 0;
1429 for (unsigned i = 1; i < size; ++i)
1430 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1431 {
1432 classes++;
1433 if (firstunique == -1
1434 && (i == size - 1
1435 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1436 firstunique = i;
1437 }
1438
1439 /* If we found a tree with unique hash, stop the iteration. */
1440 if (firstunique != -1
1441 /* Also terminate if we run out of iterations or if the number of
1442 equivalence classes is no longer increasing.
1443 For example a cyclic list of trees that are all equivalent will
1444 never have unique entry point; we however do not build such SCCs
1445 in our IL. */
1446 || classes <= last_classes || iterations > 16)
1447 {
1448 hashval_t scc_hash;
1449
1450 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1451 starting from FIRSTUNIQUE to obtain a stable order. */
1452 if (classes != size && firstunique != -1)
1453 {
1454 hash_map <tree, hashval_t> map(size*2);
1455
1456 /* Store hash values into a map, so we can associate them with
1457 the reordered SCC. */
1458 for (unsigned i = 0; i < size; ++i)
1459 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1460
1461 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1462 true);
1463 gcc_assert (again.sccstack.length () == size);
1464
1465 memcpy (sccstack.address () + first,
1466 again.sccstack.address (),
1467 sizeof (scc_entry) * size);
1468
1469 /* Update hash values of individual members by hashing in the
1470 index within the stable order. This ensures uniqueness.
1471 Also compute the SCC hash by mixing in all hash values in
1472 the stable order we obtained. */
1473 sccstack[first].hash = *map.get (sccstack[first].t);
1474 scc_hash = sccstack[first].hash;
1475 for (unsigned i = 1; i < size; ++i)
1476 {
1477 sccstack[first+i].hash
1478 = iterative_hash_hashval_t (i,
1479 *map.get (sccstack[first+i].t));
1480 scc_hash
1481 = iterative_hash_hashval_t (scc_hash,
1482 sccstack[first+i].hash);
1483 }
1484 }
1485 /* If we got a unique hash value for each tree, then sort already
1486 ensured entry-point independent order. Only compute the final
1487 SCC hash.
1488
1489 If we failed to find the unique entry point, we go by the same
1490 route. We will eventually introduce unwanted hash conflicts. */
1491 else
1492 {
1493 scc_hash = sccstack[first].hash;
1494 for (unsigned i = 1; i < size; ++i)
1495 scc_hash
1496 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1497
1498 /* We cannot 100% guarantee that the hash won't conflict so as
1499 to make it impossible to find a unique hash. This however
1500 should be an extremely rare case. ICE for now so possible
1501 issues are found and evaluated. */
1502 gcc_checking_assert (classes == size);
1503 }
1504
1505 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1506 hash into the hash of each element. */
1507 for (unsigned i = 0; i < size; ++i)
1508 sccstack[first+i].hash
1509 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1510 return scc_hash;
1511 }
1512
1513 last_classes = classes;
1514 iterations++;
1515
1516 /* We failed to identify the entry point; propagate hash values across
1517 the edges. */
1518 hash_map <tree, hashval_t> map(size*2);
1519
1520 for (unsigned i = 0; i < size; ++i)
1521 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1522
1523 for (unsigned i = 0; i < size; i++)
1524 sccstack[first+i].hash
1525 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1526 }
1527 while (true);
1528 }
1529
1530 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1531 already in the streamer cache. Main routine called for
1532 each visit of EXPR. */
1533
1534 void
1535 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1536 tree expr, bool ref_p, bool this_ref_p)
1537 {
1538 /* Handle special cases. */
1539 if (expr == NULL_TREE)
1540 return;
1541
1542 /* Do not DFS walk into indexable trees. */
1543 if (this_ref_p && tree_is_indexable (expr))
1544 return;
1545
1546 /* Check if we already streamed EXPR. */
1547 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1548 return;
1549
1550 worklist w;
1551 w.expr = expr;
1552 w.from_state = from_state;
1553 w.cstate = NULL;
1554 w.ref_p = ref_p;
1555 w.this_ref_p = this_ref_p;
1556 worklist_vec.safe_push (w);
1557 }
1558
1559
1560 /* Emit the physical representation of tree node EXPR to output block OB.
1561 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1562 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1563
1564 void
1565 lto_output_tree (struct output_block *ob, tree expr,
1566 bool ref_p, bool this_ref_p)
1567 {
1568 unsigned ix;
1569 bool existed_p;
1570
1571 if (expr == NULL_TREE)
1572 {
1573 streamer_write_record_start (ob, LTO_null);
1574 return;
1575 }
1576
1577 if (this_ref_p && tree_is_indexable (expr))
1578 {
1579 lto_output_tree_ref (ob, expr);
1580 return;
1581 }
1582
1583 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1584 if (existed_p)
1585 {
1586 /* If a node has already been streamed out, make sure that
1587 we don't write it more than once. Otherwise, the reader
1588 will instantiate two different nodes for the same object. */
1589 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1590 streamer_write_uhwi (ob, ix);
1591 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1592 lto_tree_code_to_tag (TREE_CODE (expr)));
1593 lto_stats.num_pickle_refs_output++;
1594 }
1595 else
1596 {
1597 /* This is the first time we see EXPR, write all reachable
1598 trees to OB. */
1599 static bool in_dfs_walk;
1600
1601 /* Protect against recursion which means disconnect between
1602 what tree edges we walk in the DFS walk and what edges
1603 we stream out. */
1604 gcc_assert (!in_dfs_walk);
1605
1606 /* Start the DFS walk. */
1607 /* Save ob state ... */
1608 /* let's see ... */
1609 in_dfs_walk = true;
1610 DFS (ob, expr, ref_p, this_ref_p, false);
1611 in_dfs_walk = false;
1612
1613 /* Finally append a reference to the tree we were writing.
1614 ??? If expr ended up as a singleton we could have
1615 inlined it here and avoid outputting a reference. */
1616 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1617 gcc_assert (existed_p);
1618 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1619 streamer_write_uhwi (ob, ix);
1620 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1621 lto_tree_code_to_tag (TREE_CODE (expr)));
1622 lto_stats.num_pickle_refs_output++;
1623 }
1624 }
1625
1626
1627 /* Output to OB a list of try/catch handlers starting with FIRST. */
1628
1629 static void
1630 output_eh_try_list (struct output_block *ob, eh_catch first)
1631 {
1632 eh_catch n;
1633
1634 for (n = first; n; n = n->next_catch)
1635 {
1636 streamer_write_record_start (ob, LTO_eh_catch);
1637 stream_write_tree (ob, n->type_list, true);
1638 stream_write_tree (ob, n->filter_list, true);
1639 stream_write_tree (ob, n->label, true);
1640 }
1641
1642 streamer_write_record_start (ob, LTO_null);
1643 }
1644
1645
1646 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1647 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1648 detect EH region sharing. */
1649
1650 static void
1651 output_eh_region (struct output_block *ob, eh_region r)
1652 {
1653 enum LTO_tags tag;
1654
1655 if (r == NULL)
1656 {
1657 streamer_write_record_start (ob, LTO_null);
1658 return;
1659 }
1660
1661 if (r->type == ERT_CLEANUP)
1662 tag = LTO_ert_cleanup;
1663 else if (r->type == ERT_TRY)
1664 tag = LTO_ert_try;
1665 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1666 tag = LTO_ert_allowed_exceptions;
1667 else if (r->type == ERT_MUST_NOT_THROW)
1668 tag = LTO_ert_must_not_throw;
1669 else
1670 gcc_unreachable ();
1671
1672 streamer_write_record_start (ob, tag);
1673 streamer_write_hwi (ob, r->index);
1674
1675 if (r->outer)
1676 streamer_write_hwi (ob, r->outer->index);
1677 else
1678 streamer_write_zero (ob);
1679
1680 if (r->inner)
1681 streamer_write_hwi (ob, r->inner->index);
1682 else
1683 streamer_write_zero (ob);
1684
1685 if (r->next_peer)
1686 streamer_write_hwi (ob, r->next_peer->index);
1687 else
1688 streamer_write_zero (ob);
1689
1690 if (r->type == ERT_TRY)
1691 {
1692 output_eh_try_list (ob, r->u.eh_try.first_catch);
1693 }
1694 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1695 {
1696 stream_write_tree (ob, r->u.allowed.type_list, true);
1697 stream_write_tree (ob, r->u.allowed.label, true);
1698 streamer_write_uhwi (ob, r->u.allowed.filter);
1699 }
1700 else if (r->type == ERT_MUST_NOT_THROW)
1701 {
1702 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1703 bitpack_d bp = bitpack_create (ob->main_stream);
1704 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1705 streamer_write_bitpack (&bp);
1706 }
1707
1708 if (r->landing_pads)
1709 streamer_write_hwi (ob, r->landing_pads->index);
1710 else
1711 streamer_write_zero (ob);
1712 }
1713
1714
1715 /* Output landing pad LP to OB. */
1716
1717 static void
1718 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1719 {
1720 if (lp == NULL)
1721 {
1722 streamer_write_record_start (ob, LTO_null);
1723 return;
1724 }
1725
1726 streamer_write_record_start (ob, LTO_eh_landing_pad);
1727 streamer_write_hwi (ob, lp->index);
1728 if (lp->next_lp)
1729 streamer_write_hwi (ob, lp->next_lp->index);
1730 else
1731 streamer_write_zero (ob);
1732
1733 if (lp->region)
1734 streamer_write_hwi (ob, lp->region->index);
1735 else
1736 streamer_write_zero (ob);
1737
1738 stream_write_tree (ob, lp->post_landing_pad, true);
1739 }
1740
1741
1742 /* Output the existing eh_table to OB. */
1743
1744 static void
1745 output_eh_regions (struct output_block *ob, struct function *fn)
1746 {
1747 if (fn->eh && fn->eh->region_tree)
1748 {
1749 unsigned i;
1750 eh_region eh;
1751 eh_landing_pad lp;
1752 tree ttype;
1753
1754 streamer_write_record_start (ob, LTO_eh_table);
1755
1756 /* Emit the index of the root of the EH region tree. */
1757 streamer_write_hwi (ob, fn->eh->region_tree->index);
1758
1759 /* Emit all the EH regions in the region array. */
1760 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1761 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1762 output_eh_region (ob, eh);
1763
1764 /* Emit all landing pads. */
1765 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1766 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1767 output_eh_lp (ob, lp);
1768
1769 /* Emit all the runtime type data. */
1770 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1771 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1772 stream_write_tree (ob, ttype, true);
1773
1774 /* Emit the table of action chains. */
1775 if (targetm.arm_eabi_unwinder)
1776 {
1777 tree t;
1778 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1779 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1780 stream_write_tree (ob, t, true);
1781 }
1782 else
1783 {
1784 uchar c;
1785 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1786 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1787 streamer_write_char_stream (ob->main_stream, c);
1788 }
1789 }
1790
1791 /* The LTO_null either terminates the record or indicates that there
1792 are no eh_records at all. */
1793 streamer_write_record_start (ob, LTO_null);
1794 }
1795
1796
1797 /* Output all of the active ssa names to the ssa_names stream. */
1798
1799 static void
1800 output_ssa_names (struct output_block *ob, struct function *fn)
1801 {
1802 unsigned int i, len;
1803
1804 len = vec_safe_length (SSANAMES (fn));
1805 streamer_write_uhwi (ob, len);
1806
1807 for (i = 1; i < len; i++)
1808 {
1809 tree ptr = (*SSANAMES (fn))[i];
1810
1811 if (ptr == NULL_TREE
1812 || SSA_NAME_IN_FREE_LIST (ptr)
1813 || virtual_operand_p (ptr)
1814 /* Simply skip unreleased SSA names. */
1815 || (! SSA_NAME_IS_DEFAULT_DEF (ptr)
1816 && (! SSA_NAME_DEF_STMT (ptr)
1817 || ! gimple_bb (SSA_NAME_DEF_STMT (ptr)))))
1818 continue;
1819
1820 streamer_write_uhwi (ob, i);
1821 streamer_write_char_stream (ob->main_stream,
1822 SSA_NAME_IS_DEFAULT_DEF (ptr));
1823 if (SSA_NAME_VAR (ptr))
1824 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1825 else
1826 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1827 stream_write_tree (ob, TREE_TYPE (ptr), true);
1828 }
1829
1830 streamer_write_zero (ob);
1831 }
1832
1833
1834
1835 /* Output the cfg. */
1836
1837 static void
1838 output_cfg (struct output_block *ob, struct function *fn)
1839 {
1840 struct lto_output_stream *tmp_stream = ob->main_stream;
1841 basic_block bb;
1842
1843 ob->main_stream = ob->cfg_stream;
1844
1845 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1846 profile_status_for_fn (fn));
1847
1848 /* Output the number of the highest basic block. */
1849 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1850
1851 FOR_ALL_BB_FN (bb, fn)
1852 {
1853 edge_iterator ei;
1854 edge e;
1855
1856 streamer_write_hwi (ob, bb->index);
1857
1858 /* Output the successors and the edge flags. */
1859 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1860 FOR_EACH_EDGE (e, ei, bb->succs)
1861 {
1862 streamer_write_uhwi (ob, e->dest->index);
1863 streamer_write_hwi (ob, e->probability);
1864 streamer_write_gcov_count (ob, e->count);
1865 streamer_write_uhwi (ob, e->flags);
1866 }
1867 }
1868
1869 streamer_write_hwi (ob, -1);
1870
1871 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1872 while (bb->next_bb)
1873 {
1874 streamer_write_hwi (ob, bb->next_bb->index);
1875 bb = bb->next_bb;
1876 }
1877
1878 streamer_write_hwi (ob, -1);
1879
1880 /* ??? The cfgloop interface is tied to cfun. */
1881 gcc_assert (cfun == fn);
1882
1883 /* Output the number of loops. */
1884 streamer_write_uhwi (ob, number_of_loops (fn));
1885
1886 /* Output each loop, skipping the tree root which has number zero. */
1887 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1888 {
1889 struct loop *loop = get_loop (fn, i);
1890
1891 /* Write the index of the loop header. That's enough to rebuild
1892 the loop tree on the reader side. Stream -1 for an unused
1893 loop entry. */
1894 if (!loop)
1895 {
1896 streamer_write_hwi (ob, -1);
1897 continue;
1898 }
1899 else
1900 streamer_write_hwi (ob, loop->header->index);
1901
1902 /* Write everything copy_loop_info copies. */
1903 streamer_write_enum (ob->main_stream,
1904 loop_estimation, EST_LAST, loop->estimate_state);
1905 streamer_write_hwi (ob, loop->any_upper_bound);
1906 if (loop->any_upper_bound)
1907 streamer_write_widest_int (ob, loop->nb_iterations_upper_bound);
1908 streamer_write_hwi (ob, loop->any_likely_upper_bound);
1909 if (loop->any_likely_upper_bound)
1910 streamer_write_widest_int (ob, loop->nb_iterations_likely_upper_bound);
1911 streamer_write_hwi (ob, loop->any_estimate);
1912 if (loop->any_estimate)
1913 streamer_write_widest_int (ob, loop->nb_iterations_estimate);
1914
1915 /* Write OMP SIMD related info. */
1916 streamer_write_hwi (ob, loop->safelen);
1917 streamer_write_hwi (ob, loop->dont_vectorize);
1918 streamer_write_hwi (ob, loop->force_vectorize);
1919 stream_write_tree (ob, loop->simduid, true);
1920 }
1921
1922 ob->main_stream = tmp_stream;
1923 }
1924
1925
1926 /* Create the header in the file using OB. If the section type is for
1927 a function, set FN to the decl for that function. */
1928
1929 void
1930 produce_asm (struct output_block *ob, tree fn)
1931 {
1932 enum lto_section_type section_type = ob->section_type;
1933 struct lto_function_header header;
1934 char *section_name;
1935
1936 if (section_type == LTO_section_function_body)
1937 {
1938 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1939 section_name = lto_get_section_name (section_type, name, NULL);
1940 }
1941 else
1942 section_name = lto_get_section_name (section_type, NULL, NULL);
1943
1944 lto_begin_section (section_name, !flag_wpa);
1945 free (section_name);
1946
1947 /* The entire header is stream computed here. */
1948 memset (&header, 0, sizeof (struct lto_function_header));
1949
1950 /* Write the header. */
1951 header.major_version = LTO_major_version;
1952 header.minor_version = LTO_minor_version;
1953
1954 if (section_type == LTO_section_function_body)
1955 header.cfg_size = ob->cfg_stream->total_size;
1956 header.main_size = ob->main_stream->total_size;
1957 header.string_size = ob->string_stream->total_size;
1958 lto_write_data (&header, sizeof header);
1959
1960 /* Put all of the gimple and the string table out the asm file as a
1961 block of text. */
1962 if (section_type == LTO_section_function_body)
1963 lto_write_stream (ob->cfg_stream);
1964 lto_write_stream (ob->main_stream);
1965 lto_write_stream (ob->string_stream);
1966
1967 lto_end_section ();
1968 }
1969
1970
1971 /* Output the base body of struct function FN using output block OB. */
1972
1973 static void
1974 output_struct_function_base (struct output_block *ob, struct function *fn)
1975 {
1976 struct bitpack_d bp;
1977 unsigned i;
1978 tree t;
1979
1980 /* Output the static chain and non-local goto save area. */
1981 stream_write_tree (ob, fn->static_chain_decl, true);
1982 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1983
1984 /* Output all the local variables in the function. */
1985 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1986 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1987 stream_write_tree (ob, t, true);
1988
1989 /* Output current IL state of the function. */
1990 streamer_write_uhwi (ob, fn->curr_properties);
1991
1992 /* Write all the attributes for FN. */
1993 bp = bitpack_create (ob->main_stream);
1994 bp_pack_value (&bp, fn->is_thunk, 1);
1995 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1996 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1997 bp_pack_value (&bp, fn->returns_struct, 1);
1998 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1999 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
2000 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
2001 bp_pack_value (&bp, fn->after_inlining, 1);
2002 bp_pack_value (&bp, fn->stdarg, 1);
2003 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2004 bp_pack_value (&bp, fn->has_forced_label_in_static, 1);
2005 bp_pack_value (&bp, fn->calls_alloca, 1);
2006 bp_pack_value (&bp, fn->calls_setjmp, 1);
2007 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2008 bp_pack_value (&bp, fn->has_simduid_loops, 1);
2009 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2010 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2011 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2012
2013 /* Output the function start and end loci. */
2014 stream_output_location (ob, &bp, fn->function_start_locus);
2015 stream_output_location (ob, &bp, fn->function_end_locus);
2016
2017 streamer_write_bitpack (&bp);
2018 }
2019
2020
2021 /* Collect all leaf BLOCKs beyond ROOT into LEAFS. */
2022
2023 static void
2024 collect_block_tree_leafs (tree root, vec<tree> &leafs)
2025 {
2026 for (root = BLOCK_SUBBLOCKS (root); root; root = BLOCK_CHAIN (root))
2027 if (! BLOCK_SUBBLOCKS (root))
2028 leafs.safe_push (root);
2029 else
2030 collect_block_tree_leafs (BLOCK_SUBBLOCKS (root), leafs);
2031 }
2032
2033 /* Output the body of function NODE->DECL. */
2034
2035 static void
2036 output_function (struct cgraph_node *node)
2037 {
2038 tree function;
2039 struct function *fn;
2040 basic_block bb;
2041 struct output_block *ob;
2042
2043 function = node->decl;
2044 fn = DECL_STRUCT_FUNCTION (function);
2045 ob = create_output_block (LTO_section_function_body);
2046
2047 clear_line_info (ob);
2048 ob->symbol = node;
2049
2050 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2051
2052 /* Set current_function_decl and cfun. */
2053 push_cfun (fn);
2054
2055 /* Make string 0 be a NULL string. */
2056 streamer_write_char_stream (ob->string_stream, 0);
2057
2058 streamer_write_record_start (ob, LTO_function);
2059
2060 /* Output decls for parameters and args. */
2061 stream_write_tree (ob, DECL_RESULT (function), true);
2062 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2063
2064 /* Output DECL_INITIAL for the function, which contains the tree of
2065 lexical scopes. */
2066 stream_write_tree (ob, DECL_INITIAL (function), true);
2067 /* As we do not recurse into BLOCK_SUBBLOCKS but only BLOCK_SUPERCONTEXT
2068 collect block tree leafs and stream those. */
2069 auto_vec<tree> block_tree_leafs;
2070 if (DECL_INITIAL (function))
2071 collect_block_tree_leafs (DECL_INITIAL (function), block_tree_leafs);
2072 streamer_write_uhwi (ob, block_tree_leafs.length ());
2073 for (unsigned i = 0; i < block_tree_leafs.length (); ++i)
2074 stream_write_tree (ob, block_tree_leafs[i], true);
2075
2076 /* We also stream abstract functions where we stream only stuff needed for
2077 debug info. */
2078 if (gimple_has_body_p (function))
2079 {
2080 streamer_write_uhwi (ob, 1);
2081 output_struct_function_base (ob, fn);
2082
2083 /* Output all the SSA names used in the function. */
2084 output_ssa_names (ob, fn);
2085
2086 /* Output any exception handling regions. */
2087 output_eh_regions (ob, fn);
2088
2089
2090 /* We will renumber the statements. The code that does this uses
2091 the same ordering that we use for serializing them so we can use
2092 the same code on the other end and not have to write out the
2093 statement numbers. We do not assign UIDs to PHIs here because
2094 virtual PHIs get re-computed on-the-fly which would make numbers
2095 inconsistent. */
2096 set_gimple_stmt_max_uid (cfun, 0);
2097 FOR_ALL_BB_FN (bb, cfun)
2098 {
2099 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2100 gsi_next (&gsi))
2101 {
2102 gphi *stmt = gsi.phi ();
2103
2104 /* Virtual PHIs are not going to be streamed. */
2105 if (!virtual_operand_p (gimple_phi_result (stmt)))
2106 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2107 }
2108 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2109 gsi_next (&gsi))
2110 {
2111 gimple *stmt = gsi_stmt (gsi);
2112 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2113 }
2114 }
2115 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2116 virtual phis now. */
2117 FOR_ALL_BB_FN (bb, cfun)
2118 {
2119 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2120 gsi_next (&gsi))
2121 {
2122 gphi *stmt = gsi.phi ();
2123 if (virtual_operand_p (gimple_phi_result (stmt)))
2124 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2125 }
2126 }
2127
2128 /* Output the code for the function. */
2129 FOR_ALL_BB_FN (bb, fn)
2130 output_bb (ob, bb, fn);
2131
2132 /* The terminator for this function. */
2133 streamer_write_record_start (ob, LTO_null);
2134
2135 output_cfg (ob, fn);
2136
2137 pop_cfun ();
2138 }
2139 else
2140 streamer_write_uhwi (ob, 0);
2141
2142 /* Create a section to hold the pickled output of this function. */
2143 produce_asm (ob, function);
2144
2145 destroy_output_block (ob);
2146 }
2147
2148 /* Output the body of function NODE->DECL. */
2149
2150 static void
2151 output_constructor (struct varpool_node *node)
2152 {
2153 tree var = node->decl;
2154 struct output_block *ob;
2155
2156 ob = create_output_block (LTO_section_function_body);
2157
2158 clear_line_info (ob);
2159 ob->symbol = node;
2160
2161 /* Make string 0 be a NULL string. */
2162 streamer_write_char_stream (ob->string_stream, 0);
2163
2164 /* Output DECL_INITIAL for the function, which contains the tree of
2165 lexical scopes. */
2166 stream_write_tree (ob, DECL_INITIAL (var), true);
2167
2168 /* Create a section to hold the pickled output of this function. */
2169 produce_asm (ob, var);
2170
2171 destroy_output_block (ob);
2172 }
2173
2174
2175 /* Emit toplevel asms. */
2176
2177 void
2178 lto_output_toplevel_asms (void)
2179 {
2180 struct output_block *ob;
2181 struct asm_node *can;
2182 char *section_name;
2183 struct lto_simple_header_with_strings header;
2184
2185 if (!symtab->first_asm_symbol ())
2186 return;
2187
2188 ob = create_output_block (LTO_section_asm);
2189
2190 /* Make string 0 be a NULL string. */
2191 streamer_write_char_stream (ob->string_stream, 0);
2192
2193 for (can = symtab->first_asm_symbol (); can; can = can->next)
2194 {
2195 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2196 streamer_write_hwi (ob, can->order);
2197 }
2198
2199 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2200
2201 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2202 lto_begin_section (section_name, !flag_wpa);
2203 free (section_name);
2204
2205 /* The entire header stream is computed here. */
2206 memset (&header, 0, sizeof (header));
2207
2208 /* Write the header. */
2209 header.major_version = LTO_major_version;
2210 header.minor_version = LTO_minor_version;
2211
2212 header.main_size = ob->main_stream->total_size;
2213 header.string_size = ob->string_stream->total_size;
2214 lto_write_data (&header, sizeof header);
2215
2216 /* Put all of the gimple and the string table out the asm file as a
2217 block of text. */
2218 lto_write_stream (ob->main_stream);
2219 lto_write_stream (ob->string_stream);
2220
2221 lto_end_section ();
2222
2223 destroy_output_block (ob);
2224 }
2225
2226
2227 /* Copy the function body or variable constructor of NODE without deserializing. */
2228
2229 static void
2230 copy_function_or_variable (struct symtab_node *node)
2231 {
2232 tree function = node->decl;
2233 struct lto_file_decl_data *file_data = node->lto_file_data;
2234 const char *data;
2235 size_t len;
2236 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2237 char *section_name =
2238 lto_get_section_name (LTO_section_function_body, name, NULL);
2239 size_t i, j;
2240 struct lto_in_decl_state *in_state;
2241 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2242
2243 lto_begin_section (section_name, false);
2244 free (section_name);
2245
2246 /* We may have renamed the declaration, e.g., a static function. */
2247 name = lto_get_decl_name_mapping (file_data, name);
2248
2249 data = lto_get_raw_section_data (file_data, LTO_section_function_body,
2250 name, &len);
2251 gcc_assert (data);
2252
2253 /* Do a bit copy of the function body. */
2254 lto_write_raw_data (data, len);
2255
2256 /* Copy decls. */
2257 in_state =
2258 lto_get_function_in_decl_state (node->lto_file_data, function);
2259 out_state->compressed = in_state->compressed;
2260 gcc_assert (in_state);
2261
2262 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2263 {
2264 size_t n = vec_safe_length (in_state->streams[i]);
2265 vec<tree, va_gc> *trees = in_state->streams[i];
2266 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2267
2268 /* The out state must have the same indices and the in state.
2269 So just copy the vector. All the encoders in the in state
2270 must be empty where we reach here. */
2271 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2272 encoder->trees.reserve_exact (n);
2273 for (j = 0; j < n; j++)
2274 encoder->trees.safe_push ((*trees)[j]);
2275 }
2276
2277 lto_free_raw_section_data (file_data, LTO_section_function_body, name,
2278 data, len);
2279 lto_end_section ();
2280 }
2281
2282 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2283
2284 static tree
2285 wrap_refs (tree *tp, int *ws, void *)
2286 {
2287 tree t = *tp;
2288 if (handled_component_p (t)
2289 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
2290 && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2291 {
2292 tree decl = TREE_OPERAND (t, 0);
2293 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2294 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2295 build1 (ADDR_EXPR, ptrtype, decl),
2296 build_int_cst (ptrtype, 0));
2297 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2298 *ws = 0;
2299 }
2300 else if (TREE_CODE (t) == CONSTRUCTOR)
2301 ;
2302 else if (!EXPR_P (t))
2303 *ws = 0;
2304 return NULL_TREE;
2305 }
2306
2307 /* Main entry point from the pass manager. */
2308
2309 void
2310 lto_output (void)
2311 {
2312 struct lto_out_decl_state *decl_state;
2313 bitmap output = NULL;
2314 int i, n_nodes;
2315 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2316
2317 if (flag_checking)
2318 output = lto_bitmap_alloc ();
2319
2320 /* Initialize the streamer. */
2321 lto_streamer_init ();
2322
2323 n_nodes = lto_symtab_encoder_size (encoder);
2324 /* Process only the functions with bodies. */
2325 for (i = 0; i < n_nodes; i++)
2326 {
2327 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2328 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2329 {
2330 if (lto_symtab_encoder_encode_body_p (encoder, node)
2331 && !node->alias
2332 && (!node->thunk.thunk_p || !node->thunk.add_pointer_bounds_args))
2333 {
2334 if (flag_checking)
2335 {
2336 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2337 bitmap_set_bit (output, DECL_UID (node->decl));
2338 }
2339 decl_state = lto_new_out_decl_state ();
2340 lto_push_out_decl_state (decl_state);
2341 if (gimple_has_body_p (node->decl) || !flag_wpa
2342 /* Thunks have no body but they may be synthetized
2343 at WPA time. */
2344 || DECL_ARGUMENTS (node->decl))
2345 output_function (node);
2346 else
2347 copy_function_or_variable (node);
2348 gcc_assert (lto_get_out_decl_state () == decl_state);
2349 lto_pop_out_decl_state ();
2350 lto_record_function_out_decl_state (node->decl, decl_state);
2351 }
2352 }
2353 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2354 {
2355 /* Wrap symbol references inside the ctor in a type
2356 preserving MEM_REF. */
2357 tree ctor = DECL_INITIAL (node->decl);
2358 if (ctor && !in_lto_p)
2359 walk_tree (&ctor, wrap_refs, NULL, NULL);
2360 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2361 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2362 && !node->alias)
2363 {
2364 timevar_push (TV_IPA_LTO_CTORS_OUT);
2365 if (flag_checking)
2366 {
2367 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2368 bitmap_set_bit (output, DECL_UID (node->decl));
2369 }
2370 decl_state = lto_new_out_decl_state ();
2371 lto_push_out_decl_state (decl_state);
2372 if (DECL_INITIAL (node->decl) != error_mark_node
2373 || !flag_wpa)
2374 output_constructor (node);
2375 else
2376 copy_function_or_variable (node);
2377 gcc_assert (lto_get_out_decl_state () == decl_state);
2378 lto_pop_out_decl_state ();
2379 lto_record_function_out_decl_state (node->decl, decl_state);
2380 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2381 }
2382 }
2383 }
2384
2385 /* Emit the callgraph after emitting function bodies. This needs to
2386 be done now to make sure that all the statements in every function
2387 have been renumbered so that edges can be associated with call
2388 statements using the statement UIDs. */
2389 output_symtab ();
2390
2391 output_offload_tables ();
2392
2393 #if CHECKING_P
2394 lto_bitmap_free (output);
2395 #endif
2396 }
2397
2398 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2399 from it and required for correct representation of its semantics.
2400 Each node in ENCODER must be a global declaration or a type. A node
2401 is written only once, even if it appears multiple times in the
2402 vector. Certain transitively-reachable nodes, such as those
2403 representing expressions, may be duplicated, but such nodes
2404 must not appear in ENCODER itself. */
2405
2406 static void
2407 write_global_stream (struct output_block *ob,
2408 struct lto_tree_ref_encoder *encoder)
2409 {
2410 tree t;
2411 size_t index;
2412 const size_t size = lto_tree_ref_encoder_size (encoder);
2413
2414 for (index = 0; index < size; index++)
2415 {
2416 t = lto_tree_ref_encoder_get_tree (encoder, index);
2417 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2418 stream_write_tree (ob, t, false);
2419 }
2420 }
2421
2422
2423 /* Write a sequence of indices into the globals vector corresponding
2424 to the trees in ENCODER. These are used by the reader to map the
2425 indices used to refer to global entities within function bodies to
2426 their referents. */
2427
2428 static void
2429 write_global_references (struct output_block *ob,
2430 struct lto_tree_ref_encoder *encoder)
2431 {
2432 tree t;
2433 uint32_t index;
2434 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2435
2436 /* Write size and slot indexes as 32-bit unsigned numbers. */
2437 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2438 data[0] = size;
2439
2440 for (index = 0; index < size; index++)
2441 {
2442 unsigned slot_num;
2443
2444 t = lto_tree_ref_encoder_get_tree (encoder, index);
2445 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2446 gcc_assert (slot_num != (unsigned)-1);
2447 data[index + 1] = slot_num;
2448 }
2449
2450 lto_write_data (data, sizeof (int32_t) * (size + 1));
2451 free (data);
2452 }
2453
2454
2455 /* Write all the streams in an lto_out_decl_state STATE using
2456 output block OB and output stream OUT_STREAM. */
2457
2458 void
2459 lto_output_decl_state_streams (struct output_block *ob,
2460 struct lto_out_decl_state *state)
2461 {
2462 int i;
2463
2464 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2465 write_global_stream (ob, &state->streams[i]);
2466 }
2467
2468
2469 /* Write all the references in an lto_out_decl_state STATE using
2470 output block OB and output stream OUT_STREAM. */
2471
2472 void
2473 lto_output_decl_state_refs (struct output_block *ob,
2474 struct lto_out_decl_state *state)
2475 {
2476 unsigned i;
2477 unsigned ref;
2478 tree decl;
2479
2480 /* Write reference to FUNCTION_DECL. If there is not function,
2481 write reference to void_type_node. */
2482 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2483 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2484 gcc_assert (ref != (unsigned)-1);
2485 ref = ref * 2 + (state->compressed ? 1 : 0);
2486 lto_write_data (&ref, sizeof (uint32_t));
2487
2488 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2489 write_global_references (ob, &state->streams[i]);
2490 }
2491
2492
2493 /* Return the written size of STATE. */
2494
2495 static size_t
2496 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2497 {
2498 int i;
2499 size_t size;
2500
2501 size = sizeof (int32_t); /* fn_ref. */
2502 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2503 {
2504 size += sizeof (int32_t); /* vector size. */
2505 size += (lto_tree_ref_encoder_size (&state->streams[i])
2506 * sizeof (int32_t));
2507 }
2508 return size;
2509 }
2510
2511
2512 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2513 so far. */
2514
2515 static void
2516 write_symbol (struct streamer_tree_cache_d *cache,
2517 tree t, hash_set<const char *> *seen, bool alias)
2518 {
2519 const char *name;
2520 enum gcc_plugin_symbol_kind kind;
2521 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2522 unsigned slot_num;
2523 uint64_t size;
2524 const char *comdat;
2525 unsigned char c;
2526
2527 /* None of the following kinds of symbols are needed in the
2528 symbol table. */
2529 if (!TREE_PUBLIC (t)
2530 || is_builtin_fn (t)
2531 || DECL_ABSTRACT_P (t)
2532 || (VAR_P (t) && DECL_HARD_REGISTER (t)))
2533 return;
2534
2535 gcc_assert (VAR_OR_FUNCTION_DECL_P (t));
2536
2537 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2538
2539 /* This behaves like assemble_name_raw in varasm.c, performing the
2540 same name manipulations that ASM_OUTPUT_LABELREF does. */
2541 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2542
2543 if (seen->add (name))
2544 return;
2545
2546 streamer_tree_cache_lookup (cache, t, &slot_num);
2547 gcc_assert (slot_num != (unsigned)-1);
2548
2549 if (DECL_EXTERNAL (t))
2550 {
2551 if (DECL_WEAK (t))
2552 kind = GCCPK_WEAKUNDEF;
2553 else
2554 kind = GCCPK_UNDEF;
2555 }
2556 else
2557 {
2558 if (DECL_WEAK (t))
2559 kind = GCCPK_WEAKDEF;
2560 else if (DECL_COMMON (t))
2561 kind = GCCPK_COMMON;
2562 else
2563 kind = GCCPK_DEF;
2564
2565 /* When something is defined, it should have node attached. */
2566 gcc_assert (alias || !VAR_P (t) || varpool_node::get (t)->definition);
2567 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2568 || (cgraph_node::get (t)
2569 && cgraph_node::get (t)->definition));
2570 }
2571
2572 /* Imitate what default_elf_asm_output_external do.
2573 When symbol is external, we need to output it with DEFAULT visibility
2574 when compiling with -fvisibility=default, while with HIDDEN visibility
2575 when symbol has attribute (visibility("hidden")) specified.
2576 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2577 right. */
2578
2579 if (DECL_EXTERNAL (t)
2580 && !targetm.binds_local_p (t))
2581 visibility = GCCPV_DEFAULT;
2582 else
2583 switch (DECL_VISIBILITY (t))
2584 {
2585 case VISIBILITY_DEFAULT:
2586 visibility = GCCPV_DEFAULT;
2587 break;
2588 case VISIBILITY_PROTECTED:
2589 visibility = GCCPV_PROTECTED;
2590 break;
2591 case VISIBILITY_HIDDEN:
2592 visibility = GCCPV_HIDDEN;
2593 break;
2594 case VISIBILITY_INTERNAL:
2595 visibility = GCCPV_INTERNAL;
2596 break;
2597 }
2598
2599 if (kind == GCCPK_COMMON
2600 && DECL_SIZE_UNIT (t)
2601 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2602 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2603 else
2604 size = 0;
2605
2606 if (DECL_ONE_ONLY (t))
2607 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2608 else
2609 comdat = "";
2610
2611 lto_write_data (name, strlen (name) + 1);
2612 lto_write_data (comdat, strlen (comdat) + 1);
2613 c = (unsigned char) kind;
2614 lto_write_data (&c, 1);
2615 c = (unsigned char) visibility;
2616 lto_write_data (&c, 1);
2617 lto_write_data (&size, 8);
2618 lto_write_data (&slot_num, 4);
2619 }
2620
2621 /* Return true if NODE should appear in the plugin symbol table. */
2622
2623 bool
2624 output_symbol_p (symtab_node *node)
2625 {
2626 struct cgraph_node *cnode;
2627 if (!node->real_symbol_p ())
2628 return false;
2629 /* We keep external functions in symtab for sake of inlining
2630 and devirtualization. We do not want to see them in symbol table as
2631 references unless they are really used. */
2632 cnode = dyn_cast <cgraph_node *> (node);
2633 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2634 && cnode->callers)
2635 return true;
2636
2637 /* Ignore all references from external vars initializers - they are not really
2638 part of the compilation unit until they are used by folding. Some symbols,
2639 like references to external construction vtables can not be referred to at all.
2640 We decide this at can_refer_decl_in_current_unit_p. */
2641 if (!node->definition || DECL_EXTERNAL (node->decl))
2642 {
2643 int i;
2644 struct ipa_ref *ref;
2645 for (i = 0; node->iterate_referring (i, ref); i++)
2646 {
2647 if (ref->use == IPA_REF_ALIAS)
2648 continue;
2649 if (is_a <cgraph_node *> (ref->referring))
2650 return true;
2651 if (!DECL_EXTERNAL (ref->referring->decl))
2652 return true;
2653 }
2654 return false;
2655 }
2656 return true;
2657 }
2658
2659
2660 /* Write an IL symbol table to OB.
2661 SET and VSET are cgraph/varpool node sets we are outputting. */
2662
2663 static void
2664 produce_symtab (struct output_block *ob)
2665 {
2666 struct streamer_tree_cache_d *cache = ob->writer_cache;
2667 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2668 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2669 lto_symtab_encoder_iterator lsei;
2670
2671 lto_begin_section (section_name, false);
2672 free (section_name);
2673
2674 hash_set<const char *> seen;
2675
2676 /* Write the symbol table.
2677 First write everything defined and then all declarations.
2678 This is necessary to handle cases where we have duplicated symbols. */
2679 for (lsei = lsei_start (encoder);
2680 !lsei_end_p (lsei); lsei_next (&lsei))
2681 {
2682 symtab_node *node = lsei_node (lsei);
2683
2684 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2685 continue;
2686 write_symbol (cache, node->decl, &seen, false);
2687 }
2688 for (lsei = lsei_start (encoder);
2689 !lsei_end_p (lsei); lsei_next (&lsei))
2690 {
2691 symtab_node *node = lsei_node (lsei);
2692
2693 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2694 continue;
2695 write_symbol (cache, node->decl, &seen, false);
2696 }
2697
2698 lto_end_section ();
2699 }
2700
2701
2702 /* Init the streamer_mode_table for output, where we collect info on what
2703 machine_mode values have been streamed. */
2704 void
2705 lto_output_init_mode_table (void)
2706 {
2707 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2708 }
2709
2710
2711 /* Write the mode table. */
2712 static void
2713 lto_write_mode_table (void)
2714 {
2715 struct output_block *ob;
2716 ob = create_output_block (LTO_section_mode_table);
2717 bitpack_d bp = bitpack_create (ob->main_stream);
2718
2719 /* Ensure that for GET_MODE_INNER (m) != m we have
2720 also the inner mode marked. */
2721 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2722 if (streamer_mode_table[i])
2723 {
2724 machine_mode m = (machine_mode) i;
2725 machine_mode inner_m = GET_MODE_INNER (m);
2726 if (inner_m != m)
2727 streamer_mode_table[(int) inner_m] = 1;
2728 }
2729 /* First stream modes that have GET_MODE_INNER (m) == m,
2730 so that we can refer to them afterwards. */
2731 for (int pass = 0; pass < 2; pass++)
2732 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2733 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2734 {
2735 machine_mode m = (machine_mode) i;
2736 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
2737 continue;
2738 bp_pack_value (&bp, m, 8);
2739 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2740 bp_pack_value (&bp, GET_MODE_SIZE (m), 8);
2741 bp_pack_value (&bp, GET_MODE_PRECISION (m), 16);
2742 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2743 bp_pack_value (&bp, GET_MODE_NUNITS (m), 8);
2744 switch (GET_MODE_CLASS (m))
2745 {
2746 case MODE_FRACT:
2747 case MODE_UFRACT:
2748 case MODE_ACCUM:
2749 case MODE_UACCUM:
2750 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2751 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2752 break;
2753 case MODE_FLOAT:
2754 case MODE_DECIMAL_FLOAT:
2755 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2756 break;
2757 default:
2758 break;
2759 }
2760 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2761 }
2762 bp_pack_value (&bp, VOIDmode, 8);
2763
2764 streamer_write_bitpack (&bp);
2765
2766 char *section_name
2767 = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2768 lto_begin_section (section_name, !flag_wpa);
2769 free (section_name);
2770
2771 /* The entire header stream is computed here. */
2772 struct lto_simple_header_with_strings header;
2773 memset (&header, 0, sizeof (header));
2774
2775 /* Write the header. */
2776 header.major_version = LTO_major_version;
2777 header.minor_version = LTO_minor_version;
2778
2779 header.main_size = ob->main_stream->total_size;
2780 header.string_size = ob->string_stream->total_size;
2781 lto_write_data (&header, sizeof header);
2782
2783 /* Put all of the gimple and the string table out the asm file as a
2784 block of text. */
2785 lto_write_stream (ob->main_stream);
2786 lto_write_stream (ob->string_stream);
2787
2788 lto_end_section ();
2789 destroy_output_block (ob);
2790 }
2791
2792
2793 /* This pass is run after all of the functions are serialized and all
2794 of the IPA passes have written their serialized forms. This pass
2795 causes the vector of all of the global decls and types used from
2796 this file to be written in to a section that can then be read in to
2797 recover these on other side. */
2798
2799 void
2800 produce_asm_for_decls (void)
2801 {
2802 struct lto_out_decl_state *out_state;
2803 struct lto_out_decl_state *fn_out_state;
2804 struct lto_decl_header header;
2805 char *section_name;
2806 struct output_block *ob;
2807 unsigned idx, num_fns;
2808 size_t decl_state_size;
2809 int32_t num_decl_states;
2810
2811 ob = create_output_block (LTO_section_decls);
2812
2813 memset (&header, 0, sizeof (struct lto_decl_header));
2814
2815 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2816 lto_begin_section (section_name, !flag_wpa);
2817 free (section_name);
2818
2819 /* Make string 0 be a NULL string. */
2820 streamer_write_char_stream (ob->string_stream, 0);
2821
2822 gcc_assert (!alias_pairs);
2823
2824 /* Get rid of the global decl state hash tables to save some memory. */
2825 out_state = lto_get_out_decl_state ();
2826 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2827 if (out_state->streams[i].tree_hash_table)
2828 {
2829 delete out_state->streams[i].tree_hash_table;
2830 out_state->streams[i].tree_hash_table = NULL;
2831 }
2832
2833 /* Write the global symbols. */
2834 lto_output_decl_state_streams (ob, out_state);
2835 num_fns = lto_function_decl_states.length ();
2836 for (idx = 0; idx < num_fns; idx++)
2837 {
2838 fn_out_state =
2839 lto_function_decl_states[idx];
2840 lto_output_decl_state_streams (ob, fn_out_state);
2841 }
2842
2843 header.major_version = LTO_major_version;
2844 header.minor_version = LTO_minor_version;
2845
2846 /* Currently not used. This field would allow us to preallocate
2847 the globals vector, so that it need not be resized as it is extended. */
2848 header.num_nodes = -1;
2849
2850 /* Compute the total size of all decl out states. */
2851 decl_state_size = sizeof (int32_t);
2852 decl_state_size += lto_out_decl_state_written_size (out_state);
2853 for (idx = 0; idx < num_fns; idx++)
2854 {
2855 fn_out_state =
2856 lto_function_decl_states[idx];
2857 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2858 }
2859 header.decl_state_size = decl_state_size;
2860
2861 header.main_size = ob->main_stream->total_size;
2862 header.string_size = ob->string_stream->total_size;
2863
2864 lto_write_data (&header, sizeof header);
2865
2866 /* Write the main out-decl state, followed by out-decl states of
2867 functions. */
2868 num_decl_states = num_fns + 1;
2869 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2870 lto_output_decl_state_refs (ob, out_state);
2871 for (idx = 0; idx < num_fns; idx++)
2872 {
2873 fn_out_state = lto_function_decl_states[idx];
2874 lto_output_decl_state_refs (ob, fn_out_state);
2875 }
2876
2877 lto_write_stream (ob->main_stream);
2878 lto_write_stream (ob->string_stream);
2879
2880 lto_end_section ();
2881
2882 /* Write the symbol table. It is used by linker to determine dependencies
2883 and thus we can skip it for WPA. */
2884 if (!flag_wpa)
2885 produce_symtab (ob);
2886
2887 /* Write command line opts. */
2888 lto_write_options ();
2889
2890 /* Deallocate memory and clean up. */
2891 for (idx = 0; idx < num_fns; idx++)
2892 {
2893 fn_out_state =
2894 lto_function_decl_states[idx];
2895 lto_delete_out_decl_state (fn_out_state);
2896 }
2897 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2898 lto_function_decl_states.release ();
2899 destroy_output_block (ob);
2900 if (lto_stream_offload_p)
2901 lto_write_mode_table ();
2902 }