]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-streamer-in.c
51d9a7b222b5636e4b3f9a09eaad34f0924b8c0d
[thirdparty/gcc.git] / gcc / lto-streamer-in.c
1 /* Read the GIMPLE representation from a file stream.
2
3 Copyright (C) 2009-2017 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "cfghooks.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "gimple-streamer.h"
35 #include "toplev.h"
36 #include "gimple-iterator.h"
37 #include "tree-cfg.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "tree-ssa.h"
41 #include "except.h"
42 #include "cgraph.h"
43 #include "cfgloop.h"
44 #include "debug.h"
45
46
47 struct freeing_string_slot_hasher : string_slot_hasher
48 {
49 static inline void remove (value_type *);
50 };
51
52 inline void
53 freeing_string_slot_hasher::remove (value_type *v)
54 {
55 free (v);
56 }
57
58 /* The table to hold the file names. */
59 static hash_table<freeing_string_slot_hasher> *file_name_hash_table;
60
61
62 /* Check that tag ACTUAL has one of the given values. NUM_TAGS is the
63 number of valid tag values to check. */
64
65 void
66 lto_tag_check_set (enum LTO_tags actual, int ntags, ...)
67 {
68 va_list ap;
69 int i;
70
71 va_start (ap, ntags);
72 for (i = 0; i < ntags; i++)
73 if ((unsigned) actual == va_arg (ap, unsigned))
74 {
75 va_end (ap);
76 return;
77 }
78
79 va_end (ap);
80 internal_error ("bytecode stream: unexpected tag %s", lto_tag_name (actual));
81 }
82
83
84 /* Read LENGTH bytes from STREAM to ADDR. */
85
86 void
87 lto_input_data_block (struct lto_input_block *ib, void *addr, size_t length)
88 {
89 size_t i;
90 unsigned char *const buffer = (unsigned char *) addr;
91
92 for (i = 0; i < length; i++)
93 buffer[i] = streamer_read_uchar (ib);
94 }
95
96
97 /* Lookup STRING in file_name_hash_table. If found, return the existing
98 string, otherwise insert STRING as the canonical version. */
99
100 static const char *
101 canon_file_name (const char *string)
102 {
103 string_slot **slot;
104 struct string_slot s_slot;
105 size_t len = strlen (string);
106
107 s_slot.s = string;
108 s_slot.len = len;
109
110 slot = file_name_hash_table->find_slot (&s_slot, INSERT);
111 if (*slot == NULL)
112 {
113 char *saved_string;
114 struct string_slot *new_slot;
115
116 saved_string = (char *) xmalloc (len + 1);
117 new_slot = XCNEW (struct string_slot);
118 memcpy (saved_string, string, len + 1);
119 new_slot->s = saved_string;
120 new_slot->len = len;
121 *slot = new_slot;
122 return saved_string;
123 }
124 else
125 {
126 struct string_slot *old_slot = *slot;
127 return old_slot->s;
128 }
129 }
130
131 /* Pointer to currently alive instance of lto_location_cache. */
132
133 lto_location_cache *lto_location_cache::current_cache;
134
135 /* Sort locations in source order. Start with file from last application. */
136
137 int
138 lto_location_cache::cmp_loc (const void *pa, const void *pb)
139 {
140 const cached_location *a = ((const cached_location *)pa);
141 const cached_location *b = ((const cached_location *)pb);
142 const char *current_file = current_cache->current_file;
143 int current_line = current_cache->current_line;
144
145 if (a->file == current_file && b->file != current_file)
146 return -1;
147 if (a->file != current_file && b->file == current_file)
148 return 1;
149 if (a->file == current_file && b->file == current_file)
150 {
151 if (a->line == current_line && b->line != current_line)
152 return -1;
153 if (a->line != current_line && b->line == current_line)
154 return 1;
155 }
156 if (a->file != b->file)
157 return strcmp (a->file, b->file);
158 if (a->sysp != b->sysp)
159 return a->sysp ? 1 : -1;
160 if (a->line != b->line)
161 return a->line - b->line;
162 return a->col - b->col;
163 }
164
165 /* Apply all changes in location cache. Add locations into linemap and patch
166 trees. */
167
168 bool
169 lto_location_cache::apply_location_cache ()
170 {
171 static const char *prev_file;
172 if (!loc_cache.length ())
173 return false;
174 if (loc_cache.length () > 1)
175 loc_cache.qsort (cmp_loc);
176
177 for (unsigned int i = 0; i < loc_cache.length (); i++)
178 {
179 struct cached_location loc = loc_cache[i];
180
181 if (current_file != loc.file)
182 linemap_add (line_table, prev_file ? LC_RENAME : LC_ENTER,
183 loc.sysp, loc.file, loc.line);
184 else if (current_line != loc.line)
185 {
186 int max = loc.col;
187
188 for (unsigned int j = i + 1; j < loc_cache.length (); j++)
189 if (loc.file != loc_cache[j].file
190 || loc.line != loc_cache[j].line)
191 break;
192 else if (max < loc_cache[j].col)
193 max = loc_cache[j].col;
194 linemap_line_start (line_table, loc.line, max + 1);
195 }
196 gcc_assert (*loc.loc == BUILTINS_LOCATION + 1);
197 if (current_file == loc.file && current_line == loc.line
198 && current_col == loc.col)
199 *loc.loc = current_loc;
200 else
201 current_loc = *loc.loc = linemap_position_for_column (line_table,
202 loc.col);
203 current_line = loc.line;
204 prev_file = current_file = loc.file;
205 current_col = loc.col;
206 }
207 loc_cache.truncate (0);
208 accepted_length = 0;
209 return true;
210 }
211
212 /* Tree merging did not suceed; mark all changes in the cache as accepted. */
213
214 void
215 lto_location_cache::accept_location_cache ()
216 {
217 gcc_assert (current_cache == this);
218 accepted_length = loc_cache.length ();
219 }
220
221 /* Tree merging did suceed; throw away recent changes. */
222
223 void
224 lto_location_cache::revert_location_cache ()
225 {
226 loc_cache.truncate (accepted_length);
227 }
228
229 /* Read a location bitpack from input block IB and either update *LOC directly
230 or add it to the location cache.
231 It is neccesary to call apply_location_cache to get *LOC updated. */
232
233 void
234 lto_location_cache::input_location (location_t *loc, struct bitpack_d *bp,
235 struct data_in *data_in)
236 {
237 static const char *stream_file;
238 static int stream_line;
239 static int stream_col;
240 static bool stream_sysp;
241 bool file_change, line_change, column_change;
242
243 gcc_assert (current_cache == this);
244
245 *loc = bp_unpack_int_in_range (bp, "location", 0, RESERVED_LOCATION_COUNT);
246
247 if (*loc < RESERVED_LOCATION_COUNT)
248 return;
249
250 /* Keep value RESERVED_LOCATION_COUNT in *loc as linemap lookups will
251 ICE on it. */
252
253 file_change = bp_unpack_value (bp, 1);
254 line_change = bp_unpack_value (bp, 1);
255 column_change = bp_unpack_value (bp, 1);
256
257 if (file_change)
258 {
259 stream_file = canon_file_name (bp_unpack_string (data_in, bp));
260 stream_sysp = bp_unpack_value (bp, 1);
261 }
262
263 if (line_change)
264 stream_line = bp_unpack_var_len_unsigned (bp);
265
266 if (column_change)
267 stream_col = bp_unpack_var_len_unsigned (bp);
268
269 /* This optimization saves location cache operations druing gimple
270 streaming. */
271
272 if (current_file == stream_file && current_line == stream_line
273 && current_col == stream_col && current_sysp == stream_sysp)
274 {
275 *loc = current_loc;
276 return;
277 }
278
279 struct cached_location entry
280 = {stream_file, loc, stream_line, stream_col, stream_sysp};
281 loc_cache.safe_push (entry);
282 }
283
284 /* Read a location bitpack from input block IB and either update *LOC directly
285 or add it to the location cache.
286 It is neccesary to call apply_location_cache to get *LOC updated. */
287
288 void
289 lto_input_location (location_t *loc, struct bitpack_d *bp,
290 struct data_in *data_in)
291 {
292 data_in->location_cache.input_location (loc, bp, data_in);
293 }
294
295 /* Read location and return it instead of going through location caching.
296 This should be used only when the resulting location is not going to be
297 discarded. */
298
299 location_t
300 stream_input_location_now (struct bitpack_d *bp, struct data_in *data_in)
301 {
302 location_t loc;
303 stream_input_location (&loc, bp, data_in);
304 data_in->location_cache.apply_location_cache ();
305 return loc;
306 }
307
308 /* Read a reference to a tree node from DATA_IN using input block IB.
309 TAG is the expected node that should be found in IB, if TAG belongs
310 to one of the indexable trees, expect to read a reference index to
311 be looked up in one of the symbol tables, otherwise read the pysical
312 representation of the tree using stream_read_tree. FN is the
313 function scope for the read tree. */
314
315 tree
316 lto_input_tree_ref (struct lto_input_block *ib, struct data_in *data_in,
317 struct function *fn, enum LTO_tags tag)
318 {
319 unsigned HOST_WIDE_INT ix_u;
320 tree result = NULL_TREE;
321
322 lto_tag_check_range (tag, LTO_field_decl_ref, LTO_namelist_decl_ref);
323
324 switch (tag)
325 {
326 case LTO_type_ref:
327 ix_u = streamer_read_uhwi (ib);
328 result = lto_file_decl_data_get_type (data_in->file_data, ix_u);
329 break;
330
331 case LTO_ssa_name_ref:
332 ix_u = streamer_read_uhwi (ib);
333 result = (*SSANAMES (fn))[ix_u];
334 break;
335
336 case LTO_field_decl_ref:
337 ix_u = streamer_read_uhwi (ib);
338 result = lto_file_decl_data_get_field_decl (data_in->file_data, ix_u);
339 break;
340
341 case LTO_function_decl_ref:
342 ix_u = streamer_read_uhwi (ib);
343 result = lto_file_decl_data_get_fn_decl (data_in->file_data, ix_u);
344 break;
345
346 case LTO_type_decl_ref:
347 ix_u = streamer_read_uhwi (ib);
348 result = lto_file_decl_data_get_type_decl (data_in->file_data, ix_u);
349 break;
350
351 case LTO_namespace_decl_ref:
352 ix_u = streamer_read_uhwi (ib);
353 result = lto_file_decl_data_get_namespace_decl (data_in->file_data, ix_u);
354 break;
355
356 case LTO_global_decl_ref:
357 case LTO_result_decl_ref:
358 case LTO_const_decl_ref:
359 case LTO_imported_decl_ref:
360 case LTO_label_decl_ref:
361 case LTO_translation_unit_decl_ref:
362 case LTO_namelist_decl_ref:
363 ix_u = streamer_read_uhwi (ib);
364 result = lto_file_decl_data_get_var_decl (data_in->file_data, ix_u);
365 break;
366
367 default:
368 gcc_unreachable ();
369 }
370
371 gcc_assert (result);
372
373 return result;
374 }
375
376
377 /* Read and return a double-linked list of catch handlers from input
378 block IB, using descriptors in DATA_IN. */
379
380 static struct eh_catch_d *
381 lto_input_eh_catch_list (struct lto_input_block *ib, struct data_in *data_in,
382 eh_catch *last_p)
383 {
384 eh_catch first;
385 enum LTO_tags tag;
386
387 *last_p = first = NULL;
388 tag = streamer_read_record_start (ib);
389 while (tag)
390 {
391 tree list;
392 eh_catch n;
393
394 lto_tag_check_range (tag, LTO_eh_catch, LTO_eh_catch);
395
396 /* Read the catch node. */
397 n = ggc_cleared_alloc<eh_catch_d> ();
398 n->type_list = stream_read_tree (ib, data_in);
399 n->filter_list = stream_read_tree (ib, data_in);
400 n->label = stream_read_tree (ib, data_in);
401
402 /* Register all the types in N->FILTER_LIST. */
403 for (list = n->filter_list; list; list = TREE_CHAIN (list))
404 add_type_for_runtime (TREE_VALUE (list));
405
406 /* Chain N to the end of the list. */
407 if (*last_p)
408 (*last_p)->next_catch = n;
409 n->prev_catch = *last_p;
410 *last_p = n;
411
412 /* Set the head of the list the first time through the loop. */
413 if (first == NULL)
414 first = n;
415
416 tag = streamer_read_record_start (ib);
417 }
418
419 return first;
420 }
421
422
423 /* Read and return EH region IX from input block IB, using descriptors
424 in DATA_IN. */
425
426 static eh_region
427 input_eh_region (struct lto_input_block *ib, struct data_in *data_in, int ix)
428 {
429 enum LTO_tags tag;
430 eh_region r;
431
432 /* Read the region header. */
433 tag = streamer_read_record_start (ib);
434 if (tag == LTO_null)
435 return NULL;
436
437 r = ggc_cleared_alloc<eh_region_d> ();
438 r->index = streamer_read_hwi (ib);
439
440 gcc_assert (r->index == ix);
441
442 /* Read all the region pointers as region numbers. We'll fix up
443 the pointers once the whole array has been read. */
444 r->outer = (eh_region) (intptr_t) streamer_read_hwi (ib);
445 r->inner = (eh_region) (intptr_t) streamer_read_hwi (ib);
446 r->next_peer = (eh_region) (intptr_t) streamer_read_hwi (ib);
447
448 switch (tag)
449 {
450 case LTO_ert_cleanup:
451 r->type = ERT_CLEANUP;
452 break;
453
454 case LTO_ert_try:
455 {
456 struct eh_catch_d *last_catch;
457 r->type = ERT_TRY;
458 r->u.eh_try.first_catch = lto_input_eh_catch_list (ib, data_in,
459 &last_catch);
460 r->u.eh_try.last_catch = last_catch;
461 break;
462 }
463
464 case LTO_ert_allowed_exceptions:
465 {
466 tree l;
467
468 r->type = ERT_ALLOWED_EXCEPTIONS;
469 r->u.allowed.type_list = stream_read_tree (ib, data_in);
470 r->u.allowed.label = stream_read_tree (ib, data_in);
471 r->u.allowed.filter = streamer_read_uhwi (ib);
472
473 for (l = r->u.allowed.type_list; l ; l = TREE_CHAIN (l))
474 add_type_for_runtime (TREE_VALUE (l));
475 }
476 break;
477
478 case LTO_ert_must_not_throw:
479 {
480 r->type = ERT_MUST_NOT_THROW;
481 r->u.must_not_throw.failure_decl = stream_read_tree (ib, data_in);
482 bitpack_d bp = streamer_read_bitpack (ib);
483 r->u.must_not_throw.failure_loc
484 = stream_input_location_now (&bp, data_in);
485 }
486 break;
487
488 default:
489 gcc_unreachable ();
490 }
491
492 r->landing_pads = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
493
494 return r;
495 }
496
497
498 /* Read and return EH landing pad IX from input block IB, using descriptors
499 in DATA_IN. */
500
501 static eh_landing_pad
502 input_eh_lp (struct lto_input_block *ib, struct data_in *data_in, int ix)
503 {
504 enum LTO_tags tag;
505 eh_landing_pad lp;
506
507 /* Read the landing pad header. */
508 tag = streamer_read_record_start (ib);
509 if (tag == LTO_null)
510 return NULL;
511
512 lto_tag_check_range (tag, LTO_eh_landing_pad, LTO_eh_landing_pad);
513
514 lp = ggc_cleared_alloc<eh_landing_pad_d> ();
515 lp->index = streamer_read_hwi (ib);
516 gcc_assert (lp->index == ix);
517 lp->next_lp = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
518 lp->region = (eh_region) (intptr_t) streamer_read_hwi (ib);
519 lp->post_landing_pad = stream_read_tree (ib, data_in);
520
521 return lp;
522 }
523
524
525 /* After reading the EH regions, pointers to peer and children regions
526 are region numbers. This converts all these region numbers into
527 real pointers into the rematerialized regions for FN. ROOT_REGION
528 is the region number for the root EH region in FN. */
529
530 static void
531 fixup_eh_region_pointers (struct function *fn, HOST_WIDE_INT root_region)
532 {
533 unsigned i;
534 vec<eh_region, va_gc> *eh_array = fn->eh->region_array;
535 vec<eh_landing_pad, va_gc> *lp_array = fn->eh->lp_array;
536 eh_region r;
537 eh_landing_pad lp;
538
539 gcc_assert (eh_array && lp_array);
540
541 gcc_assert (root_region >= 0);
542 fn->eh->region_tree = (*eh_array)[root_region];
543
544 #define FIXUP_EH_REGION(r) (r) = (*eh_array)[(HOST_WIDE_INT) (intptr_t) (r)]
545 #define FIXUP_EH_LP(p) (p) = (*lp_array)[(HOST_WIDE_INT) (intptr_t) (p)]
546
547 /* Convert all the index numbers stored in pointer fields into
548 pointers to the corresponding slots in the EH region array. */
549 FOR_EACH_VEC_ELT (*eh_array, i, r)
550 {
551 /* The array may contain NULL regions. */
552 if (r == NULL)
553 continue;
554
555 gcc_assert (i == (unsigned) r->index);
556 FIXUP_EH_REGION (r->outer);
557 FIXUP_EH_REGION (r->inner);
558 FIXUP_EH_REGION (r->next_peer);
559 FIXUP_EH_LP (r->landing_pads);
560 }
561
562 /* Convert all the index numbers stored in pointer fields into
563 pointers to the corresponding slots in the EH landing pad array. */
564 FOR_EACH_VEC_ELT (*lp_array, i, lp)
565 {
566 /* The array may contain NULL landing pads. */
567 if (lp == NULL)
568 continue;
569
570 gcc_assert (i == (unsigned) lp->index);
571 FIXUP_EH_LP (lp->next_lp);
572 FIXUP_EH_REGION (lp->region);
573 }
574
575 #undef FIXUP_EH_REGION
576 #undef FIXUP_EH_LP
577 }
578
579
580 /* Initialize EH support. */
581
582 void
583 lto_init_eh (void)
584 {
585 static bool eh_initialized_p = false;
586
587 if (eh_initialized_p)
588 return;
589
590 /* Contrary to most other FEs, we only initialize EH support when at
591 least one of the files in the set contains exception regions in
592 it. Since this happens much later than the call to init_eh in
593 lang_dependent_init, we have to set flag_exceptions and call
594 init_eh again to initialize the EH tables. */
595 flag_exceptions = 1;
596 init_eh ();
597
598 eh_initialized_p = true;
599 }
600
601
602 /* Read the exception table for FN from IB using the data descriptors
603 in DATA_IN. */
604
605 static void
606 input_eh_regions (struct lto_input_block *ib, struct data_in *data_in,
607 struct function *fn)
608 {
609 HOST_WIDE_INT i, root_region, len;
610 enum LTO_tags tag;
611
612 tag = streamer_read_record_start (ib);
613 if (tag == LTO_null)
614 return;
615
616 lto_tag_check_range (tag, LTO_eh_table, LTO_eh_table);
617
618 /* If the file contains EH regions, then it was compiled with
619 -fexceptions. In that case, initialize the backend EH
620 machinery. */
621 lto_init_eh ();
622
623 gcc_assert (fn->eh);
624
625 root_region = streamer_read_hwi (ib);
626 gcc_assert (root_region == (int) root_region);
627
628 /* Read the EH region array. */
629 len = streamer_read_hwi (ib);
630 gcc_assert (len == (int) len);
631 if (len > 0)
632 {
633 vec_safe_grow_cleared (fn->eh->region_array, len);
634 for (i = 0; i < len; i++)
635 {
636 eh_region r = input_eh_region (ib, data_in, i);
637 (*fn->eh->region_array)[i] = r;
638 }
639 }
640
641 /* Read the landing pads. */
642 len = streamer_read_hwi (ib);
643 gcc_assert (len == (int) len);
644 if (len > 0)
645 {
646 vec_safe_grow_cleared (fn->eh->lp_array, len);
647 for (i = 0; i < len; i++)
648 {
649 eh_landing_pad lp = input_eh_lp (ib, data_in, i);
650 (*fn->eh->lp_array)[i] = lp;
651 }
652 }
653
654 /* Read the runtime type data. */
655 len = streamer_read_hwi (ib);
656 gcc_assert (len == (int) len);
657 if (len > 0)
658 {
659 vec_safe_grow_cleared (fn->eh->ttype_data, len);
660 for (i = 0; i < len; i++)
661 {
662 tree ttype = stream_read_tree (ib, data_in);
663 (*fn->eh->ttype_data)[i] = ttype;
664 }
665 }
666
667 /* Read the table of action chains. */
668 len = streamer_read_hwi (ib);
669 gcc_assert (len == (int) len);
670 if (len > 0)
671 {
672 if (targetm.arm_eabi_unwinder)
673 {
674 vec_safe_grow_cleared (fn->eh->ehspec_data.arm_eabi, len);
675 for (i = 0; i < len; i++)
676 {
677 tree t = stream_read_tree (ib, data_in);
678 (*fn->eh->ehspec_data.arm_eabi)[i] = t;
679 }
680 }
681 else
682 {
683 vec_safe_grow_cleared (fn->eh->ehspec_data.other, len);
684 for (i = 0; i < len; i++)
685 {
686 uchar c = streamer_read_uchar (ib);
687 (*fn->eh->ehspec_data.other)[i] = c;
688 }
689 }
690 }
691
692 /* Reconstruct the EH region tree by fixing up the peer/children
693 pointers. */
694 fixup_eh_region_pointers (fn, root_region);
695
696 tag = streamer_read_record_start (ib);
697 lto_tag_check_range (tag, LTO_null, LTO_null);
698 }
699
700
701 /* Make a new basic block with index INDEX in function FN. */
702
703 static basic_block
704 make_new_block (struct function *fn, unsigned int index)
705 {
706 basic_block bb = alloc_block ();
707 bb->index = index;
708 SET_BASIC_BLOCK_FOR_FN (fn, index, bb);
709 n_basic_blocks_for_fn (fn)++;
710 return bb;
711 }
712
713
714 /* Read the CFG for function FN from input block IB. */
715
716 static void
717 input_cfg (struct lto_input_block *ib, struct data_in *data_in,
718 struct function *fn,
719 int count_materialization_scale)
720 {
721 unsigned int bb_count;
722 basic_block p_bb;
723 unsigned int i;
724 int index;
725
726 init_empty_tree_cfg_for_function (fn);
727 init_ssa_operands (fn);
728
729 profile_status_for_fn (fn) = streamer_read_enum (ib, profile_status_d,
730 PROFILE_LAST);
731
732 bb_count = streamer_read_uhwi (ib);
733
734 last_basic_block_for_fn (fn) = bb_count;
735 if (bb_count > basic_block_info_for_fn (fn)->length ())
736 vec_safe_grow_cleared (basic_block_info_for_fn (fn), bb_count);
737
738 if (bb_count > label_to_block_map_for_fn (fn)->length ())
739 vec_safe_grow_cleared (label_to_block_map_for_fn (fn), bb_count);
740
741 index = streamer_read_hwi (ib);
742 while (index != -1)
743 {
744 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
745 unsigned int edge_count;
746
747 if (bb == NULL)
748 bb = make_new_block (fn, index);
749
750 edge_count = streamer_read_uhwi (ib);
751
752 /* Connect up the CFG. */
753 for (i = 0; i < edge_count; i++)
754 {
755 unsigned int dest_index;
756 unsigned int edge_flags;
757 basic_block dest;
758 profile_probability probability;
759 profile_count count;
760 edge e;
761
762 dest_index = streamer_read_uhwi (ib);
763 probability = profile_probability::stream_in (ib);
764 count = profile_count::stream_in (ib).apply_scale
765 (count_materialization_scale, REG_BR_PROB_BASE);
766 edge_flags = streamer_read_uhwi (ib);
767
768 dest = BASIC_BLOCK_FOR_FN (fn, dest_index);
769
770 if (dest == NULL)
771 dest = make_new_block (fn, dest_index);
772
773 e = make_edge (bb, dest, edge_flags);
774 e->probability = probability;
775 e->count = count;
776 }
777
778 index = streamer_read_hwi (ib);
779 }
780
781 p_bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
782 index = streamer_read_hwi (ib);
783 while (index != -1)
784 {
785 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
786 bb->prev_bb = p_bb;
787 p_bb->next_bb = bb;
788 p_bb = bb;
789 index = streamer_read_hwi (ib);
790 }
791
792 /* ??? The cfgloop interface is tied to cfun. */
793 gcc_assert (cfun == fn);
794
795 /* Input the loop tree. */
796 unsigned n_loops = streamer_read_uhwi (ib);
797 if (n_loops == 0)
798 return;
799
800 struct loops *loops = ggc_cleared_alloc<struct loops> ();
801 init_loops_structure (fn, loops, n_loops);
802 set_loops_for_fn (fn, loops);
803
804 /* Input each loop and associate it with its loop header so
805 flow_loops_find can rebuild the loop tree. */
806 for (unsigned i = 1; i < n_loops; ++i)
807 {
808 int header_index = streamer_read_hwi (ib);
809 if (header_index == -1)
810 {
811 loops->larray->quick_push (NULL);
812 continue;
813 }
814
815 struct loop *loop = alloc_loop ();
816 loop->header = BASIC_BLOCK_FOR_FN (fn, header_index);
817 loop->header->loop_father = loop;
818
819 /* Read everything copy_loop_info copies. */
820 loop->estimate_state = streamer_read_enum (ib, loop_estimation, EST_LAST);
821 loop->any_upper_bound = streamer_read_hwi (ib);
822 if (loop->any_upper_bound)
823 loop->nb_iterations_upper_bound = streamer_read_widest_int (ib);
824 loop->any_likely_upper_bound = streamer_read_hwi (ib);
825 if (loop->any_likely_upper_bound)
826 loop->nb_iterations_likely_upper_bound = streamer_read_widest_int (ib);
827 loop->any_estimate = streamer_read_hwi (ib);
828 if (loop->any_estimate)
829 loop->nb_iterations_estimate = streamer_read_widest_int (ib);
830
831 /* Read OMP SIMD related info. */
832 loop->safelen = streamer_read_hwi (ib);
833 loop->dont_vectorize = streamer_read_hwi (ib);
834 loop->force_vectorize = streamer_read_hwi (ib);
835 loop->simduid = stream_read_tree (ib, data_in);
836
837 place_new_loop (fn, loop);
838
839 /* flow_loops_find doesn't like loops not in the tree, hook them
840 all as siblings of the tree root temporarily. */
841 flow_loop_tree_node_add (loops->tree_root, loop);
842 }
843
844 /* Rebuild the loop tree. */
845 flow_loops_find (loops);
846 }
847
848
849 /* Read the SSA names array for function FN from DATA_IN using input
850 block IB. */
851
852 static void
853 input_ssa_names (struct lto_input_block *ib, struct data_in *data_in,
854 struct function *fn)
855 {
856 unsigned int i, size;
857
858 size = streamer_read_uhwi (ib);
859 init_ssanames (fn, size);
860
861 i = streamer_read_uhwi (ib);
862 while (i)
863 {
864 tree ssa_name, name;
865 bool is_default_def;
866
867 /* Skip over the elements that had been freed. */
868 while (SSANAMES (fn)->length () < i)
869 SSANAMES (fn)->quick_push (NULL_TREE);
870
871 is_default_def = (streamer_read_uchar (ib) != 0);
872 name = stream_read_tree (ib, data_in);
873 ssa_name = make_ssa_name_fn (fn, name, NULL);
874
875 if (is_default_def)
876 {
877 set_ssa_default_def (cfun, SSA_NAME_VAR (ssa_name), ssa_name);
878 SSA_NAME_DEF_STMT (ssa_name) = gimple_build_nop ();
879 }
880
881 i = streamer_read_uhwi (ib);
882 }
883 }
884
885
886 /* Go through all NODE edges and fixup call_stmt pointers
887 so they point to STMTS. */
888
889 static void
890 fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple **stmts,
891 struct function *fn)
892 {
893 #define STMT_UID_NOT_IN_RANGE(uid) \
894 (gimple_stmt_max_uid (fn) < uid || uid == 0)
895
896 struct cgraph_edge *cedge;
897 struct ipa_ref *ref = NULL;
898 unsigned int i;
899
900 for (cedge = node->callees; cedge; cedge = cedge->next_callee)
901 {
902 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
903 fatal_error (input_location,
904 "Cgraph edge statement index out of range");
905 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
906 if (!cedge->call_stmt)
907 fatal_error (input_location,
908 "Cgraph edge statement index not found");
909 }
910 for (cedge = node->indirect_calls; cedge; cedge = cedge->next_callee)
911 {
912 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
913 fatal_error (input_location,
914 "Cgraph edge statement index out of range");
915 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
916 if (!cedge->call_stmt)
917 fatal_error (input_location, "Cgraph edge statement index not found");
918 }
919 for (i = 0; node->iterate_reference (i, ref); i++)
920 if (ref->lto_stmt_uid)
921 {
922 if (STMT_UID_NOT_IN_RANGE (ref->lto_stmt_uid))
923 fatal_error (input_location,
924 "Reference statement index out of range");
925 ref->stmt = stmts[ref->lto_stmt_uid - 1];
926 if (!ref->stmt)
927 fatal_error (input_location, "Reference statement index not found");
928 }
929 }
930
931
932 /* Fixup call_stmt pointers in NODE and all clones. */
933
934 static void
935 fixup_call_stmt_edges (struct cgraph_node *orig, gimple **stmts)
936 {
937 struct cgraph_node *node;
938 struct function *fn;
939
940 while (orig->clone_of)
941 orig = orig->clone_of;
942 fn = DECL_STRUCT_FUNCTION (orig->decl);
943
944 if (!orig->thunk.thunk_p)
945 fixup_call_stmt_edges_1 (orig, stmts, fn);
946 if (orig->clones)
947 for (node = orig->clones; node != orig;)
948 {
949 if (!node->thunk.thunk_p)
950 fixup_call_stmt_edges_1 (node, stmts, fn);
951 if (node->clones)
952 node = node->clones;
953 else if (node->next_sibling_clone)
954 node = node->next_sibling_clone;
955 else
956 {
957 while (node != orig && !node->next_sibling_clone)
958 node = node->clone_of;
959 if (node != orig)
960 node = node->next_sibling_clone;
961 }
962 }
963 }
964
965
966 /* Input the base body of struct function FN from DATA_IN
967 using input block IB. */
968
969 static void
970 input_struct_function_base (struct function *fn, struct data_in *data_in,
971 struct lto_input_block *ib)
972 {
973 struct bitpack_d bp;
974 int len;
975
976 /* Read the static chain and non-local goto save area. */
977 fn->static_chain_decl = stream_read_tree (ib, data_in);
978 fn->nonlocal_goto_save_area = stream_read_tree (ib, data_in);
979
980 /* Read all the local symbols. */
981 len = streamer_read_hwi (ib);
982 if (len > 0)
983 {
984 int i;
985 vec_safe_grow_cleared (fn->local_decls, len);
986 for (i = 0; i < len; i++)
987 {
988 tree t = stream_read_tree (ib, data_in);
989 (*fn->local_decls)[i] = t;
990 }
991 }
992
993 /* Input the current IL state of the function. */
994 fn->curr_properties = streamer_read_uhwi (ib);
995
996 /* Read all the attributes for FN. */
997 bp = streamer_read_bitpack (ib);
998 fn->is_thunk = bp_unpack_value (&bp, 1);
999 fn->has_local_explicit_reg_vars = bp_unpack_value (&bp, 1);
1000 fn->returns_pcc_struct = bp_unpack_value (&bp, 1);
1001 fn->returns_struct = bp_unpack_value (&bp, 1);
1002 fn->can_throw_non_call_exceptions = bp_unpack_value (&bp, 1);
1003 fn->can_delete_dead_exceptions = bp_unpack_value (&bp, 1);
1004 fn->always_inline_functions_inlined = bp_unpack_value (&bp, 1);
1005 fn->after_inlining = bp_unpack_value (&bp, 1);
1006 fn->stdarg = bp_unpack_value (&bp, 1);
1007 fn->has_nonlocal_label = bp_unpack_value (&bp, 1);
1008 fn->has_forced_label_in_static = bp_unpack_value (&bp, 1);
1009 fn->calls_alloca = bp_unpack_value (&bp, 1);
1010 fn->calls_setjmp = bp_unpack_value (&bp, 1);
1011 fn->has_force_vectorize_loops = bp_unpack_value (&bp, 1);
1012 fn->has_simduid_loops = bp_unpack_value (&bp, 1);
1013 fn->va_list_fpr_size = bp_unpack_value (&bp, 8);
1014 fn->va_list_gpr_size = bp_unpack_value (&bp, 8);
1015 fn->last_clique = bp_unpack_value (&bp, sizeof (short) * 8);
1016
1017 /* Input the function start and end loci. */
1018 fn->function_start_locus = stream_input_location_now (&bp, data_in);
1019 fn->function_end_locus = stream_input_location_now (&bp, data_in);
1020 }
1021
1022
1023 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1024
1025 static void
1026 input_function (tree fn_decl, struct data_in *data_in,
1027 struct lto_input_block *ib, struct lto_input_block *ib_cfg)
1028 {
1029 struct function *fn;
1030 enum LTO_tags tag;
1031 gimple **stmts;
1032 basic_block bb;
1033 struct cgraph_node *node;
1034
1035 tag = streamer_read_record_start (ib);
1036 lto_tag_check (tag, LTO_function);
1037
1038 /* Read decls for parameters and args. */
1039 DECL_RESULT (fn_decl) = stream_read_tree (ib, data_in);
1040 DECL_ARGUMENTS (fn_decl) = streamer_read_chain (ib, data_in);
1041
1042 /* Read debug args if available. */
1043 unsigned n_debugargs = streamer_read_uhwi (ib);
1044 if (n_debugargs)
1045 {
1046 vec<tree, va_gc> **debugargs = decl_debug_args_insert (fn_decl);
1047 vec_safe_grow (*debugargs, n_debugargs);
1048 for (unsigned i = 0; i < n_debugargs; ++i)
1049 (**debugargs)[i] = stream_read_tree (ib, data_in);
1050 }
1051
1052 /* Read the tree of lexical scopes for the function. */
1053 DECL_INITIAL (fn_decl) = stream_read_tree (ib, data_in);
1054 unsigned block_leaf_count = streamer_read_uhwi (ib);
1055 while (block_leaf_count--)
1056 stream_read_tree (ib, data_in);
1057
1058 if (!streamer_read_uhwi (ib))
1059 return;
1060
1061 push_struct_function (fn_decl);
1062 fn = DECL_STRUCT_FUNCTION (fn_decl);
1063 init_tree_ssa (fn);
1064 /* We input IL in SSA form. */
1065 cfun->gimple_df->in_ssa_p = true;
1066
1067 gimple_register_cfg_hooks ();
1068
1069 node = cgraph_node::get (fn_decl);
1070 if (!node)
1071 node = cgraph_node::create (fn_decl);
1072 input_struct_function_base (fn, data_in, ib);
1073 input_cfg (ib_cfg, data_in, fn, node->count_materialization_scale);
1074
1075 /* Read all the SSA names. */
1076 input_ssa_names (ib, data_in, fn);
1077
1078 /* Read the exception handling regions in the function. */
1079 input_eh_regions (ib, data_in, fn);
1080
1081 gcc_assert (DECL_INITIAL (fn_decl));
1082 DECL_SAVED_TREE (fn_decl) = NULL_TREE;
1083
1084 /* Read all the basic blocks. */
1085 tag = streamer_read_record_start (ib);
1086 while (tag)
1087 {
1088 input_bb (ib, tag, data_in, fn,
1089 node->count_materialization_scale);
1090 tag = streamer_read_record_start (ib);
1091 }
1092
1093 /* Fix up the call statements that are mentioned in the callgraph
1094 edges. */
1095 set_gimple_stmt_max_uid (cfun, 0);
1096 FOR_ALL_BB_FN (bb, cfun)
1097 {
1098 gimple_stmt_iterator gsi;
1099 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1100 {
1101 gimple *stmt = gsi_stmt (gsi);
1102 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1103 }
1104 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1105 {
1106 gimple *stmt = gsi_stmt (gsi);
1107 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1108 }
1109 }
1110 stmts = (gimple **) xcalloc (gimple_stmt_max_uid (fn), sizeof (gimple *));
1111 FOR_ALL_BB_FN (bb, cfun)
1112 {
1113 gimple_stmt_iterator bsi = gsi_start_phis (bb);
1114 while (!gsi_end_p (bsi))
1115 {
1116 gimple *stmt = gsi_stmt (bsi);
1117 gsi_next (&bsi);
1118 stmts[gimple_uid (stmt)] = stmt;
1119 }
1120 bsi = gsi_start_bb (bb);
1121 while (!gsi_end_p (bsi))
1122 {
1123 gimple *stmt = gsi_stmt (bsi);
1124 bool remove = false;
1125 /* If we're recompiling LTO objects with debug stmts but
1126 we're not supposed to have debug stmts, remove them now.
1127 We can't remove them earlier because this would cause uid
1128 mismatches in fixups, but we can do it at this point, as
1129 long as debug stmts don't require fixups.
1130 Similarly remove all IFN_*SAN_* internal calls */
1131 if (!flag_wpa)
1132 {
1133 if (!MAY_HAVE_DEBUG_STMTS && is_gimple_debug (stmt))
1134 remove = true;
1135 if (is_gimple_call (stmt)
1136 && gimple_call_internal_p (stmt))
1137 {
1138 switch (gimple_call_internal_fn (stmt))
1139 {
1140 case IFN_UBSAN_NULL:
1141 if ((flag_sanitize
1142 & (SANITIZE_NULL | SANITIZE_ALIGNMENT)) == 0)
1143 remove = true;
1144 break;
1145 case IFN_UBSAN_BOUNDS:
1146 if ((flag_sanitize & SANITIZE_BOUNDS) == 0)
1147 remove = true;
1148 break;
1149 case IFN_UBSAN_VPTR:
1150 if ((flag_sanitize & SANITIZE_VPTR) == 0)
1151 remove = true;
1152 break;
1153 case IFN_UBSAN_OBJECT_SIZE:
1154 if ((flag_sanitize & SANITIZE_OBJECT_SIZE) == 0)
1155 remove = true;
1156 break;
1157 case IFN_UBSAN_PTR:
1158 if ((flag_sanitize & SANITIZE_POINTER_OVERFLOW) == 0)
1159 remove = true;
1160 break;
1161 case IFN_ASAN_MARK:
1162 if ((flag_sanitize & SANITIZE_ADDRESS) == 0)
1163 remove = true;
1164 break;
1165 case IFN_TSAN_FUNC_EXIT:
1166 if ((flag_sanitize & SANITIZE_THREAD) == 0)
1167 remove = true;
1168 break;
1169 default:
1170 break;
1171 }
1172 gcc_assert (!remove || gimple_call_lhs (stmt) == NULL_TREE);
1173 }
1174 }
1175 if (remove)
1176 {
1177 gimple_stmt_iterator gsi = bsi;
1178 gsi_next (&bsi);
1179 unlink_stmt_vdef (stmt);
1180 release_defs (stmt);
1181 gsi_remove (&gsi, true);
1182 }
1183 else
1184 {
1185 gsi_next (&bsi);
1186 stmts[gimple_uid (stmt)] = stmt;
1187 }
1188 }
1189 }
1190
1191 /* Set the gimple body to the statement sequence in the entry
1192 basic block. FIXME lto, this is fairly hacky. The existence
1193 of a gimple body is used by the cgraph routines, but we should
1194 really use the presence of the CFG. */
1195 {
1196 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1197 gimple_set_body (fn_decl, bb_seq (ei_edge (ei)->dest));
1198 }
1199
1200 fixup_call_stmt_edges (node, stmts);
1201 execute_all_ipa_stmt_fixups (node, stmts);
1202
1203 update_ssa (TODO_update_ssa_only_virtuals);
1204 free_dominance_info (CDI_DOMINATORS);
1205 free_dominance_info (CDI_POST_DOMINATORS);
1206 free (stmts);
1207 pop_cfun ();
1208 }
1209
1210 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1211
1212 static void
1213 input_constructor (tree var, struct data_in *data_in,
1214 struct lto_input_block *ib)
1215 {
1216 DECL_INITIAL (var) = stream_read_tree (ib, data_in);
1217 }
1218
1219
1220 /* Read the body from DATA for function NODE and fill it in.
1221 FILE_DATA are the global decls and types. SECTION_TYPE is either
1222 LTO_section_function_body or LTO_section_static_initializer. If
1223 section type is LTO_section_function_body, FN must be the decl for
1224 that function. */
1225
1226 static void
1227 lto_read_body_or_constructor (struct lto_file_decl_data *file_data, struct symtab_node *node,
1228 const char *data, enum lto_section_type section_type)
1229 {
1230 const struct lto_function_header *header;
1231 struct data_in *data_in;
1232 int cfg_offset;
1233 int main_offset;
1234 int string_offset;
1235 tree fn_decl = node->decl;
1236
1237 header = (const struct lto_function_header *) data;
1238 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1239 {
1240 cfg_offset = sizeof (struct lto_function_header);
1241 main_offset = cfg_offset + header->cfg_size;
1242 string_offset = main_offset + header->main_size;
1243 }
1244 else
1245 {
1246 main_offset = sizeof (struct lto_function_header);
1247 string_offset = main_offset + header->main_size;
1248 }
1249
1250 data_in = lto_data_in_create (file_data, data + string_offset,
1251 header->string_size, vNULL);
1252
1253 if (section_type == LTO_section_function_body)
1254 {
1255 struct lto_in_decl_state *decl_state;
1256 unsigned from;
1257
1258 gcc_checking_assert (node);
1259
1260 /* Use the function's decl state. */
1261 decl_state = lto_get_function_in_decl_state (file_data, fn_decl);
1262 gcc_assert (decl_state);
1263 file_data->current_decl_state = decl_state;
1264
1265
1266 /* Set up the struct function. */
1267 from = data_in->reader_cache->nodes.length ();
1268 lto_input_block ib_main (data + main_offset, header->main_size,
1269 file_data->mode_table);
1270 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1271 {
1272 lto_input_block ib_cfg (data + cfg_offset, header->cfg_size,
1273 file_data->mode_table);
1274 input_function (fn_decl, data_in, &ib_main, &ib_cfg);
1275 }
1276 else
1277 input_constructor (fn_decl, data_in, &ib_main);
1278 data_in->location_cache.apply_location_cache ();
1279 /* And fixup types we streamed locally. */
1280 {
1281 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1282 unsigned len = cache->nodes.length ();
1283 unsigned i;
1284 for (i = len; i-- > from;)
1285 {
1286 tree t = streamer_tree_cache_get_tree (cache, i);
1287 if (t == NULL_TREE)
1288 continue;
1289
1290 if (TYPE_P (t))
1291 {
1292 gcc_assert (TYPE_CANONICAL (t) == NULL_TREE);
1293 if (type_with_alias_set_p (t)
1294 && canonical_type_used_p (t))
1295 TYPE_CANONICAL (t) = TYPE_MAIN_VARIANT (t);
1296 if (TYPE_MAIN_VARIANT (t) != t)
1297 {
1298 gcc_assert (TYPE_NEXT_VARIANT (t) == NULL_TREE);
1299 TYPE_NEXT_VARIANT (t)
1300 = TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t));
1301 TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t)) = t;
1302 }
1303 }
1304 }
1305 }
1306
1307 /* Restore decl state */
1308 file_data->current_decl_state = file_data->global_decl_state;
1309 }
1310
1311 lto_data_in_delete (data_in);
1312 }
1313
1314
1315 /* Read the body of NODE using DATA. FILE_DATA holds the global
1316 decls and types. */
1317
1318 void
1319 lto_input_function_body (struct lto_file_decl_data *file_data,
1320 struct cgraph_node *node, const char *data)
1321 {
1322 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1323 }
1324
1325 /* Read the body of NODE using DATA. FILE_DATA holds the global
1326 decls and types. */
1327
1328 void
1329 lto_input_variable_constructor (struct lto_file_decl_data *file_data,
1330 struct varpool_node *node, const char *data)
1331 {
1332 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1333 }
1334
1335
1336 /* Queue of acummulated decl -> DIE mappings. Similar to locations those
1337 are only applied to prevailing tree nodes during tree merging. */
1338 vec<dref_entry> dref_queue;
1339
1340 /* Read the physical representation of a tree node EXPR from
1341 input block IB using the per-file context in DATA_IN. */
1342
1343 static void
1344 lto_read_tree_1 (struct lto_input_block *ib, struct data_in *data_in, tree expr)
1345 {
1346 /* Read all the bitfield values in EXPR. Note that for LTO, we
1347 only write language-independent bitfields, so no more unpacking is
1348 needed. */
1349 streamer_read_tree_bitfields (ib, data_in, expr);
1350
1351 /* Read all the pointer fields in EXPR. */
1352 streamer_read_tree_body (ib, data_in, expr);
1353
1354 /* Read any LTO-specific data not read by the tree streamer. */
1355 if (DECL_P (expr)
1356 && TREE_CODE (expr) != FUNCTION_DECL
1357 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1358 DECL_INITIAL (expr) = stream_read_tree (ib, data_in);
1359
1360 /* Stream references to early generated DIEs. Keep in sync with the
1361 trees handled in dwarf2out_register_external_die. */
1362 if ((DECL_P (expr)
1363 && TREE_CODE (expr) != FIELD_DECL
1364 && TREE_CODE (expr) != DEBUG_EXPR_DECL
1365 && TREE_CODE (expr) != TYPE_DECL)
1366 || TREE_CODE (expr) == BLOCK)
1367 {
1368 const char *str = streamer_read_string (data_in, ib);
1369 if (str)
1370 {
1371 unsigned HOST_WIDE_INT off = streamer_read_uhwi (ib);
1372 dref_entry e = { expr, str, off };
1373 dref_queue.safe_push (e);
1374 }
1375 }
1376 }
1377
1378 /* Read the physical representation of a tree node with tag TAG from
1379 input block IB using the per-file context in DATA_IN. */
1380
1381 static tree
1382 lto_read_tree (struct lto_input_block *ib, struct data_in *data_in,
1383 enum LTO_tags tag, hashval_t hash)
1384 {
1385 /* Instantiate a new tree node. */
1386 tree result = streamer_alloc_tree (ib, data_in, tag);
1387
1388 /* Enter RESULT in the reader cache. This will make RESULT
1389 available so that circular references in the rest of the tree
1390 structure can be resolved in subsequent calls to stream_read_tree. */
1391 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1392
1393 lto_read_tree_1 (ib, data_in, result);
1394
1395 /* end_marker = */ streamer_read_uchar (ib);
1396
1397 return result;
1398 }
1399
1400
1401 /* Populate the reader cache with trees materialized from the SCC
1402 following in the IB, DATA_IN stream. */
1403
1404 hashval_t
1405 lto_input_scc (struct lto_input_block *ib, struct data_in *data_in,
1406 unsigned *len, unsigned *entry_len)
1407 {
1408 /* A blob of unnamed tree nodes, fill the cache from it and
1409 recurse. */
1410 unsigned size = streamer_read_uhwi (ib);
1411 hashval_t scc_hash = streamer_read_uhwi (ib);
1412 unsigned scc_entry_len = 1;
1413
1414 if (size == 1)
1415 {
1416 enum LTO_tags tag = streamer_read_record_start (ib);
1417 lto_input_tree_1 (ib, data_in, tag, scc_hash);
1418 }
1419 else
1420 {
1421 unsigned int first = data_in->reader_cache->nodes.length ();
1422 tree result;
1423
1424 scc_entry_len = streamer_read_uhwi (ib);
1425
1426 /* Materialize size trees by reading their headers. */
1427 for (unsigned i = 0; i < size; ++i)
1428 {
1429 enum LTO_tags tag = streamer_read_record_start (ib);
1430 if (tag == LTO_null
1431 || (tag >= LTO_field_decl_ref && tag <= LTO_global_decl_ref)
1432 || tag == LTO_tree_pickle_reference
1433 || tag == LTO_integer_cst
1434 || tag == LTO_tree_scc)
1435 gcc_unreachable ();
1436
1437 result = streamer_alloc_tree (ib, data_in, tag);
1438 streamer_tree_cache_append (data_in->reader_cache, result, 0);
1439 }
1440
1441 /* Read the tree bitpacks and references. */
1442 for (unsigned i = 0; i < size; ++i)
1443 {
1444 result = streamer_tree_cache_get_tree (data_in->reader_cache,
1445 first + i);
1446 lto_read_tree_1 (ib, data_in, result);
1447 /* end_marker = */ streamer_read_uchar (ib);
1448 }
1449 }
1450
1451 *len = size;
1452 *entry_len = scc_entry_len;
1453 return scc_hash;
1454 }
1455
1456
1457 /* Read a tree from input block IB using the per-file context in
1458 DATA_IN. This context is used, for example, to resolve references
1459 to previously read nodes. */
1460
1461 tree
1462 lto_input_tree_1 (struct lto_input_block *ib, struct data_in *data_in,
1463 enum LTO_tags tag, hashval_t hash)
1464 {
1465 tree result;
1466
1467 gcc_assert ((unsigned) tag < (unsigned) LTO_NUM_TAGS);
1468
1469 if (tag == LTO_null)
1470 result = NULL_TREE;
1471 else if (tag >= LTO_field_decl_ref && tag <= LTO_namelist_decl_ref)
1472 {
1473 /* If TAG is a reference to an indexable tree, the next value
1474 in IB is the index into the table where we expect to find
1475 that tree. */
1476 result = lto_input_tree_ref (ib, data_in, cfun, tag);
1477 }
1478 else if (tag == LTO_tree_pickle_reference)
1479 {
1480 /* If TAG is a reference to a previously read tree, look it up in
1481 the reader cache. */
1482 result = streamer_get_pickled_tree (ib, data_in);
1483 }
1484 else if (tag == LTO_integer_cst)
1485 {
1486 /* For shared integer constants in singletons we can use the
1487 existing tree integer constant merging code. */
1488 tree type = stream_read_tree (ib, data_in);
1489 unsigned HOST_WIDE_INT len = streamer_read_uhwi (ib);
1490 unsigned HOST_WIDE_INT i;
1491 HOST_WIDE_INT a[WIDE_INT_MAX_ELTS];
1492
1493 for (i = 0; i < len; i++)
1494 a[i] = streamer_read_hwi (ib);
1495 gcc_assert (TYPE_PRECISION (type) <= MAX_BITSIZE_MODE_ANY_INT);
1496 result = wide_int_to_tree (type, wide_int::from_array
1497 (a, len, TYPE_PRECISION (type)));
1498 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1499 }
1500 else if (tag == LTO_tree_scc)
1501 gcc_unreachable ();
1502 else
1503 {
1504 /* Otherwise, materialize a new node from IB. */
1505 result = lto_read_tree (ib, data_in, tag, hash);
1506 }
1507
1508 return result;
1509 }
1510
1511 tree
1512 lto_input_tree (struct lto_input_block *ib, struct data_in *data_in)
1513 {
1514 enum LTO_tags tag;
1515
1516 /* Input and skip SCCs. */
1517 while ((tag = streamer_read_record_start (ib)) == LTO_tree_scc)
1518 {
1519 unsigned len, entry_len;
1520 lto_input_scc (ib, data_in, &len, &entry_len);
1521
1522 /* Register DECLs with the debuginfo machinery. */
1523 while (!dref_queue.is_empty ())
1524 {
1525 dref_entry e = dref_queue.pop ();
1526 debug_hooks->register_external_die (e.decl, e.sym, e.off);
1527 }
1528 }
1529 return lto_input_tree_1 (ib, data_in, tag, 0);
1530 }
1531
1532
1533 /* Input toplevel asms. */
1534
1535 void
1536 lto_input_toplevel_asms (struct lto_file_decl_data *file_data, int order_base)
1537 {
1538 size_t len;
1539 const char *data = lto_get_section_data (file_data, LTO_section_asm,
1540 NULL, &len);
1541 const struct lto_simple_header_with_strings *header
1542 = (const struct lto_simple_header_with_strings *) data;
1543 int string_offset;
1544 struct data_in *data_in;
1545 tree str;
1546
1547 if (! data)
1548 return;
1549
1550 string_offset = sizeof (*header) + header->main_size;
1551
1552 lto_input_block ib (data + sizeof (*header), header->main_size,
1553 file_data->mode_table);
1554
1555 data_in = lto_data_in_create (file_data, data + string_offset,
1556 header->string_size, vNULL);
1557
1558 while ((str = streamer_read_string_cst (data_in, &ib)))
1559 {
1560 asm_node *node = symtab->finalize_toplevel_asm (str);
1561 node->order = streamer_read_hwi (&ib) + order_base;
1562 if (node->order >= symtab->order)
1563 symtab->order = node->order + 1;
1564 }
1565
1566 lto_data_in_delete (data_in);
1567
1568 lto_free_section_data (file_data, LTO_section_asm, NULL, data, len);
1569 }
1570
1571
1572 /* Input mode table. */
1573
1574 void
1575 lto_input_mode_table (struct lto_file_decl_data *file_data)
1576 {
1577 size_t len;
1578 const char *data = lto_get_section_data (file_data, LTO_section_mode_table,
1579 NULL, &len);
1580 if (! data)
1581 {
1582 internal_error ("cannot read LTO mode table from %s",
1583 file_data->file_name);
1584 return;
1585 }
1586
1587 unsigned char *table = ggc_cleared_vec_alloc<unsigned char> (1 << 8);
1588 file_data->mode_table = table;
1589 const struct lto_simple_header_with_strings *header
1590 = (const struct lto_simple_header_with_strings *) data;
1591 int string_offset;
1592 struct data_in *data_in;
1593 string_offset = sizeof (*header) + header->main_size;
1594
1595 lto_input_block ib (data + sizeof (*header), header->main_size, NULL);
1596 data_in = lto_data_in_create (file_data, data + string_offset,
1597 header->string_size, vNULL);
1598 bitpack_d bp = streamer_read_bitpack (&ib);
1599
1600 table[VOIDmode] = VOIDmode;
1601 table[BLKmode] = BLKmode;
1602 unsigned int m;
1603 while ((m = bp_unpack_value (&bp, 8)) != VOIDmode)
1604 {
1605 enum mode_class mclass
1606 = bp_unpack_enum (&bp, mode_class, MAX_MODE_CLASS);
1607 unsigned int size = bp_unpack_value (&bp, 8);
1608 unsigned int prec = bp_unpack_value (&bp, 16);
1609 machine_mode inner = (machine_mode) bp_unpack_value (&bp, 8);
1610 unsigned int nunits = bp_unpack_value (&bp, 8);
1611 unsigned int ibit = 0, fbit = 0;
1612 unsigned int real_fmt_len = 0;
1613 const char *real_fmt_name = NULL;
1614 switch (mclass)
1615 {
1616 case MODE_FRACT:
1617 case MODE_UFRACT:
1618 case MODE_ACCUM:
1619 case MODE_UACCUM:
1620 ibit = bp_unpack_value (&bp, 8);
1621 fbit = bp_unpack_value (&bp, 8);
1622 break;
1623 case MODE_FLOAT:
1624 case MODE_DECIMAL_FLOAT:
1625 real_fmt_name = bp_unpack_indexed_string (data_in, &bp,
1626 &real_fmt_len);
1627 break;
1628 default:
1629 break;
1630 }
1631 /* First search just the GET_CLASS_NARROWEST_MODE to wider modes,
1632 if not found, fallback to all modes. */
1633 int pass;
1634 for (pass = 0; pass < 2; pass++)
1635 for (machine_mode mr = pass ? VOIDmode
1636 : GET_CLASS_NARROWEST_MODE (mclass);
1637 pass ? mr < MAX_MACHINE_MODE : mr != VOIDmode;
1638 pass ? mr = (machine_mode) (mr + 1)
1639 : mr = GET_MODE_WIDER_MODE (mr).else_void ())
1640 if (GET_MODE_CLASS (mr) != mclass
1641 || GET_MODE_SIZE (mr) != size
1642 || GET_MODE_PRECISION (mr) != prec
1643 || (inner == m
1644 ? GET_MODE_INNER (mr) != mr
1645 : GET_MODE_INNER (mr) != table[(int) inner])
1646 || GET_MODE_IBIT (mr) != ibit
1647 || GET_MODE_FBIT (mr) != fbit
1648 || GET_MODE_NUNITS (mr) != nunits)
1649 continue;
1650 else if ((mclass == MODE_FLOAT || mclass == MODE_DECIMAL_FLOAT)
1651 && strcmp (REAL_MODE_FORMAT (mr)->name, real_fmt_name) != 0)
1652 continue;
1653 else
1654 {
1655 table[m] = mr;
1656 pass = 2;
1657 break;
1658 }
1659 unsigned int mname_len;
1660 const char *mname = bp_unpack_indexed_string (data_in, &bp, &mname_len);
1661 if (pass == 2)
1662 {
1663 switch (mclass)
1664 {
1665 case MODE_VECTOR_INT:
1666 case MODE_VECTOR_FLOAT:
1667 case MODE_VECTOR_FRACT:
1668 case MODE_VECTOR_UFRACT:
1669 case MODE_VECTOR_ACCUM:
1670 case MODE_VECTOR_UACCUM:
1671 /* For unsupported vector modes just use BLKmode,
1672 if the scalar mode is supported. */
1673 if (table[(int) inner] != VOIDmode)
1674 {
1675 table[m] = BLKmode;
1676 break;
1677 }
1678 /* FALLTHRU */
1679 default:
1680 fatal_error (UNKNOWN_LOCATION, "unsupported mode %s\n", mname);
1681 break;
1682 }
1683 }
1684 }
1685 lto_data_in_delete (data_in);
1686
1687 lto_free_section_data (file_data, LTO_section_mode_table, NULL, data, len);
1688 }
1689
1690
1691 /* Initialization for the LTO reader. */
1692
1693 void
1694 lto_reader_init (void)
1695 {
1696 lto_streamer_init ();
1697 file_name_hash_table
1698 = new hash_table<freeing_string_slot_hasher> (37);
1699 }
1700
1701
1702 /* Create a new data_in object for FILE_DATA. STRINGS is the string
1703 table to use with LEN strings. RESOLUTIONS is the vector of linker
1704 resolutions (NULL if not using a linker plugin). */
1705
1706 struct data_in *
1707 lto_data_in_create (struct lto_file_decl_data *file_data, const char *strings,
1708 unsigned len,
1709 vec<ld_plugin_symbol_resolution_t> resolutions)
1710 {
1711 struct data_in *data_in = new (struct data_in);
1712 data_in->file_data = file_data;
1713 data_in->strings = strings;
1714 data_in->strings_len = len;
1715 data_in->globals_resolution = resolutions;
1716 data_in->reader_cache = streamer_tree_cache_create (false, false, true);
1717 return data_in;
1718 }
1719
1720
1721 /* Remove DATA_IN. */
1722
1723 void
1724 lto_data_in_delete (struct data_in *data_in)
1725 {
1726 data_in->globals_resolution.release ();
1727 streamer_tree_cache_delete (data_in->reader_cache);
1728 delete data_in;
1729 }