]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-streamer-in.c
lto-streamer-in.c (input_function): Add node parameter.
[thirdparty/gcc.git] / gcc / lto-streamer-in.c
1 /* Read the GIMPLE representation from a file stream.
2
3 Copyright (C) 2009-2019 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "cfghooks.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "gimple-streamer.h"
35 #include "toplev.h"
36 #include "gimple-iterator.h"
37 #include "tree-cfg.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "tree-ssa.h"
41 #include "except.h"
42 #include "cgraph.h"
43 #include "cfgloop.h"
44 #include "debug.h"
45 #include "alloc-pool.h"
46
47 /* Allocator used to hold string slot entries for line map streaming. */
48 static struct object_allocator<struct string_slot> *string_slot_allocator;
49
50 /* The table to hold the file names. */
51 static hash_table<string_slot_hasher> *file_name_hash_table;
52
53 /* This obstack holds file names used in locators. Line map datastructures
54 points here and thus it needs to be kept allocated as long as linemaps
55 exists. */
56 static struct obstack file_name_obstack;
57
58
59 /* Check that tag ACTUAL has one of the given values. NUM_TAGS is the
60 number of valid tag values to check. */
61
62 void
63 lto_tag_check_set (enum LTO_tags actual, int ntags, ...)
64 {
65 va_list ap;
66 int i;
67
68 va_start (ap, ntags);
69 for (i = 0; i < ntags; i++)
70 if ((unsigned) actual == va_arg (ap, unsigned))
71 {
72 va_end (ap);
73 return;
74 }
75
76 va_end (ap);
77 internal_error ("bytecode stream: unexpected tag %s", lto_tag_name (actual));
78 }
79
80
81 /* Read LENGTH bytes from STREAM to ADDR. */
82
83 void
84 lto_input_data_block (class lto_input_block *ib, void *addr, size_t length)
85 {
86 size_t i;
87 unsigned char *const buffer = (unsigned char *) addr;
88
89 for (i = 0; i < length; i++)
90 buffer[i] = streamer_read_uchar (ib);
91 }
92
93
94 /* Lookup STRING in file_name_hash_table. If found, return the existing
95 string, otherwise insert STRING as the canonical version. */
96
97 static const char *
98 canon_file_name (const char *string)
99 {
100 string_slot **slot;
101 struct string_slot s_slot;
102 size_t len = strlen (string);
103
104 s_slot.s = string;
105 s_slot.len = len;
106
107 slot = file_name_hash_table->find_slot (&s_slot, INSERT);
108 if (*slot == NULL)
109 {
110 char *saved_string;
111 struct string_slot *new_slot;
112
113 saved_string = XOBNEWVEC (&file_name_obstack, char, len + 1);
114 new_slot = string_slot_allocator->allocate ();
115 memcpy (saved_string, string, len + 1);
116 new_slot->s = saved_string;
117 new_slot->len = len;
118 *slot = new_slot;
119 return saved_string;
120 }
121 else
122 {
123 struct string_slot *old_slot = *slot;
124 return old_slot->s;
125 }
126 }
127
128 /* Pointer to currently alive instance of lto_location_cache. */
129
130 lto_location_cache *lto_location_cache::current_cache;
131
132 /* Sort locations in source order. Start with file from last application. */
133
134 int
135 lto_location_cache::cmp_loc (const void *pa, const void *pb)
136 {
137 const cached_location *a = ((const cached_location *)pa);
138 const cached_location *b = ((const cached_location *)pb);
139 const char *current_file = current_cache->current_file;
140 int current_line = current_cache->current_line;
141
142 if (a->file == current_file && b->file != current_file)
143 return -1;
144 if (a->file != current_file && b->file == current_file)
145 return 1;
146 if (a->file == current_file && b->file == current_file)
147 {
148 if (a->line == current_line && b->line != current_line)
149 return -1;
150 if (a->line != current_line && b->line == current_line)
151 return 1;
152 }
153 if (a->file != b->file)
154 return strcmp (a->file, b->file);
155 if (a->sysp != b->sysp)
156 return a->sysp ? 1 : -1;
157 if (a->line != b->line)
158 return a->line - b->line;
159 return a->col - b->col;
160 }
161
162 /* Apply all changes in location cache. Add locations into linemap and patch
163 trees. */
164
165 bool
166 lto_location_cache::apply_location_cache ()
167 {
168 static const char *prev_file;
169 if (!loc_cache.length ())
170 return false;
171 if (loc_cache.length () > 1)
172 loc_cache.qsort (cmp_loc);
173
174 for (unsigned int i = 0; i < loc_cache.length (); i++)
175 {
176 struct cached_location loc = loc_cache[i];
177
178 if (current_file != loc.file)
179 linemap_add (line_table, prev_file ? LC_RENAME : LC_ENTER,
180 loc.sysp, loc.file, loc.line);
181 else if (current_line != loc.line)
182 {
183 int max = loc.col;
184
185 for (unsigned int j = i + 1; j < loc_cache.length (); j++)
186 if (loc.file != loc_cache[j].file
187 || loc.line != loc_cache[j].line)
188 break;
189 else if (max < loc_cache[j].col)
190 max = loc_cache[j].col;
191 linemap_line_start (line_table, loc.line, max + 1);
192 }
193 gcc_assert (*loc.loc == BUILTINS_LOCATION + 1);
194 if (current_file == loc.file && current_line == loc.line
195 && current_col == loc.col)
196 *loc.loc = current_loc;
197 else
198 current_loc = *loc.loc = linemap_position_for_column (line_table,
199 loc.col);
200 current_line = loc.line;
201 prev_file = current_file = loc.file;
202 current_col = loc.col;
203 }
204 loc_cache.truncate (0);
205 accepted_length = 0;
206 return true;
207 }
208
209 /* Tree merging did not suceed; mark all changes in the cache as accepted. */
210
211 void
212 lto_location_cache::accept_location_cache ()
213 {
214 gcc_assert (current_cache == this);
215 accepted_length = loc_cache.length ();
216 }
217
218 /* Tree merging did suceed; throw away recent changes. */
219
220 void
221 lto_location_cache::revert_location_cache ()
222 {
223 loc_cache.truncate (accepted_length);
224 }
225
226 /* Read a location bitpack from input block IB and either update *LOC directly
227 or add it to the location cache.
228 It is neccesary to call apply_location_cache to get *LOC updated. */
229
230 void
231 lto_location_cache::input_location (location_t *loc, struct bitpack_d *bp,
232 class data_in *data_in)
233 {
234 static const char *stream_file;
235 static int stream_line;
236 static int stream_col;
237 static bool stream_sysp;
238 bool file_change, line_change, column_change;
239
240 gcc_assert (current_cache == this);
241
242 *loc = bp_unpack_int_in_range (bp, "location", 0, RESERVED_LOCATION_COUNT);
243
244 if (*loc < RESERVED_LOCATION_COUNT)
245 return;
246
247 /* Keep value RESERVED_LOCATION_COUNT in *loc as linemap lookups will
248 ICE on it. */
249
250 file_change = bp_unpack_value (bp, 1);
251 line_change = bp_unpack_value (bp, 1);
252 column_change = bp_unpack_value (bp, 1);
253
254 if (file_change)
255 {
256 stream_file = canon_file_name (bp_unpack_string (data_in, bp));
257 stream_sysp = bp_unpack_value (bp, 1);
258 }
259
260 if (line_change)
261 stream_line = bp_unpack_var_len_unsigned (bp);
262
263 if (column_change)
264 stream_col = bp_unpack_var_len_unsigned (bp);
265
266 /* This optimization saves location cache operations druing gimple
267 streaming. */
268
269 if (current_file == stream_file && current_line == stream_line
270 && current_col == stream_col && current_sysp == stream_sysp)
271 {
272 *loc = current_loc;
273 return;
274 }
275
276 struct cached_location entry
277 = {stream_file, loc, stream_line, stream_col, stream_sysp};
278 loc_cache.safe_push (entry);
279 }
280
281 /* Read a location bitpack from input block IB and either update *LOC directly
282 or add it to the location cache.
283 It is neccesary to call apply_location_cache to get *LOC updated. */
284
285 void
286 lto_input_location (location_t *loc, struct bitpack_d *bp,
287 class data_in *data_in)
288 {
289 data_in->location_cache.input_location (loc, bp, data_in);
290 }
291
292 /* Read location and return it instead of going through location caching.
293 This should be used only when the resulting location is not going to be
294 discarded. */
295
296 location_t
297 stream_input_location_now (struct bitpack_d *bp, class data_in *data_in)
298 {
299 location_t loc;
300 stream_input_location (&loc, bp, data_in);
301 data_in->location_cache.apply_location_cache ();
302 return loc;
303 }
304
305 /* Read a reference to a tree node from DATA_IN using input block IB.
306 TAG is the expected node that should be found in IB, if TAG belongs
307 to one of the indexable trees, expect to read a reference index to
308 be looked up in one of the symbol tables, otherwise read the pysical
309 representation of the tree using stream_read_tree. FN is the
310 function scope for the read tree. */
311
312 tree
313 lto_input_tree_ref (class lto_input_block *ib, class data_in *data_in,
314 struct function *fn, enum LTO_tags tag)
315 {
316 unsigned HOST_WIDE_INT ix_u;
317 tree result = NULL_TREE;
318
319 lto_tag_check_range (tag, LTO_field_decl_ref, LTO_namelist_decl_ref);
320
321 switch (tag)
322 {
323 case LTO_type_ref:
324 ix_u = streamer_read_uhwi (ib);
325 result = lto_file_decl_data_get_type (data_in->file_data, ix_u);
326 break;
327
328 case LTO_ssa_name_ref:
329 ix_u = streamer_read_uhwi (ib);
330 result = (*SSANAMES (fn))[ix_u];
331 break;
332
333 case LTO_field_decl_ref:
334 ix_u = streamer_read_uhwi (ib);
335 result = lto_file_decl_data_get_field_decl (data_in->file_data, ix_u);
336 break;
337
338 case LTO_function_decl_ref:
339 ix_u = streamer_read_uhwi (ib);
340 result = lto_file_decl_data_get_fn_decl (data_in->file_data, ix_u);
341 break;
342
343 case LTO_type_decl_ref:
344 ix_u = streamer_read_uhwi (ib);
345 result = lto_file_decl_data_get_type_decl (data_in->file_data, ix_u);
346 break;
347
348 case LTO_namespace_decl_ref:
349 ix_u = streamer_read_uhwi (ib);
350 result = lto_file_decl_data_get_namespace_decl (data_in->file_data, ix_u);
351 break;
352
353 case LTO_global_decl_ref:
354 case LTO_result_decl_ref:
355 case LTO_const_decl_ref:
356 case LTO_imported_decl_ref:
357 case LTO_label_decl_ref:
358 case LTO_translation_unit_decl_ref:
359 case LTO_namelist_decl_ref:
360 ix_u = streamer_read_uhwi (ib);
361 result = lto_file_decl_data_get_var_decl (data_in->file_data, ix_u);
362 break;
363
364 default:
365 gcc_unreachable ();
366 }
367
368 gcc_assert (result);
369
370 return result;
371 }
372
373
374 /* Read and return a double-linked list of catch handlers from input
375 block IB, using descriptors in DATA_IN. */
376
377 static struct eh_catch_d *
378 lto_input_eh_catch_list (class lto_input_block *ib, class data_in *data_in,
379 eh_catch *last_p)
380 {
381 eh_catch first;
382 enum LTO_tags tag;
383
384 *last_p = first = NULL;
385 tag = streamer_read_record_start (ib);
386 while (tag)
387 {
388 tree list;
389 eh_catch n;
390
391 lto_tag_check_range (tag, LTO_eh_catch, LTO_eh_catch);
392
393 /* Read the catch node. */
394 n = ggc_cleared_alloc<eh_catch_d> ();
395 n->type_list = stream_read_tree (ib, data_in);
396 n->filter_list = stream_read_tree (ib, data_in);
397 n->label = stream_read_tree (ib, data_in);
398
399 /* Register all the types in N->FILTER_LIST. */
400 for (list = n->filter_list; list; list = TREE_CHAIN (list))
401 add_type_for_runtime (TREE_VALUE (list));
402
403 /* Chain N to the end of the list. */
404 if (*last_p)
405 (*last_p)->next_catch = n;
406 n->prev_catch = *last_p;
407 *last_p = n;
408
409 /* Set the head of the list the first time through the loop. */
410 if (first == NULL)
411 first = n;
412
413 tag = streamer_read_record_start (ib);
414 }
415
416 return first;
417 }
418
419
420 /* Read and return EH region IX from input block IB, using descriptors
421 in DATA_IN. */
422
423 static eh_region
424 input_eh_region (class lto_input_block *ib, class data_in *data_in, int ix)
425 {
426 enum LTO_tags tag;
427 eh_region r;
428
429 /* Read the region header. */
430 tag = streamer_read_record_start (ib);
431 if (tag == LTO_null)
432 return NULL;
433
434 r = ggc_cleared_alloc<eh_region_d> ();
435 r->index = streamer_read_hwi (ib);
436
437 gcc_assert (r->index == ix);
438
439 /* Read all the region pointers as region numbers. We'll fix up
440 the pointers once the whole array has been read. */
441 r->outer = (eh_region) (intptr_t) streamer_read_hwi (ib);
442 r->inner = (eh_region) (intptr_t) streamer_read_hwi (ib);
443 r->next_peer = (eh_region) (intptr_t) streamer_read_hwi (ib);
444
445 switch (tag)
446 {
447 case LTO_ert_cleanup:
448 r->type = ERT_CLEANUP;
449 break;
450
451 case LTO_ert_try:
452 {
453 struct eh_catch_d *last_catch;
454 r->type = ERT_TRY;
455 r->u.eh_try.first_catch = lto_input_eh_catch_list (ib, data_in,
456 &last_catch);
457 r->u.eh_try.last_catch = last_catch;
458 break;
459 }
460
461 case LTO_ert_allowed_exceptions:
462 {
463 tree l;
464
465 r->type = ERT_ALLOWED_EXCEPTIONS;
466 r->u.allowed.type_list = stream_read_tree (ib, data_in);
467 r->u.allowed.label = stream_read_tree (ib, data_in);
468 r->u.allowed.filter = streamer_read_uhwi (ib);
469
470 for (l = r->u.allowed.type_list; l ; l = TREE_CHAIN (l))
471 add_type_for_runtime (TREE_VALUE (l));
472 }
473 break;
474
475 case LTO_ert_must_not_throw:
476 {
477 r->type = ERT_MUST_NOT_THROW;
478 r->u.must_not_throw.failure_decl = stream_read_tree (ib, data_in);
479 bitpack_d bp = streamer_read_bitpack (ib);
480 r->u.must_not_throw.failure_loc
481 = stream_input_location_now (&bp, data_in);
482 }
483 break;
484
485 default:
486 gcc_unreachable ();
487 }
488
489 r->landing_pads = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
490
491 return r;
492 }
493
494
495 /* Read and return EH landing pad IX from input block IB, using descriptors
496 in DATA_IN. */
497
498 static eh_landing_pad
499 input_eh_lp (class lto_input_block *ib, class data_in *data_in, int ix)
500 {
501 enum LTO_tags tag;
502 eh_landing_pad lp;
503
504 /* Read the landing pad header. */
505 tag = streamer_read_record_start (ib);
506 if (tag == LTO_null)
507 return NULL;
508
509 lto_tag_check_range (tag, LTO_eh_landing_pad, LTO_eh_landing_pad);
510
511 lp = ggc_cleared_alloc<eh_landing_pad_d> ();
512 lp->index = streamer_read_hwi (ib);
513 gcc_assert (lp->index == ix);
514 lp->next_lp = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
515 lp->region = (eh_region) (intptr_t) streamer_read_hwi (ib);
516 lp->post_landing_pad = stream_read_tree (ib, data_in);
517
518 return lp;
519 }
520
521
522 /* After reading the EH regions, pointers to peer and children regions
523 are region numbers. This converts all these region numbers into
524 real pointers into the rematerialized regions for FN. ROOT_REGION
525 is the region number for the root EH region in FN. */
526
527 static void
528 fixup_eh_region_pointers (struct function *fn, HOST_WIDE_INT root_region)
529 {
530 unsigned i;
531 vec<eh_region, va_gc> *eh_array = fn->eh->region_array;
532 vec<eh_landing_pad, va_gc> *lp_array = fn->eh->lp_array;
533 eh_region r;
534 eh_landing_pad lp;
535
536 gcc_assert (eh_array && lp_array);
537
538 gcc_assert (root_region >= 0);
539 fn->eh->region_tree = (*eh_array)[root_region];
540
541 #define FIXUP_EH_REGION(r) (r) = (*eh_array)[(HOST_WIDE_INT) (intptr_t) (r)]
542 #define FIXUP_EH_LP(p) (p) = (*lp_array)[(HOST_WIDE_INT) (intptr_t) (p)]
543
544 /* Convert all the index numbers stored in pointer fields into
545 pointers to the corresponding slots in the EH region array. */
546 FOR_EACH_VEC_ELT (*eh_array, i, r)
547 {
548 /* The array may contain NULL regions. */
549 if (r == NULL)
550 continue;
551
552 gcc_assert (i == (unsigned) r->index);
553 FIXUP_EH_REGION (r->outer);
554 FIXUP_EH_REGION (r->inner);
555 FIXUP_EH_REGION (r->next_peer);
556 FIXUP_EH_LP (r->landing_pads);
557 }
558
559 /* Convert all the index numbers stored in pointer fields into
560 pointers to the corresponding slots in the EH landing pad array. */
561 FOR_EACH_VEC_ELT (*lp_array, i, lp)
562 {
563 /* The array may contain NULL landing pads. */
564 if (lp == NULL)
565 continue;
566
567 gcc_assert (i == (unsigned) lp->index);
568 FIXUP_EH_LP (lp->next_lp);
569 FIXUP_EH_REGION (lp->region);
570 }
571
572 #undef FIXUP_EH_REGION
573 #undef FIXUP_EH_LP
574 }
575
576
577 /* Initialize EH support. */
578
579 void
580 lto_init_eh (void)
581 {
582 static bool eh_initialized_p = false;
583
584 if (eh_initialized_p)
585 return;
586
587 /* Contrary to most other FEs, we only initialize EH support when at
588 least one of the files in the set contains exception regions in
589 it. Since this happens much later than the call to init_eh in
590 lang_dependent_init, we have to set flag_exceptions and call
591 init_eh again to initialize the EH tables. */
592 flag_exceptions = 1;
593 init_eh ();
594
595 eh_initialized_p = true;
596 }
597
598
599 /* Read the exception table for FN from IB using the data descriptors
600 in DATA_IN. */
601
602 static void
603 input_eh_regions (class lto_input_block *ib, class data_in *data_in,
604 struct function *fn)
605 {
606 HOST_WIDE_INT i, root_region, len;
607 enum LTO_tags tag;
608
609 tag = streamer_read_record_start (ib);
610 if (tag == LTO_null)
611 return;
612
613 lto_tag_check_range (tag, LTO_eh_table, LTO_eh_table);
614
615 gcc_assert (fn->eh);
616
617 root_region = streamer_read_hwi (ib);
618 gcc_assert (root_region == (int) root_region);
619
620 /* Read the EH region array. */
621 len = streamer_read_hwi (ib);
622 gcc_assert (len == (int) len);
623 if (len > 0)
624 {
625 vec_safe_grow_cleared (fn->eh->region_array, len);
626 for (i = 0; i < len; i++)
627 {
628 eh_region r = input_eh_region (ib, data_in, i);
629 (*fn->eh->region_array)[i] = r;
630 }
631 }
632
633 /* Read the landing pads. */
634 len = streamer_read_hwi (ib);
635 gcc_assert (len == (int) len);
636 if (len > 0)
637 {
638 vec_safe_grow_cleared (fn->eh->lp_array, len);
639 for (i = 0; i < len; i++)
640 {
641 eh_landing_pad lp = input_eh_lp (ib, data_in, i);
642 (*fn->eh->lp_array)[i] = lp;
643 }
644 }
645
646 /* Read the runtime type data. */
647 len = streamer_read_hwi (ib);
648 gcc_assert (len == (int) len);
649 if (len > 0)
650 {
651 vec_safe_grow_cleared (fn->eh->ttype_data, len);
652 for (i = 0; i < len; i++)
653 {
654 tree ttype = stream_read_tree (ib, data_in);
655 (*fn->eh->ttype_data)[i] = ttype;
656 }
657 }
658
659 /* Read the table of action chains. */
660 len = streamer_read_hwi (ib);
661 gcc_assert (len == (int) len);
662 if (len > 0)
663 {
664 if (targetm.arm_eabi_unwinder)
665 {
666 vec_safe_grow_cleared (fn->eh->ehspec_data.arm_eabi, len);
667 for (i = 0; i < len; i++)
668 {
669 tree t = stream_read_tree (ib, data_in);
670 (*fn->eh->ehspec_data.arm_eabi)[i] = t;
671 }
672 }
673 else
674 {
675 vec_safe_grow_cleared (fn->eh->ehspec_data.other, len);
676 for (i = 0; i < len; i++)
677 {
678 uchar c = streamer_read_uchar (ib);
679 (*fn->eh->ehspec_data.other)[i] = c;
680 }
681 }
682 }
683
684 /* Reconstruct the EH region tree by fixing up the peer/children
685 pointers. */
686 fixup_eh_region_pointers (fn, root_region);
687
688 tag = streamer_read_record_start (ib);
689 lto_tag_check_range (tag, LTO_null, LTO_null);
690 }
691
692
693 /* Make a new basic block with index INDEX in function FN. */
694
695 static basic_block
696 make_new_block (struct function *fn, unsigned int index)
697 {
698 basic_block bb = alloc_block ();
699 bb->index = index;
700 SET_BASIC_BLOCK_FOR_FN (fn, index, bb);
701 n_basic_blocks_for_fn (fn)++;
702 return bb;
703 }
704
705
706 /* Read the CFG for function FN from input block IB. */
707
708 static void
709 input_cfg (class lto_input_block *ib, class data_in *data_in,
710 struct function *fn)
711 {
712 unsigned int bb_count;
713 basic_block p_bb;
714 unsigned int i;
715 int index;
716
717 init_empty_tree_cfg_for_function (fn);
718 init_ssa_operands (fn);
719
720 profile_status_for_fn (fn) = streamer_read_enum (ib, profile_status_d,
721 PROFILE_LAST);
722
723 bb_count = streamer_read_uhwi (ib);
724
725 last_basic_block_for_fn (fn) = bb_count;
726 if (bb_count > basic_block_info_for_fn (fn)->length ())
727 vec_safe_grow_cleared (basic_block_info_for_fn (fn), bb_count);
728
729 if (bb_count > label_to_block_map_for_fn (fn)->length ())
730 vec_safe_grow_cleared (label_to_block_map_for_fn (fn), bb_count);
731
732 index = streamer_read_hwi (ib);
733 while (index != -1)
734 {
735 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
736 unsigned int edge_count;
737
738 if (bb == NULL)
739 bb = make_new_block (fn, index);
740
741 edge_count = streamer_read_uhwi (ib);
742
743 /* Connect up the CFG. */
744 for (i = 0; i < edge_count; i++)
745 {
746 unsigned int dest_index;
747 unsigned int edge_flags;
748 basic_block dest;
749 profile_probability probability;
750 edge e;
751
752 dest_index = streamer_read_uhwi (ib);
753 probability = profile_probability::stream_in (ib);
754 edge_flags = streamer_read_uhwi (ib);
755
756 dest = BASIC_BLOCK_FOR_FN (fn, dest_index);
757
758 if (dest == NULL)
759 dest = make_new_block (fn, dest_index);
760
761 e = make_edge (bb, dest, edge_flags);
762 e->probability = probability;
763 }
764
765 index = streamer_read_hwi (ib);
766 }
767
768 p_bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
769 index = streamer_read_hwi (ib);
770 while (index != -1)
771 {
772 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
773 bb->prev_bb = p_bb;
774 p_bb->next_bb = bb;
775 p_bb = bb;
776 index = streamer_read_hwi (ib);
777 }
778
779 /* ??? The cfgloop interface is tied to cfun. */
780 gcc_assert (cfun == fn);
781
782 /* Input the loop tree. */
783 unsigned n_loops = streamer_read_uhwi (ib);
784 if (n_loops == 0)
785 return;
786
787 struct loops *loops = ggc_cleared_alloc<struct loops> ();
788 init_loops_structure (fn, loops, n_loops);
789 set_loops_for_fn (fn, loops);
790
791 /* Input each loop and associate it with its loop header so
792 flow_loops_find can rebuild the loop tree. */
793 for (unsigned i = 1; i < n_loops; ++i)
794 {
795 int header_index = streamer_read_hwi (ib);
796 if (header_index == -1)
797 {
798 loops->larray->quick_push (NULL);
799 continue;
800 }
801
802 class loop *loop = alloc_loop ();
803 loop->header = BASIC_BLOCK_FOR_FN (fn, header_index);
804 loop->header->loop_father = loop;
805
806 /* Read everything copy_loop_info copies. */
807 loop->estimate_state = streamer_read_enum (ib, loop_estimation, EST_LAST);
808 loop->any_upper_bound = streamer_read_hwi (ib);
809 if (loop->any_upper_bound)
810 loop->nb_iterations_upper_bound = streamer_read_widest_int (ib);
811 loop->any_likely_upper_bound = streamer_read_hwi (ib);
812 if (loop->any_likely_upper_bound)
813 loop->nb_iterations_likely_upper_bound = streamer_read_widest_int (ib);
814 loop->any_estimate = streamer_read_hwi (ib);
815 if (loop->any_estimate)
816 loop->nb_iterations_estimate = streamer_read_widest_int (ib);
817
818 /* Read OMP SIMD related info. */
819 loop->safelen = streamer_read_hwi (ib);
820 loop->unroll = streamer_read_hwi (ib);
821 loop->owned_clique = streamer_read_hwi (ib);
822 loop->dont_vectorize = streamer_read_hwi (ib);
823 loop->force_vectorize = streamer_read_hwi (ib);
824 loop->simduid = stream_read_tree (ib, data_in);
825
826 place_new_loop (fn, loop);
827
828 /* flow_loops_find doesn't like loops not in the tree, hook them
829 all as siblings of the tree root temporarily. */
830 flow_loop_tree_node_add (loops->tree_root, loop);
831 }
832
833 /* Rebuild the loop tree. */
834 flow_loops_find (loops);
835 }
836
837
838 /* Read the SSA names array for function FN from DATA_IN using input
839 block IB. */
840
841 static void
842 input_ssa_names (class lto_input_block *ib, class data_in *data_in,
843 struct function *fn)
844 {
845 unsigned int i, size;
846
847 size = streamer_read_uhwi (ib);
848 init_ssanames (fn, size);
849
850 i = streamer_read_uhwi (ib);
851 while (i)
852 {
853 tree ssa_name, name;
854 bool is_default_def;
855
856 /* Skip over the elements that had been freed. */
857 while (SSANAMES (fn)->length () < i)
858 SSANAMES (fn)->quick_push (NULL_TREE);
859
860 is_default_def = (streamer_read_uchar (ib) != 0);
861 name = stream_read_tree (ib, data_in);
862 ssa_name = make_ssa_name_fn (fn, name, NULL);
863
864 if (is_default_def)
865 {
866 set_ssa_default_def (cfun, SSA_NAME_VAR (ssa_name), ssa_name);
867 SSA_NAME_DEF_STMT (ssa_name) = gimple_build_nop ();
868 }
869
870 i = streamer_read_uhwi (ib);
871 }
872 }
873
874
875 /* Go through all NODE edges and fixup call_stmt pointers
876 so they point to STMTS. */
877
878 static void
879 fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple **stmts,
880 struct function *fn)
881 {
882 #define STMT_UID_NOT_IN_RANGE(uid) \
883 (gimple_stmt_max_uid (fn) < uid || uid == 0)
884
885 struct cgraph_edge *cedge;
886 struct ipa_ref *ref = NULL;
887 unsigned int i;
888
889 for (cedge = node->callees; cedge; cedge = cedge->next_callee)
890 {
891 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
892 fatal_error (input_location,
893 "Cgraph edge statement index out of range");
894 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
895 if (!cedge->call_stmt)
896 fatal_error (input_location,
897 "Cgraph edge statement index not found");
898 }
899 for (cedge = node->indirect_calls; cedge; cedge = cedge->next_callee)
900 {
901 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
902 fatal_error (input_location,
903 "Cgraph edge statement index out of range");
904 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
905 if (!cedge->call_stmt)
906 fatal_error (input_location, "Cgraph edge statement index not found");
907 }
908 for (i = 0; node->iterate_reference (i, ref); i++)
909 if (ref->lto_stmt_uid)
910 {
911 if (STMT_UID_NOT_IN_RANGE (ref->lto_stmt_uid))
912 fatal_error (input_location,
913 "Reference statement index out of range");
914 ref->stmt = stmts[ref->lto_stmt_uid - 1];
915 if (!ref->stmt)
916 fatal_error (input_location, "Reference statement index not found");
917 }
918 }
919
920
921 /* Fixup call_stmt pointers in NODE and all clones. */
922
923 static void
924 fixup_call_stmt_edges (struct cgraph_node *orig, gimple **stmts)
925 {
926 struct cgraph_node *node;
927 struct function *fn;
928
929 while (orig->clone_of)
930 orig = orig->clone_of;
931 fn = DECL_STRUCT_FUNCTION (orig->decl);
932
933 if (!orig->thunk.thunk_p)
934 fixup_call_stmt_edges_1 (orig, stmts, fn);
935 if (orig->clones)
936 for (node = orig->clones; node != orig;)
937 {
938 if (!node->thunk.thunk_p)
939 fixup_call_stmt_edges_1 (node, stmts, fn);
940 if (node->clones)
941 node = node->clones;
942 else if (node->next_sibling_clone)
943 node = node->next_sibling_clone;
944 else
945 {
946 while (node != orig && !node->next_sibling_clone)
947 node = node->clone_of;
948 if (node != orig)
949 node = node->next_sibling_clone;
950 }
951 }
952 }
953
954
955 /* Input the base body of struct function FN from DATA_IN
956 using input block IB. */
957
958 static void
959 input_struct_function_base (struct function *fn, class data_in *data_in,
960 class lto_input_block *ib)
961 {
962 struct bitpack_d bp;
963 int len;
964
965 /* Read the static chain and non-local goto save area. */
966 fn->static_chain_decl = stream_read_tree (ib, data_in);
967 fn->nonlocal_goto_save_area = stream_read_tree (ib, data_in);
968
969 /* Read all the local symbols. */
970 len = streamer_read_hwi (ib);
971 if (len > 0)
972 {
973 int i;
974 vec_safe_grow_cleared (fn->local_decls, len);
975 for (i = 0; i < len; i++)
976 {
977 tree t = stream_read_tree (ib, data_in);
978 (*fn->local_decls)[i] = t;
979 }
980 }
981
982 /* Input the current IL state of the function. */
983 fn->curr_properties = streamer_read_uhwi (ib);
984
985 /* Read all the attributes for FN. */
986 bp = streamer_read_bitpack (ib);
987 fn->is_thunk = bp_unpack_value (&bp, 1);
988 fn->has_local_explicit_reg_vars = bp_unpack_value (&bp, 1);
989 fn->returns_pcc_struct = bp_unpack_value (&bp, 1);
990 fn->returns_struct = bp_unpack_value (&bp, 1);
991 fn->can_throw_non_call_exceptions = bp_unpack_value (&bp, 1);
992 fn->can_delete_dead_exceptions = bp_unpack_value (&bp, 1);
993 fn->always_inline_functions_inlined = bp_unpack_value (&bp, 1);
994 fn->after_inlining = bp_unpack_value (&bp, 1);
995 fn->stdarg = bp_unpack_value (&bp, 1);
996 fn->has_nonlocal_label = bp_unpack_value (&bp, 1);
997 fn->has_forced_label_in_static = bp_unpack_value (&bp, 1);
998 fn->calls_alloca = bp_unpack_value (&bp, 1);
999 fn->calls_setjmp = bp_unpack_value (&bp, 1);
1000 fn->calls_eh_return = bp_unpack_value (&bp, 1);
1001 fn->has_force_vectorize_loops = bp_unpack_value (&bp, 1);
1002 fn->has_simduid_loops = bp_unpack_value (&bp, 1);
1003 fn->va_list_fpr_size = bp_unpack_value (&bp, 8);
1004 fn->va_list_gpr_size = bp_unpack_value (&bp, 8);
1005 fn->last_clique = bp_unpack_value (&bp, sizeof (short) * 8);
1006
1007 /* Input the function start and end loci. */
1008 fn->function_start_locus = stream_input_location_now (&bp, data_in);
1009 fn->function_end_locus = stream_input_location_now (&bp, data_in);
1010
1011 /* Restore the instance discriminators if present. */
1012 int instance_number = bp_unpack_value (&bp, 1);
1013 if (instance_number)
1014 {
1015 instance_number = bp_unpack_value (&bp, sizeof (int) * CHAR_BIT);
1016 maybe_create_decl_to_instance_map ()->put (fn->decl, instance_number);
1017 }
1018 }
1019
1020
1021 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1022
1023 static void
1024 input_function (tree fn_decl, class data_in *data_in,
1025 class lto_input_block *ib, class lto_input_block *ib_cfg,
1026 cgraph_node *node)
1027 {
1028 struct function *fn;
1029 enum LTO_tags tag;
1030 gimple **stmts;
1031 basic_block bb;
1032
1033 tag = streamer_read_record_start (ib);
1034 lto_tag_check (tag, LTO_function);
1035
1036 /* Read decls for parameters and args. */
1037 DECL_RESULT (fn_decl) = stream_read_tree (ib, data_in);
1038 DECL_ARGUMENTS (fn_decl) = streamer_read_chain (ib, data_in);
1039
1040 /* Read debug args if available. */
1041 unsigned n_debugargs = streamer_read_uhwi (ib);
1042 if (n_debugargs)
1043 {
1044 vec<tree, va_gc> **debugargs = decl_debug_args_insert (fn_decl);
1045 vec_safe_grow (*debugargs, n_debugargs);
1046 for (unsigned i = 0; i < n_debugargs; ++i)
1047 (**debugargs)[i] = stream_read_tree (ib, data_in);
1048 }
1049
1050 /* Read the tree of lexical scopes for the function. */
1051 DECL_INITIAL (fn_decl) = stream_read_tree (ib, data_in);
1052 unsigned block_leaf_count = streamer_read_uhwi (ib);
1053 while (block_leaf_count--)
1054 stream_read_tree (ib, data_in);
1055
1056 if (!streamer_read_uhwi (ib))
1057 return;
1058
1059 push_struct_function (fn_decl);
1060 fn = DECL_STRUCT_FUNCTION (fn_decl);
1061 init_tree_ssa (fn);
1062 /* We input IL in SSA form. */
1063 cfun->gimple_df->in_ssa_p = true;
1064
1065 gimple_register_cfg_hooks ();
1066
1067 input_struct_function_base (fn, data_in, ib);
1068 input_cfg (ib_cfg, data_in, fn);
1069
1070 /* Read all the SSA names. */
1071 input_ssa_names (ib, data_in, fn);
1072
1073 /* Read the exception handling regions in the function. */
1074 input_eh_regions (ib, data_in, fn);
1075
1076 gcc_assert (DECL_INITIAL (fn_decl));
1077 DECL_SAVED_TREE (fn_decl) = NULL_TREE;
1078
1079 /* Read all the basic blocks. */
1080 tag = streamer_read_record_start (ib);
1081 while (tag)
1082 {
1083 input_bb (ib, tag, data_in, fn,
1084 node->count_materialization_scale);
1085 tag = streamer_read_record_start (ib);
1086 }
1087
1088 /* Fix up the call statements that are mentioned in the callgraph
1089 edges. */
1090 set_gimple_stmt_max_uid (cfun, 0);
1091 FOR_ALL_BB_FN (bb, cfun)
1092 {
1093 gimple_stmt_iterator gsi;
1094 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1095 {
1096 gimple *stmt = gsi_stmt (gsi);
1097 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1098 }
1099 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1100 {
1101 gimple *stmt = gsi_stmt (gsi);
1102 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1103 }
1104 }
1105 stmts = (gimple **) xcalloc (gimple_stmt_max_uid (fn), sizeof (gimple *));
1106 FOR_ALL_BB_FN (bb, cfun)
1107 {
1108 gimple_stmt_iterator bsi = gsi_start_phis (bb);
1109 while (!gsi_end_p (bsi))
1110 {
1111 gimple *stmt = gsi_stmt (bsi);
1112 gsi_next (&bsi);
1113 stmts[gimple_uid (stmt)] = stmt;
1114 }
1115 bsi = gsi_start_bb (bb);
1116 while (!gsi_end_p (bsi))
1117 {
1118 gimple *stmt = gsi_stmt (bsi);
1119 bool remove = false;
1120 /* If we're recompiling LTO objects with debug stmts but
1121 we're not supposed to have debug stmts, remove them now.
1122 We can't remove them earlier because this would cause uid
1123 mismatches in fixups, but we can do it at this point, as
1124 long as debug stmts don't require fixups.
1125 Similarly remove all IFN_*SAN_* internal calls */
1126 if (!flag_wpa)
1127 {
1128 if (is_gimple_debug (stmt)
1129 && (gimple_debug_nonbind_marker_p (stmt)
1130 ? !MAY_HAVE_DEBUG_MARKER_STMTS
1131 : !MAY_HAVE_DEBUG_BIND_STMTS))
1132 remove = true;
1133 /* In case the linemap overflows locations can be dropped
1134 to zero. Thus do not keep nonsensical inline entry markers
1135 we'd later ICE on. */
1136 tree block;
1137 if (gimple_debug_inline_entry_p (stmt)
1138 && (block = gimple_block (stmt))
1139 && !inlined_function_outer_scope_p (block))
1140 remove = true;
1141 if (is_gimple_call (stmt)
1142 && gimple_call_internal_p (stmt))
1143 {
1144 bool replace = false;
1145 switch (gimple_call_internal_fn (stmt))
1146 {
1147 case IFN_UBSAN_NULL:
1148 if ((flag_sanitize
1149 & (SANITIZE_NULL | SANITIZE_ALIGNMENT)) == 0)
1150 replace = true;
1151 break;
1152 case IFN_UBSAN_BOUNDS:
1153 if ((flag_sanitize & SANITIZE_BOUNDS) == 0)
1154 replace = true;
1155 break;
1156 case IFN_UBSAN_VPTR:
1157 if ((flag_sanitize & SANITIZE_VPTR) == 0)
1158 replace = true;
1159 break;
1160 case IFN_UBSAN_OBJECT_SIZE:
1161 if ((flag_sanitize & SANITIZE_OBJECT_SIZE) == 0)
1162 replace = true;
1163 break;
1164 case IFN_UBSAN_PTR:
1165 if ((flag_sanitize & SANITIZE_POINTER_OVERFLOW) == 0)
1166 replace = true;
1167 break;
1168 case IFN_ASAN_MARK:
1169 if ((flag_sanitize & SANITIZE_ADDRESS) == 0)
1170 replace = true;
1171 break;
1172 case IFN_TSAN_FUNC_EXIT:
1173 if ((flag_sanitize & SANITIZE_THREAD) == 0)
1174 replace = true;
1175 break;
1176 default:
1177 break;
1178 }
1179 if (replace)
1180 {
1181 gimple_call_set_internal_fn (as_a <gcall *> (stmt),
1182 IFN_NOP);
1183 update_stmt (stmt);
1184 }
1185 }
1186 }
1187 if (remove)
1188 {
1189 gimple_stmt_iterator gsi = bsi;
1190 gsi_next (&bsi);
1191 unlink_stmt_vdef (stmt);
1192 release_defs (stmt);
1193 gsi_remove (&gsi, true);
1194 }
1195 else
1196 {
1197 gsi_next (&bsi);
1198 stmts[gimple_uid (stmt)] = stmt;
1199
1200 /* Remember that the input function has begin stmt
1201 markers, so that we know to expect them when emitting
1202 debug info. */
1203 if (!cfun->debug_nonbind_markers
1204 && gimple_debug_nonbind_marker_p (stmt))
1205 cfun->debug_nonbind_markers = true;
1206 }
1207 }
1208 }
1209
1210 /* Set the gimple body to the statement sequence in the entry
1211 basic block. FIXME lto, this is fairly hacky. The existence
1212 of a gimple body is used by the cgraph routines, but we should
1213 really use the presence of the CFG. */
1214 {
1215 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1216 gimple_set_body (fn_decl, bb_seq (ei_edge (ei)->dest));
1217 }
1218
1219 update_max_bb_count ();
1220 fixup_call_stmt_edges (node, stmts);
1221 execute_all_ipa_stmt_fixups (node, stmts);
1222
1223 free_dominance_info (CDI_DOMINATORS);
1224 free_dominance_info (CDI_POST_DOMINATORS);
1225 free (stmts);
1226 pop_cfun ();
1227 }
1228
1229 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1230
1231 static void
1232 input_constructor (tree var, class data_in *data_in,
1233 class lto_input_block *ib)
1234 {
1235 DECL_INITIAL (var) = stream_read_tree (ib, data_in);
1236 }
1237
1238
1239 /* Read the body from DATA for function NODE and fill it in.
1240 FILE_DATA are the global decls and types. SECTION_TYPE is either
1241 LTO_section_function_body or LTO_section_static_initializer. If
1242 section type is LTO_section_function_body, FN must be the decl for
1243 that function. */
1244
1245 static void
1246 lto_read_body_or_constructor (struct lto_file_decl_data *file_data, struct symtab_node *node,
1247 const char *data, enum lto_section_type section_type)
1248 {
1249 const struct lto_function_header *header;
1250 class data_in *data_in;
1251 int cfg_offset;
1252 int main_offset;
1253 int string_offset;
1254 tree fn_decl = node->decl;
1255
1256 header = (const struct lto_function_header *) data;
1257 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1258 {
1259 cfg_offset = sizeof (struct lto_function_header);
1260 main_offset = cfg_offset + header->cfg_size;
1261 string_offset = main_offset + header->main_size;
1262 }
1263 else
1264 {
1265 main_offset = sizeof (struct lto_function_header);
1266 string_offset = main_offset + header->main_size;
1267 }
1268
1269 data_in = lto_data_in_create (file_data, data + string_offset,
1270 header->string_size, vNULL);
1271
1272 if (section_type == LTO_section_function_body)
1273 {
1274 struct lto_in_decl_state *decl_state;
1275 unsigned from;
1276
1277 gcc_checking_assert (node);
1278
1279 /* Use the function's decl state. */
1280 decl_state = lto_get_function_in_decl_state (file_data, fn_decl);
1281 gcc_assert (decl_state);
1282 file_data->current_decl_state = decl_state;
1283
1284
1285 /* Set up the struct function. */
1286 from = data_in->reader_cache->nodes.length ();
1287 lto_input_block ib_main (data + main_offset, header->main_size,
1288 file_data->mode_table);
1289 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1290 {
1291 lto_input_block ib_cfg (data + cfg_offset, header->cfg_size,
1292 file_data->mode_table);
1293 input_function (fn_decl, data_in, &ib_main, &ib_cfg,
1294 dyn_cast <cgraph_node *>(node));
1295 }
1296 else
1297 input_constructor (fn_decl, data_in, &ib_main);
1298 data_in->location_cache.apply_location_cache ();
1299 /* And fixup types we streamed locally. */
1300 {
1301 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1302 unsigned len = cache->nodes.length ();
1303 unsigned i;
1304 for (i = len; i-- > from;)
1305 {
1306 tree t = streamer_tree_cache_get_tree (cache, i);
1307 if (t == NULL_TREE)
1308 continue;
1309
1310 if (TYPE_P (t))
1311 {
1312 gcc_assert (TYPE_CANONICAL (t) == NULL_TREE);
1313 if (type_with_alias_set_p (t)
1314 && canonical_type_used_p (t))
1315 TYPE_CANONICAL (t) = TYPE_MAIN_VARIANT (t);
1316 if (TYPE_MAIN_VARIANT (t) != t)
1317 {
1318 gcc_assert (TYPE_NEXT_VARIANT (t) == NULL_TREE);
1319 TYPE_NEXT_VARIANT (t)
1320 = TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t));
1321 TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t)) = t;
1322 }
1323 }
1324 }
1325 }
1326
1327 /* Restore decl state */
1328 file_data->current_decl_state = file_data->global_decl_state;
1329 }
1330
1331 lto_data_in_delete (data_in);
1332 }
1333
1334
1335 /* Read the body of NODE using DATA. FILE_DATA holds the global
1336 decls and types. */
1337
1338 void
1339 lto_input_function_body (struct lto_file_decl_data *file_data,
1340 struct cgraph_node *node, const char *data)
1341 {
1342 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1343 }
1344
1345 /* Read the body of NODE using DATA. FILE_DATA holds the global
1346 decls and types. */
1347
1348 void
1349 lto_input_variable_constructor (struct lto_file_decl_data *file_data,
1350 struct varpool_node *node, const char *data)
1351 {
1352 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1353 }
1354
1355
1356 /* Queue of acummulated decl -> DIE mappings. Similar to locations those
1357 are only applied to prevailing tree nodes during tree merging. */
1358 vec<dref_entry> dref_queue;
1359
1360 /* Read the physical representation of a tree node EXPR from
1361 input block IB using the per-file context in DATA_IN. */
1362
1363 static void
1364 lto_read_tree_1 (class lto_input_block *ib, class data_in *data_in, tree expr)
1365 {
1366 /* Read all the bitfield values in EXPR. Note that for LTO, we
1367 only write language-independent bitfields, so no more unpacking is
1368 needed. */
1369 streamer_read_tree_bitfields (ib, data_in, expr);
1370
1371 /* Read all the pointer fields in EXPR. */
1372 streamer_read_tree_body (ib, data_in, expr);
1373
1374 /* Read any LTO-specific data not read by the tree streamer. */
1375 if (DECL_P (expr)
1376 && TREE_CODE (expr) != FUNCTION_DECL
1377 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1378 DECL_INITIAL (expr) = stream_read_tree (ib, data_in);
1379
1380 /* Stream references to early generated DIEs. Keep in sync with the
1381 trees handled in dwarf2out_register_external_die. */
1382 if ((DECL_P (expr)
1383 && TREE_CODE (expr) != FIELD_DECL
1384 && TREE_CODE (expr) != DEBUG_EXPR_DECL
1385 && TREE_CODE (expr) != TYPE_DECL)
1386 || TREE_CODE (expr) == BLOCK)
1387 {
1388 const char *str = streamer_read_string (data_in, ib);
1389 if (str)
1390 {
1391 unsigned HOST_WIDE_INT off = streamer_read_uhwi (ib);
1392 dref_entry e = { expr, str, off };
1393 dref_queue.safe_push (e);
1394 }
1395 }
1396 }
1397
1398 /* Read the physical representation of a tree node with tag TAG from
1399 input block IB using the per-file context in DATA_IN. */
1400
1401 static tree
1402 lto_read_tree (class lto_input_block *ib, class data_in *data_in,
1403 enum LTO_tags tag, hashval_t hash)
1404 {
1405 /* Instantiate a new tree node. */
1406 tree result = streamer_alloc_tree (ib, data_in, tag);
1407
1408 /* Enter RESULT in the reader cache. This will make RESULT
1409 available so that circular references in the rest of the tree
1410 structure can be resolved in subsequent calls to stream_read_tree. */
1411 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1412
1413 lto_read_tree_1 (ib, data_in, result);
1414
1415 /* end_marker = */ streamer_read_uchar (ib);
1416
1417 return result;
1418 }
1419
1420
1421 /* Populate the reader cache with trees materialized from the SCC
1422 following in the IB, DATA_IN stream. */
1423
1424 hashval_t
1425 lto_input_scc (class lto_input_block *ib, class data_in *data_in,
1426 unsigned *len, unsigned *entry_len)
1427 {
1428 /* A blob of unnamed tree nodes, fill the cache from it and
1429 recurse. */
1430 unsigned size = streamer_read_uhwi (ib);
1431 hashval_t scc_hash = streamer_read_uhwi (ib);
1432 unsigned scc_entry_len = 1;
1433
1434 if (size == 1)
1435 {
1436 enum LTO_tags tag = streamer_read_record_start (ib);
1437 lto_input_tree_1 (ib, data_in, tag, scc_hash);
1438 }
1439 else
1440 {
1441 unsigned int first = data_in->reader_cache->nodes.length ();
1442 tree result;
1443
1444 scc_entry_len = streamer_read_uhwi (ib);
1445
1446 /* Materialize size trees by reading their headers. */
1447 for (unsigned i = 0; i < size; ++i)
1448 {
1449 enum LTO_tags tag = streamer_read_record_start (ib);
1450 if (tag == LTO_null
1451 || (tag >= LTO_field_decl_ref && tag <= LTO_global_decl_ref)
1452 || tag == LTO_tree_pickle_reference
1453 || tag == LTO_integer_cst
1454 || tag == LTO_tree_scc)
1455 gcc_unreachable ();
1456
1457 result = streamer_alloc_tree (ib, data_in, tag);
1458 streamer_tree_cache_append (data_in->reader_cache, result, 0);
1459 }
1460
1461 /* Read the tree bitpacks and references. */
1462 for (unsigned i = 0; i < size; ++i)
1463 {
1464 result = streamer_tree_cache_get_tree (data_in->reader_cache,
1465 first + i);
1466 lto_read_tree_1 (ib, data_in, result);
1467 /* end_marker = */ streamer_read_uchar (ib);
1468 }
1469 }
1470
1471 *len = size;
1472 *entry_len = scc_entry_len;
1473 return scc_hash;
1474 }
1475
1476
1477 /* Read a tree from input block IB using the per-file context in
1478 DATA_IN. This context is used, for example, to resolve references
1479 to previously read nodes. */
1480
1481 tree
1482 lto_input_tree_1 (class lto_input_block *ib, class data_in *data_in,
1483 enum LTO_tags tag, hashval_t hash)
1484 {
1485 tree result;
1486
1487 gcc_assert ((unsigned) tag < (unsigned) LTO_NUM_TAGS);
1488
1489 if (tag == LTO_null)
1490 result = NULL_TREE;
1491 else if (tag >= LTO_field_decl_ref && tag <= LTO_namelist_decl_ref)
1492 {
1493 /* If TAG is a reference to an indexable tree, the next value
1494 in IB is the index into the table where we expect to find
1495 that tree. */
1496 result = lto_input_tree_ref (ib, data_in, cfun, tag);
1497 }
1498 else if (tag == LTO_tree_pickle_reference)
1499 {
1500 /* If TAG is a reference to a previously read tree, look it up in
1501 the reader cache. */
1502 result = streamer_get_pickled_tree (ib, data_in);
1503 }
1504 else if (tag == LTO_integer_cst)
1505 {
1506 /* For shared integer constants in singletons we can use the
1507 existing tree integer constant merging code. */
1508 tree type = stream_read_tree (ib, data_in);
1509 unsigned HOST_WIDE_INT len = streamer_read_uhwi (ib);
1510 unsigned HOST_WIDE_INT i;
1511 HOST_WIDE_INT a[WIDE_INT_MAX_ELTS];
1512
1513 for (i = 0; i < len; i++)
1514 a[i] = streamer_read_hwi (ib);
1515 gcc_assert (TYPE_PRECISION (type) <= MAX_BITSIZE_MODE_ANY_INT);
1516 result = wide_int_to_tree (type, wide_int::from_array
1517 (a, len, TYPE_PRECISION (type)));
1518 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1519 }
1520 else if (tag == LTO_tree_scc)
1521 gcc_unreachable ();
1522 else
1523 {
1524 /* Otherwise, materialize a new node from IB. */
1525 result = lto_read_tree (ib, data_in, tag, hash);
1526 }
1527
1528 return result;
1529 }
1530
1531 tree
1532 lto_input_tree (class lto_input_block *ib, class data_in *data_in)
1533 {
1534 enum LTO_tags tag;
1535
1536 /* Input and skip SCCs. */
1537 while ((tag = streamer_read_record_start (ib)) == LTO_tree_scc)
1538 {
1539 unsigned len, entry_len;
1540 lto_input_scc (ib, data_in, &len, &entry_len);
1541
1542 /* Register DECLs with the debuginfo machinery. */
1543 while (!dref_queue.is_empty ())
1544 {
1545 dref_entry e = dref_queue.pop ();
1546 debug_hooks->register_external_die (e.decl, e.sym, e.off);
1547 }
1548 }
1549 return lto_input_tree_1 (ib, data_in, tag, 0);
1550 }
1551
1552
1553 /* Input toplevel asms. */
1554
1555 void
1556 lto_input_toplevel_asms (struct lto_file_decl_data *file_data, int order_base)
1557 {
1558 size_t len;
1559 const char *data
1560 = lto_get_summary_section_data (file_data, LTO_section_asm, &len);
1561 const struct lto_simple_header_with_strings *header
1562 = (const struct lto_simple_header_with_strings *) data;
1563 int string_offset;
1564 class data_in *data_in;
1565 tree str;
1566
1567 if (! data)
1568 return;
1569
1570 string_offset = sizeof (*header) + header->main_size;
1571
1572 lto_input_block ib (data + sizeof (*header), header->main_size,
1573 file_data->mode_table);
1574
1575 data_in = lto_data_in_create (file_data, data + string_offset,
1576 header->string_size, vNULL);
1577
1578 while ((str = streamer_read_string_cst (data_in, &ib)))
1579 {
1580 asm_node *node = symtab->finalize_toplevel_asm (str);
1581 node->order = streamer_read_hwi (&ib) + order_base;
1582 if (node->order >= symtab->order)
1583 symtab->order = node->order + 1;
1584 }
1585
1586 lto_data_in_delete (data_in);
1587
1588 lto_free_section_data (file_data, LTO_section_asm, NULL, data, len);
1589 }
1590
1591
1592 /* Input mode table. */
1593
1594 void
1595 lto_input_mode_table (struct lto_file_decl_data *file_data)
1596 {
1597 size_t len;
1598 const char *data
1599 = lto_get_summary_section_data (file_data, LTO_section_mode_table, &len);
1600 if (! data)
1601 {
1602 internal_error ("cannot read LTO mode table from %s",
1603 file_data->file_name);
1604 return;
1605 }
1606
1607 unsigned char *table = ggc_cleared_vec_alloc<unsigned char> (1 << 8);
1608 file_data->mode_table = table;
1609 const struct lto_simple_header_with_strings *header
1610 = (const struct lto_simple_header_with_strings *) data;
1611 int string_offset;
1612 class data_in *data_in;
1613 string_offset = sizeof (*header) + header->main_size;
1614
1615 lto_input_block ib (data + sizeof (*header), header->main_size, NULL);
1616 data_in = lto_data_in_create (file_data, data + string_offset,
1617 header->string_size, vNULL);
1618 bitpack_d bp = streamer_read_bitpack (&ib);
1619
1620 table[VOIDmode] = VOIDmode;
1621 table[BLKmode] = BLKmode;
1622 unsigned int m;
1623 while ((m = bp_unpack_value (&bp, 8)) != VOIDmode)
1624 {
1625 enum mode_class mclass
1626 = bp_unpack_enum (&bp, mode_class, MAX_MODE_CLASS);
1627 poly_uint16 size = bp_unpack_poly_value (&bp, 16);
1628 poly_uint16 prec = bp_unpack_poly_value (&bp, 16);
1629 machine_mode inner = (machine_mode) bp_unpack_value (&bp, 8);
1630 poly_uint16 nunits = bp_unpack_poly_value (&bp, 16);
1631 unsigned int ibit = 0, fbit = 0;
1632 unsigned int real_fmt_len = 0;
1633 const char *real_fmt_name = NULL;
1634 switch (mclass)
1635 {
1636 case MODE_FRACT:
1637 case MODE_UFRACT:
1638 case MODE_ACCUM:
1639 case MODE_UACCUM:
1640 ibit = bp_unpack_value (&bp, 8);
1641 fbit = bp_unpack_value (&bp, 8);
1642 break;
1643 case MODE_FLOAT:
1644 case MODE_DECIMAL_FLOAT:
1645 real_fmt_name = bp_unpack_indexed_string (data_in, &bp,
1646 &real_fmt_len);
1647 break;
1648 default:
1649 break;
1650 }
1651 /* First search just the GET_CLASS_NARROWEST_MODE to wider modes,
1652 if not found, fallback to all modes. */
1653 int pass;
1654 for (pass = 0; pass < 2; pass++)
1655 for (machine_mode mr = pass ? VOIDmode
1656 : GET_CLASS_NARROWEST_MODE (mclass);
1657 pass ? mr < MAX_MACHINE_MODE : mr != VOIDmode;
1658 pass ? mr = (machine_mode) (mr + 1)
1659 : mr = GET_MODE_WIDER_MODE (mr).else_void ())
1660 if (GET_MODE_CLASS (mr) != mclass
1661 || maybe_ne (GET_MODE_SIZE (mr), size)
1662 || maybe_ne (GET_MODE_PRECISION (mr), prec)
1663 || (inner == m
1664 ? GET_MODE_INNER (mr) != mr
1665 : GET_MODE_INNER (mr) != table[(int) inner])
1666 || GET_MODE_IBIT (mr) != ibit
1667 || GET_MODE_FBIT (mr) != fbit
1668 || maybe_ne (GET_MODE_NUNITS (mr), nunits))
1669 continue;
1670 else if ((mclass == MODE_FLOAT || mclass == MODE_DECIMAL_FLOAT)
1671 && strcmp (REAL_MODE_FORMAT (mr)->name, real_fmt_name) != 0)
1672 continue;
1673 else
1674 {
1675 table[m] = mr;
1676 pass = 2;
1677 break;
1678 }
1679 unsigned int mname_len;
1680 const char *mname = bp_unpack_indexed_string (data_in, &bp, &mname_len);
1681 if (pass == 2)
1682 {
1683 switch (mclass)
1684 {
1685 case MODE_VECTOR_BOOL:
1686 case MODE_VECTOR_INT:
1687 case MODE_VECTOR_FLOAT:
1688 case MODE_VECTOR_FRACT:
1689 case MODE_VECTOR_UFRACT:
1690 case MODE_VECTOR_ACCUM:
1691 case MODE_VECTOR_UACCUM:
1692 /* For unsupported vector modes just use BLKmode,
1693 if the scalar mode is supported. */
1694 if (table[(int) inner] != VOIDmode)
1695 {
1696 table[m] = BLKmode;
1697 break;
1698 }
1699 /* FALLTHRU */
1700 default:
1701 fatal_error (UNKNOWN_LOCATION, "unsupported mode %qs", mname);
1702 break;
1703 }
1704 }
1705 }
1706 lto_data_in_delete (data_in);
1707
1708 lto_free_section_data (file_data, LTO_section_mode_table, NULL, data, len);
1709 }
1710
1711
1712 /* Initialization for the LTO reader. */
1713
1714 void
1715 lto_reader_init (void)
1716 {
1717 lto_streamer_init ();
1718 file_name_hash_table
1719 = new hash_table<string_slot_hasher> (37);
1720 string_slot_allocator = new object_allocator <struct string_slot>
1721 ("line map file name hash");
1722 gcc_obstack_init (&file_name_obstack);
1723 }
1724
1725 /* Free hash table used to stream in location file names. */
1726
1727 void
1728 lto_free_file_name_hash (void)
1729 {
1730 delete file_name_hash_table;
1731 file_name_hash_table = NULL;
1732 delete string_slot_allocator;
1733 string_slot_allocator = NULL;
1734 /* file_name_obstack must stay allocated since it is referred to by
1735 line map table. */
1736 }
1737
1738
1739 /* Create a new data_in object for FILE_DATA. STRINGS is the string
1740 table to use with LEN strings. RESOLUTIONS is the vector of linker
1741 resolutions (NULL if not using a linker plugin). */
1742
1743 class data_in *
1744 lto_data_in_create (struct lto_file_decl_data *file_data, const char *strings,
1745 unsigned len,
1746 vec<ld_plugin_symbol_resolution_t> resolutions)
1747 {
1748 class data_in *data_in = new (class data_in);
1749 data_in->file_data = file_data;
1750 data_in->strings = strings;
1751 data_in->strings_len = len;
1752 data_in->globals_resolution = resolutions;
1753 data_in->reader_cache = streamer_tree_cache_create (false, false, true);
1754 return data_in;
1755 }
1756
1757
1758 /* Remove DATA_IN. */
1759
1760 void
1761 lto_data_in_delete (class data_in *data_in)
1762 {
1763 data_in->globals_resolution.release ();
1764 streamer_tree_cache_delete (data_in->reader_cache);
1765 delete data_in;
1766 }