]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-streamer-in.c
[Ada] Improved support for aspect alignment in CCG
[thirdparty/gcc.git] / gcc / lto-streamer-in.c
1 /* Read the GIMPLE representation from a file stream.
2
3 Copyright (C) 2009-2020 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "cfghooks.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "gimple-streamer.h"
35 #include "toplev.h"
36 #include "gimple-iterator.h"
37 #include "tree-cfg.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "tree-ssa.h"
41 #include "except.h"
42 #include "cgraph.h"
43 #include "cfgloop.h"
44 #include "debug.h"
45 #include "alloc-pool.h"
46
47 /* Allocator used to hold string slot entries for line map streaming. */
48 static struct object_allocator<struct string_slot> *string_slot_allocator;
49
50 /* The table to hold the file names. */
51 static hash_table<string_slot_hasher> *file_name_hash_table;
52
53 /* This obstack holds file names used in locators. Line map datastructures
54 points here and thus it needs to be kept allocated as long as linemaps
55 exists. */
56 static struct obstack file_name_obstack;
57
58
59 /* Check that tag ACTUAL has one of the given values. NUM_TAGS is the
60 number of valid tag values to check. */
61
62 void
63 lto_tag_check_set (enum LTO_tags actual, int ntags, ...)
64 {
65 va_list ap;
66 int i;
67
68 va_start (ap, ntags);
69 for (i = 0; i < ntags; i++)
70 if ((unsigned) actual == va_arg (ap, unsigned))
71 {
72 va_end (ap);
73 return;
74 }
75
76 va_end (ap);
77 internal_error ("bytecode stream: unexpected tag %s", lto_tag_name (actual));
78 }
79
80
81 /* Read LENGTH bytes from STREAM to ADDR. */
82
83 void
84 lto_input_data_block (class lto_input_block *ib, void *addr, size_t length)
85 {
86 size_t i;
87 unsigned char *const buffer = (unsigned char *) addr;
88
89 for (i = 0; i < length; i++)
90 buffer[i] = streamer_read_uchar (ib);
91 }
92
93
94 /* Lookup STRING in file_name_hash_table. If found, return the existing
95 string, otherwise insert STRING as the canonical version. */
96
97 static const char *
98 canon_file_name (const char *string)
99 {
100 string_slot **slot;
101 struct string_slot s_slot;
102 size_t len = strlen (string);
103
104 s_slot.s = string;
105 s_slot.len = len;
106
107 slot = file_name_hash_table->find_slot (&s_slot, INSERT);
108 if (*slot == NULL)
109 {
110 char *saved_string;
111 struct string_slot *new_slot;
112
113 saved_string = XOBNEWVEC (&file_name_obstack, char, len + 1);
114 new_slot = string_slot_allocator->allocate ();
115 memcpy (saved_string, string, len + 1);
116 new_slot->s = saved_string;
117 new_slot->len = len;
118 *slot = new_slot;
119 return saved_string;
120 }
121 else
122 {
123 struct string_slot *old_slot = *slot;
124 return old_slot->s;
125 }
126 }
127
128 /* Pointer to currently alive instance of lto_location_cache. */
129
130 lto_location_cache *lto_location_cache::current_cache;
131
132 /* Sort locations in source order. Start with file from last application. */
133
134 int
135 lto_location_cache::cmp_loc (const void *pa, const void *pb)
136 {
137 const cached_location *a = ((const cached_location *)pa);
138 const cached_location *b = ((const cached_location *)pb);
139 const char *current_file = current_cache->current_file;
140 int current_line = current_cache->current_line;
141
142 if (a->file == current_file && b->file != current_file)
143 return -1;
144 if (a->file != current_file && b->file == current_file)
145 return 1;
146 if (a->file == current_file && b->file == current_file)
147 {
148 if (a->line == current_line && b->line != current_line)
149 return -1;
150 if (a->line != current_line && b->line == current_line)
151 return 1;
152 }
153 if (a->file != b->file)
154 return strcmp (a->file, b->file);
155 if (a->sysp != b->sysp)
156 return a->sysp ? 1 : -1;
157 if (a->line != b->line)
158 return a->line - b->line;
159 return a->col - b->col;
160 }
161
162 /* Apply all changes in location cache. Add locations into linemap and patch
163 trees. */
164
165 bool
166 lto_location_cache::apply_location_cache ()
167 {
168 static const char *prev_file;
169 if (!loc_cache.length ())
170 return false;
171 if (loc_cache.length () > 1)
172 loc_cache.qsort (cmp_loc);
173
174 for (unsigned int i = 0; i < loc_cache.length (); i++)
175 {
176 struct cached_location loc = loc_cache[i];
177
178 if (current_file != loc.file)
179 linemap_add (line_table, prev_file ? LC_RENAME : LC_ENTER,
180 loc.sysp, loc.file, loc.line);
181 else if (current_line != loc.line)
182 {
183 int max = loc.col;
184
185 for (unsigned int j = i + 1; j < loc_cache.length (); j++)
186 if (loc.file != loc_cache[j].file
187 || loc.line != loc_cache[j].line)
188 break;
189 else if (max < loc_cache[j].col)
190 max = loc_cache[j].col;
191 linemap_line_start (line_table, loc.line, max + 1);
192 }
193 gcc_assert (*loc.loc == BUILTINS_LOCATION + 1);
194 if (current_file == loc.file && current_line == loc.line
195 && current_col == loc.col)
196 *loc.loc = current_loc;
197 else
198 current_loc = *loc.loc = linemap_position_for_column (line_table,
199 loc.col);
200 current_line = loc.line;
201 prev_file = current_file = loc.file;
202 current_col = loc.col;
203 }
204 loc_cache.truncate (0);
205 accepted_length = 0;
206 return true;
207 }
208
209 /* Tree merging did not suceed; mark all changes in the cache as accepted. */
210
211 void
212 lto_location_cache::accept_location_cache ()
213 {
214 gcc_assert (current_cache == this);
215 accepted_length = loc_cache.length ();
216 }
217
218 /* Tree merging did suceed; throw away recent changes. */
219
220 void
221 lto_location_cache::revert_location_cache ()
222 {
223 loc_cache.truncate (accepted_length);
224 }
225
226 /* Read a location bitpack from input block IB and either update *LOC directly
227 or add it to the location cache.
228 It is neccesary to call apply_location_cache to get *LOC updated. */
229
230 void
231 lto_location_cache::input_location (location_t *loc, struct bitpack_d *bp,
232 class data_in *data_in)
233 {
234 static const char *stream_file;
235 static int stream_line;
236 static int stream_col;
237 static bool stream_sysp;
238 bool file_change, line_change, column_change;
239
240 gcc_assert (current_cache == this);
241
242 *loc = bp_unpack_int_in_range (bp, "location", 0, RESERVED_LOCATION_COUNT);
243
244 if (*loc < RESERVED_LOCATION_COUNT)
245 return;
246
247 /* Keep value RESERVED_LOCATION_COUNT in *loc as linemap lookups will
248 ICE on it. */
249
250 file_change = bp_unpack_value (bp, 1);
251 line_change = bp_unpack_value (bp, 1);
252 column_change = bp_unpack_value (bp, 1);
253
254 if (file_change)
255 {
256 stream_file = canon_file_name (bp_unpack_string (data_in, bp));
257 stream_sysp = bp_unpack_value (bp, 1);
258 }
259
260 if (line_change)
261 stream_line = bp_unpack_var_len_unsigned (bp);
262
263 if (column_change)
264 stream_col = bp_unpack_var_len_unsigned (bp);
265
266 /* This optimization saves location cache operations druing gimple
267 streaming. */
268
269 if (current_file == stream_file && current_line == stream_line
270 && current_col == stream_col && current_sysp == stream_sysp)
271 {
272 *loc = current_loc;
273 return;
274 }
275
276 struct cached_location entry
277 = {stream_file, loc, stream_line, stream_col, stream_sysp};
278 loc_cache.safe_push (entry);
279 }
280
281 /* Read a location bitpack from input block IB and either update *LOC directly
282 or add it to the location cache.
283 It is neccesary to call apply_location_cache to get *LOC updated. */
284
285 void
286 lto_input_location (location_t *loc, struct bitpack_d *bp,
287 class data_in *data_in)
288 {
289 data_in->location_cache.input_location (loc, bp, data_in);
290 }
291
292 /* Read location and return it instead of going through location caching.
293 This should be used only when the resulting location is not going to be
294 discarded. */
295
296 location_t
297 stream_input_location_now (struct bitpack_d *bp, class data_in *data_in)
298 {
299 location_t loc;
300 stream_input_location (&loc, bp, data_in);
301 data_in->location_cache.apply_location_cache ();
302 return loc;
303 }
304
305 /* Read a reference to a tree node from DATA_IN using input block IB.
306 TAG is the expected node that should be found in IB, if TAG belongs
307 to one of the indexable trees, expect to read a reference index to
308 be looked up in one of the symbol tables, otherwise read the pysical
309 representation of the tree using stream_read_tree. FN is the
310 function scope for the read tree. */
311
312 tree
313 lto_input_tree_ref (class lto_input_block *ib, class data_in *data_in,
314 struct function *fn, enum LTO_tags tag)
315 {
316 unsigned HOST_WIDE_INT ix_u;
317 tree result = NULL_TREE;
318
319 if (tag == LTO_ssa_name_ref)
320 {
321 ix_u = streamer_read_uhwi (ib);
322 result = (*SSANAMES (fn))[ix_u];
323 }
324 else
325 {
326 gcc_checking_assert (tag == LTO_global_stream_ref);
327 ix_u = streamer_read_uhwi (ib);
328 result = (*data_in->file_data->current_decl_state
329 ->streams[LTO_DECL_STREAM])[ix_u];
330 }
331
332 gcc_assert (result);
333
334 return result;
335 }
336
337 /* Read VAR_DECL reference to DATA from IB. */
338
339 tree
340 lto_input_var_decl_ref (lto_input_block *ib, lto_file_decl_data *file_data)
341 {
342 unsigned int ix_u = streamer_read_uhwi (ib);
343 tree result = (*file_data->current_decl_state
344 ->streams[LTO_DECL_STREAM])[ix_u];
345 gcc_assert (TREE_CODE (result) == VAR_DECL);
346 return result;
347 }
348
349 /* Read VAR_DECL reference to DATA from IB. */
350
351 tree
352 lto_input_fn_decl_ref (lto_input_block *ib, lto_file_decl_data *file_data)
353 {
354 unsigned int ix_u = streamer_read_uhwi (ib);
355 tree result = (*file_data->current_decl_state
356 ->streams[LTO_DECL_STREAM])[ix_u];
357 gcc_assert (TREE_CODE (result) == FUNCTION_DECL);
358 return result;
359 }
360
361
362 /* Read and return a double-linked list of catch handlers from input
363 block IB, using descriptors in DATA_IN. */
364
365 static struct eh_catch_d *
366 lto_input_eh_catch_list (class lto_input_block *ib, class data_in *data_in,
367 eh_catch *last_p)
368 {
369 eh_catch first;
370 enum LTO_tags tag;
371
372 *last_p = first = NULL;
373 tag = streamer_read_record_start (ib);
374 while (tag)
375 {
376 tree list;
377 eh_catch n;
378
379 lto_tag_check_range (tag, LTO_eh_catch, LTO_eh_catch);
380
381 /* Read the catch node. */
382 n = ggc_cleared_alloc<eh_catch_d> ();
383 n->type_list = stream_read_tree (ib, data_in);
384 n->filter_list = stream_read_tree (ib, data_in);
385 n->label = stream_read_tree (ib, data_in);
386
387 /* Register all the types in N->FILTER_LIST. */
388 for (list = n->filter_list; list; list = TREE_CHAIN (list))
389 add_type_for_runtime (TREE_VALUE (list));
390
391 /* Chain N to the end of the list. */
392 if (*last_p)
393 (*last_p)->next_catch = n;
394 n->prev_catch = *last_p;
395 *last_p = n;
396
397 /* Set the head of the list the first time through the loop. */
398 if (first == NULL)
399 first = n;
400
401 tag = streamer_read_record_start (ib);
402 }
403
404 return first;
405 }
406
407
408 /* Read and return EH region IX from input block IB, using descriptors
409 in DATA_IN. */
410
411 static eh_region
412 input_eh_region (class lto_input_block *ib, class data_in *data_in, int ix)
413 {
414 enum LTO_tags tag;
415 eh_region r;
416
417 /* Read the region header. */
418 tag = streamer_read_record_start (ib);
419 if (tag == LTO_null)
420 return NULL;
421
422 r = ggc_cleared_alloc<eh_region_d> ();
423 r->index = streamer_read_hwi (ib);
424
425 gcc_assert (r->index == ix);
426
427 /* Read all the region pointers as region numbers. We'll fix up
428 the pointers once the whole array has been read. */
429 r->outer = (eh_region) (intptr_t) streamer_read_hwi (ib);
430 r->inner = (eh_region) (intptr_t) streamer_read_hwi (ib);
431 r->next_peer = (eh_region) (intptr_t) streamer_read_hwi (ib);
432
433 switch (tag)
434 {
435 case LTO_ert_cleanup:
436 r->type = ERT_CLEANUP;
437 break;
438
439 case LTO_ert_try:
440 {
441 struct eh_catch_d *last_catch;
442 r->type = ERT_TRY;
443 r->u.eh_try.first_catch = lto_input_eh_catch_list (ib, data_in,
444 &last_catch);
445 r->u.eh_try.last_catch = last_catch;
446 break;
447 }
448
449 case LTO_ert_allowed_exceptions:
450 {
451 tree l;
452
453 r->type = ERT_ALLOWED_EXCEPTIONS;
454 r->u.allowed.type_list = stream_read_tree (ib, data_in);
455 r->u.allowed.label = stream_read_tree (ib, data_in);
456 r->u.allowed.filter = streamer_read_uhwi (ib);
457
458 for (l = r->u.allowed.type_list; l ; l = TREE_CHAIN (l))
459 add_type_for_runtime (TREE_VALUE (l));
460 }
461 break;
462
463 case LTO_ert_must_not_throw:
464 {
465 r->type = ERT_MUST_NOT_THROW;
466 r->u.must_not_throw.failure_decl = stream_read_tree (ib, data_in);
467 bitpack_d bp = streamer_read_bitpack (ib);
468 r->u.must_not_throw.failure_loc
469 = stream_input_location_now (&bp, data_in);
470 }
471 break;
472
473 default:
474 gcc_unreachable ();
475 }
476
477 r->landing_pads = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
478
479 return r;
480 }
481
482
483 /* Read and return EH landing pad IX from input block IB, using descriptors
484 in DATA_IN. */
485
486 static eh_landing_pad
487 input_eh_lp (class lto_input_block *ib, class data_in *data_in, int ix)
488 {
489 enum LTO_tags tag;
490 eh_landing_pad lp;
491
492 /* Read the landing pad header. */
493 tag = streamer_read_record_start (ib);
494 if (tag == LTO_null)
495 return NULL;
496
497 lto_tag_check_range (tag, LTO_eh_landing_pad, LTO_eh_landing_pad);
498
499 lp = ggc_cleared_alloc<eh_landing_pad_d> ();
500 lp->index = streamer_read_hwi (ib);
501 gcc_assert (lp->index == ix);
502 lp->next_lp = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
503 lp->region = (eh_region) (intptr_t) streamer_read_hwi (ib);
504 lp->post_landing_pad = stream_read_tree (ib, data_in);
505
506 return lp;
507 }
508
509
510 /* After reading the EH regions, pointers to peer and children regions
511 are region numbers. This converts all these region numbers into
512 real pointers into the rematerialized regions for FN. ROOT_REGION
513 is the region number for the root EH region in FN. */
514
515 static void
516 fixup_eh_region_pointers (struct function *fn, HOST_WIDE_INT root_region)
517 {
518 unsigned i;
519 vec<eh_region, va_gc> *eh_array = fn->eh->region_array;
520 vec<eh_landing_pad, va_gc> *lp_array = fn->eh->lp_array;
521 eh_region r;
522 eh_landing_pad lp;
523
524 gcc_assert (eh_array && lp_array);
525
526 gcc_assert (root_region >= 0);
527 fn->eh->region_tree = (*eh_array)[root_region];
528
529 #define FIXUP_EH_REGION(r) (r) = (*eh_array)[(HOST_WIDE_INT) (intptr_t) (r)]
530 #define FIXUP_EH_LP(p) (p) = (*lp_array)[(HOST_WIDE_INT) (intptr_t) (p)]
531
532 /* Convert all the index numbers stored in pointer fields into
533 pointers to the corresponding slots in the EH region array. */
534 FOR_EACH_VEC_ELT (*eh_array, i, r)
535 {
536 /* The array may contain NULL regions. */
537 if (r == NULL)
538 continue;
539
540 gcc_assert (i == (unsigned) r->index);
541 FIXUP_EH_REGION (r->outer);
542 FIXUP_EH_REGION (r->inner);
543 FIXUP_EH_REGION (r->next_peer);
544 FIXUP_EH_LP (r->landing_pads);
545 }
546
547 /* Convert all the index numbers stored in pointer fields into
548 pointers to the corresponding slots in the EH landing pad array. */
549 FOR_EACH_VEC_ELT (*lp_array, i, lp)
550 {
551 /* The array may contain NULL landing pads. */
552 if (lp == NULL)
553 continue;
554
555 gcc_assert (i == (unsigned) lp->index);
556 FIXUP_EH_LP (lp->next_lp);
557 FIXUP_EH_REGION (lp->region);
558 }
559
560 #undef FIXUP_EH_REGION
561 #undef FIXUP_EH_LP
562 }
563
564
565 /* Initialize EH support. */
566
567 void
568 lto_init_eh (void)
569 {
570 static bool eh_initialized_p = false;
571
572 if (eh_initialized_p)
573 return;
574
575 /* Contrary to most other FEs, we only initialize EH support when at
576 least one of the files in the set contains exception regions in
577 it. Since this happens much later than the call to init_eh in
578 lang_dependent_init, we have to set flag_exceptions and call
579 init_eh again to initialize the EH tables. */
580 flag_exceptions = 1;
581 init_eh ();
582
583 eh_initialized_p = true;
584 }
585
586
587 /* Read the exception table for FN from IB using the data descriptors
588 in DATA_IN. */
589
590 static void
591 input_eh_regions (class lto_input_block *ib, class data_in *data_in,
592 struct function *fn)
593 {
594 HOST_WIDE_INT i, root_region, len;
595 enum LTO_tags tag;
596
597 tag = streamer_read_record_start (ib);
598 if (tag == LTO_null)
599 return;
600
601 lto_tag_check_range (tag, LTO_eh_table, LTO_eh_table);
602
603 gcc_assert (fn->eh);
604
605 root_region = streamer_read_hwi (ib);
606 gcc_assert (root_region == (int) root_region);
607
608 /* Read the EH region array. */
609 len = streamer_read_hwi (ib);
610 gcc_assert (len == (int) len);
611 if (len > 0)
612 {
613 vec_safe_grow_cleared (fn->eh->region_array, len);
614 for (i = 0; i < len; i++)
615 {
616 eh_region r = input_eh_region (ib, data_in, i);
617 (*fn->eh->region_array)[i] = r;
618 }
619 }
620
621 /* Read the landing pads. */
622 len = streamer_read_hwi (ib);
623 gcc_assert (len == (int) len);
624 if (len > 0)
625 {
626 vec_safe_grow_cleared (fn->eh->lp_array, len);
627 for (i = 0; i < len; i++)
628 {
629 eh_landing_pad lp = input_eh_lp (ib, data_in, i);
630 (*fn->eh->lp_array)[i] = lp;
631 }
632 }
633
634 /* Read the runtime type data. */
635 len = streamer_read_hwi (ib);
636 gcc_assert (len == (int) len);
637 if (len > 0)
638 {
639 vec_safe_grow_cleared (fn->eh->ttype_data, len);
640 for (i = 0; i < len; i++)
641 {
642 tree ttype = stream_read_tree (ib, data_in);
643 (*fn->eh->ttype_data)[i] = ttype;
644 }
645 }
646
647 /* Read the table of action chains. */
648 len = streamer_read_hwi (ib);
649 gcc_assert (len == (int) len);
650 if (len > 0)
651 {
652 if (targetm.arm_eabi_unwinder)
653 {
654 vec_safe_grow_cleared (fn->eh->ehspec_data.arm_eabi, len);
655 for (i = 0; i < len; i++)
656 {
657 tree t = stream_read_tree (ib, data_in);
658 (*fn->eh->ehspec_data.arm_eabi)[i] = t;
659 }
660 }
661 else
662 {
663 vec_safe_grow_cleared (fn->eh->ehspec_data.other, len);
664 for (i = 0; i < len; i++)
665 {
666 uchar c = streamer_read_uchar (ib);
667 (*fn->eh->ehspec_data.other)[i] = c;
668 }
669 }
670 }
671
672 /* Reconstruct the EH region tree by fixing up the peer/children
673 pointers. */
674 fixup_eh_region_pointers (fn, root_region);
675
676 tag = streamer_read_record_start (ib);
677 lto_tag_check_range (tag, LTO_null, LTO_null);
678 }
679
680
681 /* Make a new basic block with index INDEX in function FN. */
682
683 static basic_block
684 make_new_block (struct function *fn, unsigned int index)
685 {
686 basic_block bb = alloc_block ();
687 bb->index = index;
688 SET_BASIC_BLOCK_FOR_FN (fn, index, bb);
689 n_basic_blocks_for_fn (fn)++;
690 return bb;
691 }
692
693
694 /* Read the CFG for function FN from input block IB. */
695
696 static void
697 input_cfg (class lto_input_block *ib, class data_in *data_in,
698 struct function *fn)
699 {
700 unsigned int bb_count;
701 basic_block p_bb;
702 unsigned int i;
703 int index;
704
705 init_empty_tree_cfg_for_function (fn);
706 init_ssa_operands (fn);
707
708 profile_status_for_fn (fn) = streamer_read_enum (ib, profile_status_d,
709 PROFILE_LAST);
710
711 bb_count = streamer_read_uhwi (ib);
712
713 last_basic_block_for_fn (fn) = bb_count;
714 if (bb_count > basic_block_info_for_fn (fn)->length ())
715 vec_safe_grow_cleared (basic_block_info_for_fn (fn), bb_count);
716
717 if (bb_count > label_to_block_map_for_fn (fn)->length ())
718 vec_safe_grow_cleared (label_to_block_map_for_fn (fn), bb_count);
719
720 index = streamer_read_hwi (ib);
721 while (index != -1)
722 {
723 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
724 unsigned int edge_count;
725
726 if (bb == NULL)
727 bb = make_new_block (fn, index);
728
729 edge_count = streamer_read_uhwi (ib);
730
731 /* Connect up the CFG. */
732 for (i = 0; i < edge_count; i++)
733 {
734 unsigned int dest_index;
735 unsigned int edge_flags;
736 basic_block dest;
737 profile_probability probability;
738 edge e;
739
740 dest_index = streamer_read_uhwi (ib);
741 probability = profile_probability::stream_in (ib);
742 edge_flags = streamer_read_uhwi (ib);
743
744 dest = BASIC_BLOCK_FOR_FN (fn, dest_index);
745
746 if (dest == NULL)
747 dest = make_new_block (fn, dest_index);
748
749 e = make_edge (bb, dest, edge_flags);
750 e->probability = probability;
751 }
752
753 index = streamer_read_hwi (ib);
754 }
755
756 p_bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
757 index = streamer_read_hwi (ib);
758 while (index != -1)
759 {
760 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
761 bb->prev_bb = p_bb;
762 p_bb->next_bb = bb;
763 p_bb = bb;
764 index = streamer_read_hwi (ib);
765 }
766
767 /* ??? The cfgloop interface is tied to cfun. */
768 gcc_assert (cfun == fn);
769
770 /* Input the loop tree. */
771 unsigned n_loops = streamer_read_uhwi (ib);
772 if (n_loops == 0)
773 return;
774
775 struct loops *loops = ggc_cleared_alloc<struct loops> ();
776 init_loops_structure (fn, loops, n_loops);
777 set_loops_for_fn (fn, loops);
778
779 /* Input each loop and associate it with its loop header so
780 flow_loops_find can rebuild the loop tree. */
781 for (unsigned i = 1; i < n_loops; ++i)
782 {
783 int header_index = streamer_read_hwi (ib);
784 if (header_index == -1)
785 {
786 loops->larray->quick_push (NULL);
787 continue;
788 }
789
790 class loop *loop = alloc_loop ();
791 loop->header = BASIC_BLOCK_FOR_FN (fn, header_index);
792 loop->header->loop_father = loop;
793
794 /* Read everything copy_loop_info copies. */
795 loop->estimate_state = streamer_read_enum (ib, loop_estimation, EST_LAST);
796 loop->any_upper_bound = streamer_read_hwi (ib);
797 if (loop->any_upper_bound)
798 loop->nb_iterations_upper_bound = streamer_read_widest_int (ib);
799 loop->any_likely_upper_bound = streamer_read_hwi (ib);
800 if (loop->any_likely_upper_bound)
801 loop->nb_iterations_likely_upper_bound = streamer_read_widest_int (ib);
802 loop->any_estimate = streamer_read_hwi (ib);
803 if (loop->any_estimate)
804 loop->nb_iterations_estimate = streamer_read_widest_int (ib);
805
806 /* Read OMP SIMD related info. */
807 loop->safelen = streamer_read_hwi (ib);
808 loop->unroll = streamer_read_hwi (ib);
809 loop->owned_clique = streamer_read_hwi (ib);
810 loop->dont_vectorize = streamer_read_hwi (ib);
811 loop->force_vectorize = streamer_read_hwi (ib);
812 loop->finite_p = streamer_read_hwi (ib);
813 loop->simduid = stream_read_tree (ib, data_in);
814
815 place_new_loop (fn, loop);
816
817 /* flow_loops_find doesn't like loops not in the tree, hook them
818 all as siblings of the tree root temporarily. */
819 flow_loop_tree_node_add (loops->tree_root, loop);
820 }
821
822 /* Rebuild the loop tree. */
823 flow_loops_find (loops);
824 }
825
826
827 /* Read the SSA names array for function FN from DATA_IN using input
828 block IB. */
829
830 static void
831 input_ssa_names (class lto_input_block *ib, class data_in *data_in,
832 struct function *fn)
833 {
834 unsigned int i, size;
835
836 size = streamer_read_uhwi (ib);
837 init_ssanames (fn, size);
838
839 i = streamer_read_uhwi (ib);
840 while (i)
841 {
842 tree ssa_name, name;
843 bool is_default_def;
844
845 /* Skip over the elements that had been freed. */
846 while (SSANAMES (fn)->length () < i)
847 SSANAMES (fn)->quick_push (NULL_TREE);
848
849 is_default_def = (streamer_read_uchar (ib) != 0);
850 name = stream_read_tree (ib, data_in);
851 ssa_name = make_ssa_name_fn (fn, name, NULL);
852
853 if (is_default_def)
854 {
855 set_ssa_default_def (cfun, SSA_NAME_VAR (ssa_name), ssa_name);
856 SSA_NAME_DEF_STMT (ssa_name) = gimple_build_nop ();
857 }
858
859 i = streamer_read_uhwi (ib);
860 }
861 }
862
863
864 /* Go through all NODE edges and fixup call_stmt pointers
865 so they point to STMTS. */
866
867 static void
868 fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple **stmts,
869 struct function *fn)
870 {
871 #define STMT_UID_NOT_IN_RANGE(uid) \
872 (gimple_stmt_max_uid (fn) < uid || uid == 0)
873
874 struct cgraph_edge *cedge;
875 struct ipa_ref *ref = NULL;
876 unsigned int i;
877
878 for (cedge = node->callees; cedge; cedge = cedge->next_callee)
879 {
880 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
881 fatal_error (input_location,
882 "Cgraph edge statement index out of range");
883 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
884 cedge->lto_stmt_uid = 0;
885 if (!cedge->call_stmt)
886 fatal_error (input_location,
887 "Cgraph edge statement index not found");
888 }
889 for (cedge = node->indirect_calls; cedge; cedge = cedge->next_callee)
890 {
891 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
892 fatal_error (input_location,
893 "Cgraph edge statement index out of range");
894 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
895 cedge->lto_stmt_uid = 0;
896 if (!cedge->call_stmt)
897 fatal_error (input_location, "Cgraph edge statement index not found");
898 }
899 for (i = 0; node->iterate_reference (i, ref); i++)
900 if (ref->lto_stmt_uid)
901 {
902 if (STMT_UID_NOT_IN_RANGE (ref->lto_stmt_uid))
903 fatal_error (input_location,
904 "Reference statement index out of range");
905 ref->stmt = stmts[ref->lto_stmt_uid - 1];
906 ref->lto_stmt_uid = 0;
907 if (!ref->stmt)
908 fatal_error (input_location, "Reference statement index not found");
909 }
910 }
911
912
913 /* Fixup call_stmt pointers in NODE and all clones. */
914
915 static void
916 fixup_call_stmt_edges (struct cgraph_node *orig, gimple **stmts)
917 {
918 struct cgraph_node *node;
919 struct function *fn;
920
921 while (orig->clone_of)
922 orig = orig->clone_of;
923 fn = DECL_STRUCT_FUNCTION (orig->decl);
924
925 if (!orig->thunk.thunk_p)
926 fixup_call_stmt_edges_1 (orig, stmts, fn);
927 if (orig->clones)
928 for (node = orig->clones; node != orig;)
929 {
930 if (!node->thunk.thunk_p)
931 fixup_call_stmt_edges_1 (node, stmts, fn);
932 if (node->clones)
933 node = node->clones;
934 else if (node->next_sibling_clone)
935 node = node->next_sibling_clone;
936 else
937 {
938 while (node != orig && !node->next_sibling_clone)
939 node = node->clone_of;
940 if (node != orig)
941 node = node->next_sibling_clone;
942 }
943 }
944 }
945
946
947 /* Input the base body of struct function FN from DATA_IN
948 using input block IB. */
949
950 static void
951 input_struct_function_base (struct function *fn, class data_in *data_in,
952 class lto_input_block *ib)
953 {
954 struct bitpack_d bp;
955 int len;
956
957 /* Read the static chain and non-local goto save area. */
958 fn->static_chain_decl = stream_read_tree (ib, data_in);
959 fn->nonlocal_goto_save_area = stream_read_tree (ib, data_in);
960
961 /* Read all the local symbols. */
962 len = streamer_read_hwi (ib);
963 if (len > 0)
964 {
965 int i;
966 vec_safe_grow_cleared (fn->local_decls, len);
967 for (i = 0; i < len; i++)
968 {
969 tree t = stream_read_tree (ib, data_in);
970 (*fn->local_decls)[i] = t;
971 }
972 }
973
974 /* Input the current IL state of the function. */
975 fn->curr_properties = streamer_read_uhwi (ib);
976
977 /* Read all the attributes for FN. */
978 bp = streamer_read_bitpack (ib);
979 fn->is_thunk = bp_unpack_value (&bp, 1);
980 fn->has_local_explicit_reg_vars = bp_unpack_value (&bp, 1);
981 fn->returns_pcc_struct = bp_unpack_value (&bp, 1);
982 fn->returns_struct = bp_unpack_value (&bp, 1);
983 fn->can_throw_non_call_exceptions = bp_unpack_value (&bp, 1);
984 fn->can_delete_dead_exceptions = bp_unpack_value (&bp, 1);
985 fn->always_inline_functions_inlined = bp_unpack_value (&bp, 1);
986 fn->after_inlining = bp_unpack_value (&bp, 1);
987 fn->stdarg = bp_unpack_value (&bp, 1);
988 fn->has_nonlocal_label = bp_unpack_value (&bp, 1);
989 fn->has_forced_label_in_static = bp_unpack_value (&bp, 1);
990 fn->calls_alloca = bp_unpack_value (&bp, 1);
991 fn->calls_setjmp = bp_unpack_value (&bp, 1);
992 fn->calls_eh_return = bp_unpack_value (&bp, 1);
993 fn->has_force_vectorize_loops = bp_unpack_value (&bp, 1);
994 fn->has_simduid_loops = bp_unpack_value (&bp, 1);
995 fn->va_list_fpr_size = bp_unpack_value (&bp, 8);
996 fn->va_list_gpr_size = bp_unpack_value (&bp, 8);
997 fn->last_clique = bp_unpack_value (&bp, sizeof (short) * 8);
998
999 /* Input the function start and end loci. */
1000 fn->function_start_locus = stream_input_location_now (&bp, data_in);
1001 fn->function_end_locus = stream_input_location_now (&bp, data_in);
1002
1003 /* Restore the instance discriminators if present. */
1004 int instance_number = bp_unpack_value (&bp, 1);
1005 if (instance_number)
1006 {
1007 instance_number = bp_unpack_value (&bp, sizeof (int) * CHAR_BIT);
1008 maybe_create_decl_to_instance_map ()->put (fn->decl, instance_number);
1009 }
1010 }
1011
1012 /* Read a chain of tree nodes from input block IB. DATA_IN contains
1013 tables and descriptors for the file being read. */
1014
1015 static tree
1016 streamer_read_chain (class lto_input_block *ib, class data_in *data_in)
1017 {
1018 tree first, prev, curr;
1019
1020 /* The chain is written as NULL terminated list of trees. */
1021 first = prev = NULL_TREE;
1022 do
1023 {
1024 curr = stream_read_tree (ib, data_in);
1025 if (prev)
1026 TREE_CHAIN (prev) = curr;
1027 else
1028 first = curr;
1029
1030 prev = curr;
1031 }
1032 while (curr);
1033
1034 return first;
1035 }
1036
1037 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1038
1039 static void
1040 input_function (tree fn_decl, class data_in *data_in,
1041 class lto_input_block *ib, class lto_input_block *ib_cfg,
1042 cgraph_node *node)
1043 {
1044 struct function *fn;
1045 enum LTO_tags tag;
1046 gimple **stmts;
1047 basic_block bb;
1048
1049 tag = streamer_read_record_start (ib);
1050 lto_tag_check (tag, LTO_function);
1051
1052 /* Read decls for parameters and args. */
1053 DECL_RESULT (fn_decl) = stream_read_tree (ib, data_in);
1054 DECL_ARGUMENTS (fn_decl) = streamer_read_chain (ib, data_in);
1055
1056 /* Read debug args if available. */
1057 unsigned n_debugargs = streamer_read_uhwi (ib);
1058 if (n_debugargs)
1059 {
1060 vec<tree, va_gc> **debugargs = decl_debug_args_insert (fn_decl);
1061 vec_safe_grow (*debugargs, n_debugargs);
1062 for (unsigned i = 0; i < n_debugargs; ++i)
1063 (**debugargs)[i] = stream_read_tree (ib, data_in);
1064 }
1065
1066 /* Read the tree of lexical scopes for the function. */
1067 DECL_INITIAL (fn_decl) = stream_read_tree (ib, data_in);
1068 unsigned block_leaf_count = streamer_read_uhwi (ib);
1069 while (block_leaf_count--)
1070 stream_read_tree (ib, data_in);
1071
1072 if (!streamer_read_uhwi (ib))
1073 return;
1074
1075 push_struct_function (fn_decl);
1076 fn = DECL_STRUCT_FUNCTION (fn_decl);
1077 init_tree_ssa (fn);
1078 /* We input IL in SSA form. */
1079 cfun->gimple_df->in_ssa_p = true;
1080
1081 gimple_register_cfg_hooks ();
1082
1083 input_struct_function_base (fn, data_in, ib);
1084 input_cfg (ib_cfg, data_in, fn);
1085
1086 /* Read all the SSA names. */
1087 input_ssa_names (ib, data_in, fn);
1088
1089 /* Read the exception handling regions in the function. */
1090 input_eh_regions (ib, data_in, fn);
1091
1092 gcc_assert (DECL_INITIAL (fn_decl));
1093 DECL_SAVED_TREE (fn_decl) = NULL_TREE;
1094
1095 /* Read all the basic blocks. */
1096 tag = streamer_read_record_start (ib);
1097 while (tag)
1098 {
1099 input_bb (ib, tag, data_in, fn,
1100 node->count_materialization_scale);
1101 tag = streamer_read_record_start (ib);
1102 }
1103
1104 /* Fix up the call statements that are mentioned in the callgraph
1105 edges. */
1106 set_gimple_stmt_max_uid (cfun, 0);
1107 FOR_ALL_BB_FN (bb, cfun)
1108 {
1109 gimple_stmt_iterator gsi;
1110 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1111 {
1112 gimple *stmt = gsi_stmt (gsi);
1113 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1114 }
1115 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1116 {
1117 gimple *stmt = gsi_stmt (gsi);
1118 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1119 }
1120 }
1121 stmts = (gimple **) xcalloc (gimple_stmt_max_uid (fn), sizeof (gimple *));
1122 FOR_ALL_BB_FN (bb, cfun)
1123 {
1124 gimple_stmt_iterator bsi = gsi_start_phis (bb);
1125 while (!gsi_end_p (bsi))
1126 {
1127 gimple *stmt = gsi_stmt (bsi);
1128 gsi_next (&bsi);
1129 stmts[gimple_uid (stmt)] = stmt;
1130 }
1131 bsi = gsi_start_bb (bb);
1132 while (!gsi_end_p (bsi))
1133 {
1134 gimple *stmt = gsi_stmt (bsi);
1135 bool remove = false;
1136 /* If we're recompiling LTO objects with debug stmts but
1137 we're not supposed to have debug stmts, remove them now.
1138 We can't remove them earlier because this would cause uid
1139 mismatches in fixups, but we can do it at this point, as
1140 long as debug stmts don't require fixups.
1141 Similarly remove all IFN_*SAN_* internal calls */
1142 if (!flag_wpa)
1143 {
1144 if (is_gimple_debug (stmt)
1145 && (gimple_debug_nonbind_marker_p (stmt)
1146 ? !MAY_HAVE_DEBUG_MARKER_STMTS
1147 : !MAY_HAVE_DEBUG_BIND_STMTS))
1148 remove = true;
1149 /* In case the linemap overflows locations can be dropped
1150 to zero. Thus do not keep nonsensical inline entry markers
1151 we'd later ICE on. */
1152 tree block;
1153 if (gimple_debug_inline_entry_p (stmt)
1154 && (((block = gimple_block (stmt))
1155 && !inlined_function_outer_scope_p (block))
1156 || !debug_inline_points))
1157 remove = true;
1158 if (is_gimple_call (stmt)
1159 && gimple_call_internal_p (stmt))
1160 {
1161 bool replace = false;
1162 switch (gimple_call_internal_fn (stmt))
1163 {
1164 case IFN_UBSAN_NULL:
1165 if ((flag_sanitize
1166 & (SANITIZE_NULL | SANITIZE_ALIGNMENT)) == 0)
1167 replace = true;
1168 break;
1169 case IFN_UBSAN_BOUNDS:
1170 if ((flag_sanitize & SANITIZE_BOUNDS) == 0)
1171 replace = true;
1172 break;
1173 case IFN_UBSAN_VPTR:
1174 if ((flag_sanitize & SANITIZE_VPTR) == 0)
1175 replace = true;
1176 break;
1177 case IFN_UBSAN_OBJECT_SIZE:
1178 if ((flag_sanitize & SANITIZE_OBJECT_SIZE) == 0)
1179 replace = true;
1180 break;
1181 case IFN_UBSAN_PTR:
1182 if ((flag_sanitize & SANITIZE_POINTER_OVERFLOW) == 0)
1183 replace = true;
1184 break;
1185 case IFN_ASAN_MARK:
1186 if ((flag_sanitize & SANITIZE_ADDRESS) == 0)
1187 replace = true;
1188 break;
1189 case IFN_TSAN_FUNC_EXIT:
1190 if ((flag_sanitize & SANITIZE_THREAD) == 0)
1191 replace = true;
1192 break;
1193 default:
1194 break;
1195 }
1196 if (replace)
1197 {
1198 gimple_call_set_internal_fn (as_a <gcall *> (stmt),
1199 IFN_NOP);
1200 update_stmt (stmt);
1201 }
1202 }
1203 }
1204 if (remove)
1205 {
1206 gimple_stmt_iterator gsi = bsi;
1207 gsi_next (&bsi);
1208 unlink_stmt_vdef (stmt);
1209 release_defs (stmt);
1210 gsi_remove (&gsi, true);
1211 }
1212 else
1213 {
1214 gsi_next (&bsi);
1215 stmts[gimple_uid (stmt)] = stmt;
1216
1217 /* Remember that the input function has begin stmt
1218 markers, so that we know to expect them when emitting
1219 debug info. */
1220 if (!cfun->debug_nonbind_markers
1221 && gimple_debug_nonbind_marker_p (stmt))
1222 cfun->debug_nonbind_markers = true;
1223 }
1224 }
1225 }
1226
1227 /* Set the gimple body to the statement sequence in the entry
1228 basic block. FIXME lto, this is fairly hacky. The existence
1229 of a gimple body is used by the cgraph routines, but we should
1230 really use the presence of the CFG. */
1231 {
1232 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1233 gimple_set_body (fn_decl, bb_seq (ei_edge (ei)->dest));
1234 }
1235
1236 update_max_bb_count ();
1237 fixup_call_stmt_edges (node, stmts);
1238 execute_all_ipa_stmt_fixups (node, stmts);
1239
1240 free_dominance_info (CDI_DOMINATORS);
1241 free_dominance_info (CDI_POST_DOMINATORS);
1242 free (stmts);
1243 pop_cfun ();
1244 }
1245
1246 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1247
1248 static void
1249 input_constructor (tree var, class data_in *data_in,
1250 class lto_input_block *ib)
1251 {
1252 DECL_INITIAL (var) = stream_read_tree (ib, data_in);
1253 }
1254
1255
1256 /* Read the body from DATA for function NODE and fill it in.
1257 FILE_DATA are the global decls and types. SECTION_TYPE is either
1258 LTO_section_function_body or LTO_section_static_initializer. If
1259 section type is LTO_section_function_body, FN must be the decl for
1260 that function. */
1261
1262 static void
1263 lto_read_body_or_constructor (struct lto_file_decl_data *file_data, struct symtab_node *node,
1264 const char *data, enum lto_section_type section_type)
1265 {
1266 const struct lto_function_header *header;
1267 class data_in *data_in;
1268 int cfg_offset;
1269 int main_offset;
1270 int string_offset;
1271 tree fn_decl = node->decl;
1272
1273 header = (const struct lto_function_header *) data;
1274 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1275 {
1276 cfg_offset = sizeof (struct lto_function_header);
1277 main_offset = cfg_offset + header->cfg_size;
1278 string_offset = main_offset + header->main_size;
1279 }
1280 else
1281 {
1282 main_offset = sizeof (struct lto_function_header);
1283 string_offset = main_offset + header->main_size;
1284 }
1285
1286 data_in = lto_data_in_create (file_data, data + string_offset,
1287 header->string_size, vNULL);
1288
1289 if (section_type == LTO_section_function_body)
1290 {
1291 struct lto_in_decl_state *decl_state;
1292 unsigned from;
1293
1294 gcc_checking_assert (node);
1295
1296 /* Use the function's decl state. */
1297 decl_state = lto_get_function_in_decl_state (file_data, fn_decl);
1298 gcc_assert (decl_state);
1299 file_data->current_decl_state = decl_state;
1300
1301
1302 /* Set up the struct function. */
1303 from = data_in->reader_cache->nodes.length ();
1304 lto_input_block ib_main (data + main_offset, header->main_size,
1305 file_data->mode_table);
1306 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1307 {
1308 lto_input_block ib_cfg (data + cfg_offset, header->cfg_size,
1309 file_data->mode_table);
1310 input_function (fn_decl, data_in, &ib_main, &ib_cfg,
1311 dyn_cast <cgraph_node *>(node));
1312 }
1313 else
1314 input_constructor (fn_decl, data_in, &ib_main);
1315 data_in->location_cache.apply_location_cache ();
1316 /* And fixup types we streamed locally. */
1317 {
1318 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1319 unsigned len = cache->nodes.length ();
1320 unsigned i;
1321 for (i = len; i-- > from;)
1322 {
1323 tree t = streamer_tree_cache_get_tree (cache, i);
1324 if (t == NULL_TREE)
1325 continue;
1326
1327 if (TYPE_P (t))
1328 {
1329 gcc_assert (TYPE_CANONICAL (t) == NULL_TREE);
1330 if (type_with_alias_set_p (t)
1331 && canonical_type_used_p (t))
1332 TYPE_CANONICAL (t) = TYPE_MAIN_VARIANT (t);
1333 if (TYPE_MAIN_VARIANT (t) != t)
1334 {
1335 gcc_assert (TYPE_NEXT_VARIANT (t) == NULL_TREE);
1336 TYPE_NEXT_VARIANT (t)
1337 = TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t));
1338 TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t)) = t;
1339 }
1340 }
1341 }
1342 }
1343
1344 /* Restore decl state */
1345 file_data->current_decl_state = file_data->global_decl_state;
1346 }
1347
1348 lto_data_in_delete (data_in);
1349 }
1350
1351
1352 /* Read the body of NODE using DATA. FILE_DATA holds the global
1353 decls and types. */
1354
1355 void
1356 lto_input_function_body (struct lto_file_decl_data *file_data,
1357 struct cgraph_node *node, const char *data)
1358 {
1359 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1360 }
1361
1362 /* Read the body of NODE using DATA. FILE_DATA holds the global
1363 decls and types. */
1364
1365 void
1366 lto_input_variable_constructor (struct lto_file_decl_data *file_data,
1367 struct varpool_node *node, const char *data)
1368 {
1369 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1370 }
1371
1372
1373 /* Queue of acummulated decl -> DIE mappings. Similar to locations those
1374 are only applied to prevailing tree nodes during tree merging. */
1375 vec<dref_entry> dref_queue;
1376
1377 /* Read the physical representation of a tree node EXPR from
1378 input block IB using the per-file context in DATA_IN. */
1379
1380 static void
1381 lto_read_tree_1 (class lto_input_block *ib, class data_in *data_in, tree expr)
1382 {
1383 /* Read all the bitfield values in EXPR. Note that for LTO, we
1384 only write language-independent bitfields, so no more unpacking is
1385 needed. */
1386 streamer_read_tree_bitfields (ib, data_in, expr);
1387
1388 /* Read all the pointer fields in EXPR. */
1389 streamer_read_tree_body (ib, data_in, expr);
1390
1391 /* Read any LTO-specific data not read by the tree streamer. */
1392 if (DECL_P (expr)
1393 && TREE_CODE (expr) != FUNCTION_DECL
1394 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1395 DECL_INITIAL (expr) = stream_read_tree (ib, data_in);
1396
1397 /* Stream references to early generated DIEs. Keep in sync with the
1398 trees handled in dwarf2out_register_external_die. */
1399 if ((DECL_P (expr)
1400 && TREE_CODE (expr) != FIELD_DECL
1401 && TREE_CODE (expr) != DEBUG_EXPR_DECL
1402 && TREE_CODE (expr) != TYPE_DECL)
1403 || TREE_CODE (expr) == BLOCK)
1404 {
1405 const char *str = streamer_read_string (data_in, ib);
1406 if (str)
1407 {
1408 unsigned HOST_WIDE_INT off = streamer_read_uhwi (ib);
1409 dref_entry e = { expr, str, off };
1410 dref_queue.safe_push (e);
1411 }
1412 }
1413 }
1414
1415 /* Read the physical representation of a tree node with tag TAG from
1416 input block IB using the per-file context in DATA_IN. */
1417
1418 static tree
1419 lto_read_tree (class lto_input_block *ib, class data_in *data_in,
1420 enum LTO_tags tag, hashval_t hash)
1421 {
1422 /* Instantiate a new tree node. */
1423 tree result = streamer_alloc_tree (ib, data_in, tag);
1424
1425 /* Enter RESULT in the reader cache. This will make RESULT
1426 available so that circular references in the rest of the tree
1427 structure can be resolved in subsequent calls to stream_read_tree. */
1428 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1429
1430 lto_read_tree_1 (ib, data_in, result);
1431
1432 return result;
1433 }
1434
1435
1436 /* Populate the reader cache with trees materialized from the SCC
1437 following in the IB, DATA_IN stream.
1438 If SHARED_SCC is true we input LTO_tree_scc. */
1439
1440 hashval_t
1441 lto_input_scc (class lto_input_block *ib, class data_in *data_in,
1442 unsigned *len, unsigned *entry_len, bool shared_scc)
1443 {
1444 unsigned size = streamer_read_uhwi (ib);
1445 hashval_t scc_hash = 0;
1446 unsigned scc_entry_len = 1;
1447
1448 if (shared_scc)
1449 {
1450 if (size & 1)
1451 scc_entry_len = streamer_read_uhwi (ib);
1452 size /= 2;
1453 scc_hash = streamer_read_uhwi (ib);
1454 }
1455
1456 if (size == 1)
1457 {
1458 enum LTO_tags tag = streamer_read_record_start (ib);
1459 lto_input_tree_1 (ib, data_in, tag, scc_hash);
1460 }
1461 else
1462 {
1463 unsigned int first = data_in->reader_cache->nodes.length ();
1464 tree result;
1465
1466 /* Materialize size trees by reading their headers. */
1467 for (unsigned i = 0; i < size; ++i)
1468 {
1469 enum LTO_tags tag = streamer_read_record_start (ib);
1470 if (tag == LTO_null
1471 || tag == LTO_global_stream_ref
1472 || tag == LTO_tree_pickle_reference
1473 || tag == LTO_integer_cst
1474 || tag == LTO_tree_scc
1475 || tag == LTO_trees)
1476 gcc_unreachable ();
1477
1478 result = streamer_alloc_tree (ib, data_in, tag);
1479 streamer_tree_cache_append (data_in->reader_cache, result, 0);
1480 }
1481
1482 /* Read the tree bitpacks and references. */
1483 for (unsigned i = 0; i < size; ++i)
1484 {
1485 result = streamer_tree_cache_get_tree (data_in->reader_cache,
1486 first + i);
1487 lto_read_tree_1 (ib, data_in, result);
1488 }
1489 }
1490
1491 *len = size;
1492 *entry_len = scc_entry_len;
1493 return scc_hash;
1494 }
1495
1496 /* Read reference to tree from IB and DATA_IN.
1497 This is used for streaming tree bodies where we know that
1498 the tree is already in cache or is indexable and
1499 must be matched with stream_write_tree_ref. */
1500
1501 tree
1502 stream_read_tree_ref (lto_input_block *ib, data_in *data_in)
1503 {
1504 unsigned ix = streamer_read_uhwi (ib);
1505 tree ret;
1506 if (!ix)
1507 return NULL_TREE;
1508 else if (ix < LTO_NUM_TAGS)
1509 ret = lto_input_tree_ref (ib, data_in, cfun, (LTO_tags)ix);
1510 else
1511 ret = streamer_tree_cache_get_tree (data_in->reader_cache,
1512 ix - LTO_NUM_TAGS);
1513 if (ret && streamer_debugging)
1514 {
1515 enum tree_code c = (enum tree_code)streamer_read_uhwi (ib);
1516 gcc_assert (c == TREE_CODE (ret));
1517 }
1518 return ret;
1519 }
1520
1521 /* Read a tree from input block IB using the per-file context in
1522 DATA_IN. This context is used, for example, to resolve references
1523 to previously read nodes. */
1524
1525 tree
1526 lto_input_tree_1 (class lto_input_block *ib, class data_in *data_in,
1527 enum LTO_tags tag, hashval_t hash)
1528 {
1529 tree result;
1530
1531 gcc_assert ((unsigned) tag < (unsigned) LTO_NUM_TAGS);
1532
1533 if (tag == LTO_null)
1534 result = NULL_TREE;
1535 else if (tag == LTO_global_stream_ref || tag == LTO_ssa_name_ref)
1536 {
1537 /* If TAG is a reference to an indexable tree, the next value
1538 in IB is the index into the table where we expect to find
1539 that tree. */
1540 result = lto_input_tree_ref (ib, data_in, cfun, tag);
1541 }
1542 else if (tag == LTO_tree_pickle_reference)
1543 {
1544 /* If TAG is a reference to a previously read tree, look it up in
1545 the reader cache. */
1546 result = streamer_get_pickled_tree (ib, data_in);
1547 }
1548 else if (tag == LTO_integer_cst)
1549 {
1550 /* For shared integer constants in singletons we can use the
1551 existing tree integer constant merging code. */
1552 tree type = stream_read_tree_ref (ib, data_in);
1553 unsigned HOST_WIDE_INT len = streamer_read_uhwi (ib);
1554 unsigned HOST_WIDE_INT i;
1555 HOST_WIDE_INT a[WIDE_INT_MAX_ELTS];
1556
1557 for (i = 0; i < len; i++)
1558 a[i] = streamer_read_hwi (ib);
1559 gcc_assert (TYPE_PRECISION (type) <= MAX_BITSIZE_MODE_ANY_INT);
1560 result = wide_int_to_tree (type, wide_int::from_array
1561 (a, len, TYPE_PRECISION (type)));
1562 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1563 }
1564 else if (tag == LTO_tree_scc || tag == LTO_trees)
1565 gcc_unreachable ();
1566 else
1567 {
1568 /* Otherwise, materialize a new node from IB. */
1569 result = lto_read_tree (ib, data_in, tag, hash);
1570 }
1571
1572 return result;
1573 }
1574
1575 tree
1576 lto_input_tree (class lto_input_block *ib, class data_in *data_in)
1577 {
1578 enum LTO_tags tag;
1579
1580 /* Input pickled trees needed to stream in the reference. */
1581 while ((tag = streamer_read_record_start (ib)) == LTO_trees)
1582 {
1583 unsigned len, entry_len;
1584 lto_input_scc (ib, data_in, &len, &entry_len, false);
1585
1586 /* Register DECLs with the debuginfo machinery. */
1587 while (!dref_queue.is_empty ())
1588 {
1589 dref_entry e = dref_queue.pop ();
1590 debug_hooks->register_external_die (e.decl, e.sym, e.off);
1591 }
1592 }
1593 tree t = lto_input_tree_1 (ib, data_in, tag, 0);
1594
1595 if (!dref_queue.is_empty ())
1596 {
1597 dref_entry e = dref_queue.pop ();
1598 debug_hooks->register_external_die (e.decl, e.sym, e.off);
1599 gcc_checking_assert (dref_queue.is_empty ());
1600 }
1601 return t;
1602 }
1603
1604
1605 /* Input toplevel asms. */
1606
1607 void
1608 lto_input_toplevel_asms (struct lto_file_decl_data *file_data, int order_base)
1609 {
1610 size_t len;
1611 const char *data
1612 = lto_get_summary_section_data (file_data, LTO_section_asm, &len);
1613 const struct lto_simple_header_with_strings *header
1614 = (const struct lto_simple_header_with_strings *) data;
1615 int string_offset;
1616 class data_in *data_in;
1617 tree str;
1618
1619 if (! data)
1620 return;
1621
1622 string_offset = sizeof (*header) + header->main_size;
1623
1624 lto_input_block ib (data + sizeof (*header), header->main_size,
1625 file_data->mode_table);
1626
1627 data_in = lto_data_in_create (file_data, data + string_offset,
1628 header->string_size, vNULL);
1629
1630 while ((str = streamer_read_string_cst (data_in, &ib)))
1631 {
1632 asm_node *node = symtab->finalize_toplevel_asm (str);
1633 node->order = streamer_read_hwi (&ib) + order_base;
1634 if (node->order >= symtab->order)
1635 symtab->order = node->order + 1;
1636 }
1637
1638 lto_data_in_delete (data_in);
1639
1640 lto_free_section_data (file_data, LTO_section_asm, NULL, data, len);
1641 }
1642
1643
1644 /* Input mode table. */
1645
1646 void
1647 lto_input_mode_table (struct lto_file_decl_data *file_data)
1648 {
1649 size_t len;
1650 const char *data
1651 = lto_get_summary_section_data (file_data, LTO_section_mode_table, &len);
1652 if (! data)
1653 {
1654 internal_error ("cannot read LTO mode table from %s",
1655 file_data->file_name);
1656 return;
1657 }
1658
1659 unsigned char *table = ggc_cleared_vec_alloc<unsigned char> (1 << 8);
1660 file_data->mode_table = table;
1661 const struct lto_simple_header_with_strings *header
1662 = (const struct lto_simple_header_with_strings *) data;
1663 int string_offset;
1664 class data_in *data_in;
1665 string_offset = sizeof (*header) + header->main_size;
1666
1667 lto_input_block ib (data + sizeof (*header), header->main_size, NULL);
1668 data_in = lto_data_in_create (file_data, data + string_offset,
1669 header->string_size, vNULL);
1670 bitpack_d bp = streamer_read_bitpack (&ib);
1671
1672 table[VOIDmode] = VOIDmode;
1673 table[BLKmode] = BLKmode;
1674 unsigned int m;
1675 while ((m = bp_unpack_value (&bp, 8)) != VOIDmode)
1676 {
1677 enum mode_class mclass
1678 = bp_unpack_enum (&bp, mode_class, MAX_MODE_CLASS);
1679 poly_uint16 size = bp_unpack_poly_value (&bp, 16);
1680 poly_uint16 prec = bp_unpack_poly_value (&bp, 16);
1681 machine_mode inner = (machine_mode) bp_unpack_value (&bp, 8);
1682 poly_uint16 nunits = bp_unpack_poly_value (&bp, 16);
1683 unsigned int ibit = 0, fbit = 0;
1684 unsigned int real_fmt_len = 0;
1685 const char *real_fmt_name = NULL;
1686 switch (mclass)
1687 {
1688 case MODE_FRACT:
1689 case MODE_UFRACT:
1690 case MODE_ACCUM:
1691 case MODE_UACCUM:
1692 ibit = bp_unpack_value (&bp, 8);
1693 fbit = bp_unpack_value (&bp, 8);
1694 break;
1695 case MODE_FLOAT:
1696 case MODE_DECIMAL_FLOAT:
1697 real_fmt_name = bp_unpack_indexed_string (data_in, &bp,
1698 &real_fmt_len);
1699 break;
1700 default:
1701 break;
1702 }
1703 /* First search just the GET_CLASS_NARROWEST_MODE to wider modes,
1704 if not found, fallback to all modes. */
1705 int pass;
1706 for (pass = 0; pass < 2; pass++)
1707 for (machine_mode mr = pass ? VOIDmode
1708 : GET_CLASS_NARROWEST_MODE (mclass);
1709 pass ? mr < MAX_MACHINE_MODE : mr != VOIDmode;
1710 pass ? mr = (machine_mode) (mr + 1)
1711 : mr = GET_MODE_WIDER_MODE (mr).else_void ())
1712 if (GET_MODE_CLASS (mr) != mclass
1713 || maybe_ne (GET_MODE_SIZE (mr), size)
1714 || maybe_ne (GET_MODE_PRECISION (mr), prec)
1715 || (inner == m
1716 ? GET_MODE_INNER (mr) != mr
1717 : GET_MODE_INNER (mr) != table[(int) inner])
1718 || GET_MODE_IBIT (mr) != ibit
1719 || GET_MODE_FBIT (mr) != fbit
1720 || maybe_ne (GET_MODE_NUNITS (mr), nunits))
1721 continue;
1722 else if ((mclass == MODE_FLOAT || mclass == MODE_DECIMAL_FLOAT)
1723 && strcmp (REAL_MODE_FORMAT (mr)->name, real_fmt_name) != 0)
1724 continue;
1725 else
1726 {
1727 table[m] = mr;
1728 pass = 2;
1729 break;
1730 }
1731 unsigned int mname_len;
1732 const char *mname = bp_unpack_indexed_string (data_in, &bp, &mname_len);
1733 if (pass == 2)
1734 {
1735 switch (mclass)
1736 {
1737 case MODE_VECTOR_BOOL:
1738 case MODE_VECTOR_INT:
1739 case MODE_VECTOR_FLOAT:
1740 case MODE_VECTOR_FRACT:
1741 case MODE_VECTOR_UFRACT:
1742 case MODE_VECTOR_ACCUM:
1743 case MODE_VECTOR_UACCUM:
1744 /* For unsupported vector modes just use BLKmode,
1745 if the scalar mode is supported. */
1746 if (table[(int) inner] != VOIDmode)
1747 {
1748 table[m] = BLKmode;
1749 break;
1750 }
1751 /* FALLTHRU */
1752 default:
1753 /* This is only used for offloading-target compilations and
1754 is a user-facing error. Give a better error message for
1755 the common modes; see also mode-classes.def. */
1756 if (mclass == MODE_FLOAT)
1757 fatal_error (UNKNOWN_LOCATION,
1758 "%s - %u-bit-precision floating-point numbers "
1759 "unsupported (mode %qs)", TARGET_MACHINE,
1760 prec.to_constant (), mname);
1761 else if (mclass == MODE_DECIMAL_FLOAT)
1762 fatal_error (UNKNOWN_LOCATION,
1763 "%s - %u-bit-precision decimal floating-point "
1764 "numbers unsupported (mode %qs)", TARGET_MACHINE,
1765 prec.to_constant (), mname);
1766 else if (mclass == MODE_COMPLEX_FLOAT)
1767 fatal_error (UNKNOWN_LOCATION,
1768 "%s - %u-bit-precision complex floating-point "
1769 "numbers unsupported (mode %qs)", TARGET_MACHINE,
1770 prec.to_constant (), mname);
1771 else if (mclass == MODE_INT)
1772 fatal_error (UNKNOWN_LOCATION,
1773 "%s - %u-bit integer numbers unsupported (mode "
1774 "%qs)", TARGET_MACHINE, prec.to_constant (), mname);
1775 else
1776 fatal_error (UNKNOWN_LOCATION, "%s - unsupported mode %qs",
1777 TARGET_MACHINE, mname);
1778 break;
1779 }
1780 }
1781 }
1782 lto_data_in_delete (data_in);
1783
1784 lto_free_section_data (file_data, LTO_section_mode_table, NULL, data, len);
1785 }
1786
1787
1788 /* Initialization for the LTO reader. */
1789
1790 void
1791 lto_reader_init (void)
1792 {
1793 lto_streamer_init ();
1794 file_name_hash_table
1795 = new hash_table<string_slot_hasher> (37);
1796 string_slot_allocator = new object_allocator <struct string_slot>
1797 ("line map file name hash");
1798 gcc_obstack_init (&file_name_obstack);
1799 }
1800
1801 /* Free hash table used to stream in location file names. */
1802
1803 void
1804 lto_free_file_name_hash (void)
1805 {
1806 delete file_name_hash_table;
1807 file_name_hash_table = NULL;
1808 delete string_slot_allocator;
1809 string_slot_allocator = NULL;
1810 /* file_name_obstack must stay allocated since it is referred to by
1811 line map table. */
1812 }
1813
1814
1815 /* Create a new data_in object for FILE_DATA. STRINGS is the string
1816 table to use with LEN strings. RESOLUTIONS is the vector of linker
1817 resolutions (NULL if not using a linker plugin). */
1818
1819 class data_in *
1820 lto_data_in_create (struct lto_file_decl_data *file_data, const char *strings,
1821 unsigned len,
1822 vec<ld_plugin_symbol_resolution_t> resolutions)
1823 {
1824 class data_in *data_in = new (class data_in);
1825 data_in->file_data = file_data;
1826 data_in->strings = strings;
1827 data_in->strings_len = len;
1828 data_in->globals_resolution = resolutions;
1829 data_in->reader_cache = streamer_tree_cache_create (false, false, true);
1830 return data_in;
1831 }
1832
1833
1834 /* Remove DATA_IN. */
1835
1836 void
1837 lto_data_in_delete (class data_in *data_in)
1838 {
1839 data_in->globals_resolution.release ();
1840 streamer_tree_cache_delete (data_in->reader_cache);
1841 delete data_in;
1842 }