]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-streamer-in.c
Allow automatics in equivalences
[thirdparty/gcc.git] / gcc / lto-streamer-in.c
1 /* Read the GIMPLE representation from a file stream.
2
3 Copyright (C) 2009-2019 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "cfghooks.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "gimple-streamer.h"
35 #include "toplev.h"
36 #include "gimple-iterator.h"
37 #include "tree-cfg.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "tree-ssa.h"
41 #include "except.h"
42 #include "cgraph.h"
43 #include "cfgloop.h"
44 #include "debug.h"
45
46
47 struct freeing_string_slot_hasher : string_slot_hasher
48 {
49 static inline void remove (value_type *);
50 };
51
52 inline void
53 freeing_string_slot_hasher::remove (value_type *v)
54 {
55 free (v);
56 }
57
58 /* The table to hold the file names. */
59 static hash_table<freeing_string_slot_hasher> *file_name_hash_table;
60
61
62 /* Check that tag ACTUAL has one of the given values. NUM_TAGS is the
63 number of valid tag values to check. */
64
65 void
66 lto_tag_check_set (enum LTO_tags actual, int ntags, ...)
67 {
68 va_list ap;
69 int i;
70
71 va_start (ap, ntags);
72 for (i = 0; i < ntags; i++)
73 if ((unsigned) actual == va_arg (ap, unsigned))
74 {
75 va_end (ap);
76 return;
77 }
78
79 va_end (ap);
80 internal_error ("bytecode stream: unexpected tag %s", lto_tag_name (actual));
81 }
82
83
84 /* Read LENGTH bytes from STREAM to ADDR. */
85
86 void
87 lto_input_data_block (class lto_input_block *ib, void *addr, size_t length)
88 {
89 size_t i;
90 unsigned char *const buffer = (unsigned char *) addr;
91
92 for (i = 0; i < length; i++)
93 buffer[i] = streamer_read_uchar (ib);
94 }
95
96
97 /* Lookup STRING in file_name_hash_table. If found, return the existing
98 string, otherwise insert STRING as the canonical version. */
99
100 static const char *
101 canon_file_name (const char *string)
102 {
103 string_slot **slot;
104 struct string_slot s_slot;
105 size_t len = strlen (string);
106
107 s_slot.s = string;
108 s_slot.len = len;
109
110 slot = file_name_hash_table->find_slot (&s_slot, INSERT);
111 if (*slot == NULL)
112 {
113 char *saved_string;
114 struct string_slot *new_slot;
115
116 saved_string = (char *) xmalloc (len + 1);
117 new_slot = XCNEW (struct string_slot);
118 memcpy (saved_string, string, len + 1);
119 new_slot->s = saved_string;
120 new_slot->len = len;
121 *slot = new_slot;
122 return saved_string;
123 }
124 else
125 {
126 struct string_slot *old_slot = *slot;
127 return old_slot->s;
128 }
129 }
130
131 /* Pointer to currently alive instance of lto_location_cache. */
132
133 lto_location_cache *lto_location_cache::current_cache;
134
135 /* Sort locations in source order. Start with file from last application. */
136
137 int
138 lto_location_cache::cmp_loc (const void *pa, const void *pb)
139 {
140 const cached_location *a = ((const cached_location *)pa);
141 const cached_location *b = ((const cached_location *)pb);
142 const char *current_file = current_cache->current_file;
143 int current_line = current_cache->current_line;
144
145 if (a->file == current_file && b->file != current_file)
146 return -1;
147 if (a->file != current_file && b->file == current_file)
148 return 1;
149 if (a->file == current_file && b->file == current_file)
150 {
151 if (a->line == current_line && b->line != current_line)
152 return -1;
153 if (a->line != current_line && b->line == current_line)
154 return 1;
155 }
156 if (a->file != b->file)
157 return strcmp (a->file, b->file);
158 if (a->sysp != b->sysp)
159 return a->sysp ? 1 : -1;
160 if (a->line != b->line)
161 return a->line - b->line;
162 return a->col - b->col;
163 }
164
165 /* Apply all changes in location cache. Add locations into linemap and patch
166 trees. */
167
168 bool
169 lto_location_cache::apply_location_cache ()
170 {
171 static const char *prev_file;
172 if (!loc_cache.length ())
173 return false;
174 if (loc_cache.length () > 1)
175 loc_cache.qsort (cmp_loc);
176
177 for (unsigned int i = 0; i < loc_cache.length (); i++)
178 {
179 struct cached_location loc = loc_cache[i];
180
181 if (current_file != loc.file)
182 linemap_add (line_table, prev_file ? LC_RENAME : LC_ENTER,
183 loc.sysp, loc.file, loc.line);
184 else if (current_line != loc.line)
185 {
186 int max = loc.col;
187
188 for (unsigned int j = i + 1; j < loc_cache.length (); j++)
189 if (loc.file != loc_cache[j].file
190 || loc.line != loc_cache[j].line)
191 break;
192 else if (max < loc_cache[j].col)
193 max = loc_cache[j].col;
194 linemap_line_start (line_table, loc.line, max + 1);
195 }
196 gcc_assert (*loc.loc == BUILTINS_LOCATION + 1);
197 if (current_file == loc.file && current_line == loc.line
198 && current_col == loc.col)
199 *loc.loc = current_loc;
200 else
201 current_loc = *loc.loc = linemap_position_for_column (line_table,
202 loc.col);
203 current_line = loc.line;
204 prev_file = current_file = loc.file;
205 current_col = loc.col;
206 }
207 loc_cache.truncate (0);
208 accepted_length = 0;
209 return true;
210 }
211
212 /* Tree merging did not suceed; mark all changes in the cache as accepted. */
213
214 void
215 lto_location_cache::accept_location_cache ()
216 {
217 gcc_assert (current_cache == this);
218 accepted_length = loc_cache.length ();
219 }
220
221 /* Tree merging did suceed; throw away recent changes. */
222
223 void
224 lto_location_cache::revert_location_cache ()
225 {
226 loc_cache.truncate (accepted_length);
227 }
228
229 /* Read a location bitpack from input block IB and either update *LOC directly
230 or add it to the location cache.
231 It is neccesary to call apply_location_cache to get *LOC updated. */
232
233 void
234 lto_location_cache::input_location (location_t *loc, struct bitpack_d *bp,
235 class data_in *data_in)
236 {
237 static const char *stream_file;
238 static int stream_line;
239 static int stream_col;
240 static bool stream_sysp;
241 bool file_change, line_change, column_change;
242
243 gcc_assert (current_cache == this);
244
245 *loc = bp_unpack_int_in_range (bp, "location", 0, RESERVED_LOCATION_COUNT);
246
247 if (*loc < RESERVED_LOCATION_COUNT)
248 return;
249
250 /* Keep value RESERVED_LOCATION_COUNT in *loc as linemap lookups will
251 ICE on it. */
252
253 file_change = bp_unpack_value (bp, 1);
254 line_change = bp_unpack_value (bp, 1);
255 column_change = bp_unpack_value (bp, 1);
256
257 if (file_change)
258 {
259 stream_file = canon_file_name (bp_unpack_string (data_in, bp));
260 stream_sysp = bp_unpack_value (bp, 1);
261 }
262
263 if (line_change)
264 stream_line = bp_unpack_var_len_unsigned (bp);
265
266 if (column_change)
267 stream_col = bp_unpack_var_len_unsigned (bp);
268
269 /* This optimization saves location cache operations druing gimple
270 streaming. */
271
272 if (current_file == stream_file && current_line == stream_line
273 && current_col == stream_col && current_sysp == stream_sysp)
274 {
275 *loc = current_loc;
276 return;
277 }
278
279 struct cached_location entry
280 = {stream_file, loc, stream_line, stream_col, stream_sysp};
281 loc_cache.safe_push (entry);
282 }
283
284 /* Read a location bitpack from input block IB and either update *LOC directly
285 or add it to the location cache.
286 It is neccesary to call apply_location_cache to get *LOC updated. */
287
288 void
289 lto_input_location (location_t *loc, struct bitpack_d *bp,
290 class data_in *data_in)
291 {
292 data_in->location_cache.input_location (loc, bp, data_in);
293 }
294
295 /* Read location and return it instead of going through location caching.
296 This should be used only when the resulting location is not going to be
297 discarded. */
298
299 location_t
300 stream_input_location_now (struct bitpack_d *bp, class data_in *data_in)
301 {
302 location_t loc;
303 stream_input_location (&loc, bp, data_in);
304 data_in->location_cache.apply_location_cache ();
305 return loc;
306 }
307
308 /* Read a reference to a tree node from DATA_IN using input block IB.
309 TAG is the expected node that should be found in IB, if TAG belongs
310 to one of the indexable trees, expect to read a reference index to
311 be looked up in one of the symbol tables, otherwise read the pysical
312 representation of the tree using stream_read_tree. FN is the
313 function scope for the read tree. */
314
315 tree
316 lto_input_tree_ref (class lto_input_block *ib, class data_in *data_in,
317 struct function *fn, enum LTO_tags tag)
318 {
319 unsigned HOST_WIDE_INT ix_u;
320 tree result = NULL_TREE;
321
322 lto_tag_check_range (tag, LTO_field_decl_ref, LTO_namelist_decl_ref);
323
324 switch (tag)
325 {
326 case LTO_type_ref:
327 ix_u = streamer_read_uhwi (ib);
328 result = lto_file_decl_data_get_type (data_in->file_data, ix_u);
329 break;
330
331 case LTO_ssa_name_ref:
332 ix_u = streamer_read_uhwi (ib);
333 result = (*SSANAMES (fn))[ix_u];
334 break;
335
336 case LTO_field_decl_ref:
337 ix_u = streamer_read_uhwi (ib);
338 result = lto_file_decl_data_get_field_decl (data_in->file_data, ix_u);
339 break;
340
341 case LTO_function_decl_ref:
342 ix_u = streamer_read_uhwi (ib);
343 result = lto_file_decl_data_get_fn_decl (data_in->file_data, ix_u);
344 break;
345
346 case LTO_type_decl_ref:
347 ix_u = streamer_read_uhwi (ib);
348 result = lto_file_decl_data_get_type_decl (data_in->file_data, ix_u);
349 break;
350
351 case LTO_namespace_decl_ref:
352 ix_u = streamer_read_uhwi (ib);
353 result = lto_file_decl_data_get_namespace_decl (data_in->file_data, ix_u);
354 break;
355
356 case LTO_global_decl_ref:
357 case LTO_result_decl_ref:
358 case LTO_const_decl_ref:
359 case LTO_imported_decl_ref:
360 case LTO_label_decl_ref:
361 case LTO_translation_unit_decl_ref:
362 case LTO_namelist_decl_ref:
363 ix_u = streamer_read_uhwi (ib);
364 result = lto_file_decl_data_get_var_decl (data_in->file_data, ix_u);
365 break;
366
367 default:
368 gcc_unreachable ();
369 }
370
371 gcc_assert (result);
372
373 return result;
374 }
375
376
377 /* Read and return a double-linked list of catch handlers from input
378 block IB, using descriptors in DATA_IN. */
379
380 static struct eh_catch_d *
381 lto_input_eh_catch_list (class lto_input_block *ib, class data_in *data_in,
382 eh_catch *last_p)
383 {
384 eh_catch first;
385 enum LTO_tags tag;
386
387 *last_p = first = NULL;
388 tag = streamer_read_record_start (ib);
389 while (tag)
390 {
391 tree list;
392 eh_catch n;
393
394 lto_tag_check_range (tag, LTO_eh_catch, LTO_eh_catch);
395
396 /* Read the catch node. */
397 n = ggc_cleared_alloc<eh_catch_d> ();
398 n->type_list = stream_read_tree (ib, data_in);
399 n->filter_list = stream_read_tree (ib, data_in);
400 n->label = stream_read_tree (ib, data_in);
401
402 /* Register all the types in N->FILTER_LIST. */
403 for (list = n->filter_list; list; list = TREE_CHAIN (list))
404 add_type_for_runtime (TREE_VALUE (list));
405
406 /* Chain N to the end of the list. */
407 if (*last_p)
408 (*last_p)->next_catch = n;
409 n->prev_catch = *last_p;
410 *last_p = n;
411
412 /* Set the head of the list the first time through the loop. */
413 if (first == NULL)
414 first = n;
415
416 tag = streamer_read_record_start (ib);
417 }
418
419 return first;
420 }
421
422
423 /* Read and return EH region IX from input block IB, using descriptors
424 in DATA_IN. */
425
426 static eh_region
427 input_eh_region (class lto_input_block *ib, class data_in *data_in, int ix)
428 {
429 enum LTO_tags tag;
430 eh_region r;
431
432 /* Read the region header. */
433 tag = streamer_read_record_start (ib);
434 if (tag == LTO_null)
435 return NULL;
436
437 r = ggc_cleared_alloc<eh_region_d> ();
438 r->index = streamer_read_hwi (ib);
439
440 gcc_assert (r->index == ix);
441
442 /* Read all the region pointers as region numbers. We'll fix up
443 the pointers once the whole array has been read. */
444 r->outer = (eh_region) (intptr_t) streamer_read_hwi (ib);
445 r->inner = (eh_region) (intptr_t) streamer_read_hwi (ib);
446 r->next_peer = (eh_region) (intptr_t) streamer_read_hwi (ib);
447
448 switch (tag)
449 {
450 case LTO_ert_cleanup:
451 r->type = ERT_CLEANUP;
452 break;
453
454 case LTO_ert_try:
455 {
456 struct eh_catch_d *last_catch;
457 r->type = ERT_TRY;
458 r->u.eh_try.first_catch = lto_input_eh_catch_list (ib, data_in,
459 &last_catch);
460 r->u.eh_try.last_catch = last_catch;
461 break;
462 }
463
464 case LTO_ert_allowed_exceptions:
465 {
466 tree l;
467
468 r->type = ERT_ALLOWED_EXCEPTIONS;
469 r->u.allowed.type_list = stream_read_tree (ib, data_in);
470 r->u.allowed.label = stream_read_tree (ib, data_in);
471 r->u.allowed.filter = streamer_read_uhwi (ib);
472
473 for (l = r->u.allowed.type_list; l ; l = TREE_CHAIN (l))
474 add_type_for_runtime (TREE_VALUE (l));
475 }
476 break;
477
478 case LTO_ert_must_not_throw:
479 {
480 r->type = ERT_MUST_NOT_THROW;
481 r->u.must_not_throw.failure_decl = stream_read_tree (ib, data_in);
482 bitpack_d bp = streamer_read_bitpack (ib);
483 r->u.must_not_throw.failure_loc
484 = stream_input_location_now (&bp, data_in);
485 }
486 break;
487
488 default:
489 gcc_unreachable ();
490 }
491
492 r->landing_pads = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
493
494 return r;
495 }
496
497
498 /* Read and return EH landing pad IX from input block IB, using descriptors
499 in DATA_IN. */
500
501 static eh_landing_pad
502 input_eh_lp (class lto_input_block *ib, class data_in *data_in, int ix)
503 {
504 enum LTO_tags tag;
505 eh_landing_pad lp;
506
507 /* Read the landing pad header. */
508 tag = streamer_read_record_start (ib);
509 if (tag == LTO_null)
510 return NULL;
511
512 lto_tag_check_range (tag, LTO_eh_landing_pad, LTO_eh_landing_pad);
513
514 lp = ggc_cleared_alloc<eh_landing_pad_d> ();
515 lp->index = streamer_read_hwi (ib);
516 gcc_assert (lp->index == ix);
517 lp->next_lp = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
518 lp->region = (eh_region) (intptr_t) streamer_read_hwi (ib);
519 lp->post_landing_pad = stream_read_tree (ib, data_in);
520
521 return lp;
522 }
523
524
525 /* After reading the EH regions, pointers to peer and children regions
526 are region numbers. This converts all these region numbers into
527 real pointers into the rematerialized regions for FN. ROOT_REGION
528 is the region number for the root EH region in FN. */
529
530 static void
531 fixup_eh_region_pointers (struct function *fn, HOST_WIDE_INT root_region)
532 {
533 unsigned i;
534 vec<eh_region, va_gc> *eh_array = fn->eh->region_array;
535 vec<eh_landing_pad, va_gc> *lp_array = fn->eh->lp_array;
536 eh_region r;
537 eh_landing_pad lp;
538
539 gcc_assert (eh_array && lp_array);
540
541 gcc_assert (root_region >= 0);
542 fn->eh->region_tree = (*eh_array)[root_region];
543
544 #define FIXUP_EH_REGION(r) (r) = (*eh_array)[(HOST_WIDE_INT) (intptr_t) (r)]
545 #define FIXUP_EH_LP(p) (p) = (*lp_array)[(HOST_WIDE_INT) (intptr_t) (p)]
546
547 /* Convert all the index numbers stored in pointer fields into
548 pointers to the corresponding slots in the EH region array. */
549 FOR_EACH_VEC_ELT (*eh_array, i, r)
550 {
551 /* The array may contain NULL regions. */
552 if (r == NULL)
553 continue;
554
555 gcc_assert (i == (unsigned) r->index);
556 FIXUP_EH_REGION (r->outer);
557 FIXUP_EH_REGION (r->inner);
558 FIXUP_EH_REGION (r->next_peer);
559 FIXUP_EH_LP (r->landing_pads);
560 }
561
562 /* Convert all the index numbers stored in pointer fields into
563 pointers to the corresponding slots in the EH landing pad array. */
564 FOR_EACH_VEC_ELT (*lp_array, i, lp)
565 {
566 /* The array may contain NULL landing pads. */
567 if (lp == NULL)
568 continue;
569
570 gcc_assert (i == (unsigned) lp->index);
571 FIXUP_EH_LP (lp->next_lp);
572 FIXUP_EH_REGION (lp->region);
573 }
574
575 #undef FIXUP_EH_REGION
576 #undef FIXUP_EH_LP
577 }
578
579
580 /* Initialize EH support. */
581
582 void
583 lto_init_eh (void)
584 {
585 static bool eh_initialized_p = false;
586
587 if (eh_initialized_p)
588 return;
589
590 /* Contrary to most other FEs, we only initialize EH support when at
591 least one of the files in the set contains exception regions in
592 it. Since this happens much later than the call to init_eh in
593 lang_dependent_init, we have to set flag_exceptions and call
594 init_eh again to initialize the EH tables. */
595 flag_exceptions = 1;
596 init_eh ();
597
598 eh_initialized_p = true;
599 }
600
601
602 /* Read the exception table for FN from IB using the data descriptors
603 in DATA_IN. */
604
605 static void
606 input_eh_regions (class lto_input_block *ib, class data_in *data_in,
607 struct function *fn)
608 {
609 HOST_WIDE_INT i, root_region, len;
610 enum LTO_tags tag;
611
612 tag = streamer_read_record_start (ib);
613 if (tag == LTO_null)
614 return;
615
616 lto_tag_check_range (tag, LTO_eh_table, LTO_eh_table);
617
618 /* If the file contains EH regions, then it was compiled with
619 -fexceptions. In that case, initialize the backend EH
620 machinery. */
621 lto_init_eh ();
622
623 gcc_assert (fn->eh);
624
625 root_region = streamer_read_hwi (ib);
626 gcc_assert (root_region == (int) root_region);
627
628 /* Read the EH region array. */
629 len = streamer_read_hwi (ib);
630 gcc_assert (len == (int) len);
631 if (len > 0)
632 {
633 vec_safe_grow_cleared (fn->eh->region_array, len);
634 for (i = 0; i < len; i++)
635 {
636 eh_region r = input_eh_region (ib, data_in, i);
637 (*fn->eh->region_array)[i] = r;
638 }
639 }
640
641 /* Read the landing pads. */
642 len = streamer_read_hwi (ib);
643 gcc_assert (len == (int) len);
644 if (len > 0)
645 {
646 vec_safe_grow_cleared (fn->eh->lp_array, len);
647 for (i = 0; i < len; i++)
648 {
649 eh_landing_pad lp = input_eh_lp (ib, data_in, i);
650 (*fn->eh->lp_array)[i] = lp;
651 }
652 }
653
654 /* Read the runtime type data. */
655 len = streamer_read_hwi (ib);
656 gcc_assert (len == (int) len);
657 if (len > 0)
658 {
659 vec_safe_grow_cleared (fn->eh->ttype_data, len);
660 for (i = 0; i < len; i++)
661 {
662 tree ttype = stream_read_tree (ib, data_in);
663 (*fn->eh->ttype_data)[i] = ttype;
664 }
665 }
666
667 /* Read the table of action chains. */
668 len = streamer_read_hwi (ib);
669 gcc_assert (len == (int) len);
670 if (len > 0)
671 {
672 if (targetm.arm_eabi_unwinder)
673 {
674 vec_safe_grow_cleared (fn->eh->ehspec_data.arm_eabi, len);
675 for (i = 0; i < len; i++)
676 {
677 tree t = stream_read_tree (ib, data_in);
678 (*fn->eh->ehspec_data.arm_eabi)[i] = t;
679 }
680 }
681 else
682 {
683 vec_safe_grow_cleared (fn->eh->ehspec_data.other, len);
684 for (i = 0; i < len; i++)
685 {
686 uchar c = streamer_read_uchar (ib);
687 (*fn->eh->ehspec_data.other)[i] = c;
688 }
689 }
690 }
691
692 /* Reconstruct the EH region tree by fixing up the peer/children
693 pointers. */
694 fixup_eh_region_pointers (fn, root_region);
695
696 tag = streamer_read_record_start (ib);
697 lto_tag_check_range (tag, LTO_null, LTO_null);
698 }
699
700
701 /* Make a new basic block with index INDEX in function FN. */
702
703 static basic_block
704 make_new_block (struct function *fn, unsigned int index)
705 {
706 basic_block bb = alloc_block ();
707 bb->index = index;
708 SET_BASIC_BLOCK_FOR_FN (fn, index, bb);
709 n_basic_blocks_for_fn (fn)++;
710 return bb;
711 }
712
713
714 /* Read the CFG for function FN from input block IB. */
715
716 static void
717 input_cfg (class lto_input_block *ib, class data_in *data_in,
718 struct function *fn)
719 {
720 unsigned int bb_count;
721 basic_block p_bb;
722 unsigned int i;
723 int index;
724
725 init_empty_tree_cfg_for_function (fn);
726 init_ssa_operands (fn);
727
728 profile_status_for_fn (fn) = streamer_read_enum (ib, profile_status_d,
729 PROFILE_LAST);
730
731 bb_count = streamer_read_uhwi (ib);
732
733 last_basic_block_for_fn (fn) = bb_count;
734 if (bb_count > basic_block_info_for_fn (fn)->length ())
735 vec_safe_grow_cleared (basic_block_info_for_fn (fn), bb_count);
736
737 if (bb_count > label_to_block_map_for_fn (fn)->length ())
738 vec_safe_grow_cleared (label_to_block_map_for_fn (fn), bb_count);
739
740 index = streamer_read_hwi (ib);
741 while (index != -1)
742 {
743 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
744 unsigned int edge_count;
745
746 if (bb == NULL)
747 bb = make_new_block (fn, index);
748
749 edge_count = streamer_read_uhwi (ib);
750
751 /* Connect up the CFG. */
752 for (i = 0; i < edge_count; i++)
753 {
754 unsigned int dest_index;
755 unsigned int edge_flags;
756 basic_block dest;
757 profile_probability probability;
758 edge e;
759
760 dest_index = streamer_read_uhwi (ib);
761 probability = profile_probability::stream_in (ib);
762 edge_flags = streamer_read_uhwi (ib);
763
764 dest = BASIC_BLOCK_FOR_FN (fn, dest_index);
765
766 if (dest == NULL)
767 dest = make_new_block (fn, dest_index);
768
769 e = make_edge (bb, dest, edge_flags);
770 e->probability = probability;
771 }
772
773 index = streamer_read_hwi (ib);
774 }
775
776 p_bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
777 index = streamer_read_hwi (ib);
778 while (index != -1)
779 {
780 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
781 bb->prev_bb = p_bb;
782 p_bb->next_bb = bb;
783 p_bb = bb;
784 index = streamer_read_hwi (ib);
785 }
786
787 /* ??? The cfgloop interface is tied to cfun. */
788 gcc_assert (cfun == fn);
789
790 /* Input the loop tree. */
791 unsigned n_loops = streamer_read_uhwi (ib);
792 if (n_loops == 0)
793 return;
794
795 struct loops *loops = ggc_cleared_alloc<struct loops> ();
796 init_loops_structure (fn, loops, n_loops);
797 set_loops_for_fn (fn, loops);
798
799 /* Input each loop and associate it with its loop header so
800 flow_loops_find can rebuild the loop tree. */
801 for (unsigned i = 1; i < n_loops; ++i)
802 {
803 int header_index = streamer_read_hwi (ib);
804 if (header_index == -1)
805 {
806 loops->larray->quick_push (NULL);
807 continue;
808 }
809
810 class loop *loop = alloc_loop ();
811 loop->header = BASIC_BLOCK_FOR_FN (fn, header_index);
812 loop->header->loop_father = loop;
813
814 /* Read everything copy_loop_info copies. */
815 loop->estimate_state = streamer_read_enum (ib, loop_estimation, EST_LAST);
816 loop->any_upper_bound = streamer_read_hwi (ib);
817 if (loop->any_upper_bound)
818 loop->nb_iterations_upper_bound = streamer_read_widest_int (ib);
819 loop->any_likely_upper_bound = streamer_read_hwi (ib);
820 if (loop->any_likely_upper_bound)
821 loop->nb_iterations_likely_upper_bound = streamer_read_widest_int (ib);
822 loop->any_estimate = streamer_read_hwi (ib);
823 if (loop->any_estimate)
824 loop->nb_iterations_estimate = streamer_read_widest_int (ib);
825
826 /* Read OMP SIMD related info. */
827 loop->safelen = streamer_read_hwi (ib);
828 loop->unroll = streamer_read_hwi (ib);
829 loop->owned_clique = streamer_read_hwi (ib);
830 loop->dont_vectorize = streamer_read_hwi (ib);
831 loop->force_vectorize = streamer_read_hwi (ib);
832 loop->simduid = stream_read_tree (ib, data_in);
833
834 place_new_loop (fn, loop);
835
836 /* flow_loops_find doesn't like loops not in the tree, hook them
837 all as siblings of the tree root temporarily. */
838 flow_loop_tree_node_add (loops->tree_root, loop);
839 }
840
841 /* Rebuild the loop tree. */
842 flow_loops_find (loops);
843 }
844
845
846 /* Read the SSA names array for function FN from DATA_IN using input
847 block IB. */
848
849 static void
850 input_ssa_names (class lto_input_block *ib, class data_in *data_in,
851 struct function *fn)
852 {
853 unsigned int i, size;
854
855 size = streamer_read_uhwi (ib);
856 init_ssanames (fn, size);
857
858 i = streamer_read_uhwi (ib);
859 while (i)
860 {
861 tree ssa_name, name;
862 bool is_default_def;
863
864 /* Skip over the elements that had been freed. */
865 while (SSANAMES (fn)->length () < i)
866 SSANAMES (fn)->quick_push (NULL_TREE);
867
868 is_default_def = (streamer_read_uchar (ib) != 0);
869 name = stream_read_tree (ib, data_in);
870 ssa_name = make_ssa_name_fn (fn, name, NULL);
871
872 if (is_default_def)
873 {
874 set_ssa_default_def (cfun, SSA_NAME_VAR (ssa_name), ssa_name);
875 SSA_NAME_DEF_STMT (ssa_name) = gimple_build_nop ();
876 }
877
878 i = streamer_read_uhwi (ib);
879 }
880 }
881
882
883 /* Go through all NODE edges and fixup call_stmt pointers
884 so they point to STMTS. */
885
886 static void
887 fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple **stmts,
888 struct function *fn)
889 {
890 #define STMT_UID_NOT_IN_RANGE(uid) \
891 (gimple_stmt_max_uid (fn) < uid || uid == 0)
892
893 struct cgraph_edge *cedge;
894 struct ipa_ref *ref = NULL;
895 unsigned int i;
896
897 for (cedge = node->callees; cedge; cedge = cedge->next_callee)
898 {
899 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
900 fatal_error (input_location,
901 "Cgraph edge statement index out of range");
902 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
903 if (!cedge->call_stmt)
904 fatal_error (input_location,
905 "Cgraph edge statement index not found");
906 }
907 for (cedge = node->indirect_calls; cedge; cedge = cedge->next_callee)
908 {
909 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
910 fatal_error (input_location,
911 "Cgraph edge statement index out of range");
912 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
913 if (!cedge->call_stmt)
914 fatal_error (input_location, "Cgraph edge statement index not found");
915 }
916 for (i = 0; node->iterate_reference (i, ref); i++)
917 if (ref->lto_stmt_uid)
918 {
919 if (STMT_UID_NOT_IN_RANGE (ref->lto_stmt_uid))
920 fatal_error (input_location,
921 "Reference statement index out of range");
922 ref->stmt = stmts[ref->lto_stmt_uid - 1];
923 if (!ref->stmt)
924 fatal_error (input_location, "Reference statement index not found");
925 }
926 }
927
928
929 /* Fixup call_stmt pointers in NODE and all clones. */
930
931 static void
932 fixup_call_stmt_edges (struct cgraph_node *orig, gimple **stmts)
933 {
934 struct cgraph_node *node;
935 struct function *fn;
936
937 while (orig->clone_of)
938 orig = orig->clone_of;
939 fn = DECL_STRUCT_FUNCTION (orig->decl);
940
941 if (!orig->thunk.thunk_p)
942 fixup_call_stmt_edges_1 (orig, stmts, fn);
943 if (orig->clones)
944 for (node = orig->clones; node != orig;)
945 {
946 if (!node->thunk.thunk_p)
947 fixup_call_stmt_edges_1 (node, stmts, fn);
948 if (node->clones)
949 node = node->clones;
950 else if (node->next_sibling_clone)
951 node = node->next_sibling_clone;
952 else
953 {
954 while (node != orig && !node->next_sibling_clone)
955 node = node->clone_of;
956 if (node != orig)
957 node = node->next_sibling_clone;
958 }
959 }
960 }
961
962
963 /* Input the base body of struct function FN from DATA_IN
964 using input block IB. */
965
966 static void
967 input_struct_function_base (struct function *fn, class data_in *data_in,
968 class lto_input_block *ib)
969 {
970 struct bitpack_d bp;
971 int len;
972
973 /* Read the static chain and non-local goto save area. */
974 fn->static_chain_decl = stream_read_tree (ib, data_in);
975 fn->nonlocal_goto_save_area = stream_read_tree (ib, data_in);
976
977 /* Read all the local symbols. */
978 len = streamer_read_hwi (ib);
979 if (len > 0)
980 {
981 int i;
982 vec_safe_grow_cleared (fn->local_decls, len);
983 for (i = 0; i < len; i++)
984 {
985 tree t = stream_read_tree (ib, data_in);
986 (*fn->local_decls)[i] = t;
987 }
988 }
989
990 /* Input the current IL state of the function. */
991 fn->curr_properties = streamer_read_uhwi (ib);
992
993 /* Read all the attributes for FN. */
994 bp = streamer_read_bitpack (ib);
995 fn->is_thunk = bp_unpack_value (&bp, 1);
996 fn->has_local_explicit_reg_vars = bp_unpack_value (&bp, 1);
997 fn->returns_pcc_struct = bp_unpack_value (&bp, 1);
998 fn->returns_struct = bp_unpack_value (&bp, 1);
999 fn->can_throw_non_call_exceptions = bp_unpack_value (&bp, 1);
1000 fn->can_delete_dead_exceptions = bp_unpack_value (&bp, 1);
1001 fn->always_inline_functions_inlined = bp_unpack_value (&bp, 1);
1002 fn->after_inlining = bp_unpack_value (&bp, 1);
1003 fn->stdarg = bp_unpack_value (&bp, 1);
1004 fn->has_nonlocal_label = bp_unpack_value (&bp, 1);
1005 fn->has_forced_label_in_static = bp_unpack_value (&bp, 1);
1006 fn->calls_alloca = bp_unpack_value (&bp, 1);
1007 fn->calls_setjmp = bp_unpack_value (&bp, 1);
1008 fn->calls_eh_return = bp_unpack_value (&bp, 1);
1009 fn->has_force_vectorize_loops = bp_unpack_value (&bp, 1);
1010 fn->has_simduid_loops = bp_unpack_value (&bp, 1);
1011 fn->va_list_fpr_size = bp_unpack_value (&bp, 8);
1012 fn->va_list_gpr_size = bp_unpack_value (&bp, 8);
1013 fn->last_clique = bp_unpack_value (&bp, sizeof (short) * 8);
1014
1015 /* Input the function start and end loci. */
1016 fn->function_start_locus = stream_input_location_now (&bp, data_in);
1017 fn->function_end_locus = stream_input_location_now (&bp, data_in);
1018
1019 /* Restore the instance discriminators if present. */
1020 int instance_number = bp_unpack_value (&bp, 1);
1021 if (instance_number)
1022 {
1023 instance_number = bp_unpack_value (&bp, sizeof (int) * CHAR_BIT);
1024 maybe_create_decl_to_instance_map ()->put (fn->decl, instance_number);
1025 }
1026 }
1027
1028
1029 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1030
1031 static void
1032 input_function (tree fn_decl, class data_in *data_in,
1033 class lto_input_block *ib, class lto_input_block *ib_cfg)
1034 {
1035 struct function *fn;
1036 enum LTO_tags tag;
1037 gimple **stmts;
1038 basic_block bb;
1039 struct cgraph_node *node;
1040
1041 tag = streamer_read_record_start (ib);
1042 lto_tag_check (tag, LTO_function);
1043
1044 /* Read decls for parameters and args. */
1045 DECL_RESULT (fn_decl) = stream_read_tree (ib, data_in);
1046 DECL_ARGUMENTS (fn_decl) = streamer_read_chain (ib, data_in);
1047
1048 /* Read debug args if available. */
1049 unsigned n_debugargs = streamer_read_uhwi (ib);
1050 if (n_debugargs)
1051 {
1052 vec<tree, va_gc> **debugargs = decl_debug_args_insert (fn_decl);
1053 vec_safe_grow (*debugargs, n_debugargs);
1054 for (unsigned i = 0; i < n_debugargs; ++i)
1055 (**debugargs)[i] = stream_read_tree (ib, data_in);
1056 }
1057
1058 /* Read the tree of lexical scopes for the function. */
1059 DECL_INITIAL (fn_decl) = stream_read_tree (ib, data_in);
1060 unsigned block_leaf_count = streamer_read_uhwi (ib);
1061 while (block_leaf_count--)
1062 stream_read_tree (ib, data_in);
1063
1064 if (!streamer_read_uhwi (ib))
1065 return;
1066
1067 push_struct_function (fn_decl);
1068 fn = DECL_STRUCT_FUNCTION (fn_decl);
1069 init_tree_ssa (fn);
1070 /* We input IL in SSA form. */
1071 cfun->gimple_df->in_ssa_p = true;
1072
1073 gimple_register_cfg_hooks ();
1074
1075 node = cgraph_node::get (fn_decl);
1076 if (!node)
1077 node = cgraph_node::create (fn_decl);
1078 input_struct_function_base (fn, data_in, ib);
1079 input_cfg (ib_cfg, data_in, fn);
1080
1081 /* Read all the SSA names. */
1082 input_ssa_names (ib, data_in, fn);
1083
1084 /* Read the exception handling regions in the function. */
1085 input_eh_regions (ib, data_in, fn);
1086
1087 gcc_assert (DECL_INITIAL (fn_decl));
1088 DECL_SAVED_TREE (fn_decl) = NULL_TREE;
1089
1090 /* Read all the basic blocks. */
1091 tag = streamer_read_record_start (ib);
1092 while (tag)
1093 {
1094 input_bb (ib, tag, data_in, fn,
1095 node->count_materialization_scale);
1096 tag = streamer_read_record_start (ib);
1097 }
1098
1099 /* Fix up the call statements that are mentioned in the callgraph
1100 edges. */
1101 set_gimple_stmt_max_uid (cfun, 0);
1102 FOR_ALL_BB_FN (bb, cfun)
1103 {
1104 gimple_stmt_iterator gsi;
1105 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1106 {
1107 gimple *stmt = gsi_stmt (gsi);
1108 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1109 }
1110 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1111 {
1112 gimple *stmt = gsi_stmt (gsi);
1113 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1114 }
1115 }
1116 stmts = (gimple **) xcalloc (gimple_stmt_max_uid (fn), sizeof (gimple *));
1117 FOR_ALL_BB_FN (bb, cfun)
1118 {
1119 gimple_stmt_iterator bsi = gsi_start_phis (bb);
1120 while (!gsi_end_p (bsi))
1121 {
1122 gimple *stmt = gsi_stmt (bsi);
1123 gsi_next (&bsi);
1124 stmts[gimple_uid (stmt)] = stmt;
1125 }
1126 bsi = gsi_start_bb (bb);
1127 while (!gsi_end_p (bsi))
1128 {
1129 gimple *stmt = gsi_stmt (bsi);
1130 bool remove = false;
1131 /* If we're recompiling LTO objects with debug stmts but
1132 we're not supposed to have debug stmts, remove them now.
1133 We can't remove them earlier because this would cause uid
1134 mismatches in fixups, but we can do it at this point, as
1135 long as debug stmts don't require fixups.
1136 Similarly remove all IFN_*SAN_* internal calls */
1137 if (!flag_wpa)
1138 {
1139 if (is_gimple_debug (stmt)
1140 && (gimple_debug_nonbind_marker_p (stmt)
1141 ? !MAY_HAVE_DEBUG_MARKER_STMTS
1142 : !MAY_HAVE_DEBUG_BIND_STMTS))
1143 remove = true;
1144 /* In case the linemap overflows locations can be dropped
1145 to zero. Thus do not keep nonsensical inline entry markers
1146 we'd later ICE on. */
1147 tree block;
1148 if (gimple_debug_inline_entry_p (stmt)
1149 && (block = gimple_block (stmt))
1150 && !inlined_function_outer_scope_p (block))
1151 remove = true;
1152 if (is_gimple_call (stmt)
1153 && gimple_call_internal_p (stmt))
1154 {
1155 bool replace = false;
1156 switch (gimple_call_internal_fn (stmt))
1157 {
1158 case IFN_UBSAN_NULL:
1159 if ((flag_sanitize
1160 & (SANITIZE_NULL | SANITIZE_ALIGNMENT)) == 0)
1161 replace = true;
1162 break;
1163 case IFN_UBSAN_BOUNDS:
1164 if ((flag_sanitize & SANITIZE_BOUNDS) == 0)
1165 replace = true;
1166 break;
1167 case IFN_UBSAN_VPTR:
1168 if ((flag_sanitize & SANITIZE_VPTR) == 0)
1169 replace = true;
1170 break;
1171 case IFN_UBSAN_OBJECT_SIZE:
1172 if ((flag_sanitize & SANITIZE_OBJECT_SIZE) == 0)
1173 replace = true;
1174 break;
1175 case IFN_UBSAN_PTR:
1176 if ((flag_sanitize & SANITIZE_POINTER_OVERFLOW) == 0)
1177 replace = true;
1178 break;
1179 case IFN_ASAN_MARK:
1180 if ((flag_sanitize & SANITIZE_ADDRESS) == 0)
1181 replace = true;
1182 break;
1183 case IFN_TSAN_FUNC_EXIT:
1184 if ((flag_sanitize & SANITIZE_THREAD) == 0)
1185 replace = true;
1186 break;
1187 default:
1188 break;
1189 }
1190 if (replace)
1191 {
1192 gimple_call_set_internal_fn (as_a <gcall *> (stmt),
1193 IFN_NOP);
1194 update_stmt (stmt);
1195 }
1196 }
1197 }
1198 if (remove)
1199 {
1200 gimple_stmt_iterator gsi = bsi;
1201 gsi_next (&bsi);
1202 unlink_stmt_vdef (stmt);
1203 release_defs (stmt);
1204 gsi_remove (&gsi, true);
1205 }
1206 else
1207 {
1208 gsi_next (&bsi);
1209 stmts[gimple_uid (stmt)] = stmt;
1210
1211 /* Remember that the input function has begin stmt
1212 markers, so that we know to expect them when emitting
1213 debug info. */
1214 if (!cfun->debug_nonbind_markers
1215 && gimple_debug_nonbind_marker_p (stmt))
1216 cfun->debug_nonbind_markers = true;
1217 }
1218 }
1219 }
1220
1221 /* Set the gimple body to the statement sequence in the entry
1222 basic block. FIXME lto, this is fairly hacky. The existence
1223 of a gimple body is used by the cgraph routines, but we should
1224 really use the presence of the CFG. */
1225 {
1226 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1227 gimple_set_body (fn_decl, bb_seq (ei_edge (ei)->dest));
1228 }
1229
1230 update_max_bb_count ();
1231 fixup_call_stmt_edges (node, stmts);
1232 execute_all_ipa_stmt_fixups (node, stmts);
1233
1234 update_ssa (TODO_update_ssa_only_virtuals);
1235 free_dominance_info (CDI_DOMINATORS);
1236 free_dominance_info (CDI_POST_DOMINATORS);
1237 free (stmts);
1238 pop_cfun ();
1239 }
1240
1241 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1242
1243 static void
1244 input_constructor (tree var, class data_in *data_in,
1245 class lto_input_block *ib)
1246 {
1247 DECL_INITIAL (var) = stream_read_tree (ib, data_in);
1248 }
1249
1250
1251 /* Read the body from DATA for function NODE and fill it in.
1252 FILE_DATA are the global decls and types. SECTION_TYPE is either
1253 LTO_section_function_body or LTO_section_static_initializer. If
1254 section type is LTO_section_function_body, FN must be the decl for
1255 that function. */
1256
1257 static void
1258 lto_read_body_or_constructor (struct lto_file_decl_data *file_data, struct symtab_node *node,
1259 const char *data, enum lto_section_type section_type)
1260 {
1261 const struct lto_function_header *header;
1262 class data_in *data_in;
1263 int cfg_offset;
1264 int main_offset;
1265 int string_offset;
1266 tree fn_decl = node->decl;
1267
1268 header = (const struct lto_function_header *) data;
1269 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1270 {
1271 cfg_offset = sizeof (struct lto_function_header);
1272 main_offset = cfg_offset + header->cfg_size;
1273 string_offset = main_offset + header->main_size;
1274 }
1275 else
1276 {
1277 main_offset = sizeof (struct lto_function_header);
1278 string_offset = main_offset + header->main_size;
1279 }
1280
1281 data_in = lto_data_in_create (file_data, data + string_offset,
1282 header->string_size, vNULL);
1283
1284 if (section_type == LTO_section_function_body)
1285 {
1286 struct lto_in_decl_state *decl_state;
1287 unsigned from;
1288
1289 gcc_checking_assert (node);
1290
1291 /* Use the function's decl state. */
1292 decl_state = lto_get_function_in_decl_state (file_data, fn_decl);
1293 gcc_assert (decl_state);
1294 file_data->current_decl_state = decl_state;
1295
1296
1297 /* Set up the struct function. */
1298 from = data_in->reader_cache->nodes.length ();
1299 lto_input_block ib_main (data + main_offset, header->main_size,
1300 file_data->mode_table);
1301 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1302 {
1303 lto_input_block ib_cfg (data + cfg_offset, header->cfg_size,
1304 file_data->mode_table);
1305 input_function (fn_decl, data_in, &ib_main, &ib_cfg);
1306 }
1307 else
1308 input_constructor (fn_decl, data_in, &ib_main);
1309 data_in->location_cache.apply_location_cache ();
1310 /* And fixup types we streamed locally. */
1311 {
1312 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1313 unsigned len = cache->nodes.length ();
1314 unsigned i;
1315 for (i = len; i-- > from;)
1316 {
1317 tree t = streamer_tree_cache_get_tree (cache, i);
1318 if (t == NULL_TREE)
1319 continue;
1320
1321 if (TYPE_P (t))
1322 {
1323 gcc_assert (TYPE_CANONICAL (t) == NULL_TREE);
1324 if (type_with_alias_set_p (t)
1325 && canonical_type_used_p (t))
1326 TYPE_CANONICAL (t) = TYPE_MAIN_VARIANT (t);
1327 if (TYPE_MAIN_VARIANT (t) != t)
1328 {
1329 gcc_assert (TYPE_NEXT_VARIANT (t) == NULL_TREE);
1330 TYPE_NEXT_VARIANT (t)
1331 = TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t));
1332 TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t)) = t;
1333 }
1334 }
1335 }
1336 }
1337
1338 /* Restore decl state */
1339 file_data->current_decl_state = file_data->global_decl_state;
1340 }
1341
1342 lto_data_in_delete (data_in);
1343 }
1344
1345
1346 /* Read the body of NODE using DATA. FILE_DATA holds the global
1347 decls and types. */
1348
1349 void
1350 lto_input_function_body (struct lto_file_decl_data *file_data,
1351 struct cgraph_node *node, const char *data)
1352 {
1353 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1354 }
1355
1356 /* Read the body of NODE using DATA. FILE_DATA holds the global
1357 decls and types. */
1358
1359 void
1360 lto_input_variable_constructor (struct lto_file_decl_data *file_data,
1361 struct varpool_node *node, const char *data)
1362 {
1363 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1364 }
1365
1366
1367 /* Queue of acummulated decl -> DIE mappings. Similar to locations those
1368 are only applied to prevailing tree nodes during tree merging. */
1369 vec<dref_entry> dref_queue;
1370
1371 /* Read the physical representation of a tree node EXPR from
1372 input block IB using the per-file context in DATA_IN. */
1373
1374 static void
1375 lto_read_tree_1 (class lto_input_block *ib, class data_in *data_in, tree expr)
1376 {
1377 /* Read all the bitfield values in EXPR. Note that for LTO, we
1378 only write language-independent bitfields, so no more unpacking is
1379 needed. */
1380 streamer_read_tree_bitfields (ib, data_in, expr);
1381
1382 /* Read all the pointer fields in EXPR. */
1383 streamer_read_tree_body (ib, data_in, expr);
1384
1385 /* Read any LTO-specific data not read by the tree streamer. */
1386 if (DECL_P (expr)
1387 && TREE_CODE (expr) != FUNCTION_DECL
1388 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1389 DECL_INITIAL (expr) = stream_read_tree (ib, data_in);
1390
1391 /* Stream references to early generated DIEs. Keep in sync with the
1392 trees handled in dwarf2out_register_external_die. */
1393 if ((DECL_P (expr)
1394 && TREE_CODE (expr) != FIELD_DECL
1395 && TREE_CODE (expr) != DEBUG_EXPR_DECL
1396 && TREE_CODE (expr) != TYPE_DECL)
1397 || TREE_CODE (expr) == BLOCK)
1398 {
1399 const char *str = streamer_read_string (data_in, ib);
1400 if (str)
1401 {
1402 unsigned HOST_WIDE_INT off = streamer_read_uhwi (ib);
1403 dref_entry e = { expr, str, off };
1404 dref_queue.safe_push (e);
1405 }
1406 }
1407 }
1408
1409 /* Read the physical representation of a tree node with tag TAG from
1410 input block IB using the per-file context in DATA_IN. */
1411
1412 static tree
1413 lto_read_tree (class lto_input_block *ib, class data_in *data_in,
1414 enum LTO_tags tag, hashval_t hash)
1415 {
1416 /* Instantiate a new tree node. */
1417 tree result = streamer_alloc_tree (ib, data_in, tag);
1418
1419 /* Enter RESULT in the reader cache. This will make RESULT
1420 available so that circular references in the rest of the tree
1421 structure can be resolved in subsequent calls to stream_read_tree. */
1422 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1423
1424 lto_read_tree_1 (ib, data_in, result);
1425
1426 /* end_marker = */ streamer_read_uchar (ib);
1427
1428 return result;
1429 }
1430
1431
1432 /* Populate the reader cache with trees materialized from the SCC
1433 following in the IB, DATA_IN stream. */
1434
1435 hashval_t
1436 lto_input_scc (class lto_input_block *ib, class data_in *data_in,
1437 unsigned *len, unsigned *entry_len)
1438 {
1439 /* A blob of unnamed tree nodes, fill the cache from it and
1440 recurse. */
1441 unsigned size = streamer_read_uhwi (ib);
1442 hashval_t scc_hash = streamer_read_uhwi (ib);
1443 unsigned scc_entry_len = 1;
1444
1445 if (size == 1)
1446 {
1447 enum LTO_tags tag = streamer_read_record_start (ib);
1448 lto_input_tree_1 (ib, data_in, tag, scc_hash);
1449 }
1450 else
1451 {
1452 unsigned int first = data_in->reader_cache->nodes.length ();
1453 tree result;
1454
1455 scc_entry_len = streamer_read_uhwi (ib);
1456
1457 /* Materialize size trees by reading their headers. */
1458 for (unsigned i = 0; i < size; ++i)
1459 {
1460 enum LTO_tags tag = streamer_read_record_start (ib);
1461 if (tag == LTO_null
1462 || (tag >= LTO_field_decl_ref && tag <= LTO_global_decl_ref)
1463 || tag == LTO_tree_pickle_reference
1464 || tag == LTO_integer_cst
1465 || tag == LTO_tree_scc)
1466 gcc_unreachable ();
1467
1468 result = streamer_alloc_tree (ib, data_in, tag);
1469 streamer_tree_cache_append (data_in->reader_cache, result, 0);
1470 }
1471
1472 /* Read the tree bitpacks and references. */
1473 for (unsigned i = 0; i < size; ++i)
1474 {
1475 result = streamer_tree_cache_get_tree (data_in->reader_cache,
1476 first + i);
1477 lto_read_tree_1 (ib, data_in, result);
1478 /* end_marker = */ streamer_read_uchar (ib);
1479 }
1480 }
1481
1482 *len = size;
1483 *entry_len = scc_entry_len;
1484 return scc_hash;
1485 }
1486
1487
1488 /* Read a tree from input block IB using the per-file context in
1489 DATA_IN. This context is used, for example, to resolve references
1490 to previously read nodes. */
1491
1492 tree
1493 lto_input_tree_1 (class lto_input_block *ib, class data_in *data_in,
1494 enum LTO_tags tag, hashval_t hash)
1495 {
1496 tree result;
1497
1498 gcc_assert ((unsigned) tag < (unsigned) LTO_NUM_TAGS);
1499
1500 if (tag == LTO_null)
1501 result = NULL_TREE;
1502 else if (tag >= LTO_field_decl_ref && tag <= LTO_namelist_decl_ref)
1503 {
1504 /* If TAG is a reference to an indexable tree, the next value
1505 in IB is the index into the table where we expect to find
1506 that tree. */
1507 result = lto_input_tree_ref (ib, data_in, cfun, tag);
1508 }
1509 else if (tag == LTO_tree_pickle_reference)
1510 {
1511 /* If TAG is a reference to a previously read tree, look it up in
1512 the reader cache. */
1513 result = streamer_get_pickled_tree (ib, data_in);
1514 }
1515 else if (tag == LTO_integer_cst)
1516 {
1517 /* For shared integer constants in singletons we can use the
1518 existing tree integer constant merging code. */
1519 tree type = stream_read_tree (ib, data_in);
1520 unsigned HOST_WIDE_INT len = streamer_read_uhwi (ib);
1521 unsigned HOST_WIDE_INT i;
1522 HOST_WIDE_INT a[WIDE_INT_MAX_ELTS];
1523
1524 for (i = 0; i < len; i++)
1525 a[i] = streamer_read_hwi (ib);
1526 gcc_assert (TYPE_PRECISION (type) <= MAX_BITSIZE_MODE_ANY_INT);
1527 result = wide_int_to_tree (type, wide_int::from_array
1528 (a, len, TYPE_PRECISION (type)));
1529 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1530 }
1531 else if (tag == LTO_tree_scc)
1532 gcc_unreachable ();
1533 else
1534 {
1535 /* Otherwise, materialize a new node from IB. */
1536 result = lto_read_tree (ib, data_in, tag, hash);
1537 }
1538
1539 return result;
1540 }
1541
1542 tree
1543 lto_input_tree (class lto_input_block *ib, class data_in *data_in)
1544 {
1545 enum LTO_tags tag;
1546
1547 /* Input and skip SCCs. */
1548 while ((tag = streamer_read_record_start (ib)) == LTO_tree_scc)
1549 {
1550 unsigned len, entry_len;
1551 lto_input_scc (ib, data_in, &len, &entry_len);
1552
1553 /* Register DECLs with the debuginfo machinery. */
1554 while (!dref_queue.is_empty ())
1555 {
1556 dref_entry e = dref_queue.pop ();
1557 debug_hooks->register_external_die (e.decl, e.sym, e.off);
1558 }
1559 }
1560 return lto_input_tree_1 (ib, data_in, tag, 0);
1561 }
1562
1563
1564 /* Input toplevel asms. */
1565
1566 void
1567 lto_input_toplevel_asms (struct lto_file_decl_data *file_data, int order_base)
1568 {
1569 size_t len;
1570 const char *data = lto_get_section_data (file_data, LTO_section_asm,
1571 NULL, &len);
1572 const struct lto_simple_header_with_strings *header
1573 = (const struct lto_simple_header_with_strings *) data;
1574 int string_offset;
1575 class data_in *data_in;
1576 tree str;
1577
1578 if (! data)
1579 return;
1580
1581 string_offset = sizeof (*header) + header->main_size;
1582
1583 lto_input_block ib (data + sizeof (*header), header->main_size,
1584 file_data->mode_table);
1585
1586 data_in = lto_data_in_create (file_data, data + string_offset,
1587 header->string_size, vNULL);
1588
1589 while ((str = streamer_read_string_cst (data_in, &ib)))
1590 {
1591 asm_node *node = symtab->finalize_toplevel_asm (str);
1592 node->order = streamer_read_hwi (&ib) + order_base;
1593 if (node->order >= symtab->order)
1594 symtab->order = node->order + 1;
1595 }
1596
1597 lto_data_in_delete (data_in);
1598
1599 lto_free_section_data (file_data, LTO_section_asm, NULL, data, len);
1600 }
1601
1602
1603 /* Input mode table. */
1604
1605 void
1606 lto_input_mode_table (struct lto_file_decl_data *file_data)
1607 {
1608 size_t len;
1609 const char *data = lto_get_section_data (file_data, LTO_section_mode_table,
1610 NULL, &len);
1611 if (! data)
1612 {
1613 internal_error ("cannot read LTO mode table from %s",
1614 file_data->file_name);
1615 return;
1616 }
1617
1618 unsigned char *table = ggc_cleared_vec_alloc<unsigned char> (1 << 8);
1619 file_data->mode_table = table;
1620 const struct lto_simple_header_with_strings *header
1621 = (const struct lto_simple_header_with_strings *) data;
1622 int string_offset;
1623 class data_in *data_in;
1624 string_offset = sizeof (*header) + header->main_size;
1625
1626 lto_input_block ib (data + sizeof (*header), header->main_size, NULL);
1627 data_in = lto_data_in_create (file_data, data + string_offset,
1628 header->string_size, vNULL);
1629 bitpack_d bp = streamer_read_bitpack (&ib);
1630
1631 table[VOIDmode] = VOIDmode;
1632 table[BLKmode] = BLKmode;
1633 unsigned int m;
1634 while ((m = bp_unpack_value (&bp, 8)) != VOIDmode)
1635 {
1636 enum mode_class mclass
1637 = bp_unpack_enum (&bp, mode_class, MAX_MODE_CLASS);
1638 poly_uint16 size = bp_unpack_poly_value (&bp, 16);
1639 poly_uint16 prec = bp_unpack_poly_value (&bp, 16);
1640 machine_mode inner = (machine_mode) bp_unpack_value (&bp, 8);
1641 poly_uint16 nunits = bp_unpack_poly_value (&bp, 16);
1642 unsigned int ibit = 0, fbit = 0;
1643 unsigned int real_fmt_len = 0;
1644 const char *real_fmt_name = NULL;
1645 switch (mclass)
1646 {
1647 case MODE_FRACT:
1648 case MODE_UFRACT:
1649 case MODE_ACCUM:
1650 case MODE_UACCUM:
1651 ibit = bp_unpack_value (&bp, 8);
1652 fbit = bp_unpack_value (&bp, 8);
1653 break;
1654 case MODE_FLOAT:
1655 case MODE_DECIMAL_FLOAT:
1656 real_fmt_name = bp_unpack_indexed_string (data_in, &bp,
1657 &real_fmt_len);
1658 break;
1659 default:
1660 break;
1661 }
1662 /* First search just the GET_CLASS_NARROWEST_MODE to wider modes,
1663 if not found, fallback to all modes. */
1664 int pass;
1665 for (pass = 0; pass < 2; pass++)
1666 for (machine_mode mr = pass ? VOIDmode
1667 : GET_CLASS_NARROWEST_MODE (mclass);
1668 pass ? mr < MAX_MACHINE_MODE : mr != VOIDmode;
1669 pass ? mr = (machine_mode) (mr + 1)
1670 : mr = GET_MODE_WIDER_MODE (mr).else_void ())
1671 if (GET_MODE_CLASS (mr) != mclass
1672 || maybe_ne (GET_MODE_SIZE (mr), size)
1673 || maybe_ne (GET_MODE_PRECISION (mr), prec)
1674 || (inner == m
1675 ? GET_MODE_INNER (mr) != mr
1676 : GET_MODE_INNER (mr) != table[(int) inner])
1677 || GET_MODE_IBIT (mr) != ibit
1678 || GET_MODE_FBIT (mr) != fbit
1679 || maybe_ne (GET_MODE_NUNITS (mr), nunits))
1680 continue;
1681 else if ((mclass == MODE_FLOAT || mclass == MODE_DECIMAL_FLOAT)
1682 && strcmp (REAL_MODE_FORMAT (mr)->name, real_fmt_name) != 0)
1683 continue;
1684 else
1685 {
1686 table[m] = mr;
1687 pass = 2;
1688 break;
1689 }
1690 unsigned int mname_len;
1691 const char *mname = bp_unpack_indexed_string (data_in, &bp, &mname_len);
1692 if (pass == 2)
1693 {
1694 switch (mclass)
1695 {
1696 case MODE_VECTOR_BOOL:
1697 case MODE_VECTOR_INT:
1698 case MODE_VECTOR_FLOAT:
1699 case MODE_VECTOR_FRACT:
1700 case MODE_VECTOR_UFRACT:
1701 case MODE_VECTOR_ACCUM:
1702 case MODE_VECTOR_UACCUM:
1703 /* For unsupported vector modes just use BLKmode,
1704 if the scalar mode is supported. */
1705 if (table[(int) inner] != VOIDmode)
1706 {
1707 table[m] = BLKmode;
1708 break;
1709 }
1710 /* FALLTHRU */
1711 default:
1712 fatal_error (UNKNOWN_LOCATION, "unsupported mode %qs", mname);
1713 break;
1714 }
1715 }
1716 }
1717 lto_data_in_delete (data_in);
1718
1719 lto_free_section_data (file_data, LTO_section_mode_table, NULL, data, len);
1720 }
1721
1722
1723 /* Initialization for the LTO reader. */
1724
1725 void
1726 lto_reader_init (void)
1727 {
1728 lto_streamer_init ();
1729 file_name_hash_table
1730 = new hash_table<freeing_string_slot_hasher> (37);
1731 }
1732
1733
1734 /* Create a new data_in object for FILE_DATA. STRINGS is the string
1735 table to use with LEN strings. RESOLUTIONS is the vector of linker
1736 resolutions (NULL if not using a linker plugin). */
1737
1738 class data_in *
1739 lto_data_in_create (struct lto_file_decl_data *file_data, const char *strings,
1740 unsigned len,
1741 vec<ld_plugin_symbol_resolution_t> resolutions)
1742 {
1743 class data_in *data_in = new (class data_in);
1744 data_in->file_data = file_data;
1745 data_in->strings = strings;
1746 data_in->strings_len = len;
1747 data_in->globals_resolution = resolutions;
1748 data_in->reader_cache = streamer_tree_cache_create (false, false, true);
1749 return data_in;
1750 }
1751
1752
1753 /* Remove DATA_IN. */
1754
1755 void
1756 lto_data_in_delete (class data_in *data_in)
1757 {
1758 data_in->globals_resolution.release ();
1759 streamer_tree_cache_delete (data_in->reader_cache);
1760 delete data_in;
1761 }