]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-streamer-in.c
i386: Improve vector mode and TFmode ABS and NEG patterns
[thirdparty/gcc.git] / gcc / lto-streamer-in.c
1 /* Read the GIMPLE representation from a file stream.
2
3 Copyright (C) 2009-2020 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "cfghooks.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "gimple-streamer.h"
35 #include "toplev.h"
36 #include "gimple-iterator.h"
37 #include "tree-cfg.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "tree-ssa.h"
41 #include "except.h"
42 #include "cgraph.h"
43 #include "cfgloop.h"
44 #include "debug.h"
45 #include "alloc-pool.h"
46
47 /* Allocator used to hold string slot entries for line map streaming. */
48 static struct object_allocator<struct string_slot> *string_slot_allocator;
49
50 /* The table to hold the file names. */
51 static hash_table<string_slot_hasher> *file_name_hash_table;
52
53 /* This obstack holds file names used in locators. Line map datastructures
54 points here and thus it needs to be kept allocated as long as linemaps
55 exists. */
56 static struct obstack file_name_obstack;
57
58
59 /* Check that tag ACTUAL has one of the given values. NUM_TAGS is the
60 number of valid tag values to check. */
61
62 void
63 lto_tag_check_set (enum LTO_tags actual, int ntags, ...)
64 {
65 va_list ap;
66 int i;
67
68 va_start (ap, ntags);
69 for (i = 0; i < ntags; i++)
70 if ((unsigned) actual == va_arg (ap, unsigned))
71 {
72 va_end (ap);
73 return;
74 }
75
76 va_end (ap);
77 internal_error ("bytecode stream: unexpected tag %s", lto_tag_name (actual));
78 }
79
80
81 /* Read LENGTH bytes from STREAM to ADDR. */
82
83 void
84 lto_input_data_block (class lto_input_block *ib, void *addr, size_t length)
85 {
86 size_t i;
87 unsigned char *const buffer = (unsigned char *) addr;
88
89 for (i = 0; i < length; i++)
90 buffer[i] = streamer_read_uchar (ib);
91 }
92
93
94 /* Lookup STRING in file_name_hash_table. If found, return the existing
95 string, otherwise insert STRING as the canonical version. */
96
97 static const char *
98 canon_file_name (const char *string)
99 {
100 string_slot **slot;
101 struct string_slot s_slot;
102 size_t len = strlen (string);
103
104 s_slot.s = string;
105 s_slot.len = len;
106
107 slot = file_name_hash_table->find_slot (&s_slot, INSERT);
108 if (*slot == NULL)
109 {
110 char *saved_string;
111 struct string_slot *new_slot;
112
113 saved_string = XOBNEWVEC (&file_name_obstack, char, len + 1);
114 new_slot = string_slot_allocator->allocate ();
115 memcpy (saved_string, string, len + 1);
116 new_slot->s = saved_string;
117 new_slot->len = len;
118 *slot = new_slot;
119 return saved_string;
120 }
121 else
122 {
123 struct string_slot *old_slot = *slot;
124 return old_slot->s;
125 }
126 }
127
128 /* Pointer to currently alive instance of lto_location_cache. */
129
130 lto_location_cache *lto_location_cache::current_cache;
131
132 /* Sort locations in source order. Start with file from last application. */
133
134 int
135 lto_location_cache::cmp_loc (const void *pa, const void *pb)
136 {
137 const cached_location *a = ((const cached_location *)pa);
138 const cached_location *b = ((const cached_location *)pb);
139 const char *current_file = current_cache->current_file;
140 int current_line = current_cache->current_line;
141
142 if (a->file == current_file && b->file != current_file)
143 return -1;
144 if (a->file != current_file && b->file == current_file)
145 return 1;
146 if (a->file == current_file && b->file == current_file)
147 {
148 if (a->line == current_line && b->line != current_line)
149 return -1;
150 if (a->line != current_line && b->line == current_line)
151 return 1;
152 }
153 if (a->file != b->file)
154 return strcmp (a->file, b->file);
155 if (a->sysp != b->sysp)
156 return a->sysp ? 1 : -1;
157 if (a->line != b->line)
158 return a->line - b->line;
159 return a->col - b->col;
160 }
161
162 /* Apply all changes in location cache. Add locations into linemap and patch
163 trees. */
164
165 bool
166 lto_location_cache::apply_location_cache ()
167 {
168 static const char *prev_file;
169 if (!loc_cache.length ())
170 return false;
171 if (loc_cache.length () > 1)
172 loc_cache.qsort (cmp_loc);
173
174 for (unsigned int i = 0; i < loc_cache.length (); i++)
175 {
176 struct cached_location loc = loc_cache[i];
177
178 if (current_file != loc.file)
179 linemap_add (line_table, prev_file ? LC_RENAME : LC_ENTER,
180 loc.sysp, loc.file, loc.line);
181 else if (current_line != loc.line)
182 {
183 int max = loc.col;
184
185 for (unsigned int j = i + 1; j < loc_cache.length (); j++)
186 if (loc.file != loc_cache[j].file
187 || loc.line != loc_cache[j].line)
188 break;
189 else if (max < loc_cache[j].col)
190 max = loc_cache[j].col;
191 linemap_line_start (line_table, loc.line, max + 1);
192 }
193 gcc_assert (*loc.loc == BUILTINS_LOCATION + 1);
194 if (current_file == loc.file && current_line == loc.line
195 && current_col == loc.col)
196 *loc.loc = current_loc;
197 else
198 current_loc = *loc.loc = linemap_position_for_column (line_table,
199 loc.col);
200 current_line = loc.line;
201 prev_file = current_file = loc.file;
202 current_col = loc.col;
203 }
204 loc_cache.truncate (0);
205 accepted_length = 0;
206 return true;
207 }
208
209 /* Tree merging did not suceed; mark all changes in the cache as accepted. */
210
211 void
212 lto_location_cache::accept_location_cache ()
213 {
214 gcc_assert (current_cache == this);
215 accepted_length = loc_cache.length ();
216 }
217
218 /* Tree merging did suceed; throw away recent changes. */
219
220 void
221 lto_location_cache::revert_location_cache ()
222 {
223 loc_cache.truncate (accepted_length);
224 }
225
226 /* Read a location bitpack from input block IB and either update *LOC directly
227 or add it to the location cache.
228 It is neccesary to call apply_location_cache to get *LOC updated. */
229
230 void
231 lto_location_cache::input_location (location_t *loc, struct bitpack_d *bp,
232 class data_in *data_in)
233 {
234 static const char *stream_file;
235 static int stream_line;
236 static int stream_col;
237 static bool stream_sysp;
238 bool file_change, line_change, column_change;
239
240 gcc_assert (current_cache == this);
241
242 *loc = bp_unpack_int_in_range (bp, "location", 0, RESERVED_LOCATION_COUNT);
243
244 if (*loc < RESERVED_LOCATION_COUNT)
245 return;
246
247 /* Keep value RESERVED_LOCATION_COUNT in *loc as linemap lookups will
248 ICE on it. */
249
250 file_change = bp_unpack_value (bp, 1);
251 line_change = bp_unpack_value (bp, 1);
252 column_change = bp_unpack_value (bp, 1);
253
254 if (file_change)
255 {
256 stream_file = canon_file_name (bp_unpack_string (data_in, bp));
257 stream_sysp = bp_unpack_value (bp, 1);
258 }
259
260 if (line_change)
261 stream_line = bp_unpack_var_len_unsigned (bp);
262
263 if (column_change)
264 stream_col = bp_unpack_var_len_unsigned (bp);
265
266 /* This optimization saves location cache operations druing gimple
267 streaming. */
268
269 if (current_file == stream_file && current_line == stream_line
270 && current_col == stream_col && current_sysp == stream_sysp)
271 {
272 *loc = current_loc;
273 return;
274 }
275
276 struct cached_location entry
277 = {stream_file, loc, stream_line, stream_col, stream_sysp};
278 loc_cache.safe_push (entry);
279 }
280
281 /* Read a location bitpack from input block IB and either update *LOC directly
282 or add it to the location cache.
283 It is neccesary to call apply_location_cache to get *LOC updated. */
284
285 void
286 lto_input_location (location_t *loc, struct bitpack_d *bp,
287 class data_in *data_in)
288 {
289 data_in->location_cache.input_location (loc, bp, data_in);
290 }
291
292 /* Read location and return it instead of going through location caching.
293 This should be used only when the resulting location is not going to be
294 discarded. */
295
296 location_t
297 stream_input_location_now (struct bitpack_d *bp, class data_in *data_in)
298 {
299 location_t loc;
300 stream_input_location (&loc, bp, data_in);
301 data_in->location_cache.apply_location_cache ();
302 return loc;
303 }
304
305 /* Read a reference to a tree node from DATA_IN using input block IB.
306 TAG is the expected node that should be found in IB, if TAG belongs
307 to one of the indexable trees, expect to read a reference index to
308 be looked up in one of the symbol tables, otherwise read the pysical
309 representation of the tree using stream_read_tree. FN is the
310 function scope for the read tree. */
311
312 tree
313 lto_input_tree_ref (class lto_input_block *ib, class data_in *data_in,
314 struct function *fn, enum LTO_tags tag)
315 {
316 unsigned HOST_WIDE_INT ix_u;
317 tree result = NULL_TREE;
318
319 lto_tag_check_range (tag, LTO_field_decl_ref, LTO_namelist_decl_ref);
320
321 switch (tag)
322 {
323 case LTO_type_ref:
324 ix_u = streamer_read_uhwi (ib);
325 result = lto_file_decl_data_get_type (data_in->file_data, ix_u);
326 break;
327
328 case LTO_ssa_name_ref:
329 ix_u = streamer_read_uhwi (ib);
330 result = (*SSANAMES (fn))[ix_u];
331 break;
332
333 case LTO_field_decl_ref:
334 ix_u = streamer_read_uhwi (ib);
335 result = lto_file_decl_data_get_field_decl (data_in->file_data, ix_u);
336 break;
337
338 case LTO_function_decl_ref:
339 ix_u = streamer_read_uhwi (ib);
340 result = lto_file_decl_data_get_fn_decl (data_in->file_data, ix_u);
341 break;
342
343 case LTO_type_decl_ref:
344 ix_u = streamer_read_uhwi (ib);
345 result = lto_file_decl_data_get_type_decl (data_in->file_data, ix_u);
346 break;
347
348 case LTO_namespace_decl_ref:
349 ix_u = streamer_read_uhwi (ib);
350 result = lto_file_decl_data_get_namespace_decl (data_in->file_data, ix_u);
351 break;
352
353 case LTO_global_decl_ref:
354 case LTO_result_decl_ref:
355 case LTO_const_decl_ref:
356 case LTO_imported_decl_ref:
357 case LTO_label_decl_ref:
358 case LTO_translation_unit_decl_ref:
359 case LTO_namelist_decl_ref:
360 ix_u = streamer_read_uhwi (ib);
361 result = lto_file_decl_data_get_var_decl (data_in->file_data, ix_u);
362 break;
363
364 default:
365 gcc_unreachable ();
366 }
367
368 gcc_assert (result);
369
370 return result;
371 }
372
373
374 /* Read and return a double-linked list of catch handlers from input
375 block IB, using descriptors in DATA_IN. */
376
377 static struct eh_catch_d *
378 lto_input_eh_catch_list (class lto_input_block *ib, class data_in *data_in,
379 eh_catch *last_p)
380 {
381 eh_catch first;
382 enum LTO_tags tag;
383
384 *last_p = first = NULL;
385 tag = streamer_read_record_start (ib);
386 while (tag)
387 {
388 tree list;
389 eh_catch n;
390
391 lto_tag_check_range (tag, LTO_eh_catch, LTO_eh_catch);
392
393 /* Read the catch node. */
394 n = ggc_cleared_alloc<eh_catch_d> ();
395 n->type_list = stream_read_tree (ib, data_in);
396 n->filter_list = stream_read_tree (ib, data_in);
397 n->label = stream_read_tree (ib, data_in);
398
399 /* Register all the types in N->FILTER_LIST. */
400 for (list = n->filter_list; list; list = TREE_CHAIN (list))
401 add_type_for_runtime (TREE_VALUE (list));
402
403 /* Chain N to the end of the list. */
404 if (*last_p)
405 (*last_p)->next_catch = n;
406 n->prev_catch = *last_p;
407 *last_p = n;
408
409 /* Set the head of the list the first time through the loop. */
410 if (first == NULL)
411 first = n;
412
413 tag = streamer_read_record_start (ib);
414 }
415
416 return first;
417 }
418
419
420 /* Read and return EH region IX from input block IB, using descriptors
421 in DATA_IN. */
422
423 static eh_region
424 input_eh_region (class lto_input_block *ib, class data_in *data_in, int ix)
425 {
426 enum LTO_tags tag;
427 eh_region r;
428
429 /* Read the region header. */
430 tag = streamer_read_record_start (ib);
431 if (tag == LTO_null)
432 return NULL;
433
434 r = ggc_cleared_alloc<eh_region_d> ();
435 r->index = streamer_read_hwi (ib);
436
437 gcc_assert (r->index == ix);
438
439 /* Read all the region pointers as region numbers. We'll fix up
440 the pointers once the whole array has been read. */
441 r->outer = (eh_region) (intptr_t) streamer_read_hwi (ib);
442 r->inner = (eh_region) (intptr_t) streamer_read_hwi (ib);
443 r->next_peer = (eh_region) (intptr_t) streamer_read_hwi (ib);
444
445 switch (tag)
446 {
447 case LTO_ert_cleanup:
448 r->type = ERT_CLEANUP;
449 break;
450
451 case LTO_ert_try:
452 {
453 struct eh_catch_d *last_catch;
454 r->type = ERT_TRY;
455 r->u.eh_try.first_catch = lto_input_eh_catch_list (ib, data_in,
456 &last_catch);
457 r->u.eh_try.last_catch = last_catch;
458 break;
459 }
460
461 case LTO_ert_allowed_exceptions:
462 {
463 tree l;
464
465 r->type = ERT_ALLOWED_EXCEPTIONS;
466 r->u.allowed.type_list = stream_read_tree (ib, data_in);
467 r->u.allowed.label = stream_read_tree (ib, data_in);
468 r->u.allowed.filter = streamer_read_uhwi (ib);
469
470 for (l = r->u.allowed.type_list; l ; l = TREE_CHAIN (l))
471 add_type_for_runtime (TREE_VALUE (l));
472 }
473 break;
474
475 case LTO_ert_must_not_throw:
476 {
477 r->type = ERT_MUST_NOT_THROW;
478 r->u.must_not_throw.failure_decl = stream_read_tree (ib, data_in);
479 bitpack_d bp = streamer_read_bitpack (ib);
480 r->u.must_not_throw.failure_loc
481 = stream_input_location_now (&bp, data_in);
482 }
483 break;
484
485 default:
486 gcc_unreachable ();
487 }
488
489 r->landing_pads = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
490
491 return r;
492 }
493
494
495 /* Read and return EH landing pad IX from input block IB, using descriptors
496 in DATA_IN. */
497
498 static eh_landing_pad
499 input_eh_lp (class lto_input_block *ib, class data_in *data_in, int ix)
500 {
501 enum LTO_tags tag;
502 eh_landing_pad lp;
503
504 /* Read the landing pad header. */
505 tag = streamer_read_record_start (ib);
506 if (tag == LTO_null)
507 return NULL;
508
509 lto_tag_check_range (tag, LTO_eh_landing_pad, LTO_eh_landing_pad);
510
511 lp = ggc_cleared_alloc<eh_landing_pad_d> ();
512 lp->index = streamer_read_hwi (ib);
513 gcc_assert (lp->index == ix);
514 lp->next_lp = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
515 lp->region = (eh_region) (intptr_t) streamer_read_hwi (ib);
516 lp->post_landing_pad = stream_read_tree (ib, data_in);
517
518 return lp;
519 }
520
521
522 /* After reading the EH regions, pointers to peer and children regions
523 are region numbers. This converts all these region numbers into
524 real pointers into the rematerialized regions for FN. ROOT_REGION
525 is the region number for the root EH region in FN. */
526
527 static void
528 fixup_eh_region_pointers (struct function *fn, HOST_WIDE_INT root_region)
529 {
530 unsigned i;
531 vec<eh_region, va_gc> *eh_array = fn->eh->region_array;
532 vec<eh_landing_pad, va_gc> *lp_array = fn->eh->lp_array;
533 eh_region r;
534 eh_landing_pad lp;
535
536 gcc_assert (eh_array && lp_array);
537
538 gcc_assert (root_region >= 0);
539 fn->eh->region_tree = (*eh_array)[root_region];
540
541 #define FIXUP_EH_REGION(r) (r) = (*eh_array)[(HOST_WIDE_INT) (intptr_t) (r)]
542 #define FIXUP_EH_LP(p) (p) = (*lp_array)[(HOST_WIDE_INT) (intptr_t) (p)]
543
544 /* Convert all the index numbers stored in pointer fields into
545 pointers to the corresponding slots in the EH region array. */
546 FOR_EACH_VEC_ELT (*eh_array, i, r)
547 {
548 /* The array may contain NULL regions. */
549 if (r == NULL)
550 continue;
551
552 gcc_assert (i == (unsigned) r->index);
553 FIXUP_EH_REGION (r->outer);
554 FIXUP_EH_REGION (r->inner);
555 FIXUP_EH_REGION (r->next_peer);
556 FIXUP_EH_LP (r->landing_pads);
557 }
558
559 /* Convert all the index numbers stored in pointer fields into
560 pointers to the corresponding slots in the EH landing pad array. */
561 FOR_EACH_VEC_ELT (*lp_array, i, lp)
562 {
563 /* The array may contain NULL landing pads. */
564 if (lp == NULL)
565 continue;
566
567 gcc_assert (i == (unsigned) lp->index);
568 FIXUP_EH_LP (lp->next_lp);
569 FIXUP_EH_REGION (lp->region);
570 }
571
572 #undef FIXUP_EH_REGION
573 #undef FIXUP_EH_LP
574 }
575
576
577 /* Initialize EH support. */
578
579 void
580 lto_init_eh (void)
581 {
582 static bool eh_initialized_p = false;
583
584 if (eh_initialized_p)
585 return;
586
587 /* Contrary to most other FEs, we only initialize EH support when at
588 least one of the files in the set contains exception regions in
589 it. Since this happens much later than the call to init_eh in
590 lang_dependent_init, we have to set flag_exceptions and call
591 init_eh again to initialize the EH tables. */
592 flag_exceptions = 1;
593 init_eh ();
594
595 eh_initialized_p = true;
596 }
597
598
599 /* Read the exception table for FN from IB using the data descriptors
600 in DATA_IN. */
601
602 static void
603 input_eh_regions (class lto_input_block *ib, class data_in *data_in,
604 struct function *fn)
605 {
606 HOST_WIDE_INT i, root_region, len;
607 enum LTO_tags tag;
608
609 tag = streamer_read_record_start (ib);
610 if (tag == LTO_null)
611 return;
612
613 lto_tag_check_range (tag, LTO_eh_table, LTO_eh_table);
614
615 gcc_assert (fn->eh);
616
617 root_region = streamer_read_hwi (ib);
618 gcc_assert (root_region == (int) root_region);
619
620 /* Read the EH region array. */
621 len = streamer_read_hwi (ib);
622 gcc_assert (len == (int) len);
623 if (len > 0)
624 {
625 vec_safe_grow_cleared (fn->eh->region_array, len);
626 for (i = 0; i < len; i++)
627 {
628 eh_region r = input_eh_region (ib, data_in, i);
629 (*fn->eh->region_array)[i] = r;
630 }
631 }
632
633 /* Read the landing pads. */
634 len = streamer_read_hwi (ib);
635 gcc_assert (len == (int) len);
636 if (len > 0)
637 {
638 vec_safe_grow_cleared (fn->eh->lp_array, len);
639 for (i = 0; i < len; i++)
640 {
641 eh_landing_pad lp = input_eh_lp (ib, data_in, i);
642 (*fn->eh->lp_array)[i] = lp;
643 }
644 }
645
646 /* Read the runtime type data. */
647 len = streamer_read_hwi (ib);
648 gcc_assert (len == (int) len);
649 if (len > 0)
650 {
651 vec_safe_grow_cleared (fn->eh->ttype_data, len);
652 for (i = 0; i < len; i++)
653 {
654 tree ttype = stream_read_tree (ib, data_in);
655 (*fn->eh->ttype_data)[i] = ttype;
656 }
657 }
658
659 /* Read the table of action chains. */
660 len = streamer_read_hwi (ib);
661 gcc_assert (len == (int) len);
662 if (len > 0)
663 {
664 if (targetm.arm_eabi_unwinder)
665 {
666 vec_safe_grow_cleared (fn->eh->ehspec_data.arm_eabi, len);
667 for (i = 0; i < len; i++)
668 {
669 tree t = stream_read_tree (ib, data_in);
670 (*fn->eh->ehspec_data.arm_eabi)[i] = t;
671 }
672 }
673 else
674 {
675 vec_safe_grow_cleared (fn->eh->ehspec_data.other, len);
676 for (i = 0; i < len; i++)
677 {
678 uchar c = streamer_read_uchar (ib);
679 (*fn->eh->ehspec_data.other)[i] = c;
680 }
681 }
682 }
683
684 /* Reconstruct the EH region tree by fixing up the peer/children
685 pointers. */
686 fixup_eh_region_pointers (fn, root_region);
687
688 tag = streamer_read_record_start (ib);
689 lto_tag_check_range (tag, LTO_null, LTO_null);
690 }
691
692
693 /* Make a new basic block with index INDEX in function FN. */
694
695 static basic_block
696 make_new_block (struct function *fn, unsigned int index)
697 {
698 basic_block bb = alloc_block ();
699 bb->index = index;
700 SET_BASIC_BLOCK_FOR_FN (fn, index, bb);
701 n_basic_blocks_for_fn (fn)++;
702 return bb;
703 }
704
705
706 /* Read the CFG for function FN from input block IB. */
707
708 static void
709 input_cfg (class lto_input_block *ib, class data_in *data_in,
710 struct function *fn)
711 {
712 unsigned int bb_count;
713 basic_block p_bb;
714 unsigned int i;
715 int index;
716
717 init_empty_tree_cfg_for_function (fn);
718 init_ssa_operands (fn);
719
720 profile_status_for_fn (fn) = streamer_read_enum (ib, profile_status_d,
721 PROFILE_LAST);
722
723 bb_count = streamer_read_uhwi (ib);
724
725 last_basic_block_for_fn (fn) = bb_count;
726 if (bb_count > basic_block_info_for_fn (fn)->length ())
727 vec_safe_grow_cleared (basic_block_info_for_fn (fn), bb_count);
728
729 if (bb_count > label_to_block_map_for_fn (fn)->length ())
730 vec_safe_grow_cleared (label_to_block_map_for_fn (fn), bb_count);
731
732 index = streamer_read_hwi (ib);
733 while (index != -1)
734 {
735 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
736 unsigned int edge_count;
737
738 if (bb == NULL)
739 bb = make_new_block (fn, index);
740
741 edge_count = streamer_read_uhwi (ib);
742
743 /* Connect up the CFG. */
744 for (i = 0; i < edge_count; i++)
745 {
746 unsigned int dest_index;
747 unsigned int edge_flags;
748 basic_block dest;
749 profile_probability probability;
750 edge e;
751
752 dest_index = streamer_read_uhwi (ib);
753 probability = profile_probability::stream_in (ib);
754 edge_flags = streamer_read_uhwi (ib);
755
756 dest = BASIC_BLOCK_FOR_FN (fn, dest_index);
757
758 if (dest == NULL)
759 dest = make_new_block (fn, dest_index);
760
761 e = make_edge (bb, dest, edge_flags);
762 e->probability = probability;
763 }
764
765 index = streamer_read_hwi (ib);
766 }
767
768 p_bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
769 index = streamer_read_hwi (ib);
770 while (index != -1)
771 {
772 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
773 bb->prev_bb = p_bb;
774 p_bb->next_bb = bb;
775 p_bb = bb;
776 index = streamer_read_hwi (ib);
777 }
778
779 /* ??? The cfgloop interface is tied to cfun. */
780 gcc_assert (cfun == fn);
781
782 /* Input the loop tree. */
783 unsigned n_loops = streamer_read_uhwi (ib);
784 if (n_loops == 0)
785 return;
786
787 struct loops *loops = ggc_cleared_alloc<struct loops> ();
788 init_loops_structure (fn, loops, n_loops);
789 set_loops_for_fn (fn, loops);
790
791 /* Input each loop and associate it with its loop header so
792 flow_loops_find can rebuild the loop tree. */
793 for (unsigned i = 1; i < n_loops; ++i)
794 {
795 int header_index = streamer_read_hwi (ib);
796 if (header_index == -1)
797 {
798 loops->larray->quick_push (NULL);
799 continue;
800 }
801
802 class loop *loop = alloc_loop ();
803 loop->header = BASIC_BLOCK_FOR_FN (fn, header_index);
804 loop->header->loop_father = loop;
805
806 /* Read everything copy_loop_info copies. */
807 loop->estimate_state = streamer_read_enum (ib, loop_estimation, EST_LAST);
808 loop->any_upper_bound = streamer_read_hwi (ib);
809 if (loop->any_upper_bound)
810 loop->nb_iterations_upper_bound = streamer_read_widest_int (ib);
811 loop->any_likely_upper_bound = streamer_read_hwi (ib);
812 if (loop->any_likely_upper_bound)
813 loop->nb_iterations_likely_upper_bound = streamer_read_widest_int (ib);
814 loop->any_estimate = streamer_read_hwi (ib);
815 if (loop->any_estimate)
816 loop->nb_iterations_estimate = streamer_read_widest_int (ib);
817
818 /* Read OMP SIMD related info. */
819 loop->safelen = streamer_read_hwi (ib);
820 loop->unroll = streamer_read_hwi (ib);
821 loop->owned_clique = streamer_read_hwi (ib);
822 loop->dont_vectorize = streamer_read_hwi (ib);
823 loop->force_vectorize = streamer_read_hwi (ib);
824 loop->finite_p = streamer_read_hwi (ib);
825 loop->simduid = stream_read_tree (ib, data_in);
826
827 place_new_loop (fn, loop);
828
829 /* flow_loops_find doesn't like loops not in the tree, hook them
830 all as siblings of the tree root temporarily. */
831 flow_loop_tree_node_add (loops->tree_root, loop);
832 }
833
834 /* Rebuild the loop tree. */
835 flow_loops_find (loops);
836 }
837
838
839 /* Read the SSA names array for function FN from DATA_IN using input
840 block IB. */
841
842 static void
843 input_ssa_names (class lto_input_block *ib, class data_in *data_in,
844 struct function *fn)
845 {
846 unsigned int i, size;
847
848 size = streamer_read_uhwi (ib);
849 init_ssanames (fn, size);
850
851 i = streamer_read_uhwi (ib);
852 while (i)
853 {
854 tree ssa_name, name;
855 bool is_default_def;
856
857 /* Skip over the elements that had been freed. */
858 while (SSANAMES (fn)->length () < i)
859 SSANAMES (fn)->quick_push (NULL_TREE);
860
861 is_default_def = (streamer_read_uchar (ib) != 0);
862 name = stream_read_tree (ib, data_in);
863 ssa_name = make_ssa_name_fn (fn, name, NULL);
864
865 if (is_default_def)
866 {
867 set_ssa_default_def (cfun, SSA_NAME_VAR (ssa_name), ssa_name);
868 SSA_NAME_DEF_STMT (ssa_name) = gimple_build_nop ();
869 }
870
871 i = streamer_read_uhwi (ib);
872 }
873 }
874
875
876 /* Go through all NODE edges and fixup call_stmt pointers
877 so they point to STMTS. */
878
879 static void
880 fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple **stmts,
881 struct function *fn)
882 {
883 #define STMT_UID_NOT_IN_RANGE(uid) \
884 (gimple_stmt_max_uid (fn) < uid || uid == 0)
885
886 struct cgraph_edge *cedge;
887 struct ipa_ref *ref = NULL;
888 unsigned int i;
889
890 for (cedge = node->callees; cedge; cedge = cedge->next_callee)
891 {
892 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
893 fatal_error (input_location,
894 "Cgraph edge statement index out of range");
895 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
896 cedge->lto_stmt_uid = 0;
897 if (!cedge->call_stmt)
898 fatal_error (input_location,
899 "Cgraph edge statement index not found");
900 }
901 for (cedge = node->indirect_calls; cedge; cedge = cedge->next_callee)
902 {
903 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
904 fatal_error (input_location,
905 "Cgraph edge statement index out of range");
906 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
907 cedge->lto_stmt_uid = 0;
908 if (!cedge->call_stmt)
909 fatal_error (input_location, "Cgraph edge statement index not found");
910 }
911 for (i = 0; node->iterate_reference (i, ref); i++)
912 if (ref->lto_stmt_uid)
913 {
914 if (STMT_UID_NOT_IN_RANGE (ref->lto_stmt_uid))
915 fatal_error (input_location,
916 "Reference statement index out of range");
917 ref->stmt = stmts[ref->lto_stmt_uid - 1];
918 ref->lto_stmt_uid = 0;
919 if (!ref->stmt)
920 fatal_error (input_location, "Reference statement index not found");
921 }
922 }
923
924
925 /* Fixup call_stmt pointers in NODE and all clones. */
926
927 static void
928 fixup_call_stmt_edges (struct cgraph_node *orig, gimple **stmts)
929 {
930 struct cgraph_node *node;
931 struct function *fn;
932
933 while (orig->clone_of)
934 orig = orig->clone_of;
935 fn = DECL_STRUCT_FUNCTION (orig->decl);
936
937 if (!orig->thunk.thunk_p)
938 fixup_call_stmt_edges_1 (orig, stmts, fn);
939 if (orig->clones)
940 for (node = orig->clones; node != orig;)
941 {
942 if (!node->thunk.thunk_p)
943 fixup_call_stmt_edges_1 (node, stmts, fn);
944 if (node->clones)
945 node = node->clones;
946 else if (node->next_sibling_clone)
947 node = node->next_sibling_clone;
948 else
949 {
950 while (node != orig && !node->next_sibling_clone)
951 node = node->clone_of;
952 if (node != orig)
953 node = node->next_sibling_clone;
954 }
955 }
956 }
957
958
959 /* Input the base body of struct function FN from DATA_IN
960 using input block IB. */
961
962 static void
963 input_struct_function_base (struct function *fn, class data_in *data_in,
964 class lto_input_block *ib)
965 {
966 struct bitpack_d bp;
967 int len;
968
969 /* Read the static chain and non-local goto save area. */
970 fn->static_chain_decl = stream_read_tree (ib, data_in);
971 fn->nonlocal_goto_save_area = stream_read_tree (ib, data_in);
972
973 /* Read all the local symbols. */
974 len = streamer_read_hwi (ib);
975 if (len > 0)
976 {
977 int i;
978 vec_safe_grow_cleared (fn->local_decls, len);
979 for (i = 0; i < len; i++)
980 {
981 tree t = stream_read_tree (ib, data_in);
982 (*fn->local_decls)[i] = t;
983 }
984 }
985
986 /* Input the current IL state of the function. */
987 fn->curr_properties = streamer_read_uhwi (ib);
988
989 /* Read all the attributes for FN. */
990 bp = streamer_read_bitpack (ib);
991 fn->is_thunk = bp_unpack_value (&bp, 1);
992 fn->has_local_explicit_reg_vars = bp_unpack_value (&bp, 1);
993 fn->returns_pcc_struct = bp_unpack_value (&bp, 1);
994 fn->returns_struct = bp_unpack_value (&bp, 1);
995 fn->can_throw_non_call_exceptions = bp_unpack_value (&bp, 1);
996 fn->can_delete_dead_exceptions = bp_unpack_value (&bp, 1);
997 fn->always_inline_functions_inlined = bp_unpack_value (&bp, 1);
998 fn->after_inlining = bp_unpack_value (&bp, 1);
999 fn->stdarg = bp_unpack_value (&bp, 1);
1000 fn->has_nonlocal_label = bp_unpack_value (&bp, 1);
1001 fn->has_forced_label_in_static = bp_unpack_value (&bp, 1);
1002 fn->calls_alloca = bp_unpack_value (&bp, 1);
1003 fn->calls_setjmp = bp_unpack_value (&bp, 1);
1004 fn->calls_eh_return = bp_unpack_value (&bp, 1);
1005 fn->has_force_vectorize_loops = bp_unpack_value (&bp, 1);
1006 fn->has_simduid_loops = bp_unpack_value (&bp, 1);
1007 fn->va_list_fpr_size = bp_unpack_value (&bp, 8);
1008 fn->va_list_gpr_size = bp_unpack_value (&bp, 8);
1009 fn->last_clique = bp_unpack_value (&bp, sizeof (short) * 8);
1010
1011 /* Input the function start and end loci. */
1012 fn->function_start_locus = stream_input_location_now (&bp, data_in);
1013 fn->function_end_locus = stream_input_location_now (&bp, data_in);
1014
1015 /* Restore the instance discriminators if present. */
1016 int instance_number = bp_unpack_value (&bp, 1);
1017 if (instance_number)
1018 {
1019 instance_number = bp_unpack_value (&bp, sizeof (int) * CHAR_BIT);
1020 maybe_create_decl_to_instance_map ()->put (fn->decl, instance_number);
1021 }
1022 }
1023
1024
1025 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1026
1027 static void
1028 input_function (tree fn_decl, class data_in *data_in,
1029 class lto_input_block *ib, class lto_input_block *ib_cfg,
1030 cgraph_node *node)
1031 {
1032 struct function *fn;
1033 enum LTO_tags tag;
1034 gimple **stmts;
1035 basic_block bb;
1036
1037 tag = streamer_read_record_start (ib);
1038 lto_tag_check (tag, LTO_function);
1039
1040 /* Read decls for parameters and args. */
1041 DECL_RESULT (fn_decl) = stream_read_tree (ib, data_in);
1042 DECL_ARGUMENTS (fn_decl) = streamer_read_chain (ib, data_in);
1043
1044 /* Read debug args if available. */
1045 unsigned n_debugargs = streamer_read_uhwi (ib);
1046 if (n_debugargs)
1047 {
1048 vec<tree, va_gc> **debugargs = decl_debug_args_insert (fn_decl);
1049 vec_safe_grow (*debugargs, n_debugargs);
1050 for (unsigned i = 0; i < n_debugargs; ++i)
1051 (**debugargs)[i] = stream_read_tree (ib, data_in);
1052 }
1053
1054 /* Read the tree of lexical scopes for the function. */
1055 DECL_INITIAL (fn_decl) = stream_read_tree (ib, data_in);
1056 unsigned block_leaf_count = streamer_read_uhwi (ib);
1057 while (block_leaf_count--)
1058 stream_read_tree (ib, data_in);
1059
1060 if (!streamer_read_uhwi (ib))
1061 return;
1062
1063 push_struct_function (fn_decl);
1064 fn = DECL_STRUCT_FUNCTION (fn_decl);
1065 init_tree_ssa (fn);
1066 /* We input IL in SSA form. */
1067 cfun->gimple_df->in_ssa_p = true;
1068
1069 gimple_register_cfg_hooks ();
1070
1071 input_struct_function_base (fn, data_in, ib);
1072 input_cfg (ib_cfg, data_in, fn);
1073
1074 /* Read all the SSA names. */
1075 input_ssa_names (ib, data_in, fn);
1076
1077 /* Read the exception handling regions in the function. */
1078 input_eh_regions (ib, data_in, fn);
1079
1080 gcc_assert (DECL_INITIAL (fn_decl));
1081 DECL_SAVED_TREE (fn_decl) = NULL_TREE;
1082
1083 /* Read all the basic blocks. */
1084 tag = streamer_read_record_start (ib);
1085 while (tag)
1086 {
1087 input_bb (ib, tag, data_in, fn,
1088 node->count_materialization_scale);
1089 tag = streamer_read_record_start (ib);
1090 }
1091
1092 /* Fix up the call statements that are mentioned in the callgraph
1093 edges. */
1094 set_gimple_stmt_max_uid (cfun, 0);
1095 FOR_ALL_BB_FN (bb, cfun)
1096 {
1097 gimple_stmt_iterator gsi;
1098 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1099 {
1100 gimple *stmt = gsi_stmt (gsi);
1101 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1102 }
1103 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1104 {
1105 gimple *stmt = gsi_stmt (gsi);
1106 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1107 }
1108 }
1109 stmts = (gimple **) xcalloc (gimple_stmt_max_uid (fn), sizeof (gimple *));
1110 FOR_ALL_BB_FN (bb, cfun)
1111 {
1112 gimple_stmt_iterator bsi = gsi_start_phis (bb);
1113 while (!gsi_end_p (bsi))
1114 {
1115 gimple *stmt = gsi_stmt (bsi);
1116 gsi_next (&bsi);
1117 stmts[gimple_uid (stmt)] = stmt;
1118 }
1119 bsi = gsi_start_bb (bb);
1120 while (!gsi_end_p (bsi))
1121 {
1122 gimple *stmt = gsi_stmt (bsi);
1123 bool remove = false;
1124 /* If we're recompiling LTO objects with debug stmts but
1125 we're not supposed to have debug stmts, remove them now.
1126 We can't remove them earlier because this would cause uid
1127 mismatches in fixups, but we can do it at this point, as
1128 long as debug stmts don't require fixups.
1129 Similarly remove all IFN_*SAN_* internal calls */
1130 if (!flag_wpa)
1131 {
1132 if (is_gimple_debug (stmt)
1133 && (gimple_debug_nonbind_marker_p (stmt)
1134 ? !MAY_HAVE_DEBUG_MARKER_STMTS
1135 : !MAY_HAVE_DEBUG_BIND_STMTS))
1136 remove = true;
1137 /* In case the linemap overflows locations can be dropped
1138 to zero. Thus do not keep nonsensical inline entry markers
1139 we'd later ICE on. */
1140 tree block;
1141 if (gimple_debug_inline_entry_p (stmt)
1142 && (((block = gimple_block (stmt))
1143 && !inlined_function_outer_scope_p (block))
1144 || !debug_inline_points))
1145 remove = true;
1146 if (is_gimple_call (stmt)
1147 && gimple_call_internal_p (stmt))
1148 {
1149 bool replace = false;
1150 switch (gimple_call_internal_fn (stmt))
1151 {
1152 case IFN_UBSAN_NULL:
1153 if ((flag_sanitize
1154 & (SANITIZE_NULL | SANITIZE_ALIGNMENT)) == 0)
1155 replace = true;
1156 break;
1157 case IFN_UBSAN_BOUNDS:
1158 if ((flag_sanitize & SANITIZE_BOUNDS) == 0)
1159 replace = true;
1160 break;
1161 case IFN_UBSAN_VPTR:
1162 if ((flag_sanitize & SANITIZE_VPTR) == 0)
1163 replace = true;
1164 break;
1165 case IFN_UBSAN_OBJECT_SIZE:
1166 if ((flag_sanitize & SANITIZE_OBJECT_SIZE) == 0)
1167 replace = true;
1168 break;
1169 case IFN_UBSAN_PTR:
1170 if ((flag_sanitize & SANITIZE_POINTER_OVERFLOW) == 0)
1171 replace = true;
1172 break;
1173 case IFN_ASAN_MARK:
1174 if ((flag_sanitize & SANITIZE_ADDRESS) == 0)
1175 replace = true;
1176 break;
1177 case IFN_TSAN_FUNC_EXIT:
1178 if ((flag_sanitize & SANITIZE_THREAD) == 0)
1179 replace = true;
1180 break;
1181 default:
1182 break;
1183 }
1184 if (replace)
1185 {
1186 gimple_call_set_internal_fn (as_a <gcall *> (stmt),
1187 IFN_NOP);
1188 update_stmt (stmt);
1189 }
1190 }
1191 }
1192 if (remove)
1193 {
1194 gimple_stmt_iterator gsi = bsi;
1195 gsi_next (&bsi);
1196 unlink_stmt_vdef (stmt);
1197 release_defs (stmt);
1198 gsi_remove (&gsi, true);
1199 }
1200 else
1201 {
1202 gsi_next (&bsi);
1203 stmts[gimple_uid (stmt)] = stmt;
1204
1205 /* Remember that the input function has begin stmt
1206 markers, so that we know to expect them when emitting
1207 debug info. */
1208 if (!cfun->debug_nonbind_markers
1209 && gimple_debug_nonbind_marker_p (stmt))
1210 cfun->debug_nonbind_markers = true;
1211 }
1212 }
1213 }
1214
1215 /* Set the gimple body to the statement sequence in the entry
1216 basic block. FIXME lto, this is fairly hacky. The existence
1217 of a gimple body is used by the cgraph routines, but we should
1218 really use the presence of the CFG. */
1219 {
1220 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1221 gimple_set_body (fn_decl, bb_seq (ei_edge (ei)->dest));
1222 }
1223
1224 update_max_bb_count ();
1225 fixup_call_stmt_edges (node, stmts);
1226 execute_all_ipa_stmt_fixups (node, stmts);
1227
1228 free_dominance_info (CDI_DOMINATORS);
1229 free_dominance_info (CDI_POST_DOMINATORS);
1230 free (stmts);
1231 pop_cfun ();
1232 }
1233
1234 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1235
1236 static void
1237 input_constructor (tree var, class data_in *data_in,
1238 class lto_input_block *ib)
1239 {
1240 DECL_INITIAL (var) = stream_read_tree (ib, data_in);
1241 }
1242
1243
1244 /* Read the body from DATA for function NODE and fill it in.
1245 FILE_DATA are the global decls and types. SECTION_TYPE is either
1246 LTO_section_function_body or LTO_section_static_initializer. If
1247 section type is LTO_section_function_body, FN must be the decl for
1248 that function. */
1249
1250 static void
1251 lto_read_body_or_constructor (struct lto_file_decl_data *file_data, struct symtab_node *node,
1252 const char *data, enum lto_section_type section_type)
1253 {
1254 const struct lto_function_header *header;
1255 class data_in *data_in;
1256 int cfg_offset;
1257 int main_offset;
1258 int string_offset;
1259 tree fn_decl = node->decl;
1260
1261 header = (const struct lto_function_header *) data;
1262 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1263 {
1264 cfg_offset = sizeof (struct lto_function_header);
1265 main_offset = cfg_offset + header->cfg_size;
1266 string_offset = main_offset + header->main_size;
1267 }
1268 else
1269 {
1270 main_offset = sizeof (struct lto_function_header);
1271 string_offset = main_offset + header->main_size;
1272 }
1273
1274 data_in = lto_data_in_create (file_data, data + string_offset,
1275 header->string_size, vNULL);
1276
1277 if (section_type == LTO_section_function_body)
1278 {
1279 struct lto_in_decl_state *decl_state;
1280 unsigned from;
1281
1282 gcc_checking_assert (node);
1283
1284 /* Use the function's decl state. */
1285 decl_state = lto_get_function_in_decl_state (file_data, fn_decl);
1286 gcc_assert (decl_state);
1287 file_data->current_decl_state = decl_state;
1288
1289
1290 /* Set up the struct function. */
1291 from = data_in->reader_cache->nodes.length ();
1292 lto_input_block ib_main (data + main_offset, header->main_size,
1293 file_data->mode_table);
1294 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1295 {
1296 lto_input_block ib_cfg (data + cfg_offset, header->cfg_size,
1297 file_data->mode_table);
1298 input_function (fn_decl, data_in, &ib_main, &ib_cfg,
1299 dyn_cast <cgraph_node *>(node));
1300 }
1301 else
1302 input_constructor (fn_decl, data_in, &ib_main);
1303 data_in->location_cache.apply_location_cache ();
1304 /* And fixup types we streamed locally. */
1305 {
1306 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1307 unsigned len = cache->nodes.length ();
1308 unsigned i;
1309 for (i = len; i-- > from;)
1310 {
1311 tree t = streamer_tree_cache_get_tree (cache, i);
1312 if (t == NULL_TREE)
1313 continue;
1314
1315 if (TYPE_P (t))
1316 {
1317 gcc_assert (TYPE_CANONICAL (t) == NULL_TREE);
1318 if (type_with_alias_set_p (t)
1319 && canonical_type_used_p (t))
1320 TYPE_CANONICAL (t) = TYPE_MAIN_VARIANT (t);
1321 if (TYPE_MAIN_VARIANT (t) != t)
1322 {
1323 gcc_assert (TYPE_NEXT_VARIANT (t) == NULL_TREE);
1324 TYPE_NEXT_VARIANT (t)
1325 = TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t));
1326 TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t)) = t;
1327 }
1328 }
1329 }
1330 }
1331
1332 /* Restore decl state */
1333 file_data->current_decl_state = file_data->global_decl_state;
1334 }
1335
1336 lto_data_in_delete (data_in);
1337 }
1338
1339
1340 /* Read the body of NODE using DATA. FILE_DATA holds the global
1341 decls and types. */
1342
1343 void
1344 lto_input_function_body (struct lto_file_decl_data *file_data,
1345 struct cgraph_node *node, const char *data)
1346 {
1347 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1348 }
1349
1350 /* Read the body of NODE using DATA. FILE_DATA holds the global
1351 decls and types. */
1352
1353 void
1354 lto_input_variable_constructor (struct lto_file_decl_data *file_data,
1355 struct varpool_node *node, const char *data)
1356 {
1357 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1358 }
1359
1360
1361 /* Queue of acummulated decl -> DIE mappings. Similar to locations those
1362 are only applied to prevailing tree nodes during tree merging. */
1363 vec<dref_entry> dref_queue;
1364
1365 /* Read the physical representation of a tree node EXPR from
1366 input block IB using the per-file context in DATA_IN. */
1367
1368 static void
1369 lto_read_tree_1 (class lto_input_block *ib, class data_in *data_in, tree expr)
1370 {
1371 /* Read all the bitfield values in EXPR. Note that for LTO, we
1372 only write language-independent bitfields, so no more unpacking is
1373 needed. */
1374 streamer_read_tree_bitfields (ib, data_in, expr);
1375
1376 /* Read all the pointer fields in EXPR. */
1377 streamer_read_tree_body (ib, data_in, expr);
1378
1379 /* Read any LTO-specific data not read by the tree streamer. */
1380 if (DECL_P (expr)
1381 && TREE_CODE (expr) != FUNCTION_DECL
1382 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1383 DECL_INITIAL (expr) = stream_read_tree (ib, data_in);
1384
1385 /* Stream references to early generated DIEs. Keep in sync with the
1386 trees handled in dwarf2out_register_external_die. */
1387 if ((DECL_P (expr)
1388 && TREE_CODE (expr) != FIELD_DECL
1389 && TREE_CODE (expr) != DEBUG_EXPR_DECL
1390 && TREE_CODE (expr) != TYPE_DECL)
1391 || TREE_CODE (expr) == BLOCK)
1392 {
1393 const char *str = streamer_read_string (data_in, ib);
1394 if (str)
1395 {
1396 unsigned HOST_WIDE_INT off = streamer_read_uhwi (ib);
1397 dref_entry e = { expr, str, off };
1398 dref_queue.safe_push (e);
1399 }
1400 }
1401 }
1402
1403 /* Read the physical representation of a tree node with tag TAG from
1404 input block IB using the per-file context in DATA_IN. */
1405
1406 static tree
1407 lto_read_tree (class lto_input_block *ib, class data_in *data_in,
1408 enum LTO_tags tag, hashval_t hash)
1409 {
1410 /* Instantiate a new tree node. */
1411 tree result = streamer_alloc_tree (ib, data_in, tag);
1412
1413 /* Enter RESULT in the reader cache. This will make RESULT
1414 available so that circular references in the rest of the tree
1415 structure can be resolved in subsequent calls to stream_read_tree. */
1416 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1417
1418 lto_read_tree_1 (ib, data_in, result);
1419
1420 /* end_marker = */ streamer_read_uchar (ib);
1421
1422 return result;
1423 }
1424
1425
1426 /* Populate the reader cache with trees materialized from the SCC
1427 following in the IB, DATA_IN stream. */
1428
1429 hashval_t
1430 lto_input_scc (class lto_input_block *ib, class data_in *data_in,
1431 unsigned *len, unsigned *entry_len)
1432 {
1433 /* A blob of unnamed tree nodes, fill the cache from it and
1434 recurse. */
1435 unsigned size = streamer_read_uhwi (ib);
1436 hashval_t scc_hash = streamer_read_uhwi (ib);
1437 unsigned scc_entry_len = 1;
1438
1439 if (size == 1)
1440 {
1441 enum LTO_tags tag = streamer_read_record_start (ib);
1442 lto_input_tree_1 (ib, data_in, tag, scc_hash);
1443 }
1444 else
1445 {
1446 unsigned int first = data_in->reader_cache->nodes.length ();
1447 tree result;
1448
1449 scc_entry_len = streamer_read_uhwi (ib);
1450
1451 /* Materialize size trees by reading their headers. */
1452 for (unsigned i = 0; i < size; ++i)
1453 {
1454 enum LTO_tags tag = streamer_read_record_start (ib);
1455 if (tag == LTO_null
1456 || (tag >= LTO_field_decl_ref && tag <= LTO_global_decl_ref)
1457 || tag == LTO_tree_pickle_reference
1458 || tag == LTO_integer_cst
1459 || tag == LTO_tree_scc)
1460 gcc_unreachable ();
1461
1462 result = streamer_alloc_tree (ib, data_in, tag);
1463 streamer_tree_cache_append (data_in->reader_cache, result, 0);
1464 }
1465
1466 /* Read the tree bitpacks and references. */
1467 for (unsigned i = 0; i < size; ++i)
1468 {
1469 result = streamer_tree_cache_get_tree (data_in->reader_cache,
1470 first + i);
1471 lto_read_tree_1 (ib, data_in, result);
1472 /* end_marker = */ streamer_read_uchar (ib);
1473 }
1474 }
1475
1476 *len = size;
1477 *entry_len = scc_entry_len;
1478 return scc_hash;
1479 }
1480
1481
1482 /* Read a tree from input block IB using the per-file context in
1483 DATA_IN. This context is used, for example, to resolve references
1484 to previously read nodes. */
1485
1486 tree
1487 lto_input_tree_1 (class lto_input_block *ib, class data_in *data_in,
1488 enum LTO_tags tag, hashval_t hash)
1489 {
1490 tree result;
1491
1492 gcc_assert ((unsigned) tag < (unsigned) LTO_NUM_TAGS);
1493
1494 if (tag == LTO_null)
1495 result = NULL_TREE;
1496 else if (tag >= LTO_field_decl_ref && tag <= LTO_namelist_decl_ref)
1497 {
1498 /* If TAG is a reference to an indexable tree, the next value
1499 in IB is the index into the table where we expect to find
1500 that tree. */
1501 result = lto_input_tree_ref (ib, data_in, cfun, tag);
1502 }
1503 else if (tag == LTO_tree_pickle_reference)
1504 {
1505 /* If TAG is a reference to a previously read tree, look it up in
1506 the reader cache. */
1507 result = streamer_get_pickled_tree (ib, data_in);
1508 }
1509 else if (tag == LTO_integer_cst)
1510 {
1511 /* For shared integer constants in singletons we can use the
1512 existing tree integer constant merging code. */
1513 tree type = stream_read_tree (ib, data_in);
1514 unsigned HOST_WIDE_INT len = streamer_read_uhwi (ib);
1515 unsigned HOST_WIDE_INT i;
1516 HOST_WIDE_INT a[WIDE_INT_MAX_ELTS];
1517
1518 for (i = 0; i < len; i++)
1519 a[i] = streamer_read_hwi (ib);
1520 gcc_assert (TYPE_PRECISION (type) <= MAX_BITSIZE_MODE_ANY_INT);
1521 result = wide_int_to_tree (type, wide_int::from_array
1522 (a, len, TYPE_PRECISION (type)));
1523 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1524 }
1525 else if (tag == LTO_tree_scc)
1526 gcc_unreachable ();
1527 else
1528 {
1529 /* Otherwise, materialize a new node from IB. */
1530 result = lto_read_tree (ib, data_in, tag, hash);
1531 }
1532
1533 return result;
1534 }
1535
1536 tree
1537 lto_input_tree (class lto_input_block *ib, class data_in *data_in)
1538 {
1539 enum LTO_tags tag;
1540
1541 /* Input and skip SCCs. */
1542 while ((tag = streamer_read_record_start (ib)) == LTO_tree_scc)
1543 {
1544 unsigned len, entry_len;
1545 lto_input_scc (ib, data_in, &len, &entry_len);
1546
1547 /* Register DECLs with the debuginfo machinery. */
1548 while (!dref_queue.is_empty ())
1549 {
1550 dref_entry e = dref_queue.pop ();
1551 debug_hooks->register_external_die (e.decl, e.sym, e.off);
1552 }
1553 }
1554 return lto_input_tree_1 (ib, data_in, tag, 0);
1555 }
1556
1557
1558 /* Input toplevel asms. */
1559
1560 void
1561 lto_input_toplevel_asms (struct lto_file_decl_data *file_data, int order_base)
1562 {
1563 size_t len;
1564 const char *data
1565 = lto_get_summary_section_data (file_data, LTO_section_asm, &len);
1566 const struct lto_simple_header_with_strings *header
1567 = (const struct lto_simple_header_with_strings *) data;
1568 int string_offset;
1569 class data_in *data_in;
1570 tree str;
1571
1572 if (! data)
1573 return;
1574
1575 string_offset = sizeof (*header) + header->main_size;
1576
1577 lto_input_block ib (data + sizeof (*header), header->main_size,
1578 file_data->mode_table);
1579
1580 data_in = lto_data_in_create (file_data, data + string_offset,
1581 header->string_size, vNULL);
1582
1583 while ((str = streamer_read_string_cst (data_in, &ib)))
1584 {
1585 asm_node *node = symtab->finalize_toplevel_asm (str);
1586 node->order = streamer_read_hwi (&ib) + order_base;
1587 if (node->order >= symtab->order)
1588 symtab->order = node->order + 1;
1589 }
1590
1591 lto_data_in_delete (data_in);
1592
1593 lto_free_section_data (file_data, LTO_section_asm, NULL, data, len);
1594 }
1595
1596
1597 /* Input mode table. */
1598
1599 void
1600 lto_input_mode_table (struct lto_file_decl_data *file_data)
1601 {
1602 size_t len;
1603 const char *data
1604 = lto_get_summary_section_data (file_data, LTO_section_mode_table, &len);
1605 if (! data)
1606 {
1607 internal_error ("cannot read LTO mode table from %s",
1608 file_data->file_name);
1609 return;
1610 }
1611
1612 unsigned char *table = ggc_cleared_vec_alloc<unsigned char> (1 << 8);
1613 file_data->mode_table = table;
1614 const struct lto_simple_header_with_strings *header
1615 = (const struct lto_simple_header_with_strings *) data;
1616 int string_offset;
1617 class data_in *data_in;
1618 string_offset = sizeof (*header) + header->main_size;
1619
1620 lto_input_block ib (data + sizeof (*header), header->main_size, NULL);
1621 data_in = lto_data_in_create (file_data, data + string_offset,
1622 header->string_size, vNULL);
1623 bitpack_d bp = streamer_read_bitpack (&ib);
1624
1625 table[VOIDmode] = VOIDmode;
1626 table[BLKmode] = BLKmode;
1627 unsigned int m;
1628 while ((m = bp_unpack_value (&bp, 8)) != VOIDmode)
1629 {
1630 enum mode_class mclass
1631 = bp_unpack_enum (&bp, mode_class, MAX_MODE_CLASS);
1632 poly_uint16 size = bp_unpack_poly_value (&bp, 16);
1633 poly_uint16 prec = bp_unpack_poly_value (&bp, 16);
1634 machine_mode inner = (machine_mode) bp_unpack_value (&bp, 8);
1635 poly_uint16 nunits = bp_unpack_poly_value (&bp, 16);
1636 unsigned int ibit = 0, fbit = 0;
1637 unsigned int real_fmt_len = 0;
1638 const char *real_fmt_name = NULL;
1639 switch (mclass)
1640 {
1641 case MODE_FRACT:
1642 case MODE_UFRACT:
1643 case MODE_ACCUM:
1644 case MODE_UACCUM:
1645 ibit = bp_unpack_value (&bp, 8);
1646 fbit = bp_unpack_value (&bp, 8);
1647 break;
1648 case MODE_FLOAT:
1649 case MODE_DECIMAL_FLOAT:
1650 real_fmt_name = bp_unpack_indexed_string (data_in, &bp,
1651 &real_fmt_len);
1652 break;
1653 default:
1654 break;
1655 }
1656 /* First search just the GET_CLASS_NARROWEST_MODE to wider modes,
1657 if not found, fallback to all modes. */
1658 int pass;
1659 for (pass = 0; pass < 2; pass++)
1660 for (machine_mode mr = pass ? VOIDmode
1661 : GET_CLASS_NARROWEST_MODE (mclass);
1662 pass ? mr < MAX_MACHINE_MODE : mr != VOIDmode;
1663 pass ? mr = (machine_mode) (mr + 1)
1664 : mr = GET_MODE_WIDER_MODE (mr).else_void ())
1665 if (GET_MODE_CLASS (mr) != mclass
1666 || maybe_ne (GET_MODE_SIZE (mr), size)
1667 || maybe_ne (GET_MODE_PRECISION (mr), prec)
1668 || (inner == m
1669 ? GET_MODE_INNER (mr) != mr
1670 : GET_MODE_INNER (mr) != table[(int) inner])
1671 || GET_MODE_IBIT (mr) != ibit
1672 || GET_MODE_FBIT (mr) != fbit
1673 || maybe_ne (GET_MODE_NUNITS (mr), nunits))
1674 continue;
1675 else if ((mclass == MODE_FLOAT || mclass == MODE_DECIMAL_FLOAT)
1676 && strcmp (REAL_MODE_FORMAT (mr)->name, real_fmt_name) != 0)
1677 continue;
1678 else
1679 {
1680 table[m] = mr;
1681 pass = 2;
1682 break;
1683 }
1684 unsigned int mname_len;
1685 const char *mname = bp_unpack_indexed_string (data_in, &bp, &mname_len);
1686 if (pass == 2)
1687 {
1688 switch (mclass)
1689 {
1690 case MODE_VECTOR_BOOL:
1691 case MODE_VECTOR_INT:
1692 case MODE_VECTOR_FLOAT:
1693 case MODE_VECTOR_FRACT:
1694 case MODE_VECTOR_UFRACT:
1695 case MODE_VECTOR_ACCUM:
1696 case MODE_VECTOR_UACCUM:
1697 /* For unsupported vector modes just use BLKmode,
1698 if the scalar mode is supported. */
1699 if (table[(int) inner] != VOIDmode)
1700 {
1701 table[m] = BLKmode;
1702 break;
1703 }
1704 /* FALLTHRU */
1705 default:
1706 /* This is only used for offloading-target compilations and
1707 is a user-facing error. Give a better error message for
1708 the common modes; see also mode-classes.def. */
1709 if (mclass == MODE_FLOAT)
1710 fatal_error (UNKNOWN_LOCATION,
1711 "%s - %u-bit-precision floating-point numbers "
1712 "unsupported (mode %qs)", TARGET_MACHINE,
1713 prec.to_constant (), mname);
1714 else if (mclass == MODE_DECIMAL_FLOAT)
1715 fatal_error (UNKNOWN_LOCATION,
1716 "%s - %u-bit-precision decimal floating-point "
1717 "numbers unsupported (mode %qs)", TARGET_MACHINE,
1718 prec.to_constant (), mname);
1719 else if (mclass == MODE_COMPLEX_FLOAT)
1720 fatal_error (UNKNOWN_LOCATION,
1721 "%s - %u-bit-precision complex floating-point "
1722 "numbers unsupported (mode %qs)", TARGET_MACHINE,
1723 prec.to_constant (), mname);
1724 else if (mclass == MODE_INT)
1725 fatal_error (UNKNOWN_LOCATION,
1726 "%s - %u-bit integer numbers unsupported (mode "
1727 "%qs)", TARGET_MACHINE, prec.to_constant (), mname);
1728 else
1729 fatal_error (UNKNOWN_LOCATION, "%s - unsupported mode %qs",
1730 TARGET_MACHINE, mname);
1731 break;
1732 }
1733 }
1734 }
1735 lto_data_in_delete (data_in);
1736
1737 lto_free_section_data (file_data, LTO_section_mode_table, NULL, data, len);
1738 }
1739
1740
1741 /* Initialization for the LTO reader. */
1742
1743 void
1744 lto_reader_init (void)
1745 {
1746 lto_streamer_init ();
1747 file_name_hash_table
1748 = new hash_table<string_slot_hasher> (37);
1749 string_slot_allocator = new object_allocator <struct string_slot>
1750 ("line map file name hash");
1751 gcc_obstack_init (&file_name_obstack);
1752 }
1753
1754 /* Free hash table used to stream in location file names. */
1755
1756 void
1757 lto_free_file_name_hash (void)
1758 {
1759 delete file_name_hash_table;
1760 file_name_hash_table = NULL;
1761 delete string_slot_allocator;
1762 string_slot_allocator = NULL;
1763 /* file_name_obstack must stay allocated since it is referred to by
1764 line map table. */
1765 }
1766
1767
1768 /* Create a new data_in object for FILE_DATA. STRINGS is the string
1769 table to use with LEN strings. RESOLUTIONS is the vector of linker
1770 resolutions (NULL if not using a linker plugin). */
1771
1772 class data_in *
1773 lto_data_in_create (struct lto_file_decl_data *file_data, const char *strings,
1774 unsigned len,
1775 vec<ld_plugin_symbol_resolution_t> resolutions)
1776 {
1777 class data_in *data_in = new (class data_in);
1778 data_in->file_data = file_data;
1779 data_in->strings = strings;
1780 data_in->strings_len = len;
1781 data_in->globals_resolution = resolutions;
1782 data_in->reader_cache = streamer_tree_cache_create (false, false, true);
1783 return data_in;
1784 }
1785
1786
1787 /* Remove DATA_IN. */
1788
1789 void
1790 lto_data_in_delete (class data_in *data_in)
1791 {
1792 data_in->globals_resolution.release ();
1793 streamer_tree_cache_delete (data_in->reader_cache);
1794 delete data_in;
1795 }