]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lto-streamer-in.c
lto-streamer-in.c (input_ssa_names): Do not allocate GIMPLE_NOP for all SSA names.
[thirdparty/gcc.git] / gcc / lto-streamer-in.c
1 /* Read the GIMPLE representation from a file stream.
2
3 Copyright (C) 2009-2016 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "cfghooks.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "gimple-streamer.h"
35 #include "toplev.h"
36 #include "gimple-iterator.h"
37 #include "tree-cfg.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "tree-ssa.h"
41 #include "except.h"
42 #include "cgraph.h"
43 #include "cfgloop.h"
44
45
46 struct freeing_string_slot_hasher : string_slot_hasher
47 {
48 static inline void remove (value_type *);
49 };
50
51 inline void
52 freeing_string_slot_hasher::remove (value_type *v)
53 {
54 free (v);
55 }
56
57 /* The table to hold the file names. */
58 static hash_table<freeing_string_slot_hasher> *file_name_hash_table;
59
60
61 /* Check that tag ACTUAL has one of the given values. NUM_TAGS is the
62 number of valid tag values to check. */
63
64 void
65 lto_tag_check_set (enum LTO_tags actual, int ntags, ...)
66 {
67 va_list ap;
68 int i;
69
70 va_start (ap, ntags);
71 for (i = 0; i < ntags; i++)
72 if ((unsigned) actual == va_arg (ap, unsigned))
73 {
74 va_end (ap);
75 return;
76 }
77
78 va_end (ap);
79 internal_error ("bytecode stream: unexpected tag %s", lto_tag_name (actual));
80 }
81
82
83 /* Read LENGTH bytes from STREAM to ADDR. */
84
85 void
86 lto_input_data_block (struct lto_input_block *ib, void *addr, size_t length)
87 {
88 size_t i;
89 unsigned char *const buffer = (unsigned char *const) addr;
90
91 for (i = 0; i < length; i++)
92 buffer[i] = streamer_read_uchar (ib);
93 }
94
95
96 /* Lookup STRING in file_name_hash_table. If found, return the existing
97 string, otherwise insert STRING as the canonical version. */
98
99 static const char *
100 canon_file_name (const char *string)
101 {
102 string_slot **slot;
103 struct string_slot s_slot;
104 size_t len = strlen (string);
105
106 s_slot.s = string;
107 s_slot.len = len;
108
109 slot = file_name_hash_table->find_slot (&s_slot, INSERT);
110 if (*slot == NULL)
111 {
112 char *saved_string;
113 struct string_slot *new_slot;
114
115 saved_string = (char *) xmalloc (len + 1);
116 new_slot = XCNEW (struct string_slot);
117 memcpy (saved_string, string, len + 1);
118 new_slot->s = saved_string;
119 new_slot->len = len;
120 *slot = new_slot;
121 return saved_string;
122 }
123 else
124 {
125 struct string_slot *old_slot = *slot;
126 return old_slot->s;
127 }
128 }
129
130 /* Pointer to currently alive instance of lto_location_cache. */
131
132 lto_location_cache *lto_location_cache::current_cache;
133
134 /* Sort locations in source order. Start with file from last application. */
135
136 int
137 lto_location_cache::cmp_loc (const void *pa, const void *pb)
138 {
139 const cached_location *a = ((const cached_location *)pa);
140 const cached_location *b = ((const cached_location *)pb);
141 const char *current_file = current_cache->current_file;
142 int current_line = current_cache->current_line;
143
144 if (a->file == current_file && b->file != current_file)
145 return -1;
146 if (a->file != current_file && b->file == current_file)
147 return 1;
148 if (a->file == current_file && b->file == current_file)
149 {
150 if (a->line == current_line && b->line != current_line)
151 return -1;
152 if (a->line != current_line && b->line == current_line)
153 return 1;
154 }
155 if (a->file != b->file)
156 return strcmp (a->file, b->file);
157 if (a->sysp != b->sysp)
158 return a->sysp ? 1 : -1;
159 if (a->line != b->line)
160 return a->line - b->line;
161 return a->col - b->col;
162 }
163
164 /* Apply all changes in location cache. Add locations into linemap and patch
165 trees. */
166
167 bool
168 lto_location_cache::apply_location_cache ()
169 {
170 static const char *prev_file;
171 if (!loc_cache.length ())
172 return false;
173 if (loc_cache.length () > 1)
174 loc_cache.qsort (cmp_loc);
175
176 for (unsigned int i = 0; i < loc_cache.length (); i++)
177 {
178 struct cached_location loc = loc_cache[i];
179
180 if (current_file != loc.file)
181 linemap_add (line_table, prev_file ? LC_RENAME : LC_ENTER,
182 loc.sysp, loc.file, loc.line);
183 else if (current_line != loc.line)
184 {
185 int max = loc.col;
186
187 for (unsigned int j = i + 1; j < loc_cache.length (); j++)
188 if (loc.file != loc_cache[j].file
189 || loc.line != loc_cache[j].line)
190 break;
191 else if (max < loc_cache[j].col)
192 max = loc_cache[j].col;
193 linemap_line_start (line_table, loc.line, max + 1);
194 }
195 gcc_assert (*loc.loc == BUILTINS_LOCATION + 1);
196 if (current_file == loc.file && current_line == loc.line
197 && current_col == loc.col)
198 *loc.loc = current_loc;
199 else
200 current_loc = *loc.loc = linemap_position_for_column (line_table,
201 loc.col);
202 current_line = loc.line;
203 prev_file = current_file = loc.file;
204 current_col = loc.col;
205 }
206 loc_cache.truncate (0);
207 accepted_length = 0;
208 return true;
209 }
210
211 /* Tree merging did not suceed; mark all changes in the cache as accepted. */
212
213 void
214 lto_location_cache::accept_location_cache ()
215 {
216 gcc_assert (current_cache == this);
217 accepted_length = loc_cache.length ();
218 }
219
220 /* Tree merging did suceed; throw away recent changes. */
221
222 void
223 lto_location_cache::revert_location_cache ()
224 {
225 loc_cache.truncate (accepted_length);
226 }
227
228 /* Read a location bitpack from input block IB and either update *LOC directly
229 or add it to the location cache.
230 It is neccesary to call apply_location_cache to get *LOC updated. */
231
232 void
233 lto_location_cache::input_location (location_t *loc, struct bitpack_d *bp,
234 struct data_in *data_in)
235 {
236 static const char *stream_file;
237 static int stream_line;
238 static int stream_col;
239 static bool stream_sysp;
240 bool file_change, line_change, column_change;
241
242 gcc_assert (current_cache == this);
243
244 *loc = bp_unpack_int_in_range (bp, "location", 0, RESERVED_LOCATION_COUNT);
245
246 if (*loc < RESERVED_LOCATION_COUNT)
247 return;
248
249 /* Keep value RESERVED_LOCATION_COUNT in *loc as linemap lookups will
250 ICE on it. */
251
252 file_change = bp_unpack_value (bp, 1);
253 line_change = bp_unpack_value (bp, 1);
254 column_change = bp_unpack_value (bp, 1);
255
256 if (file_change)
257 {
258 stream_file = canon_file_name (bp_unpack_string (data_in, bp));
259 stream_sysp = bp_unpack_value (bp, 1);
260 }
261
262 if (line_change)
263 stream_line = bp_unpack_var_len_unsigned (bp);
264
265 if (column_change)
266 stream_col = bp_unpack_var_len_unsigned (bp);
267
268 /* This optimization saves location cache operations druing gimple
269 streaming. */
270
271 if (current_file == stream_file && current_line == stream_line
272 && current_col == stream_col && current_sysp == stream_sysp)
273 {
274 *loc = current_loc;
275 return;
276 }
277
278 struct cached_location entry
279 = {stream_file, loc, stream_line, stream_col, stream_sysp};
280 loc_cache.safe_push (entry);
281 }
282
283 /* Read a location bitpack from input block IB and either update *LOC directly
284 or add it to the location cache.
285 It is neccesary to call apply_location_cache to get *LOC updated. */
286
287 void
288 lto_input_location (location_t *loc, struct bitpack_d *bp,
289 struct data_in *data_in)
290 {
291 data_in->location_cache.input_location (loc, bp, data_in);
292 }
293
294 /* Read location and return it instead of going through location caching.
295 This should be used only when the resulting location is not going to be
296 discarded. */
297
298 location_t
299 stream_input_location_now (struct bitpack_d *bp, struct data_in *data_in)
300 {
301 location_t loc;
302 stream_input_location (&loc, bp, data_in);
303 data_in->location_cache.apply_location_cache ();
304 return loc;
305 }
306
307 /* Read a reference to a tree node from DATA_IN using input block IB.
308 TAG is the expected node that should be found in IB, if TAG belongs
309 to one of the indexable trees, expect to read a reference index to
310 be looked up in one of the symbol tables, otherwise read the pysical
311 representation of the tree using stream_read_tree. FN is the
312 function scope for the read tree. */
313
314 tree
315 lto_input_tree_ref (struct lto_input_block *ib, struct data_in *data_in,
316 struct function *fn, enum LTO_tags tag)
317 {
318 unsigned HOST_WIDE_INT ix_u;
319 tree result = NULL_TREE;
320
321 lto_tag_check_range (tag, LTO_field_decl_ref, LTO_namelist_decl_ref);
322
323 switch (tag)
324 {
325 case LTO_type_ref:
326 ix_u = streamer_read_uhwi (ib);
327 result = lto_file_decl_data_get_type (data_in->file_data, ix_u);
328 break;
329
330 case LTO_ssa_name_ref:
331 ix_u = streamer_read_uhwi (ib);
332 result = (*SSANAMES (fn))[ix_u];
333 break;
334
335 case LTO_field_decl_ref:
336 ix_u = streamer_read_uhwi (ib);
337 result = lto_file_decl_data_get_field_decl (data_in->file_data, ix_u);
338 break;
339
340 case LTO_function_decl_ref:
341 ix_u = streamer_read_uhwi (ib);
342 result = lto_file_decl_data_get_fn_decl (data_in->file_data, ix_u);
343 break;
344
345 case LTO_type_decl_ref:
346 ix_u = streamer_read_uhwi (ib);
347 result = lto_file_decl_data_get_type_decl (data_in->file_data, ix_u);
348 break;
349
350 case LTO_namespace_decl_ref:
351 ix_u = streamer_read_uhwi (ib);
352 result = lto_file_decl_data_get_namespace_decl (data_in->file_data, ix_u);
353 break;
354
355 case LTO_global_decl_ref:
356 case LTO_result_decl_ref:
357 case LTO_const_decl_ref:
358 case LTO_imported_decl_ref:
359 case LTO_label_decl_ref:
360 case LTO_translation_unit_decl_ref:
361 case LTO_namelist_decl_ref:
362 ix_u = streamer_read_uhwi (ib);
363 result = lto_file_decl_data_get_var_decl (data_in->file_data, ix_u);
364 break;
365
366 default:
367 gcc_unreachable ();
368 }
369
370 gcc_assert (result);
371
372 return result;
373 }
374
375
376 /* Read and return a double-linked list of catch handlers from input
377 block IB, using descriptors in DATA_IN. */
378
379 static struct eh_catch_d *
380 lto_input_eh_catch_list (struct lto_input_block *ib, struct data_in *data_in,
381 eh_catch *last_p)
382 {
383 eh_catch first;
384 enum LTO_tags tag;
385
386 *last_p = first = NULL;
387 tag = streamer_read_record_start (ib);
388 while (tag)
389 {
390 tree list;
391 eh_catch n;
392
393 lto_tag_check_range (tag, LTO_eh_catch, LTO_eh_catch);
394
395 /* Read the catch node. */
396 n = ggc_cleared_alloc<eh_catch_d> ();
397 n->type_list = stream_read_tree (ib, data_in);
398 n->filter_list = stream_read_tree (ib, data_in);
399 n->label = stream_read_tree (ib, data_in);
400
401 /* Register all the types in N->FILTER_LIST. */
402 for (list = n->filter_list; list; list = TREE_CHAIN (list))
403 add_type_for_runtime (TREE_VALUE (list));
404
405 /* Chain N to the end of the list. */
406 if (*last_p)
407 (*last_p)->next_catch = n;
408 n->prev_catch = *last_p;
409 *last_p = n;
410
411 /* Set the head of the list the first time through the loop. */
412 if (first == NULL)
413 first = n;
414
415 tag = streamer_read_record_start (ib);
416 }
417
418 return first;
419 }
420
421
422 /* Read and return EH region IX from input block IB, using descriptors
423 in DATA_IN. */
424
425 static eh_region
426 input_eh_region (struct lto_input_block *ib, struct data_in *data_in, int ix)
427 {
428 enum LTO_tags tag;
429 eh_region r;
430
431 /* Read the region header. */
432 tag = streamer_read_record_start (ib);
433 if (tag == LTO_null)
434 return NULL;
435
436 r = ggc_cleared_alloc<eh_region_d> ();
437 r->index = streamer_read_hwi (ib);
438
439 gcc_assert (r->index == ix);
440
441 /* Read all the region pointers as region numbers. We'll fix up
442 the pointers once the whole array has been read. */
443 r->outer = (eh_region) (intptr_t) streamer_read_hwi (ib);
444 r->inner = (eh_region) (intptr_t) streamer_read_hwi (ib);
445 r->next_peer = (eh_region) (intptr_t) streamer_read_hwi (ib);
446
447 switch (tag)
448 {
449 case LTO_ert_cleanup:
450 r->type = ERT_CLEANUP;
451 break;
452
453 case LTO_ert_try:
454 {
455 struct eh_catch_d *last_catch;
456 r->type = ERT_TRY;
457 r->u.eh_try.first_catch = lto_input_eh_catch_list (ib, data_in,
458 &last_catch);
459 r->u.eh_try.last_catch = last_catch;
460 break;
461 }
462
463 case LTO_ert_allowed_exceptions:
464 {
465 tree l;
466
467 r->type = ERT_ALLOWED_EXCEPTIONS;
468 r->u.allowed.type_list = stream_read_tree (ib, data_in);
469 r->u.allowed.label = stream_read_tree (ib, data_in);
470 r->u.allowed.filter = streamer_read_uhwi (ib);
471
472 for (l = r->u.allowed.type_list; l ; l = TREE_CHAIN (l))
473 add_type_for_runtime (TREE_VALUE (l));
474 }
475 break;
476
477 case LTO_ert_must_not_throw:
478 {
479 r->type = ERT_MUST_NOT_THROW;
480 r->u.must_not_throw.failure_decl = stream_read_tree (ib, data_in);
481 bitpack_d bp = streamer_read_bitpack (ib);
482 r->u.must_not_throw.failure_loc
483 = stream_input_location_now (&bp, data_in);
484 }
485 break;
486
487 default:
488 gcc_unreachable ();
489 }
490
491 r->landing_pads = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
492
493 return r;
494 }
495
496
497 /* Read and return EH landing pad IX from input block IB, using descriptors
498 in DATA_IN. */
499
500 static eh_landing_pad
501 input_eh_lp (struct lto_input_block *ib, struct data_in *data_in, int ix)
502 {
503 enum LTO_tags tag;
504 eh_landing_pad lp;
505
506 /* Read the landing pad header. */
507 tag = streamer_read_record_start (ib);
508 if (tag == LTO_null)
509 return NULL;
510
511 lto_tag_check_range (tag, LTO_eh_landing_pad, LTO_eh_landing_pad);
512
513 lp = ggc_cleared_alloc<eh_landing_pad_d> ();
514 lp->index = streamer_read_hwi (ib);
515 gcc_assert (lp->index == ix);
516 lp->next_lp = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
517 lp->region = (eh_region) (intptr_t) streamer_read_hwi (ib);
518 lp->post_landing_pad = stream_read_tree (ib, data_in);
519
520 return lp;
521 }
522
523
524 /* After reading the EH regions, pointers to peer and children regions
525 are region numbers. This converts all these region numbers into
526 real pointers into the rematerialized regions for FN. ROOT_REGION
527 is the region number for the root EH region in FN. */
528
529 static void
530 fixup_eh_region_pointers (struct function *fn, HOST_WIDE_INT root_region)
531 {
532 unsigned i;
533 vec<eh_region, va_gc> *eh_array = fn->eh->region_array;
534 vec<eh_landing_pad, va_gc> *lp_array = fn->eh->lp_array;
535 eh_region r;
536 eh_landing_pad lp;
537
538 gcc_assert (eh_array && lp_array);
539
540 gcc_assert (root_region >= 0);
541 fn->eh->region_tree = (*eh_array)[root_region];
542
543 #define FIXUP_EH_REGION(r) (r) = (*eh_array)[(HOST_WIDE_INT) (intptr_t) (r)]
544 #define FIXUP_EH_LP(p) (p) = (*lp_array)[(HOST_WIDE_INT) (intptr_t) (p)]
545
546 /* Convert all the index numbers stored in pointer fields into
547 pointers to the corresponding slots in the EH region array. */
548 FOR_EACH_VEC_ELT (*eh_array, i, r)
549 {
550 /* The array may contain NULL regions. */
551 if (r == NULL)
552 continue;
553
554 gcc_assert (i == (unsigned) r->index);
555 FIXUP_EH_REGION (r->outer);
556 FIXUP_EH_REGION (r->inner);
557 FIXUP_EH_REGION (r->next_peer);
558 FIXUP_EH_LP (r->landing_pads);
559 }
560
561 /* Convert all the index numbers stored in pointer fields into
562 pointers to the corresponding slots in the EH landing pad array. */
563 FOR_EACH_VEC_ELT (*lp_array, i, lp)
564 {
565 /* The array may contain NULL landing pads. */
566 if (lp == NULL)
567 continue;
568
569 gcc_assert (i == (unsigned) lp->index);
570 FIXUP_EH_LP (lp->next_lp);
571 FIXUP_EH_REGION (lp->region);
572 }
573
574 #undef FIXUP_EH_REGION
575 #undef FIXUP_EH_LP
576 }
577
578
579 /* Initialize EH support. */
580
581 void
582 lto_init_eh (void)
583 {
584 static bool eh_initialized_p = false;
585
586 if (eh_initialized_p)
587 return;
588
589 /* Contrary to most other FEs, we only initialize EH support when at
590 least one of the files in the set contains exception regions in
591 it. Since this happens much later than the call to init_eh in
592 lang_dependent_init, we have to set flag_exceptions and call
593 init_eh again to initialize the EH tables. */
594 flag_exceptions = 1;
595 init_eh ();
596
597 eh_initialized_p = true;
598 }
599
600
601 /* Read the exception table for FN from IB using the data descriptors
602 in DATA_IN. */
603
604 static void
605 input_eh_regions (struct lto_input_block *ib, struct data_in *data_in,
606 struct function *fn)
607 {
608 HOST_WIDE_INT i, root_region, len;
609 enum LTO_tags tag;
610
611 tag = streamer_read_record_start (ib);
612 if (tag == LTO_null)
613 return;
614
615 lto_tag_check_range (tag, LTO_eh_table, LTO_eh_table);
616
617 /* If the file contains EH regions, then it was compiled with
618 -fexceptions. In that case, initialize the backend EH
619 machinery. */
620 lto_init_eh ();
621
622 gcc_assert (fn->eh);
623
624 root_region = streamer_read_hwi (ib);
625 gcc_assert (root_region == (int) root_region);
626
627 /* Read the EH region array. */
628 len = streamer_read_hwi (ib);
629 gcc_assert (len == (int) len);
630 if (len > 0)
631 {
632 vec_safe_grow_cleared (fn->eh->region_array, len);
633 for (i = 0; i < len; i++)
634 {
635 eh_region r = input_eh_region (ib, data_in, i);
636 (*fn->eh->region_array)[i] = r;
637 }
638 }
639
640 /* Read the landing pads. */
641 len = streamer_read_hwi (ib);
642 gcc_assert (len == (int) len);
643 if (len > 0)
644 {
645 vec_safe_grow_cleared (fn->eh->lp_array, len);
646 for (i = 0; i < len; i++)
647 {
648 eh_landing_pad lp = input_eh_lp (ib, data_in, i);
649 (*fn->eh->lp_array)[i] = lp;
650 }
651 }
652
653 /* Read the runtime type data. */
654 len = streamer_read_hwi (ib);
655 gcc_assert (len == (int) len);
656 if (len > 0)
657 {
658 vec_safe_grow_cleared (fn->eh->ttype_data, len);
659 for (i = 0; i < len; i++)
660 {
661 tree ttype = stream_read_tree (ib, data_in);
662 (*fn->eh->ttype_data)[i] = ttype;
663 }
664 }
665
666 /* Read the table of action chains. */
667 len = streamer_read_hwi (ib);
668 gcc_assert (len == (int) len);
669 if (len > 0)
670 {
671 if (targetm.arm_eabi_unwinder)
672 {
673 vec_safe_grow_cleared (fn->eh->ehspec_data.arm_eabi, len);
674 for (i = 0; i < len; i++)
675 {
676 tree t = stream_read_tree (ib, data_in);
677 (*fn->eh->ehspec_data.arm_eabi)[i] = t;
678 }
679 }
680 else
681 {
682 vec_safe_grow_cleared (fn->eh->ehspec_data.other, len);
683 for (i = 0; i < len; i++)
684 {
685 uchar c = streamer_read_uchar (ib);
686 (*fn->eh->ehspec_data.other)[i] = c;
687 }
688 }
689 }
690
691 /* Reconstruct the EH region tree by fixing up the peer/children
692 pointers. */
693 fixup_eh_region_pointers (fn, root_region);
694
695 tag = streamer_read_record_start (ib);
696 lto_tag_check_range (tag, LTO_null, LTO_null);
697 }
698
699
700 /* Make a new basic block with index INDEX in function FN. */
701
702 static basic_block
703 make_new_block (struct function *fn, unsigned int index)
704 {
705 basic_block bb = alloc_block ();
706 bb->index = index;
707 SET_BASIC_BLOCK_FOR_FN (fn, index, bb);
708 n_basic_blocks_for_fn (fn)++;
709 return bb;
710 }
711
712
713 /* Read a wide-int. */
714
715 static widest_int
716 streamer_read_wi (struct lto_input_block *ib)
717 {
718 HOST_WIDE_INT a[WIDE_INT_MAX_ELTS];
719 int i;
720 int prec ATTRIBUTE_UNUSED = streamer_read_uhwi (ib);
721 int len = streamer_read_uhwi (ib);
722 for (i = 0; i < len; i++)
723 a[i] = streamer_read_hwi (ib);
724 return widest_int::from_array (a, len);
725 }
726
727
728 /* Read the CFG for function FN from input block IB. */
729
730 static void
731 input_cfg (struct lto_input_block *ib, struct data_in *data_in,
732 struct function *fn,
733 int count_materialization_scale)
734 {
735 unsigned int bb_count;
736 basic_block p_bb;
737 unsigned int i;
738 int index;
739
740 init_empty_tree_cfg_for_function (fn);
741 init_ssa_operands (fn);
742
743 profile_status_for_fn (fn) = streamer_read_enum (ib, profile_status_d,
744 PROFILE_LAST);
745
746 bb_count = streamer_read_uhwi (ib);
747
748 last_basic_block_for_fn (fn) = bb_count;
749 if (bb_count > basic_block_info_for_fn (fn)->length ())
750 vec_safe_grow_cleared (basic_block_info_for_fn (fn), bb_count);
751
752 if (bb_count > label_to_block_map_for_fn (fn)->length ())
753 vec_safe_grow_cleared (label_to_block_map_for_fn (fn), bb_count);
754
755 index = streamer_read_hwi (ib);
756 while (index != -1)
757 {
758 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
759 unsigned int edge_count;
760
761 if (bb == NULL)
762 bb = make_new_block (fn, index);
763
764 edge_count = streamer_read_uhwi (ib);
765
766 /* Connect up the CFG. */
767 for (i = 0; i < edge_count; i++)
768 {
769 unsigned int dest_index;
770 unsigned int edge_flags;
771 basic_block dest;
772 int probability;
773 gcov_type count;
774 edge e;
775
776 dest_index = streamer_read_uhwi (ib);
777 probability = (int) streamer_read_hwi (ib);
778 count = apply_scale ((gcov_type) streamer_read_gcov_count (ib),
779 count_materialization_scale);
780 edge_flags = streamer_read_uhwi (ib);
781
782 dest = BASIC_BLOCK_FOR_FN (fn, dest_index);
783
784 if (dest == NULL)
785 dest = make_new_block (fn, dest_index);
786
787 e = make_edge (bb, dest, edge_flags);
788 e->probability = probability;
789 e->count = count;
790 }
791
792 index = streamer_read_hwi (ib);
793 }
794
795 p_bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
796 index = streamer_read_hwi (ib);
797 while (index != -1)
798 {
799 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
800 bb->prev_bb = p_bb;
801 p_bb->next_bb = bb;
802 p_bb = bb;
803 index = streamer_read_hwi (ib);
804 }
805
806 /* ??? The cfgloop interface is tied to cfun. */
807 gcc_assert (cfun == fn);
808
809 /* Input the loop tree. */
810 unsigned n_loops = streamer_read_uhwi (ib);
811 if (n_loops == 0)
812 return;
813
814 struct loops *loops = ggc_cleared_alloc<struct loops> ();
815 init_loops_structure (fn, loops, n_loops);
816 set_loops_for_fn (fn, loops);
817
818 /* Input each loop and associate it with its loop header so
819 flow_loops_find can rebuild the loop tree. */
820 for (unsigned i = 1; i < n_loops; ++i)
821 {
822 int header_index = streamer_read_hwi (ib);
823 if (header_index == -1)
824 {
825 loops->larray->quick_push (NULL);
826 continue;
827 }
828
829 struct loop *loop = alloc_loop ();
830 loop->header = BASIC_BLOCK_FOR_FN (fn, header_index);
831 loop->header->loop_father = loop;
832
833 /* Read everything copy_loop_info copies. */
834 loop->estimate_state = streamer_read_enum (ib, loop_estimation, EST_LAST);
835 loop->any_upper_bound = streamer_read_hwi (ib);
836 if (loop->any_upper_bound)
837 loop->nb_iterations_upper_bound = streamer_read_wi (ib);
838 loop->any_estimate = streamer_read_hwi (ib);
839 if (loop->any_estimate)
840 loop->nb_iterations_estimate = streamer_read_wi (ib);
841
842 /* Read OMP SIMD related info. */
843 loop->safelen = streamer_read_hwi (ib);
844 loop->dont_vectorize = streamer_read_hwi (ib);
845 loop->force_vectorize = streamer_read_hwi (ib);
846 loop->simduid = stream_read_tree (ib, data_in);
847
848 place_new_loop (fn, loop);
849
850 /* flow_loops_find doesn't like loops not in the tree, hook them
851 all as siblings of the tree root temporarily. */
852 flow_loop_tree_node_add (loops->tree_root, loop);
853 }
854
855 /* Rebuild the loop tree. */
856 flow_loops_find (loops);
857 }
858
859
860 /* Read the SSA names array for function FN from DATA_IN using input
861 block IB. */
862
863 static void
864 input_ssa_names (struct lto_input_block *ib, struct data_in *data_in,
865 struct function *fn)
866 {
867 unsigned int i, size;
868
869 size = streamer_read_uhwi (ib);
870 init_ssanames (fn, size);
871
872 i = streamer_read_uhwi (ib);
873 while (i)
874 {
875 tree ssa_name, name;
876 bool is_default_def;
877
878 /* Skip over the elements that had been freed. */
879 while (SSANAMES (fn)->length () < i)
880 SSANAMES (fn)->quick_push (NULL_TREE);
881
882 is_default_def = (streamer_read_uchar (ib) != 0);
883 name = stream_read_tree (ib, data_in);
884 ssa_name = make_ssa_name_fn (fn, name, NULL);
885
886 if (is_default_def)
887 {
888 set_ssa_default_def (cfun, SSA_NAME_VAR (ssa_name), ssa_name);
889 SSA_NAME_DEF_STMT (ssa_name) = gimple_build_nop ();
890 }
891
892 i = streamer_read_uhwi (ib);
893 }
894 }
895
896
897 /* Go through all NODE edges and fixup call_stmt pointers
898 so they point to STMTS. */
899
900 static void
901 fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple **stmts,
902 struct function *fn)
903 {
904 struct cgraph_edge *cedge;
905 struct ipa_ref *ref = NULL;
906 unsigned int i;
907
908 for (cedge = node->callees; cedge; cedge = cedge->next_callee)
909 {
910 if (gimple_stmt_max_uid (fn) < cedge->lto_stmt_uid)
911 fatal_error (input_location,
912 "Cgraph edge statement index out of range");
913 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
914 if (!cedge->call_stmt)
915 fatal_error (input_location,
916 "Cgraph edge statement index not found");
917 }
918 for (cedge = node->indirect_calls; cedge; cedge = cedge->next_callee)
919 {
920 if (gimple_stmt_max_uid (fn) < cedge->lto_stmt_uid)
921 fatal_error (input_location,
922 "Cgraph edge statement index out of range");
923 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
924 if (!cedge->call_stmt)
925 fatal_error (input_location, "Cgraph edge statement index not found");
926 }
927 for (i = 0; node->iterate_reference (i, ref); i++)
928 if (ref->lto_stmt_uid)
929 {
930 if (gimple_stmt_max_uid (fn) < ref->lto_stmt_uid)
931 fatal_error (input_location,
932 "Reference statement index out of range");
933 ref->stmt = stmts[ref->lto_stmt_uid - 1];
934 if (!ref->stmt)
935 fatal_error (input_location, "Reference statement index not found");
936 }
937 }
938
939
940 /* Fixup call_stmt pointers in NODE and all clones. */
941
942 static void
943 fixup_call_stmt_edges (struct cgraph_node *orig, gimple **stmts)
944 {
945 struct cgraph_node *node;
946 struct function *fn;
947
948 while (orig->clone_of)
949 orig = orig->clone_of;
950 fn = DECL_STRUCT_FUNCTION (orig->decl);
951
952 fixup_call_stmt_edges_1 (orig, stmts, fn);
953 if (orig->clones)
954 for (node = orig->clones; node != orig;)
955 {
956 fixup_call_stmt_edges_1 (node, stmts, fn);
957 if (node->clones)
958 node = node->clones;
959 else if (node->next_sibling_clone)
960 node = node->next_sibling_clone;
961 else
962 {
963 while (node != orig && !node->next_sibling_clone)
964 node = node->clone_of;
965 if (node != orig)
966 node = node->next_sibling_clone;
967 }
968 }
969 }
970
971
972 /* Input the base body of struct function FN from DATA_IN
973 using input block IB. */
974
975 static void
976 input_struct_function_base (struct function *fn, struct data_in *data_in,
977 struct lto_input_block *ib)
978 {
979 struct bitpack_d bp;
980 int len;
981
982 /* Read the static chain and non-local goto save area. */
983 fn->static_chain_decl = stream_read_tree (ib, data_in);
984 fn->nonlocal_goto_save_area = stream_read_tree (ib, data_in);
985
986 /* Read all the local symbols. */
987 len = streamer_read_hwi (ib);
988 if (len > 0)
989 {
990 int i;
991 vec_safe_grow_cleared (fn->local_decls, len);
992 for (i = 0; i < len; i++)
993 {
994 tree t = stream_read_tree (ib, data_in);
995 (*fn->local_decls)[i] = t;
996 }
997 }
998
999 /* Input the current IL state of the function. */
1000 fn->curr_properties = streamer_read_uhwi (ib);
1001
1002 /* Read all the attributes for FN. */
1003 bp = streamer_read_bitpack (ib);
1004 fn->is_thunk = bp_unpack_value (&bp, 1);
1005 fn->has_local_explicit_reg_vars = bp_unpack_value (&bp, 1);
1006 fn->returns_pcc_struct = bp_unpack_value (&bp, 1);
1007 fn->returns_struct = bp_unpack_value (&bp, 1);
1008 fn->can_throw_non_call_exceptions = bp_unpack_value (&bp, 1);
1009 fn->can_delete_dead_exceptions = bp_unpack_value (&bp, 1);
1010 fn->always_inline_functions_inlined = bp_unpack_value (&bp, 1);
1011 fn->after_inlining = bp_unpack_value (&bp, 1);
1012 fn->stdarg = bp_unpack_value (&bp, 1);
1013 fn->has_nonlocal_label = bp_unpack_value (&bp, 1);
1014 fn->has_forced_label_in_static = bp_unpack_value (&bp, 1);
1015 fn->calls_alloca = bp_unpack_value (&bp, 1);
1016 fn->calls_setjmp = bp_unpack_value (&bp, 1);
1017 fn->has_force_vectorize_loops = bp_unpack_value (&bp, 1);
1018 fn->has_simduid_loops = bp_unpack_value (&bp, 1);
1019 fn->va_list_fpr_size = bp_unpack_value (&bp, 8);
1020 fn->va_list_gpr_size = bp_unpack_value (&bp, 8);
1021 fn->last_clique = bp_unpack_value (&bp, sizeof (short) * 8);
1022
1023 /* Input the function start and end loci. */
1024 fn->function_start_locus = stream_input_location_now (&bp, data_in);
1025 fn->function_end_locus = stream_input_location_now (&bp, data_in);
1026 }
1027
1028
1029 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1030
1031 static void
1032 input_function (tree fn_decl, struct data_in *data_in,
1033 struct lto_input_block *ib, struct lto_input_block *ib_cfg)
1034 {
1035 struct function *fn;
1036 enum LTO_tags tag;
1037 gimple **stmts;
1038 basic_block bb;
1039 struct cgraph_node *node;
1040
1041 tag = streamer_read_record_start (ib);
1042 lto_tag_check (tag, LTO_function);
1043
1044 /* Read decls for parameters and args. */
1045 DECL_RESULT (fn_decl) = stream_read_tree (ib, data_in);
1046 DECL_ARGUMENTS (fn_decl) = streamer_read_chain (ib, data_in);
1047
1048 /* Read the tree of lexical scopes for the function. */
1049 DECL_INITIAL (fn_decl) = stream_read_tree (ib, data_in);
1050
1051 if (!streamer_read_uhwi (ib))
1052 return;
1053
1054 push_struct_function (fn_decl);
1055 fn = DECL_STRUCT_FUNCTION (fn_decl);
1056 init_tree_ssa (fn);
1057 /* We input IL in SSA form. */
1058 cfun->gimple_df->in_ssa_p = true;
1059
1060 gimple_register_cfg_hooks ();
1061
1062 node = cgraph_node::get (fn_decl);
1063 if (!node)
1064 node = cgraph_node::create (fn_decl);
1065 input_struct_function_base (fn, data_in, ib);
1066 input_cfg (ib_cfg, data_in, fn, node->count_materialization_scale);
1067
1068 /* Read all the SSA names. */
1069 input_ssa_names (ib, data_in, fn);
1070
1071 /* Read the exception handling regions in the function. */
1072 input_eh_regions (ib, data_in, fn);
1073
1074 gcc_assert (DECL_INITIAL (fn_decl));
1075 DECL_SAVED_TREE (fn_decl) = NULL_TREE;
1076
1077 /* Read all the basic blocks. */
1078 tag = streamer_read_record_start (ib);
1079 while (tag)
1080 {
1081 input_bb (ib, tag, data_in, fn,
1082 node->count_materialization_scale);
1083 tag = streamer_read_record_start (ib);
1084 }
1085
1086 /* Fix up the call statements that are mentioned in the callgraph
1087 edges. */
1088 set_gimple_stmt_max_uid (cfun, 0);
1089 FOR_ALL_BB_FN (bb, cfun)
1090 {
1091 gimple_stmt_iterator gsi;
1092 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1093 {
1094 gimple *stmt = gsi_stmt (gsi);
1095 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1096 }
1097 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1098 {
1099 gimple *stmt = gsi_stmt (gsi);
1100 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1101 }
1102 }
1103 stmts = (gimple **) xcalloc (gimple_stmt_max_uid (fn), sizeof (gimple *));
1104 FOR_ALL_BB_FN (bb, cfun)
1105 {
1106 gimple_stmt_iterator bsi = gsi_start_phis (bb);
1107 while (!gsi_end_p (bsi))
1108 {
1109 gimple *stmt = gsi_stmt (bsi);
1110 gsi_next (&bsi);
1111 stmts[gimple_uid (stmt)] = stmt;
1112 }
1113 bsi = gsi_start_bb (bb);
1114 while (!gsi_end_p (bsi))
1115 {
1116 gimple *stmt = gsi_stmt (bsi);
1117 /* If we're recompiling LTO objects with debug stmts but
1118 we're not supposed to have debug stmts, remove them now.
1119 We can't remove them earlier because this would cause uid
1120 mismatches in fixups, but we can do it at this point, as
1121 long as debug stmts don't require fixups. */
1122 if (!MAY_HAVE_DEBUG_STMTS && !flag_wpa && is_gimple_debug (stmt))
1123 {
1124 gimple_stmt_iterator gsi = bsi;
1125 gsi_next (&bsi);
1126 gsi_remove (&gsi, true);
1127 }
1128 else
1129 {
1130 gsi_next (&bsi);
1131 stmts[gimple_uid (stmt)] = stmt;
1132 }
1133 }
1134 }
1135
1136 /* Set the gimple body to the statement sequence in the entry
1137 basic block. FIXME lto, this is fairly hacky. The existence
1138 of a gimple body is used by the cgraph routines, but we should
1139 really use the presence of the CFG. */
1140 {
1141 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1142 gimple_set_body (fn_decl, bb_seq (ei_edge (ei)->dest));
1143 }
1144
1145 fixup_call_stmt_edges (node, stmts);
1146 execute_all_ipa_stmt_fixups (node, stmts);
1147
1148 update_ssa (TODO_update_ssa_only_virtuals);
1149 free_dominance_info (CDI_DOMINATORS);
1150 free_dominance_info (CDI_POST_DOMINATORS);
1151 free (stmts);
1152 pop_cfun ();
1153 }
1154
1155 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1156
1157 static void
1158 input_constructor (tree var, struct data_in *data_in,
1159 struct lto_input_block *ib)
1160 {
1161 DECL_INITIAL (var) = stream_read_tree (ib, data_in);
1162 }
1163
1164
1165 /* Read the body from DATA for function NODE and fill it in.
1166 FILE_DATA are the global decls and types. SECTION_TYPE is either
1167 LTO_section_function_body or LTO_section_static_initializer. If
1168 section type is LTO_section_function_body, FN must be the decl for
1169 that function. */
1170
1171 static void
1172 lto_read_body_or_constructor (struct lto_file_decl_data *file_data, struct symtab_node *node,
1173 const char *data, enum lto_section_type section_type)
1174 {
1175 const struct lto_function_header *header;
1176 struct data_in *data_in;
1177 int cfg_offset;
1178 int main_offset;
1179 int string_offset;
1180 tree fn_decl = node->decl;
1181
1182 header = (const struct lto_function_header *) data;
1183 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1184 {
1185 cfg_offset = sizeof (struct lto_function_header);
1186 main_offset = cfg_offset + header->cfg_size;
1187 string_offset = main_offset + header->main_size;
1188 }
1189 else
1190 {
1191 main_offset = sizeof (struct lto_function_header);
1192 string_offset = main_offset + header->main_size;
1193 }
1194
1195 data_in = lto_data_in_create (file_data, data + string_offset,
1196 header->string_size, vNULL);
1197
1198 if (section_type == LTO_section_function_body)
1199 {
1200 struct lto_in_decl_state *decl_state;
1201 unsigned from;
1202
1203 gcc_checking_assert (node);
1204
1205 /* Use the function's decl state. */
1206 decl_state = lto_get_function_in_decl_state (file_data, fn_decl);
1207 gcc_assert (decl_state);
1208 file_data->current_decl_state = decl_state;
1209
1210
1211 /* Set up the struct function. */
1212 from = data_in->reader_cache->nodes.length ();
1213 lto_input_block ib_main (data + main_offset, header->main_size,
1214 file_data->mode_table);
1215 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1216 {
1217 lto_input_block ib_cfg (data + cfg_offset, header->cfg_size,
1218 file_data->mode_table);
1219 input_function (fn_decl, data_in, &ib_main, &ib_cfg);
1220 }
1221 else
1222 input_constructor (fn_decl, data_in, &ib_main);
1223 data_in->location_cache.apply_location_cache ();
1224 /* And fixup types we streamed locally. */
1225 {
1226 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1227 unsigned len = cache->nodes.length ();
1228 unsigned i;
1229 for (i = len; i-- > from;)
1230 {
1231 tree t = streamer_tree_cache_get_tree (cache, i);
1232 if (t == NULL_TREE)
1233 continue;
1234
1235 if (TYPE_P (t))
1236 {
1237 gcc_assert (TYPE_CANONICAL (t) == NULL_TREE);
1238 if (type_with_alias_set_p (t)
1239 && canonical_type_used_p (t))
1240 TYPE_CANONICAL (t) = TYPE_MAIN_VARIANT (t);
1241 if (TYPE_MAIN_VARIANT (t) != t)
1242 {
1243 gcc_assert (TYPE_NEXT_VARIANT (t) == NULL_TREE);
1244 TYPE_NEXT_VARIANT (t)
1245 = TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t));
1246 TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t)) = t;
1247 }
1248 }
1249 }
1250 }
1251
1252 /* Restore decl state */
1253 file_data->current_decl_state = file_data->global_decl_state;
1254 }
1255
1256 lto_data_in_delete (data_in);
1257 }
1258
1259
1260 /* Read the body of NODE using DATA. FILE_DATA holds the global
1261 decls and types. */
1262
1263 void
1264 lto_input_function_body (struct lto_file_decl_data *file_data,
1265 struct cgraph_node *node, const char *data)
1266 {
1267 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1268 }
1269
1270 /* Read the body of NODE using DATA. FILE_DATA holds the global
1271 decls and types. */
1272
1273 void
1274 lto_input_variable_constructor (struct lto_file_decl_data *file_data,
1275 struct varpool_node *node, const char *data)
1276 {
1277 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1278 }
1279
1280
1281 /* Read the physical representation of a tree node EXPR from
1282 input block IB using the per-file context in DATA_IN. */
1283
1284 static void
1285 lto_read_tree_1 (struct lto_input_block *ib, struct data_in *data_in, tree expr)
1286 {
1287 /* Read all the bitfield values in EXPR. Note that for LTO, we
1288 only write language-independent bitfields, so no more unpacking is
1289 needed. */
1290 streamer_read_tree_bitfields (ib, data_in, expr);
1291
1292 /* Read all the pointer fields in EXPR. */
1293 streamer_read_tree_body (ib, data_in, expr);
1294
1295 /* Read any LTO-specific data not read by the tree streamer. */
1296 if (DECL_P (expr)
1297 && TREE_CODE (expr) != FUNCTION_DECL
1298 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1299 DECL_INITIAL (expr) = stream_read_tree (ib, data_in);
1300
1301 /* We should never try to instantiate an MD or NORMAL builtin here. */
1302 if (TREE_CODE (expr) == FUNCTION_DECL)
1303 gcc_assert (!streamer_handle_as_builtin_p (expr));
1304
1305 #ifdef LTO_STREAMER_DEBUG
1306 /* Remove the mapping to RESULT's original address set by
1307 streamer_alloc_tree. */
1308 lto_orig_address_remove (expr);
1309 #endif
1310 }
1311
1312 /* Read the physical representation of a tree node with tag TAG from
1313 input block IB using the per-file context in DATA_IN. */
1314
1315 static tree
1316 lto_read_tree (struct lto_input_block *ib, struct data_in *data_in,
1317 enum LTO_tags tag, hashval_t hash)
1318 {
1319 /* Instantiate a new tree node. */
1320 tree result = streamer_alloc_tree (ib, data_in, tag);
1321
1322 /* Enter RESULT in the reader cache. This will make RESULT
1323 available so that circular references in the rest of the tree
1324 structure can be resolved in subsequent calls to stream_read_tree. */
1325 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1326
1327 lto_read_tree_1 (ib, data_in, result);
1328
1329 /* end_marker = */ streamer_read_uchar (ib);
1330
1331 return result;
1332 }
1333
1334
1335 /* Populate the reader cache with trees materialized from the SCC
1336 following in the IB, DATA_IN stream. */
1337
1338 hashval_t
1339 lto_input_scc (struct lto_input_block *ib, struct data_in *data_in,
1340 unsigned *len, unsigned *entry_len)
1341 {
1342 /* A blob of unnamed tree nodes, fill the cache from it and
1343 recurse. */
1344 unsigned size = streamer_read_uhwi (ib);
1345 hashval_t scc_hash = streamer_read_uhwi (ib);
1346 unsigned scc_entry_len = 1;
1347
1348 if (size == 1)
1349 {
1350 enum LTO_tags tag = streamer_read_record_start (ib);
1351 lto_input_tree_1 (ib, data_in, tag, scc_hash);
1352 }
1353 else
1354 {
1355 unsigned int first = data_in->reader_cache->nodes.length ();
1356 tree result;
1357
1358 scc_entry_len = streamer_read_uhwi (ib);
1359
1360 /* Materialize size trees by reading their headers. */
1361 for (unsigned i = 0; i < size; ++i)
1362 {
1363 enum LTO_tags tag = streamer_read_record_start (ib);
1364 if (tag == LTO_null
1365 || (tag >= LTO_field_decl_ref && tag <= LTO_global_decl_ref)
1366 || tag == LTO_tree_pickle_reference
1367 || tag == LTO_builtin_decl
1368 || tag == LTO_integer_cst
1369 || tag == LTO_tree_scc)
1370 gcc_unreachable ();
1371
1372 result = streamer_alloc_tree (ib, data_in, tag);
1373 streamer_tree_cache_append (data_in->reader_cache, result, 0);
1374 }
1375
1376 /* Read the tree bitpacks and references. */
1377 for (unsigned i = 0; i < size; ++i)
1378 {
1379 result = streamer_tree_cache_get_tree (data_in->reader_cache,
1380 first + i);
1381 lto_read_tree_1 (ib, data_in, result);
1382 /* end_marker = */ streamer_read_uchar (ib);
1383 }
1384 }
1385
1386 *len = size;
1387 *entry_len = scc_entry_len;
1388 return scc_hash;
1389 }
1390
1391
1392 /* Read a tree from input block IB using the per-file context in
1393 DATA_IN. This context is used, for example, to resolve references
1394 to previously read nodes. */
1395
1396 tree
1397 lto_input_tree_1 (struct lto_input_block *ib, struct data_in *data_in,
1398 enum LTO_tags tag, hashval_t hash)
1399 {
1400 tree result;
1401
1402 gcc_assert ((unsigned) tag < (unsigned) LTO_NUM_TAGS);
1403
1404 if (tag == LTO_null)
1405 result = NULL_TREE;
1406 else if (tag >= LTO_field_decl_ref && tag <= LTO_namelist_decl_ref)
1407 {
1408 /* If TAG is a reference to an indexable tree, the next value
1409 in IB is the index into the table where we expect to find
1410 that tree. */
1411 result = lto_input_tree_ref (ib, data_in, cfun, tag);
1412 }
1413 else if (tag == LTO_tree_pickle_reference)
1414 {
1415 /* If TAG is a reference to a previously read tree, look it up in
1416 the reader cache. */
1417 result = streamer_get_pickled_tree (ib, data_in);
1418 }
1419 else if (tag == LTO_builtin_decl)
1420 {
1421 /* If we are going to read a built-in function, all we need is
1422 the code and class. */
1423 result = streamer_get_builtin_tree (ib, data_in);
1424 }
1425 else if (tag == LTO_integer_cst)
1426 {
1427 /* For shared integer constants in singletons we can use the
1428 existing tree integer constant merging code. */
1429 tree type = stream_read_tree (ib, data_in);
1430 unsigned HOST_WIDE_INT len = streamer_read_uhwi (ib);
1431 unsigned HOST_WIDE_INT i;
1432 HOST_WIDE_INT a[WIDE_INT_MAX_ELTS];
1433
1434 for (i = 0; i < len; i++)
1435 a[i] = streamer_read_hwi (ib);
1436 gcc_assert (TYPE_PRECISION (type) <= MAX_BITSIZE_MODE_ANY_INT);
1437 result = wide_int_to_tree (type, wide_int::from_array
1438 (a, len, TYPE_PRECISION (type)));
1439 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1440 }
1441 else if (tag == LTO_tree_scc)
1442 gcc_unreachable ();
1443 else
1444 {
1445 /* Otherwise, materialize a new node from IB. */
1446 result = lto_read_tree (ib, data_in, tag, hash);
1447 }
1448
1449 return result;
1450 }
1451
1452 tree
1453 lto_input_tree (struct lto_input_block *ib, struct data_in *data_in)
1454 {
1455 enum LTO_tags tag;
1456
1457 /* Input and skip SCCs. */
1458 while ((tag = streamer_read_record_start (ib)) == LTO_tree_scc)
1459 {
1460 unsigned len, entry_len;
1461 lto_input_scc (ib, data_in, &len, &entry_len);
1462 }
1463 return lto_input_tree_1 (ib, data_in, tag, 0);
1464 }
1465
1466
1467 /* Input toplevel asms. */
1468
1469 void
1470 lto_input_toplevel_asms (struct lto_file_decl_data *file_data, int order_base)
1471 {
1472 size_t len;
1473 const char *data = lto_get_section_data (file_data, LTO_section_asm,
1474 NULL, &len);
1475 const struct lto_simple_header_with_strings *header
1476 = (const struct lto_simple_header_with_strings *) data;
1477 int string_offset;
1478 struct data_in *data_in;
1479 tree str;
1480
1481 if (! data)
1482 return;
1483
1484 string_offset = sizeof (*header) + header->main_size;
1485
1486 lto_input_block ib (data + sizeof (*header), header->main_size,
1487 file_data->mode_table);
1488
1489 data_in = lto_data_in_create (file_data, data + string_offset,
1490 header->string_size, vNULL);
1491
1492 while ((str = streamer_read_string_cst (data_in, &ib)))
1493 {
1494 asm_node *node = symtab->finalize_toplevel_asm (str);
1495 node->order = streamer_read_hwi (&ib) + order_base;
1496 if (node->order >= symtab->order)
1497 symtab->order = node->order + 1;
1498 }
1499
1500 lto_data_in_delete (data_in);
1501
1502 lto_free_section_data (file_data, LTO_section_asm, NULL, data, len);
1503 }
1504
1505
1506 /* Input mode table. */
1507
1508 void
1509 lto_input_mode_table (struct lto_file_decl_data *file_data)
1510 {
1511 size_t len;
1512 const char *data = lto_get_section_data (file_data, LTO_section_mode_table,
1513 NULL, &len);
1514 if (! data)
1515 {
1516 internal_error ("cannot read LTO mode table from %s",
1517 file_data->file_name);
1518 return;
1519 }
1520
1521 unsigned char *table = ggc_cleared_vec_alloc<unsigned char> (1 << 8);
1522 file_data->mode_table = table;
1523 const struct lto_simple_header_with_strings *header
1524 = (const struct lto_simple_header_with_strings *) data;
1525 int string_offset;
1526 struct data_in *data_in;
1527 string_offset = sizeof (*header) + header->main_size;
1528
1529 lto_input_block ib (data + sizeof (*header), header->main_size, NULL);
1530 data_in = lto_data_in_create (file_data, data + string_offset,
1531 header->string_size, vNULL);
1532 bitpack_d bp = streamer_read_bitpack (&ib);
1533
1534 table[VOIDmode] = VOIDmode;
1535 table[BLKmode] = BLKmode;
1536 unsigned int m;
1537 while ((m = bp_unpack_value (&bp, 8)) != VOIDmode)
1538 {
1539 enum mode_class mclass
1540 = bp_unpack_enum (&bp, mode_class, MAX_MODE_CLASS);
1541 unsigned int size = bp_unpack_value (&bp, 8);
1542 unsigned int prec = bp_unpack_value (&bp, 16);
1543 machine_mode inner = (machine_mode) bp_unpack_value (&bp, 8);
1544 unsigned int nunits = bp_unpack_value (&bp, 8);
1545 unsigned int ibit = 0, fbit = 0;
1546 unsigned int real_fmt_len = 0;
1547 const char *real_fmt_name = NULL;
1548 switch (mclass)
1549 {
1550 case MODE_FRACT:
1551 case MODE_UFRACT:
1552 case MODE_ACCUM:
1553 case MODE_UACCUM:
1554 ibit = bp_unpack_value (&bp, 8);
1555 fbit = bp_unpack_value (&bp, 8);
1556 break;
1557 case MODE_FLOAT:
1558 case MODE_DECIMAL_FLOAT:
1559 real_fmt_name = bp_unpack_indexed_string (data_in, &bp,
1560 &real_fmt_len);
1561 break;
1562 default:
1563 break;
1564 }
1565 /* First search just the GET_CLASS_NARROWEST_MODE to wider modes,
1566 if not found, fallback to all modes. */
1567 int pass;
1568 for (pass = 0; pass < 2; pass++)
1569 for (machine_mode mr = pass ? VOIDmode
1570 : GET_CLASS_NARROWEST_MODE (mclass);
1571 pass ? mr < MAX_MACHINE_MODE : mr != VOIDmode;
1572 pass ? mr = (machine_mode) (mr + 1)
1573 : mr = GET_MODE_WIDER_MODE (mr))
1574 if (GET_MODE_CLASS (mr) != mclass
1575 || GET_MODE_SIZE (mr) != size
1576 || GET_MODE_PRECISION (mr) != prec
1577 || (inner == m
1578 ? GET_MODE_INNER (mr) != mr
1579 : GET_MODE_INNER (mr) != table[(int) inner])
1580 || GET_MODE_IBIT (mr) != ibit
1581 || GET_MODE_FBIT (mr) != fbit
1582 || GET_MODE_NUNITS (mr) != nunits)
1583 continue;
1584 else if ((mclass == MODE_FLOAT || mclass == MODE_DECIMAL_FLOAT)
1585 && strcmp (REAL_MODE_FORMAT (mr)->name, real_fmt_name) != 0)
1586 continue;
1587 else
1588 {
1589 table[m] = mr;
1590 pass = 2;
1591 break;
1592 }
1593 unsigned int mname_len;
1594 const char *mname = bp_unpack_indexed_string (data_in, &bp, &mname_len);
1595 if (pass == 2)
1596 {
1597 switch (mclass)
1598 {
1599 case MODE_VECTOR_INT:
1600 case MODE_VECTOR_FLOAT:
1601 case MODE_VECTOR_FRACT:
1602 case MODE_VECTOR_UFRACT:
1603 case MODE_VECTOR_ACCUM:
1604 case MODE_VECTOR_UACCUM:
1605 /* For unsupported vector modes just use BLKmode,
1606 if the scalar mode is supported. */
1607 if (table[(int) inner] != VOIDmode)
1608 {
1609 table[m] = BLKmode;
1610 break;
1611 }
1612 /* FALLTHRU */
1613 default:
1614 fatal_error (UNKNOWN_LOCATION, "unsupported mode %s\n", mname);
1615 break;
1616 }
1617 }
1618 }
1619 lto_data_in_delete (data_in);
1620
1621 lto_free_section_data (file_data, LTO_section_mode_table, NULL, data, len);
1622 }
1623
1624
1625 /* Initialization for the LTO reader. */
1626
1627 void
1628 lto_reader_init (void)
1629 {
1630 lto_streamer_init ();
1631 file_name_hash_table
1632 = new hash_table<freeing_string_slot_hasher> (37);
1633 }
1634
1635
1636 /* Create a new data_in object for FILE_DATA. STRINGS is the string
1637 table to use with LEN strings. RESOLUTIONS is the vector of linker
1638 resolutions (NULL if not using a linker plugin). */
1639
1640 struct data_in *
1641 lto_data_in_create (struct lto_file_decl_data *file_data, const char *strings,
1642 unsigned len,
1643 vec<ld_plugin_symbol_resolution_t> resolutions)
1644 {
1645 struct data_in *data_in = new (struct data_in);
1646 data_in->file_data = file_data;
1647 data_in->strings = strings;
1648 data_in->strings_len = len;
1649 data_in->globals_resolution = resolutions;
1650 data_in->reader_cache = streamer_tree_cache_create (false, false, true);
1651 return data_in;
1652 }
1653
1654
1655 /* Remove DATA_IN. */
1656
1657 void
1658 lto_data_in_delete (struct data_in *data_in)
1659 {
1660 data_in->globals_resolution.release ();
1661 streamer_tree_cache_delete (data_in->reader_cache);
1662 delete data_in;
1663 }