]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/lto/lto.c
This patch rewrites the old VEC macro-based interface into a new one
[thirdparty/gcc.git] / gcc / lto / lto.c
CommitLineData
7bfefa9d 1/* Top-level LTO routines.
50ed40f5 2 Copyright 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
7bfefa9d 3 Contributed by CodeSourcery, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "opts.h"
25#include "toplev.h"
26#include "tree.h"
d534cab0 27#include "tree-flow.h"
852f689e 28#include "diagnostic-core.h"
7bfefa9d 29#include "tm.h"
7bfefa9d 30#include "cgraph.h"
31#include "ggc.h"
32#include "tree-ssa-operands.h"
33#include "tree-pass.h"
34#include "langhooks.h"
35#include "vec.h"
36#include "bitmap.h"
37#include "pointer-set.h"
38#include "ipa-prop.h"
39#include "common.h"
3c9e9cba 40#include "debug.h"
7bfefa9d 41#include "gimple.h"
42#include "lto.h"
43#include "lto-tree.h"
44#include "lto-streamer.h"
2541503d 45#include "tree-streamer.h"
f18bad33 46#include "splay-tree.h"
50ed40f5 47#include "lto-partition.h"
7bfefa9d 48
3c9e9cba 49static GTY(()) tree first_personality_decl;
50
654ca0f7 51/* Returns a hash code for P. */
52
53static hashval_t
54hash_name (const void *p)
55{
56 const struct lto_section_slot *ds = (const struct lto_section_slot *) p;
57 return (hashval_t) htab_hash_string (ds->name);
58}
59
60
61/* Returns nonzero if P1 and P2 are equal. */
62
63static int
64eq_name (const void *p1, const void *p2)
65{
66 const struct lto_section_slot *s1 =
67 (const struct lto_section_slot *) p1;
68 const struct lto_section_slot *s2 =
69 (const struct lto_section_slot *) p2;
70
71 return strcmp (s1->name, s2->name) == 0;
72}
73
74/* Free lto_section_slot */
75
76static void
77free_with_string (void *arg)
78{
79 struct lto_section_slot *s = (struct lto_section_slot *)arg;
80
81 free (CONST_CAST (char *, s->name));
82 free (arg);
83}
84
85/* Create section hash table */
86
87htab_t
88lto_obj_create_section_hash_table (void)
89{
90 return htab_create (37, hash_name, eq_name, free_with_string);
91}
3c9e9cba 92
15c58191 93/* Delete an allocated integer KEY in the splay tree. */
94
95static void
96lto_splay_tree_delete_id (splay_tree_key key)
97{
98 free ((void *) key);
99}
100
101/* Compare splay tree node ids A and B. */
102
103static int
104lto_splay_tree_compare_ids (splay_tree_key a, splay_tree_key b)
105{
106 unsigned HOST_WIDE_INT ai;
107 unsigned HOST_WIDE_INT bi;
108
109 ai = *(unsigned HOST_WIDE_INT *) a;
110 bi = *(unsigned HOST_WIDE_INT *) b;
111
112 if (ai < bi)
113 return -1;
114 else if (ai > bi)
115 return 1;
116 return 0;
117}
118
119/* Look up splay tree node by ID in splay tree T. */
120
121static splay_tree_node
122lto_splay_tree_lookup (splay_tree t, unsigned HOST_WIDE_INT id)
123{
124 return splay_tree_lookup (t, (splay_tree_key) &id);
125}
126
127/* Check if KEY has ID. */
128
129static bool
130lto_splay_tree_id_equal_p (splay_tree_key key, unsigned HOST_WIDE_INT id)
131{
132 return *(unsigned HOST_WIDE_INT *) key == id;
133}
134
135/* Insert a splay tree node into tree T with ID as key and FILE_DATA as value.
136 The ID is allocated separately because we need HOST_WIDE_INTs which may
137 be wider than a splay_tree_key. */
138
139static void
140lto_splay_tree_insert (splay_tree t, unsigned HOST_WIDE_INT id,
141 struct lto_file_decl_data *file_data)
142{
143 unsigned HOST_WIDE_INT *idp = XCNEW (unsigned HOST_WIDE_INT);
144 *idp = id;
145 splay_tree_insert (t, (splay_tree_key) idp, (splay_tree_value) file_data);
146}
147
148/* Create a splay tree. */
149
150static splay_tree
151lto_splay_tree_new (void)
152{
153 return splay_tree_new (lto_splay_tree_compare_ids,
154 lto_splay_tree_delete_id,
155 NULL);
156}
157
97b862d3 158/* Return true when NODE has a clone that is analyzed (i.e. we need
159 to load its body even if the node itself is not needed). */
160
161static bool
162has_analyzed_clone_p (struct cgraph_node *node)
163{
164 struct cgraph_node *orig = node;
165 node = node->clones;
166 if (node)
167 while (node != orig)
168 {
169 if (node->analyzed)
170 return true;
171 if (node->clones)
172 node = node->clones;
173 else if (node->next_sibling_clone)
174 node = node->next_sibling_clone;
175 else
176 {
177 while (node != orig && !node->next_sibling_clone)
178 node = node->clone_of;
179 if (node != orig)
180 node = node->next_sibling_clone;
181 }
182 }
183 return false;
184}
185
3c9e9cba 186/* Read the function body for the function associated with NODE. */
7bfefa9d 187
188static void
189lto_materialize_function (struct cgraph_node *node)
190{
191 tree decl;
192 struct lto_file_decl_data *file_data;
193 const char *data, *name;
194 size_t len;
7bfefa9d 195
7d0d0ce1 196 decl = node->symbol.decl;
97b862d3 197 /* Read in functions with body (analyzed nodes)
198 and also functions that are needed to produce virtual clones. */
91bf9d9a 199 if (cgraph_function_with_gimple_body_p (node) || has_analyzed_clone_p (node))
7bfefa9d 200 {
c9aa6453 201 /* Clones don't need to be read. */
97b862d3 202 if (node->clone_of)
203 return;
7bfefa9d 204
205 /* Load the function body only if not operating in WPA mode. In
206 WPA mode, the body of the function is not needed. */
207 if (!flag_wpa)
208 {
7d0d0ce1 209 file_data = node->symbol.lto_file_data;
5cd33168 210 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
211
212 /* We may have renamed the declaration, e.g., a static function. */
213 name = lto_get_decl_name_mapping (file_data, name);
214
215 data = lto_get_section_data (file_data, LTO_section_function_body,
216 name, &len);
217 if (!data)
218 fatal_error ("%s: section %s is missing",
219 file_data->file_name,
220 name);
221
222 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
223
9078126c 224 push_struct_function (decl);
3c9e9cba 225 announce_function (decl);
7bfefa9d 226 lto_input_function_body (file_data, decl, data);
3c9e9cba 227 if (DECL_FUNCTION_PERSONALITY (decl) && !first_personality_decl)
228 first_personality_decl = DECL_FUNCTION_PERSONALITY (decl);
7bfefa9d 229 lto_stats.num_function_bodies++;
5cd33168 230 lto_free_section_data (file_data, LTO_section_function_body, name,
231 data, len);
9078126c 232 pop_cfun ();
5cd33168 233 ggc_collect ();
7bfefa9d 234 }
7bfefa9d 235 }
7bfefa9d 236
237 /* Let the middle end know about the function. */
238 rest_of_decl_compilation (decl, 1, 0);
7bfefa9d 239}
240
241
851d9296 242/* Decode the content of memory pointed to by DATA in the in decl
243 state object STATE. DATA_IN points to a data_in structure for
244 decoding. Return the address after the decoded object in the
245 input. */
7bfefa9d 246
247static const uint32_t *
248lto_read_in_decl_state (struct data_in *data_in, const uint32_t *data,
249 struct lto_in_decl_state *state)
250{
251 uint32_t ix;
252 tree decl;
253 uint32_t i, j;
949e5786 254
7bfefa9d 255 ix = *data++;
7f385784 256 decl = streamer_tree_cache_get (data_in->reader_cache, ix);
7bfefa9d 257 if (TREE_CODE (decl) != FUNCTION_DECL)
258 {
259 gcc_assert (decl == void_type_node);
260 decl = NULL_TREE;
261 }
262 state->fn_decl = decl;
263
264 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
265 {
266 uint32_t size = *data++;
ba72912a 267 tree *decls = ggc_alloc_vec_tree (size);
7bfefa9d 268
269 for (j = 0; j < size; j++)
7f385784 270 decls[j] = streamer_tree_cache_get (data_in->reader_cache, data[j]);
d534cab0 271
272 state->streams[i].size = size;
273 state->streams[i].trees = decls;
274 data += size;
275 }
276
277 return data;
278}
279
ed29d77f 280
281
bdaea387 282/* Global type table. FIXME, it should be possible to re-use some
283 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
284 etc), but those assume that types were built with the various
285 build_*_type routines which is not the case with the streamer. */
286static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
287 htab_t gimple_types;
288static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
289 htab_t type_hash_cache;
290
bdaea387 291static hashval_t gimple_type_hash (const void *);
292
293/* Structure used to maintain a cache of some type pairs compared by
294 gimple_types_compatible_p when comparing aggregate types. There are
295 three possible values for SAME_P:
296
297 -2: The pair (T1, T2) has just been inserted in the table.
298 0: T1 and T2 are different types.
ed29d77f 299 1: T1 and T2 are the same type. */
bdaea387 300
301struct type_pair_d
302{
303 unsigned int uid1;
304 unsigned int uid2;
ed29d77f 305 signed char same_p;
bdaea387 306};
307typedef struct type_pair_d *type_pair_t;
bdaea387 308
309#define GIMPLE_TYPE_PAIR_SIZE 16381
310struct type_pair_d *type_pair_cache;
311
312
313/* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
314 entry if none existed. */
315
316static inline type_pair_t
317lookup_type_pair (tree t1, tree t2)
318{
319 unsigned int index;
320 unsigned int uid1, uid2;
321
bdaea387 322 if (TYPE_UID (t1) < TYPE_UID (t2))
323 {
324 uid1 = TYPE_UID (t1);
325 uid2 = TYPE_UID (t2);
326 }
327 else
328 {
329 uid1 = TYPE_UID (t2);
330 uid2 = TYPE_UID (t1);
331 }
332 gcc_checking_assert (uid1 != uid2);
333
334 /* iterative_hash_hashval_t imply an function calls.
335 We know that UIDS are in limited range. */
336 index = ((((unsigned HOST_WIDE_INT)uid1 << HOST_BITS_PER_WIDE_INT / 2) + uid2)
337 % GIMPLE_TYPE_PAIR_SIZE);
338 if (type_pair_cache [index].uid1 == uid1
339 && type_pair_cache [index].uid2 == uid2)
340 return &type_pair_cache[index];
341
342 type_pair_cache [index].uid1 = uid1;
343 type_pair_cache [index].uid2 = uid2;
ed29d77f 344 type_pair_cache [index].same_p = -2;
bdaea387 345
346 return &type_pair_cache[index];
347}
348
349/* Per pointer state for the SCC finding. The on_sccstack flag
350 is not strictly required, it is true when there is no hash value
351 recorded for the type and false otherwise. But querying that
352 is slower. */
353
354struct sccs
355{
356 unsigned int dfsnum;
357 unsigned int low;
358 bool on_sccstack;
359 union {
360 hashval_t hash;
361 signed char same_p;
362 } u;
363};
364
365static unsigned int next_dfs_num;
366static unsigned int gtc_next_dfs_num;
367
368/* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
369
370typedef struct GTY(()) gimple_type_leader_entry_s {
371 tree type;
372 tree leader;
373} gimple_type_leader_entry;
374
375#define GIMPLE_TYPE_LEADER_SIZE 16381
ed29d77f 376static GTY((length("GIMPLE_TYPE_LEADER_SIZE")))
bdaea387 377 gimple_type_leader_entry *gimple_type_leader;
378
379/* Lookup an existing leader for T and return it or NULL_TREE, if
380 there is none in the cache. */
381
382static inline tree
383gimple_lookup_type_leader (tree t)
384{
385 gimple_type_leader_entry *leader;
386
bdaea387 387 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
388 if (leader->type != t)
389 return NULL_TREE;
390
391 return leader->leader;
392}
393
394
bdaea387 395/* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
396 true then if any type has no name return false, otherwise return
397 true if both types have no names. */
398
399static bool
400compare_type_names_p (tree t1, tree t2)
401{
402 tree name1 = TYPE_NAME (t1);
403 tree name2 = TYPE_NAME (t2);
404
405 if ((name1 != NULL_TREE) != (name2 != NULL_TREE))
406 return false;
407
408 if (name1 == NULL_TREE)
409 return true;
410
411 /* Either both should be a TYPE_DECL or both an IDENTIFIER_NODE. */
412 if (TREE_CODE (name1) != TREE_CODE (name2))
413 return false;
414
415 if (TREE_CODE (name1) == TYPE_DECL)
416 name1 = DECL_NAME (name1);
417 gcc_checking_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
418
419 if (TREE_CODE (name2) == TYPE_DECL)
420 name2 = DECL_NAME (name2);
421 gcc_checking_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
422
423 /* Identifiers can be compared with pointer equality rather
424 than a string comparison. */
425 if (name1 == name2)
426 return true;
427
428 return false;
429}
430
431static bool
432gimple_types_compatible_p_1 (tree, tree, type_pair_t,
f1f41a6c 433 vec<type_pair_t> *,
bdaea387 434 struct pointer_map_t *, struct obstack *);
435
436/* DFS visit the edge from the callers type pair with state *STATE to
437 the pair T1, T2 while operating in FOR_MERGING_P mode.
438 Update the merging status if it is not part of the SCC containing the
439 callers pair and return it.
440 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
441
442static bool
443gtc_visit (tree t1, tree t2,
444 struct sccs *state,
f1f41a6c 445 vec<type_pair_t> *sccstack,
bdaea387 446 struct pointer_map_t *sccstate,
447 struct obstack *sccstate_obstack)
448{
449 struct sccs *cstate = NULL;
450 type_pair_t p;
451 void **slot;
452 tree leader1, leader2;
453
454 /* Check first for the obvious case of pointer identity. */
455 if (t1 == t2)
456 return true;
457
458 /* Check that we have two types to compare. */
459 if (t1 == NULL_TREE || t2 == NULL_TREE)
460 return false;
461
462 /* Can't be the same type if the types don't have the same code. */
463 if (TREE_CODE (t1) != TREE_CODE (t2))
464 return false;
465
466 /* Can't be the same type if they have different CV qualifiers. */
467 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
468 return false;
469
470 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
471 return false;
472
473 /* Void types and nullptr types are always the same. */
474 if (TREE_CODE (t1) == VOID_TYPE
475 || TREE_CODE (t1) == NULLPTR_TYPE)
476 return true;
477
478 /* Can't be the same type if they have different alignment or mode. */
479 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
480 || TYPE_MODE (t1) != TYPE_MODE (t2))
481 return false;
482
483 /* Do some simple checks before doing three hashtable queries. */
484 if (INTEGRAL_TYPE_P (t1)
485 || SCALAR_FLOAT_TYPE_P (t1)
486 || FIXED_POINT_TYPE_P (t1)
487 || TREE_CODE (t1) == VECTOR_TYPE
488 || TREE_CODE (t1) == COMPLEX_TYPE
489 || TREE_CODE (t1) == OFFSET_TYPE
490 || POINTER_TYPE_P (t1))
491 {
492 /* Can't be the same type if they have different sign or precision. */
493 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
494 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
495 return false;
496
497 if (TREE_CODE (t1) == INTEGER_TYPE
498 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
499 return false;
500
501 /* That's all we need to check for float and fixed-point types. */
502 if (SCALAR_FLOAT_TYPE_P (t1)
503 || FIXED_POINT_TYPE_P (t1))
504 return true;
505
506 /* For other types fall through to more complex checks. */
507 }
508
509 /* If the types have been previously registered and found equal
510 they still are. */
511 leader1 = gimple_lookup_type_leader (t1);
512 leader2 = gimple_lookup_type_leader (t2);
513 if (leader1 == t2
514 || t1 == leader2
515 || (leader1 && leader1 == leader2))
516 return true;
517
518 /* If the hash values of t1 and t2 are different the types can't
519 possibly be the same. This helps keeping the type-pair hashtable
520 small, only tracking comparisons for hash collisions. */
521 if (gimple_type_hash (t1) != gimple_type_hash (t2))
522 return false;
523
524 /* Allocate a new cache entry for this comparison. */
525 p = lookup_type_pair (t1, t2);
ed29d77f 526 if (p->same_p == 0 || p->same_p == 1)
bdaea387 527 {
528 /* We have already decided whether T1 and T2 are the
529 same, return the cached result. */
ed29d77f 530 return p->same_p == 1;
bdaea387 531 }
532
533 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
534 cstate = (struct sccs *)*slot;
535 /* Not yet visited. DFS recurse. */
536 if (!cstate)
537 {
538 gimple_types_compatible_p_1 (t1, t2, p,
539 sccstack, sccstate, sccstate_obstack);
540 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
541 state->low = MIN (state->low, cstate->low);
542 }
543 /* If the type is still on the SCC stack adjust the parents low. */
544 if (cstate->dfsnum < state->dfsnum
545 && cstate->on_sccstack)
546 state->low = MIN (cstate->dfsnum, state->low);
547
548 /* Return the current lattice value. We start with an equality
549 assumption so types part of a SCC will be optimistically
550 treated equal unless proven otherwise. */
551 return cstate->u.same_p;
552}
553
554/* Worker for gimple_types_compatible.
555 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
556
557static bool
558gimple_types_compatible_p_1 (tree t1, tree t2, type_pair_t p,
f1f41a6c 559 vec<type_pair_t> *sccstack,
bdaea387 560 struct pointer_map_t *sccstate,
561 struct obstack *sccstate_obstack)
562{
563 struct sccs *state;
564
ed29d77f 565 gcc_assert (p->same_p == -2);
bdaea387 566
567 state = XOBNEW (sccstate_obstack, struct sccs);
568 *pointer_map_insert (sccstate, p) = state;
569
f1f41a6c 570 sccstack->safe_push (p);
bdaea387 571 state->dfsnum = gtc_next_dfs_num++;
572 state->low = state->dfsnum;
573 state->on_sccstack = true;
574 /* Start with an equality assumption. As we DFS recurse into child
575 SCCs this assumption may get revisited. */
576 state->u.same_p = 1;
577
578 /* The struct tags shall compare equal. */
579 if (!compare_type_names_p (t1, t2))
580 goto different_types;
581
a5693828 582 /* The main variant of both types should compare equal. */
583 if (TYPE_MAIN_VARIANT (t1) != t1
584 || TYPE_MAIN_VARIANT (t2) != t2)
585 {
586 if (!gtc_visit (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2),
587 state, sccstack, sccstate, sccstate_obstack))
588 goto different_types;
589 }
590
bdaea387 591 /* We may not merge typedef types to the same type in different
592 contexts. */
593 if (TYPE_NAME (t1)
594 && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
595 && DECL_CONTEXT (TYPE_NAME (t1))
596 && TYPE_P (DECL_CONTEXT (TYPE_NAME (t1))))
597 {
598 if (!gtc_visit (DECL_CONTEXT (TYPE_NAME (t1)),
599 DECL_CONTEXT (TYPE_NAME (t2)),
600 state, sccstack, sccstate, sccstate_obstack))
601 goto different_types;
602 }
603
604 /* If their attributes are not the same they can't be the same type. */
605 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
606 goto different_types;
607
608 /* Do type-specific comparisons. */
609 switch (TREE_CODE (t1))
610 {
611 case VECTOR_TYPE:
612 case COMPLEX_TYPE:
613 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
614 state, sccstack, sccstate, sccstate_obstack))
615 goto different_types;
616 goto same_types;
617
618 case ARRAY_TYPE:
619 /* Array types are the same if the element types are the same and
620 the number of elements are the same. */
621 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
622 state, sccstack, sccstate, sccstate_obstack)
623 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
624 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
625 goto different_types;
626 else
627 {
628 tree i1 = TYPE_DOMAIN (t1);
629 tree i2 = TYPE_DOMAIN (t2);
630
631 /* For an incomplete external array, the type domain can be
632 NULL_TREE. Check this condition also. */
633 if (i1 == NULL_TREE && i2 == NULL_TREE)
634 goto same_types;
635 else if (i1 == NULL_TREE || i2 == NULL_TREE)
636 goto different_types;
637 else
638 {
639 tree min1 = TYPE_MIN_VALUE (i1);
640 tree min2 = TYPE_MIN_VALUE (i2);
641 tree max1 = TYPE_MAX_VALUE (i1);
642 tree max2 = TYPE_MAX_VALUE (i2);
643
644 /* The minimum/maximum values have to be the same. */
645 if ((min1 == min2
646 || (min1 && min2
647 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
648 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
649 || operand_equal_p (min1, min2, 0))))
650 && (max1 == max2
651 || (max1 && max2
652 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
653 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
654 || operand_equal_p (max1, max2, 0)))))
655 goto same_types;
656 else
657 goto different_types;
658 }
659 }
660
661 case METHOD_TYPE:
662 /* Method types should belong to the same class. */
663 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
664 state, sccstack, sccstate, sccstate_obstack))
665 goto different_types;
666
667 /* Fallthru */
668
669 case FUNCTION_TYPE:
670 /* Function types are the same if the return type and arguments types
671 are the same. */
672 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
673 state, sccstack, sccstate, sccstate_obstack))
674 goto different_types;
675
676 if (!comp_type_attributes (t1, t2))
677 goto different_types;
678
679 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
680 goto same_types;
681 else
682 {
683 tree parms1, parms2;
684
685 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
686 parms1 && parms2;
687 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
688 {
689 if (!gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2),
690 state, sccstack, sccstate, sccstate_obstack))
691 goto different_types;
692 }
693
694 if (parms1 || parms2)
695 goto different_types;
696
697 goto same_types;
698 }
699
700 case OFFSET_TYPE:
701 {
702 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
703 state, sccstack, sccstate, sccstate_obstack)
704 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
705 TYPE_OFFSET_BASETYPE (t2),
706 state, sccstack, sccstate, sccstate_obstack))
707 goto different_types;
708
709 goto same_types;
710 }
711
712 case POINTER_TYPE:
713 case REFERENCE_TYPE:
714 {
715 /* If the two pointers have different ref-all attributes,
716 they can't be the same type. */
717 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
718 goto different_types;
719
720 /* Otherwise, pointer and reference types are the same if the
721 pointed-to types are the same. */
722 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
723 state, sccstack, sccstate, sccstate_obstack))
724 goto same_types;
725
726 goto different_types;
727 }
728
729 case INTEGER_TYPE:
730 case BOOLEAN_TYPE:
731 {
732 tree min1 = TYPE_MIN_VALUE (t1);
733 tree max1 = TYPE_MAX_VALUE (t1);
734 tree min2 = TYPE_MIN_VALUE (t2);
735 tree max2 = TYPE_MAX_VALUE (t2);
736 bool min_equal_p = false;
737 bool max_equal_p = false;
738
739 /* If either type has a minimum value, the other type must
740 have the same. */
741 if (min1 == NULL_TREE && min2 == NULL_TREE)
742 min_equal_p = true;
743 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
744 min_equal_p = true;
745
746 /* Likewise, if either type has a maximum value, the other
747 type must have the same. */
748 if (max1 == NULL_TREE && max2 == NULL_TREE)
749 max_equal_p = true;
750 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
751 max_equal_p = true;
752
753 if (!min_equal_p || !max_equal_p)
754 goto different_types;
755
756 goto same_types;
757 }
758
759 case ENUMERAL_TYPE:
760 {
761 /* FIXME lto, we cannot check bounds on enumeral types because
762 different front ends will produce different values.
763 In C, enumeral types are integers, while in C++ each element
764 will have its own symbolic value. We should decide how enums
765 are to be represented in GIMPLE and have each front end lower
766 to that. */
767 tree v1, v2;
768
769 /* For enumeral types, all the values must be the same. */
770 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
771 goto same_types;
772
773 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
774 v1 && v2;
775 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
776 {
777 tree c1 = TREE_VALUE (v1);
778 tree c2 = TREE_VALUE (v2);
779
780 if (TREE_CODE (c1) == CONST_DECL)
781 c1 = DECL_INITIAL (c1);
782
783 if (TREE_CODE (c2) == CONST_DECL)
784 c2 = DECL_INITIAL (c2);
785
786 if (tree_int_cst_equal (c1, c2) != 1)
787 goto different_types;
788
789 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
790 goto different_types;
791 }
792
793 /* If one enumeration has more values than the other, they
794 are not the same. */
795 if (v1 || v2)
796 goto different_types;
797
798 goto same_types;
799 }
800
801 case RECORD_TYPE:
802 case UNION_TYPE:
803 case QUAL_UNION_TYPE:
804 {
805 tree f1, f2;
806
807 /* For aggregate types, all the fields must be the same. */
808 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
809 f1 && f2;
810 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
811 {
812 /* Different field kinds are not compatible. */
813 if (TREE_CODE (f1) != TREE_CODE (f2))
814 goto different_types;
815 /* Field decls must have the same name and offset. */
816 if (TREE_CODE (f1) == FIELD_DECL
817 && (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
818 || !gimple_compare_field_offset (f1, f2)))
819 goto different_types;
820 /* All entities should have the same name and type. */
821 if (DECL_NAME (f1) != DECL_NAME (f2)
822 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2),
823 state, sccstack, sccstate, sccstate_obstack))
824 goto different_types;
825 }
826
827 /* If one aggregate has more fields than the other, they
828 are not the same. */
829 if (f1 || f2)
830 goto different_types;
831
832 goto same_types;
833 }
834
835 default:
836 gcc_unreachable ();
837 }
838
839 /* Common exit path for types that are not compatible. */
840different_types:
841 state->u.same_p = 0;
842 goto pop;
843
844 /* Common exit path for types that are compatible. */
845same_types:
846 gcc_assert (state->u.same_p == 1);
847
848pop:
849 if (state->low == state->dfsnum)
850 {
851 type_pair_t x;
852
853 /* Pop off the SCC and set its cache values to the final
854 comparison result. */
855 do
856 {
857 struct sccs *cstate;
f1f41a6c 858 x = sccstack->pop ();
bdaea387 859 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
860 cstate->on_sccstack = false;
ed29d77f 861 x->same_p = state->u.same_p;
bdaea387 862 }
863 while (x != p);
864 }
865
866 return state->u.same_p;
867}
868
869/* Return true iff T1 and T2 are structurally identical. When
870 FOR_MERGING_P is true the an incomplete type and a complete type
871 are considered different, otherwise they are considered compatible. */
872
873static bool
874gimple_types_compatible_p (tree t1, tree t2)
875{
f1f41a6c 876 vec<type_pair_t> sccstack = vec<type_pair_t>();
bdaea387 877 struct pointer_map_t *sccstate;
878 struct obstack sccstate_obstack;
879 type_pair_t p = NULL;
880 bool res;
881 tree leader1, leader2;
882
883 /* Before starting to set up the SCC machinery handle simple cases. */
884
885 /* Check first for the obvious case of pointer identity. */
886 if (t1 == t2)
887 return true;
888
889 /* Check that we have two types to compare. */
890 if (t1 == NULL_TREE || t2 == NULL_TREE)
891 return false;
892
893 /* Can't be the same type if the types don't have the same code. */
894 if (TREE_CODE (t1) != TREE_CODE (t2))
895 return false;
896
897 /* Can't be the same type if they have different CV qualifiers. */
898 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
899 return false;
900
901 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
902 return false;
903
904 /* Void types and nullptr types are always the same. */
905 if (TREE_CODE (t1) == VOID_TYPE
906 || TREE_CODE (t1) == NULLPTR_TYPE)
907 return true;
908
909 /* Can't be the same type if they have different alignment or mode. */
910 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
911 || TYPE_MODE (t1) != TYPE_MODE (t2))
912 return false;
913
914 /* Do some simple checks before doing three hashtable queries. */
915 if (INTEGRAL_TYPE_P (t1)
916 || SCALAR_FLOAT_TYPE_P (t1)
917 || FIXED_POINT_TYPE_P (t1)
918 || TREE_CODE (t1) == VECTOR_TYPE
919 || TREE_CODE (t1) == COMPLEX_TYPE
920 || TREE_CODE (t1) == OFFSET_TYPE
921 || POINTER_TYPE_P (t1))
922 {
923 /* Can't be the same type if they have different sign or precision. */
924 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
925 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
926 return false;
927
928 if (TREE_CODE (t1) == INTEGER_TYPE
929 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
930 return false;
931
932 /* That's all we need to check for float and fixed-point types. */
933 if (SCALAR_FLOAT_TYPE_P (t1)
934 || FIXED_POINT_TYPE_P (t1))
935 return true;
936
937 /* For other types fall through to more complex checks. */
938 }
939
940 /* If the types have been previously registered and found equal
941 they still are. */
942 leader1 = gimple_lookup_type_leader (t1);
943 leader2 = gimple_lookup_type_leader (t2);
944 if (leader1 == t2
945 || t1 == leader2
946 || (leader1 && leader1 == leader2))
947 return true;
948
949 /* If the hash values of t1 and t2 are different the types can't
950 possibly be the same. This helps keeping the type-pair hashtable
951 small, only tracking comparisons for hash collisions. */
952 if (gimple_type_hash (t1) != gimple_type_hash (t2))
953 return false;
954
955 /* If we've visited this type pair before (in the case of aggregates
956 with self-referential types), and we made a decision, return it. */
957 p = lookup_type_pair (t1, t2);
ed29d77f 958 if (p->same_p == 0 || p->same_p == 1)
bdaea387 959 {
960 /* We have already decided whether T1 and T2 are the
961 same, return the cached result. */
ed29d77f 962 return p->same_p == 1;
bdaea387 963 }
964
965 /* Now set up the SCC machinery for the comparison. */
966 gtc_next_dfs_num = 1;
967 sccstate = pointer_map_create ();
968 gcc_obstack_init (&sccstate_obstack);
969 res = gimple_types_compatible_p_1 (t1, t2, p,
970 &sccstack, sccstate, &sccstate_obstack);
f1f41a6c 971 sccstack.release ();
bdaea387 972 pointer_map_destroy (sccstate);
973 obstack_free (&sccstate_obstack, NULL);
974
975 return res;
976}
977
978static hashval_t
f1f41a6c 979iterative_hash_gimple_type (tree, hashval_t, vec<tree> *,
bdaea387 980 struct pointer_map_t *, struct obstack *);
981
982/* DFS visit the edge from the callers type with state *STATE to T.
983 Update the callers type hash V with the hash for T if it is not part
984 of the SCC containing the callers type and return it.
985 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
986
987static hashval_t
988visit (tree t, struct sccs *state, hashval_t v,
f1f41a6c 989 vec<tree> *sccstack,
bdaea387 990 struct pointer_map_t *sccstate,
991 struct obstack *sccstate_obstack)
992{
993 struct sccs *cstate = NULL;
994 struct tree_int_map m;
995 void **slot;
996
997 /* If there is a hash value recorded for this type then it can't
998 possibly be part of our parent SCC. Simply mix in its hash. */
999 m.base.from = t;
1000 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
1001 && *slot)
1002 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
1003
1004 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
1005 cstate = (struct sccs *)*slot;
1006 if (!cstate)
1007 {
1008 hashval_t tem;
1009 /* Not yet visited. DFS recurse. */
1010 tem = iterative_hash_gimple_type (t, v,
1011 sccstack, sccstate, sccstate_obstack);
1012 if (!cstate)
1013 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
1014 state->low = MIN (state->low, cstate->low);
1015 /* If the type is no longer on the SCC stack and thus is not part
1016 of the parents SCC mix in its hash value. Otherwise we will
1017 ignore the type for hashing purposes and return the unaltered
1018 hash value. */
1019 if (!cstate->on_sccstack)
1020 return tem;
1021 }
1022 if (cstate->dfsnum < state->dfsnum
1023 && cstate->on_sccstack)
1024 state->low = MIN (cstate->dfsnum, state->low);
1025
1026 /* We are part of our parents SCC, skip this type during hashing
1027 and return the unaltered hash value. */
1028 return v;
1029}
1030
bdaea387 1031/* Hash NAME with the previous hash value V and return it. */
1032
1033static hashval_t
1034iterative_hash_name (tree name, hashval_t v)
1035{
1036 if (!name)
1037 return v;
1038 v = iterative_hash_hashval_t (TREE_CODE (name), v);
1039 if (TREE_CODE (name) == TYPE_DECL)
1040 name = DECL_NAME (name);
1041 if (!name)
1042 return v;
1043 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
1044 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
1045}
1046
1047/* A type, hashvalue pair for sorting SCC members. */
1048
1049struct type_hash_pair {
1050 tree type;
1051 hashval_t hash;
1052};
1053
1054/* Compare two type, hashvalue pairs. */
1055
1056static int
1057type_hash_pair_compare (const void *p1_, const void *p2_)
1058{
1059 const struct type_hash_pair *p1 = (const struct type_hash_pair *) p1_;
1060 const struct type_hash_pair *p2 = (const struct type_hash_pair *) p2_;
1061 if (p1->hash < p2->hash)
1062 return -1;
1063 else if (p1->hash > p2->hash)
1064 return 1;
1065 return 0;
1066}
1067
1068/* Returning a hash value for gimple type TYPE combined with VAL.
1069 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
1070
1071 To hash a type we end up hashing in types that are reachable.
1072 Through pointers we can end up with cycles which messes up the
1073 required property that we need to compute the same hash value
1074 for structurally equivalent types. To avoid this we have to
1075 hash all types in a cycle (the SCC) in a commutative way. The
1076 easiest way is to not mix in the hashes of the SCC members at
1077 all. To make this work we have to delay setting the hash
1078 values of the SCC until it is complete. */
1079
1080static hashval_t
1081iterative_hash_gimple_type (tree type, hashval_t val,
f1f41a6c 1082 vec<tree> *sccstack,
bdaea387 1083 struct pointer_map_t *sccstate,
1084 struct obstack *sccstate_obstack)
1085{
1086 hashval_t v;
1087 void **slot;
1088 struct sccs *state;
1089
1090 /* Not visited during this DFS walk. */
1091 gcc_checking_assert (!pointer_map_contains (sccstate, type));
1092 state = XOBNEW (sccstate_obstack, struct sccs);
1093 *pointer_map_insert (sccstate, type) = state;
1094
f1f41a6c 1095 sccstack->safe_push (type);
bdaea387 1096 state->dfsnum = next_dfs_num++;
1097 state->low = state->dfsnum;
1098 state->on_sccstack = true;
1099
1100 /* Combine a few common features of types so that types are grouped into
1101 smaller sets; when searching for existing matching types to merge,
1102 only existing types having the same features as the new type will be
1103 checked. */
1104 v = iterative_hash_name (TYPE_NAME (type), 0);
1105 if (TYPE_NAME (type)
1106 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1107 && DECL_CONTEXT (TYPE_NAME (type))
1108 && TYPE_P (DECL_CONTEXT (TYPE_NAME (type))))
1109 v = visit (DECL_CONTEXT (TYPE_NAME (type)), state, v,
1110 sccstack, sccstate, sccstate_obstack);
a5693828 1111
1112 /* Factor in the variant structure. */
1113 if (TYPE_MAIN_VARIANT (type) != type)
1114 v = visit (TYPE_MAIN_VARIANT (type), state, v,
1115 sccstack, sccstate, sccstate_obstack);
1116
bdaea387 1117 v = iterative_hash_hashval_t (TREE_CODE (type), v);
1118 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
1119 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
1120
1121 /* Do not hash the types size as this will cause differences in
1122 hash values for the complete vs. the incomplete type variant. */
1123
1124 /* Incorporate common features of numerical types. */
1125 if (INTEGRAL_TYPE_P (type)
1126 || SCALAR_FLOAT_TYPE_P (type)
1127 || FIXED_POINT_TYPE_P (type))
1128 {
1129 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
1130 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
1131 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
1132 }
1133
1134 /* For pointer and reference types, fold in information about the type
1135 pointed to. */
1136 if (POINTER_TYPE_P (type))
1137 v = visit (TREE_TYPE (type), state, v,
1138 sccstack, sccstate, sccstate_obstack);
1139
1140 /* For integer types hash the types min/max values and the string flag. */
1141 if (TREE_CODE (type) == INTEGER_TYPE)
1142 {
1143 /* OMP lowering can introduce error_mark_node in place of
1144 random local decls in types. */
1145 if (TYPE_MIN_VALUE (type) != error_mark_node)
1146 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
1147 if (TYPE_MAX_VALUE (type) != error_mark_node)
1148 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
1149 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
1150 }
1151
1152 /* For array types hash the domain and the string flag. */
1153 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
1154 {
1155 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
1156 v = visit (TYPE_DOMAIN (type), state, v,
1157 sccstack, sccstate, sccstate_obstack);
1158 }
1159
1160 /* Recurse for aggregates with a single element type. */
1161 if (TREE_CODE (type) == ARRAY_TYPE
1162 || TREE_CODE (type) == COMPLEX_TYPE
1163 || TREE_CODE (type) == VECTOR_TYPE)
1164 v = visit (TREE_TYPE (type), state, v,
1165 sccstack, sccstate, sccstate_obstack);
1166
1167 /* Incorporate function return and argument types. */
1168 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
1169 {
1170 unsigned na;
1171 tree p;
1172
1173 /* For method types also incorporate their parent class. */
1174 if (TREE_CODE (type) == METHOD_TYPE)
1175 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
1176 sccstack, sccstate, sccstate_obstack);
1177
1178 /* Check result and argument types. */
1179 v = visit (TREE_TYPE (type), state, v,
1180 sccstack, sccstate, sccstate_obstack);
1181 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
1182 {
1183 v = visit (TREE_VALUE (p), state, v,
1184 sccstack, sccstate, sccstate_obstack);
1185 na++;
1186 }
1187
1188 v = iterative_hash_hashval_t (na, v);
1189 }
1190
1191 if (RECORD_OR_UNION_TYPE_P (type))
1192 {
1193 unsigned nf;
1194 tree f;
1195
1196 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
1197 {
1198 v = iterative_hash_name (DECL_NAME (f), v);
1199 v = visit (TREE_TYPE (f), state, v,
1200 sccstack, sccstate, sccstate_obstack);
1201 nf++;
1202 }
1203
1204 v = iterative_hash_hashval_t (nf, v);
1205 }
1206
1207 /* Record hash for us. */
1208 state->u.hash = v;
1209
1210 /* See if we found an SCC. */
1211 if (state->low == state->dfsnum)
1212 {
1213 tree x;
1214 struct tree_int_map *m;
1215
1216 /* Pop off the SCC and set its hash values. */
f1f41a6c 1217 x = sccstack->pop ();
bdaea387 1218 /* Optimize SCC size one. */
1219 if (x == type)
1220 {
1221 state->on_sccstack = false;
1222 m = ggc_alloc_cleared_tree_int_map ();
1223 m->base.from = x;
1224 m->to = v;
1225 slot = htab_find_slot (type_hash_cache, m, INSERT);
1226 gcc_assert (!*slot);
1227 *slot = (void *) m;
1228 }
1229 else
1230 {
1231 struct sccs *cstate;
1232 unsigned first, i, size, j;
1233 struct type_hash_pair *pairs;
1234 /* Pop off the SCC and build an array of type, hash pairs. */
f1f41a6c 1235 first = sccstack->length () - 1;
1236 while ((*sccstack)[first] != type)
bdaea387 1237 --first;
f1f41a6c 1238 size = sccstack->length () - first + 1;
bdaea387 1239 pairs = XALLOCAVEC (struct type_hash_pair, size);
1240 i = 0;
1241 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
1242 cstate->on_sccstack = false;
1243 pairs[i].type = x;
1244 pairs[i].hash = cstate->u.hash;
1245 do
1246 {
f1f41a6c 1247 x = sccstack->pop ();
bdaea387 1248 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
1249 cstate->on_sccstack = false;
1250 ++i;
1251 pairs[i].type = x;
1252 pairs[i].hash = cstate->u.hash;
1253 }
1254 while (x != type);
1255 gcc_assert (i + 1 == size);
1256 /* Sort the arrays of type, hash pairs so that when we mix in
1257 all members of the SCC the hash value becomes independent on
1258 the order we visited the SCC. Disregard hashes equal to
1259 the hash of the type we mix into because we cannot guarantee
1260 a stable sort for those across different TUs. */
1261 qsort (pairs, size, sizeof (struct type_hash_pair),
1262 type_hash_pair_compare);
1263 for (i = 0; i < size; ++i)
1264 {
1265 hashval_t hash;
1266 m = ggc_alloc_cleared_tree_int_map ();
1267 m->base.from = pairs[i].type;
1268 hash = pairs[i].hash;
1269 /* Skip same hashes. */
1270 for (j = i + 1; j < size && pairs[j].hash == pairs[i].hash; ++j)
1271 ;
1272 for (; j < size; ++j)
1273 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
1274 for (j = 0; pairs[j].hash != pairs[i].hash; ++j)
1275 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
1276 m->to = hash;
1277 if (pairs[i].type == type)
1278 v = hash;
1279 slot = htab_find_slot (type_hash_cache, m, INSERT);
1280 gcc_assert (!*slot);
1281 *slot = (void *) m;
1282 }
1283 }
1284 }
1285
1286 return iterative_hash_hashval_t (v, val);
1287}
1288
1289/* Returns a hash value for P (assumed to be a type). The hash value
1290 is computed using some distinguishing features of the type. Note
1291 that we cannot use pointer hashing here as we may be dealing with
1292 two distinct instances of the same type.
1293
1294 This function should produce the same hash value for two compatible
1295 types according to gimple_types_compatible_p. */
1296
1297static hashval_t
1298gimple_type_hash (const void *p)
1299{
1300 const_tree t = (const_tree) p;
f1f41a6c 1301 vec<tree> sccstack = vec<tree>();
bdaea387 1302 struct pointer_map_t *sccstate;
1303 struct obstack sccstate_obstack;
1304 hashval_t val;
1305 void **slot;
1306 struct tree_int_map m;
1307
bdaea387 1308 m.base.from = CONST_CAST_TREE (t);
1309 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
1310 && *slot)
1311 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
1312
1313 /* Perform a DFS walk and pre-hash all reachable types. */
1314 next_dfs_num = 1;
1315 sccstate = pointer_map_create ();
1316 gcc_obstack_init (&sccstate_obstack);
1317 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
1318 &sccstack, sccstate, &sccstate_obstack);
f1f41a6c 1319 sccstack.release ();
bdaea387 1320 pointer_map_destroy (sccstate);
1321 obstack_free (&sccstate_obstack, NULL);
1322
1323 return val;
1324}
ed29d77f 1325
bdaea387 1326/* Returns nonzero if P1 and P2 are equal. */
1327
1328static int
1329gimple_type_eq (const void *p1, const void *p2)
1330{
1331 const_tree t1 = (const_tree) p1;
1332 const_tree t2 = (const_tree) p2;
1333 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
1334 CONST_CAST_TREE (t2));
1335}
1336
1337
1338/* Worker for gimple_register_type.
1339 Register type T in the global type table gimple_types.
1340 When REGISTERING_MV is false first recurse for the main variant of T. */
1341
1342static tree
1343gimple_register_type_1 (tree t, bool registering_mv)
1344{
1345 void **slot;
1346 gimple_type_leader_entry *leader;
1347
1348 /* If we registered this type before return the cached result. */
1349 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
1350 if (leader->type == t)
1351 return leader->leader;
1352
1353 /* Always register the main variant first. This is important so we
1354 pick up the non-typedef variants as canonical, otherwise we'll end
1355 up taking typedef ids for structure tags during comparison.
1356 It also makes sure that main variants will be merged to main variants.
1357 As we are operating on a possibly partially fixed up type graph
1358 do not bother to recurse more than once, otherwise we may end up
1359 walking in circles.
1360 If we are registering a main variant it will either remain its
1361 own main variant or it will be merged to something else in which
1362 case we do not care for the main variant leader. */
1363 if (!registering_mv
1364 && TYPE_MAIN_VARIANT (t) != t)
1365 gimple_register_type_1 (TYPE_MAIN_VARIANT (t), true);
1366
1367 /* See if we already have an equivalent type registered. */
1368 slot = htab_find_slot (gimple_types, t, INSERT);
1369 if (*slot
1370 && *(tree *)slot != t)
1371 {
1372 tree new_type = (tree) *((tree *) slot);
1373 leader->type = t;
1374 leader->leader = new_type;
1375 return new_type;
1376 }
1377
1378 /* If not, insert it to the cache and the hash. */
1379 leader->type = t;
1380 leader->leader = t;
1381 *slot = (void *) t;
1382 return t;
1383}
1384
1385/* Register type T in the global type table gimple_types.
1386 If another type T', compatible with T, already existed in
1387 gimple_types then return T', otherwise return T. This is used by
1388 LTO to merge identical types read from different TUs. */
1389
1390static tree
1391gimple_register_type (tree t)
1392{
1393 gcc_assert (TYPE_P (t));
bdaea387 1394 return gimple_register_type_1 (t, false);
1395}
1396
1397#define GIMPLE_REGISTER_TYPE(tt) \
1398 (TREE_VISITED (tt) ? gimple_register_type (tt) : tt)
1399
1400
1401
d534cab0 1402/* A hashtable of trees that potentially refer to variables or functions
1403 that must be replaced with their prevailing variant. */
1404static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node))) htab_t
1405 tree_with_vars;
1406
1407/* Remember that T is a tree that (potentially) refers to a variable
1408 or function decl that may be replaced with its prevailing variant. */
1409static void
1410remember_with_vars (tree t)
1411{
1412 *(tree *) htab_find_slot (tree_with_vars, t, INSERT) = t;
1413}
1414
1415#define LTO_FIXUP_TREE(tt) \
1416 do \
1417 { \
1418 if (tt) \
1419 { \
1420 if (TYPE_P (tt)) \
a7a11a02 1421 (tt) = GIMPLE_REGISTER_TYPE (tt); \
d534cab0 1422 if (VAR_OR_FUNCTION_DECL_P (tt) && TREE_PUBLIC (tt)) \
1423 remember_with_vars (t); \
35d3a6eb 1424 if (TREE_CODE (tt) == INTEGER_CST) \
1425 (tt) = fixup_integer_cst (tt); \
d534cab0 1426 } \
1427 } while (0)
1428
1429static void lto_fixup_types (tree);
1430
35d3a6eb 1431/* Return integer_cst T with updated type. */
1432
1433static tree
1434fixup_integer_cst (tree t)
1435{
1436 tree type = GIMPLE_REGISTER_TYPE (TREE_TYPE (t));
1437
1438 if (type == TREE_TYPE (t))
1439 return t;
1440
1441 /* If overflow was set, streamer_read_integer_cst
1442 produced local copy of T. */
1443 if (TREE_OVERFLOW (t))
1444 {
1445 TREE_TYPE (t) = type;
1446 return t;
1447 }
1448 else
1449 /* Otherwise produce new shared node for the new type. */
1450 return build_int_cst_wide (type, TREE_INT_CST_LOW (t),
1451 TREE_INT_CST_HIGH (t));
1452}
1453
1e184c62 1454/* Fix up fields of a tree_typed T. */
d534cab0 1455
1456static void
1e184c62 1457lto_ft_typed (tree t)
d534cab0 1458{
d534cab0 1459 LTO_FIXUP_TREE (TREE_TYPE (t));
1e184c62 1460}
1461
1462/* Fix up fields of a tree_common T. */
d534cab0 1463
1e184c62 1464static void
1465lto_ft_common (tree t)
1466{
1467 lto_ft_typed (t);
d534cab0 1468 LTO_FIXUP_TREE (TREE_CHAIN (t));
1469}
1470
1471/* Fix up fields of a decl_minimal T. */
1472
1473static void
1474lto_ft_decl_minimal (tree t)
1475{
1476 lto_ft_common (t);
1477 LTO_FIXUP_TREE (DECL_NAME (t));
1478 LTO_FIXUP_TREE (DECL_CONTEXT (t));
1479}
1480
1481/* Fix up fields of a decl_common T. */
1482
1483static void
1484lto_ft_decl_common (tree t)
1485{
1486 lto_ft_decl_minimal (t);
1487 LTO_FIXUP_TREE (DECL_SIZE (t));
1488 LTO_FIXUP_TREE (DECL_SIZE_UNIT (t));
1489 LTO_FIXUP_TREE (DECL_INITIAL (t));
1490 LTO_FIXUP_TREE (DECL_ATTRIBUTES (t));
1491 LTO_FIXUP_TREE (DECL_ABSTRACT_ORIGIN (t));
1492}
1493
1494/* Fix up fields of a decl_with_vis T. */
1495
1496static void
1497lto_ft_decl_with_vis (tree t)
1498{
1499 lto_ft_decl_common (t);
1500
1501 /* Accessor macro has side-effects, use field-name here. */
1502 LTO_FIXUP_TREE (t->decl_with_vis.assembler_name);
1503 LTO_FIXUP_TREE (DECL_SECTION_NAME (t));
1504}
1505
1506/* Fix up fields of a decl_non_common T. */
1507
1508static void
1509lto_ft_decl_non_common (tree t)
1510{
1511 lto_ft_decl_with_vis (t);
1512 LTO_FIXUP_TREE (DECL_ARGUMENT_FLD (t));
1513 LTO_FIXUP_TREE (DECL_RESULT_FLD (t));
1514 LTO_FIXUP_TREE (DECL_VINDEX (t));
6cd6787c 1515 /* The C frontends may create exact duplicates for DECL_ORIGINAL_TYPE
1516 like for 'typedef enum foo foo'. We have no way of avoiding to
1517 merge them and dwarf2out.c cannot deal with this,
1518 so fix this up by clearing DECL_ORIGINAL_TYPE in this case. */
1519 if (TREE_CODE (t) == TYPE_DECL
1520 && DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
1521 DECL_ORIGINAL_TYPE (t) = NULL_TREE;
d534cab0 1522}
1523
1524/* Fix up fields of a decl_non_common T. */
1525
1526static void
1527lto_ft_function (tree t)
1528{
1529 lto_ft_decl_non_common (t);
1530 LTO_FIXUP_TREE (DECL_FUNCTION_PERSONALITY (t));
1531}
1532
1533/* Fix up fields of a field_decl T. */
1534
1535static void
1536lto_ft_field_decl (tree t)
1537{
1538 lto_ft_decl_common (t);
1539 LTO_FIXUP_TREE (DECL_FIELD_OFFSET (t));
1540 LTO_FIXUP_TREE (DECL_BIT_FIELD_TYPE (t));
1541 LTO_FIXUP_TREE (DECL_QUALIFIER (t));
1542 LTO_FIXUP_TREE (DECL_FIELD_BIT_OFFSET (t));
1543 LTO_FIXUP_TREE (DECL_FCONTEXT (t));
1544}
1545
1546/* Fix up fields of a type T. */
1547
1548static void
1549lto_ft_type (tree t)
1550{
d534cab0 1551 lto_ft_common (t);
1552 LTO_FIXUP_TREE (TYPE_CACHED_VALUES (t));
1553 LTO_FIXUP_TREE (TYPE_SIZE (t));
1554 LTO_FIXUP_TREE (TYPE_SIZE_UNIT (t));
1555 LTO_FIXUP_TREE (TYPE_ATTRIBUTES (t));
1556 LTO_FIXUP_TREE (TYPE_NAME (t));
1557
1558 /* Accessors are for derived node types only. */
1559 if (!POINTER_TYPE_P (t))
8f2eb9e1 1560 LTO_FIXUP_TREE (TYPE_MINVAL (t));
1561 LTO_FIXUP_TREE (TYPE_MAXVAL (t));
d534cab0 1562
1563 /* Accessor is for derived node types only. */
8f2eb9e1 1564 LTO_FIXUP_TREE (t->type_non_common.binfo);
d534cab0 1565
1566 LTO_FIXUP_TREE (TYPE_CONTEXT (t));
d534cab0 1567}
1568
1569/* Fix up fields of a BINFO T. */
1570
1571static void
1572lto_ft_binfo (tree t)
1573{
1574 unsigned HOST_WIDE_INT i, n;
1575 tree base, saved_base;
1576
1577 lto_ft_common (t);
1578 LTO_FIXUP_TREE (BINFO_VTABLE (t));
1579 LTO_FIXUP_TREE (BINFO_OFFSET (t));
1580 LTO_FIXUP_TREE (BINFO_VIRTUALS (t));
1581 LTO_FIXUP_TREE (BINFO_VPTR_FIELD (t));
f1f41a6c 1582 n = vec_safe_length (BINFO_BASE_ACCESSES (t));
d534cab0 1583 for (i = 0; i < n; i++)
1584 {
1585 saved_base = base = BINFO_BASE_ACCESS (t, i);
1586 LTO_FIXUP_TREE (base);
1587 if (base != saved_base)
f1f41a6c 1588 (*BINFO_BASE_ACCESSES (t))[i] = base;
d534cab0 1589 }
1590 LTO_FIXUP_TREE (BINFO_INHERITANCE_CHAIN (t));
1591 LTO_FIXUP_TREE (BINFO_SUBVTT_INDEX (t));
1592 LTO_FIXUP_TREE (BINFO_VPTR_INDEX (t));
1593 n = BINFO_N_BASE_BINFOS (t);
1594 for (i = 0; i < n; i++)
1595 {
1596 saved_base = base = BINFO_BASE_BINFO (t, i);
1597 LTO_FIXUP_TREE (base);
1598 if (base != saved_base)
f1f41a6c 1599 (*BINFO_BASE_BINFOS (t))[i] = base;
d534cab0 1600 }
1601}
1602
1603/* Fix up fields of a CONSTRUCTOR T. */
1604
1605static void
1606lto_ft_constructor (tree t)
1607{
1608 unsigned HOST_WIDE_INT idx;
1609 constructor_elt *ce;
1610
1e184c62 1611 lto_ft_typed (t);
d534cab0 1612
f1f41a6c 1613 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
d534cab0 1614 {
1615 LTO_FIXUP_TREE (ce->index);
1616 LTO_FIXUP_TREE (ce->value);
1617 }
1618}
1619
1620/* Fix up fields of an expression tree T. */
1621
1622static void
1623lto_ft_expr (tree t)
1624{
1625 int i;
1e184c62 1626 lto_ft_typed (t);
d534cab0 1627 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
1628 LTO_FIXUP_TREE (TREE_OPERAND (t, i));
1629}
1630
1631/* Given a tree T fixup fields of T by replacing types with their merged
1632 variant and other entities by an equal entity from an earlier compilation
1633 unit, or an entity being canonical in a different way. This includes
1634 for instance integer or string constants. */
1635
1636static void
1637lto_fixup_types (tree t)
1638{
1639 switch (TREE_CODE (t))
1640 {
1641 case IDENTIFIER_NODE:
1642 break;
1643
1644 case TREE_LIST:
1645 LTO_FIXUP_TREE (TREE_VALUE (t));
1646 LTO_FIXUP_TREE (TREE_PURPOSE (t));
1647 LTO_FIXUP_TREE (TREE_CHAIN (t));
1648 break;
1649
1650 case FIELD_DECL:
1651 lto_ft_field_decl (t);
1652 break;
1653
1654 case LABEL_DECL:
1655 case CONST_DECL:
1656 case PARM_DECL:
1657 case RESULT_DECL:
1658 case IMPORTED_DECL:
1659 lto_ft_decl_common (t);
1660 break;
1661
1662 case VAR_DECL:
1663 lto_ft_decl_with_vis (t);
1664 break;
1665
1666 case TYPE_DECL:
1667 lto_ft_decl_non_common (t);
1668 break;
1669
1670 case FUNCTION_DECL:
1671 lto_ft_function (t);
1672 break;
1673
1674 case TREE_BINFO:
1675 lto_ft_binfo (t);
1676 break;
1677
1678 case PLACEHOLDER_EXPR:
1679 lto_ft_common (t);
1680 break;
1681
1682 case BLOCK:
1683 case TRANSLATION_UNIT_DECL:
1684 case OPTIMIZATION_NODE:
1685 case TARGET_OPTION_NODE:
1686 break;
1687
1688 default:
1689 if (TYPE_P (t))
1690 lto_ft_type (t);
1691 else if (TREE_CODE (t) == CONSTRUCTOR)
1692 lto_ft_constructor (t);
1693 else if (CONSTANT_CLASS_P (t))
1694 LTO_FIXUP_TREE (TREE_TYPE (t));
1695 else if (EXPR_P (t))
1696 {
1697 lto_ft_expr (t);
1698 }
1699 else
1700 {
1701 remember_with_vars (t);
1702 }
1703 }
1704}
1705
f05c9dfb 1706
1707/* Return the resolution for the decl with index INDEX from DATA_IN. */
1708
1709static enum ld_plugin_symbol_resolution
1710get_resolution (struct data_in *data_in, unsigned index)
1711{
f1f41a6c 1712 if (data_in->globals_resolution.exists ())
f05c9dfb 1713 {
1714 ld_plugin_symbol_resolution_t ret;
1715 /* We can have references to not emitted functions in
1716 DECL_FUNCTION_PERSONALITY at least. So we can and have
1717 to indeed return LDPR_UNKNOWN in some cases. */
f1f41a6c 1718 if (data_in->globals_resolution.length () <= index)
f05c9dfb 1719 return LDPR_UNKNOWN;
f1f41a6c 1720 ret = data_in->globals_resolution[index];
f05c9dfb 1721 return ret;
1722 }
1723 else
1724 /* Delay resolution finding until decl merging. */
1725 return LDPR_UNKNOWN;
1726}
1727
18ff06f9 1728/* Map assigning declarations their resolutions. */
1729static pointer_map_t *resolution_map;
1730
1731/* We need to record resolutions until symbol table is read. */
1732static void
1733register_resolution (tree decl, enum ld_plugin_symbol_resolution resolution)
1734{
1735 if (resolution == LDPR_UNKNOWN)
1736 return;
1737 if (!resolution_map)
1738 resolution_map = pointer_map_create ();
1739 *pointer_map_insert (resolution_map, decl) = (void *)(size_t)resolution;
1740}
f05c9dfb 1741
1742/* Register DECL with the global symbol table and change its
1743 name if necessary to avoid name clashes for static globals across
1744 different files. */
1745
1746static void
1747lto_register_var_decl_in_symtab (struct data_in *data_in, tree decl)
1748{
1749 tree context;
1750
1751 /* Variable has file scope, not local. Need to ensure static variables
1752 between different files don't clash unexpectedly. */
1753 if (!TREE_PUBLIC (decl)
1754 && !((context = decl_function_context (decl))
1755 && auto_var_in_fn_p (decl, context)))
1756 {
1757 /* ??? We normally pre-mangle names before we serialize them
1758 out. Here, in lto1, we do not know the language, and
1759 thus cannot do the mangling again. Instead, we just
1760 append a suffix to the mangled name. The resulting name,
1761 however, is not a properly-formed mangled name, and will
1762 confuse any attempt to unmangle it. */
1763 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
1764 char *label;
1765
1766 ASM_FORMAT_PRIVATE_NAME (label, name, DECL_UID (decl));
1767 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (label));
1768 rest_of_decl_compilation (decl, 1, 0);
f1f41a6c 1769 vec_safe_push (lto_global_var_decls, decl);
f05c9dfb 1770 }
1771
1772 /* If this variable has already been declared, queue the
1773 declaration for merging. */
1774 if (TREE_PUBLIC (decl))
1775 {
1776 unsigned ix;
7f385784 1777 if (!streamer_tree_cache_lookup (data_in->reader_cache, decl, &ix))
f05c9dfb 1778 gcc_unreachable ();
18ff06f9 1779 register_resolution (decl, get_resolution (data_in, ix));
f05c9dfb 1780 }
1781}
1782
1783
1784/* Register DECL with the global symbol table and change its
1785 name if necessary to avoid name clashes for static globals across
1786 different files. DATA_IN contains descriptors and tables for the
1787 file being read. */
1788
1789static void
1790lto_register_function_decl_in_symtab (struct data_in *data_in, tree decl)
1791{
1792 /* Need to ensure static entities between different files
1793 don't clash unexpectedly. */
1794 if (!TREE_PUBLIC (decl))
1795 {
1796 /* We must not use the DECL_ASSEMBLER_NAME macro here, as it
1797 may set the assembler name where it was previously empty. */
1798 tree old_assembler_name = decl->decl_with_vis.assembler_name;
1799
1800 /* FIXME lto: We normally pre-mangle names before we serialize
1801 them out. Here, in lto1, we do not know the language, and
1802 thus cannot do the mangling again. Instead, we just append a
1803 suffix to the mangled name. The resulting name, however, is
1804 not a properly-formed mangled name, and will confuse any
1805 attempt to unmangle it. */
1806 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
1807 char *label;
1808
1809 ASM_FORMAT_PRIVATE_NAME (label, name, DECL_UID (decl));
1810 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (label));
1811
1812 /* We may arrive here with the old assembler name not set
1813 if the function body is not needed, e.g., it has been
1814 inlined away and does not appear in the cgraph. */
1815 if (old_assembler_name)
1816 {
1817 tree new_assembler_name = DECL_ASSEMBLER_NAME (decl);
1818
1819 /* Make the original assembler name available for later use.
1820 We may have used it to indicate the section within its
1821 object file where the function body may be found.
1822 FIXME lto: Find a better way to maintain the function decl
1823 to body section mapping so we don't need this hack. */
1824 lto_record_renamed_decl (data_in->file_data,
1825 IDENTIFIER_POINTER (old_assembler_name),
1826 IDENTIFIER_POINTER (new_assembler_name));
f05c9dfb 1827 }
1828 }
1829
1830 /* If this variable has already been declared, queue the
1831 declaration for merging. */
1832 if (TREE_PUBLIC (decl) && !DECL_ABSTRACT (decl))
1833 {
1834 unsigned ix;
7f385784 1835 if (!streamer_tree_cache_lookup (data_in->reader_cache, decl, &ix))
f05c9dfb 1836 gcc_unreachable ();
18ff06f9 1837 register_resolution (decl, get_resolution (data_in, ix));
f05c9dfb 1838 }
1839}
1840
1841
d534cab0 1842/* Given a streamer cache structure DATA_IN (holding a sequence of trees
1843 for one compilation unit) go over all trees starting at index FROM until the
1844 end of the sequence and replace fields of those trees, and the trees
1845 themself with their canonical variants as per gimple_register_type. */
1846
1847static void
1848uniquify_nodes (struct data_in *data_in, unsigned from)
1849{
7f385784 1850 struct streamer_tree_cache_d *cache = data_in->reader_cache;
f1f41a6c 1851 unsigned len = cache->nodes.length ();
d534cab0 1852 unsigned i;
bddb3763 1853
f05c9dfb 1854 /* Go backwards because children streamed for the first time come
bddb3763 1855 as part of their parents, and hence are created after them. */
4d83607a 1856
bbcef2a7 1857 /* First register all the types in the cache. This makes sure to
1858 have the original structure in the type cycles when registering
1859 them and computing hashes. */
bddb3763 1860 for (i = len; i-- > from;)
1861 {
f1f41a6c 1862 tree t = cache->nodes[i];
bbcef2a7 1863 if (t && TYPE_P (t))
a7a11a02 1864 {
1865 tree newt = gimple_register_type (t);
1866 /* Mark non-prevailing types so we fix them up. No need
1867 to reset that flag afterwards - nothing that refers
1868 to those types is left and they are collected. */
1869 if (newt != t)
1870 TREE_VISITED (t) = 1;
1871 }
bddb3763 1872 }
1873
4d83607a 1874 /* Second fixup all trees in the new cache entries. */
d534cab0 1875 for (i = len; i-- > from;)
1876 {
f1f41a6c 1877 tree t = cache->nodes[i];
d534cab0 1878 tree oldt = t;
1879 if (!t)
1880 continue;
1881
1882 /* First fixup the fields of T. */
1883 lto_fixup_types (t);
1884
4d83607a 1885 if (!TYPE_P (t))
1886 continue;
1887
d534cab0 1888 /* Now try to find a canonical variant of T itself. */
a7a11a02 1889 t = GIMPLE_REGISTER_TYPE (t);
4d83607a 1890
1891 if (t == oldt)
d534cab0 1892 {
4d83607a 1893 /* The following re-creates proper variant lists while fixing up
1894 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
1895 variant list state before fixup is broken. */
1896 tree tem, mv;
1897
8873e58c 1898#ifdef ENABLE_CHECKING
4d83607a 1899 /* Remove us from our main variant list if we are not the
1900 variant leader. */
1901 if (TYPE_MAIN_VARIANT (t) != t)
d534cab0 1902 {
4d83607a 1903 tem = TYPE_MAIN_VARIANT (t);
1904 while (tem && TYPE_NEXT_VARIANT (tem) != t)
1905 tem = TYPE_NEXT_VARIANT (tem);
8873e58c 1906 gcc_assert (!tem && !TYPE_NEXT_VARIANT (t));
d534cab0 1907 }
8873e58c 1908#endif
4d83607a 1909
1910 /* Query our new main variant. */
a7a11a02 1911 mv = GIMPLE_REGISTER_TYPE (TYPE_MAIN_VARIANT (t));
4d83607a 1912
1913 /* If we were the variant leader and we get replaced ourselves drop
1914 all variants from our list. */
1915 if (TYPE_MAIN_VARIANT (t) == t
1916 && mv != t)
d534cab0 1917 {
4d83607a 1918 tem = t;
1919 while (tem)
1920 {
1921 tree tem2 = TYPE_NEXT_VARIANT (tem);
1922 TYPE_NEXT_VARIANT (tem) = NULL_TREE;
1923 tem = tem2;
1924 }
d534cab0 1925 }
7bfefa9d 1926
4d83607a 1927 /* If we are not our own variant leader link us into our new leaders
1928 variant list. */
1929 if (mv != t)
1930 {
1931 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
1932 TYPE_NEXT_VARIANT (mv) = t;
b6c200c3 1933 if (RECORD_OR_UNION_TYPE_P (t))
1934 TYPE_BINFO (t) = TYPE_BINFO (mv);
90f56810 1935 /* Preserve the invariant that type variants share their
1936 TYPE_FIELDS. */
1937 if (RECORD_OR_UNION_TYPE_P (t)
1938 && TYPE_FIELDS (mv) != TYPE_FIELDS (t))
1939 {
1940 tree f1, f2;
1941 for (f1 = TYPE_FIELDS (mv), f2 = TYPE_FIELDS (t);
1942 f1 && f2; f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1943 {
1944 unsigned ix;
1945 gcc_assert (f1 != f2
1946 && DECL_NAME (f1) == DECL_NAME (f2));
1947 if (!streamer_tree_cache_lookup (cache, f2, &ix))
1948 gcc_unreachable ();
1949 /* If we're going to replace an element which we'd
1950 still visit in the next iterations, we wouldn't
1951 handle it, so do it here. We do have to handle it
1952 even though the field_decl itself will be removed,
1953 as it could refer to e.g. integer_cst which we
1954 wouldn't reach via any other way, hence they
1955 (and their type) would stay uncollected. */
1956 /* ??? We should rather make sure to replace all
1957 references to f2 with f1. That means handling
1958 COMPONENT_REFs and CONSTRUCTOR elements in
1959 lto_fixup_types and special-case the field-decl
1960 operand handling. */
1961 /* ??? Not sure the above is all relevant in this
1962 path canonicalizing TYPE_FIELDS to that of the
1963 main variant. */
1964 if (ix < i)
1965 lto_fixup_types (f2);
1966 streamer_tree_cache_insert_at (cache, f1, ix);
1967 }
1968 TYPE_FIELDS (t) = TYPE_FIELDS (mv);
1969 }
4d83607a 1970 }
1971
1972 /* Finally adjust our main variant and fix it up. */
1973 TYPE_MAIN_VARIANT (t) = mv;
1974
1975 /* The following reconstructs the pointer chains
1976 of the new pointed-to type if we are a main variant. We do
1977 not stream those so they are broken before fixup. */
1978 if (TREE_CODE (t) == POINTER_TYPE
1979 && TYPE_MAIN_VARIANT (t) == t)
1980 {
1981 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t));
1982 TYPE_POINTER_TO (TREE_TYPE (t)) = t;
1983 }
1984 else if (TREE_CODE (t) == REFERENCE_TYPE
1985 && TYPE_MAIN_VARIANT (t) == t)
1986 {
1987 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t));
1988 TYPE_REFERENCE_TO (TREE_TYPE (t)) = t;
1989 }
1990 }
1991
8f2b914f 1992 else
4d83607a 1993 {
8f2b914f 1994 if (RECORD_OR_UNION_TYPE_P (t))
1995 {
1996 tree f1, f2;
1997 if (TYPE_FIELDS (t) != TYPE_FIELDS (oldt))
1998 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (oldt);
1999 f1 && f2; f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
2000 {
2001 unsigned ix;
2002 gcc_assert (f1 != f2 && DECL_NAME (f1) == DECL_NAME (f2));
7f385784 2003 if (!streamer_tree_cache_lookup (cache, f2, &ix))
8f2b914f 2004 gcc_unreachable ();
2005 /* If we're going to replace an element which we'd
2006 still visit in the next iterations, we wouldn't
2007 handle it, so do it here. We do have to handle it
2008 even though the field_decl itself will be removed,
2009 as it could refer to e.g. integer_cst which we
2010 wouldn't reach via any other way, hence they
2011 (and their type) would stay uncollected. */
2012 /* ??? We should rather make sure to replace all
2013 references to f2 with f1. That means handling
2014 COMPONENT_REFs and CONSTRUCTOR elements in
2015 lto_fixup_types and special-case the field-decl
2016 operand handling. */
2017 if (ix < i)
2018 lto_fixup_types (f2);
7f385784 2019 streamer_tree_cache_insert_at (cache, f1, ix);
8f2b914f 2020 }
2021 }
4d83607a 2022
d534cab0 2023 /* If we found a tree that is equal to oldt replace it in the
2024 cache, so that further users (in the various LTO sections)
2025 make use of it. */
7f385784 2026 streamer_tree_cache_insert_at (cache, t, i);
7bfefa9d 2027 }
7bfefa9d 2028 }
4d83607a 2029
bbcef2a7 2030 /* Finally compute the canonical type of all TREE_TYPEs and register
2031 VAR_DECL and FUNCTION_DECL nodes in the symbol table.
2032 From this point there are no longer any types with
2033 TYPE_STRUCTURAL_EQUALITY_P and its type-based alias problems.
2034 This step requires the TYPE_POINTER_TO lists being present, so
2035 make sure it is done last. */
4d83607a 2036 for (i = len; i-- > from;)
2037 {
f1f41a6c 2038 tree t = cache->nodes[i];
bbcef2a7 2039 if (t == NULL_TREE)
4d83607a 2040 continue;
2041
bbcef2a7 2042 if (TREE_CODE (t) == VAR_DECL)
2043 lto_register_var_decl_in_symtab (data_in, t);
2044 else if (TREE_CODE (t) == FUNCTION_DECL && !DECL_BUILT_IN (t))
2045 lto_register_function_decl_in_symtab (data_in, t);
69fe045c 2046 else if (!flag_wpa
2047 && TREE_CODE (t) == TYPE_DECL)
2048 debug_hooks->type_decl (t, !DECL_FILE_SCOPE_P (t));
bbcef2a7 2049 else if (TYPE_P (t) && !TYPE_CANONICAL (t))
4d83607a 2050 TYPE_CANONICAL (t) = gimple_register_canonical_type (t);
2051 }
7bfefa9d 2052}
2053
f05c9dfb 2054
7bfefa9d 2055/* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
2056 RESOLUTIONS is the set of symbols picked by the linker (read from the
2057 resolution file when the linker plugin is being used). */
2058
2059static void
2060lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
f1f41a6c 2061 vec<ld_plugin_symbol_resolution_t> resolutions)
7bfefa9d 2062{
2063 const struct lto_decl_header *header = (const struct lto_decl_header *) data;
949e5786 2064 const int decl_offset = sizeof (struct lto_decl_header);
2065 const int main_offset = decl_offset + header->decl_state_size;
2066 const int string_offset = main_offset + header->main_size;
7bfefa9d 2067 struct lto_input_block ib_main;
2068 struct data_in *data_in;
2069 unsigned int i;
2070 const uint32_t *data_ptr, *data_end;
2071 uint32_t num_decl_states;
2072
2073 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2074 header->main_size);
2075
2076 data_in = lto_data_in_create (decl_data, (const char *) data + string_offset,
2077 header->string_size, resolutions);
2078
a7a11a02 2079 /* We do not uniquify the pre-loaded cache entries, those are middle-end
2080 internal types that should not be merged. */
2081
7bfefa9d 2082 /* Read the global declarations and types. */
2083 while (ib_main.p < ib_main.len)
2084 {
d534cab0 2085 tree t;
f1f41a6c 2086 unsigned from = data_in->reader_cache->nodes.length ();
515cf651 2087 t = stream_read_tree (&ib_main, data_in);
7bfefa9d 2088 gcc_assert (t && ib_main.p <= ib_main.len);
d534cab0 2089 uniquify_nodes (data_in, from);
7bfefa9d 2090 }
2091
2092 /* Read in lto_in_decl_state objects. */
2093 data_ptr = (const uint32_t *) ((const char*) data + decl_offset);
2094 data_end =
2095 (const uint32_t *) ((const char*) data_ptr + header->decl_state_size);
2096 num_decl_states = *data_ptr++;
2097
2098 gcc_assert (num_decl_states > 0);
2099 decl_data->global_decl_state = lto_new_in_decl_state ();
2100 data_ptr = lto_read_in_decl_state (data_in, data_ptr,
2101 decl_data->global_decl_state);
2102
2103 /* Read in per-function decl states and enter them in hash table. */
2104 decl_data->function_decl_states =
57305941 2105 htab_create_ggc (37, lto_hash_in_decl_state, lto_eq_in_decl_state, NULL);
7bfefa9d 2106
2107 for (i = 1; i < num_decl_states; i++)
2108 {
2109 struct lto_in_decl_state *state = lto_new_in_decl_state ();
2110 void **slot;
2111
2112 data_ptr = lto_read_in_decl_state (data_in, data_ptr, state);
2113 slot = htab_find_slot (decl_data->function_decl_states, state, INSERT);
2114 gcc_assert (*slot == NULL);
2115 *slot = state;
2116 }
2117
2118 if (data_ptr != data_end)
2119 internal_error ("bytecode stream: garbage at the end of symbols section");
949e5786 2120
7bfefa9d 2121 /* Set the current decl state to be the global state. */
2122 decl_data->current_decl_state = decl_data->global_decl_state;
2123
7bfefa9d 2124 lto_data_in_delete (data_in);
2125}
2126
949e5786 2127/* Custom version of strtoll, which is not portable. */
2128
2129static HOST_WIDEST_INT
2130lto_parse_hex (const char *p)
2131{
2132 HOST_WIDEST_INT ret = 0;
2133
81897669 2134 for (; *p != '\0'; ++p)
2135 {
2136 char c = *p;
2137 unsigned char part;
2138 ret <<= 4;
2139 if (c >= '0' && c <= '9')
2140 part = c - '0';
2141 else if (c >= 'a' && c <= 'f')
2142 part = c - 'a' + 10;
2143 else if (c >= 'A' && c <= 'F')
2144 part = c - 'A' + 10;
2145 else
2146 internal_error ("could not parse hex number");
2147 ret |= part;
2148 }
949e5786 2149
81897669 2150 return ret;
2151}
2152
7bfefa9d 2153/* Read resolution for file named FILE_NAME. The resolution is read from
f18bad33 2154 RESOLUTION. */
7bfefa9d 2155
f18bad33 2156static void
2157lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file)
7bfefa9d 2158{
2159 /* We require that objects in the resolution file are in the same
2160 order as the lto1 command line. */
2161 unsigned int name_len;
2162 char *obj_name;
2163 unsigned int num_symbols;
2164 unsigned int i;
f18bad33 2165 struct lto_file_decl_data *file_data;
f18bad33 2166 splay_tree_node nd = NULL;
7bfefa9d 2167
2168 if (!resolution)
f18bad33 2169 return;
7bfefa9d 2170
b7fedf62 2171 name_len = strlen (file->filename);
7bfefa9d 2172 obj_name = XNEWVEC (char, name_len + 1);
2173 fscanf (resolution, " "); /* Read white space. */
2174
2175 fread (obj_name, sizeof (char), name_len, resolution);
2176 obj_name[name_len] = '\0';
82715bcd 2177 if (filename_cmp (obj_name, file->filename) != 0)
7bfefa9d 2178 internal_error ("unexpected file name %s in linker resolution file. "
b7fedf62 2179 "Expected %s", obj_name, file->filename);
2180 if (file->offset != 0)
2181 {
2182 int t;
81897669 2183 char offset_p[17];
949e5786 2184 HOST_WIDEST_INT offset;
81897669 2185 t = fscanf (resolution, "@0x%16s", offset_p);
b7fedf62 2186 if (t != 1)
2187 internal_error ("could not parse file offset");
81897669 2188 offset = lto_parse_hex (offset_p);
b7fedf62 2189 if (offset != file->offset)
2190 internal_error ("unexpected offset");
2191 }
7bfefa9d 2192
2193 free (obj_name);
2194
2195 fscanf (resolution, "%u", &num_symbols);
2196
2197 for (i = 0; i < num_symbols; i++)
2198 {
d405c5a4 2199 int t;
15c58191 2200 unsigned index;
2201 unsigned HOST_WIDE_INT id;
7bfefa9d 2202 char r_str[27];
6c8c5385 2203 enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0;
7bfefa9d 2204 unsigned int j;
2205 unsigned int lto_resolution_str_len =
2206 sizeof (lto_resolution_str) / sizeof (char *);
b5c89d58 2207 res_pair rp;
7bfefa9d 2208
15c58191 2209 t = fscanf (resolution, "%u " HOST_WIDE_INT_PRINT_HEX_PURE " %26s %*[^\n]\n",
2210 &index, &id, r_str);
f18bad33 2211 if (t != 3)
bf776685 2212 internal_error ("invalid line in the resolution file");
7bfefa9d 2213
2214 for (j = 0; j < lto_resolution_str_len; j++)
2215 {
2216 if (strcmp (lto_resolution_str[j], r_str) == 0)
2217 {
2218 r = (enum ld_plugin_symbol_resolution) j;
2219 break;
2220 }
2221 }
d405c5a4 2222 if (j == lto_resolution_str_len)
bf776685 2223 internal_error ("invalid resolution in the resolution file");
7bfefa9d 2224
15c58191 2225 if (!(nd && lto_splay_tree_id_equal_p (nd->key, id)))
f18bad33 2226 {
15c58191 2227 nd = lto_splay_tree_lookup (file_ids, id);
f18bad33 2228 if (nd == NULL)
15c58191 2229 internal_error ("resolution sub id " HOST_WIDE_INT_PRINT_HEX_PURE
2230 " not in object file", id);
f18bad33 2231 }
2232
2233 file_data = (struct lto_file_decl_data *)nd->value;
b5c89d58 2234 /* The indexes are very sparse. To save memory save them in a compact
2235 format that is only unpacked later when the subfile is processed. */
2236 rp.res = r;
2237 rp.index = index;
f1f41a6c 2238 file_data->respairs.safe_push (rp);
b5c89d58 2239 if (file_data->max_index < index)
2240 file_data->max_index = index;
7bfefa9d 2241 }
f18bad33 2242}
7bfefa9d 2243
805389b2 2244/* List of file_decl_datas */
2245struct file_data_list
2246 {
2247 struct lto_file_decl_data *first, *last;
2248 };
2249
f18bad33 2250/* Is the name for a id'ed LTO section? */
2251
2252static int
badc6cfa 2253lto_section_with_id (const char *name, unsigned HOST_WIDE_INT *id)
f18bad33 2254{
eaa13879 2255 const char *s;
f18bad33 2256
2257 if (strncmp (name, LTO_SECTION_NAME_PREFIX, strlen (LTO_SECTION_NAME_PREFIX)))
2258 return 0;
2259 s = strrchr (name, '.');
badc6cfa 2260 return s && sscanf (s, "." HOST_WIDE_INT_PRINT_HEX_PURE, id) == 1;
f18bad33 2261}
2262
2263/* Create file_data of each sub file id */
2264
2265static int
805389b2 2266create_subid_section_table (struct lto_section_slot *ls, splay_tree file_ids,
2267 struct file_data_list *list)
f18bad33 2268{
2269 struct lto_section_slot s_slot, *new_slot;
badc6cfa 2270 unsigned HOST_WIDE_INT id;
f18bad33 2271 splay_tree_node nd;
2272 void **hash_slot;
2273 char *new_name;
2274 struct lto_file_decl_data *file_data;
2275
2276 if (!lto_section_with_id (ls->name, &id))
2277 return 1;
2278
2279 /* Find hash table of sub module id */
15c58191 2280 nd = lto_splay_tree_lookup (file_ids, id);
f18bad33 2281 if (nd != NULL)
2282 {
2283 file_data = (struct lto_file_decl_data *)nd->value;
2284 }
2285 else
2286 {
2287 file_data = ggc_alloc_lto_file_decl_data ();
2288 memset(file_data, 0, sizeof (struct lto_file_decl_data));
2289 file_data->id = id;
2290 file_data->section_hash_table = lto_obj_create_section_hash_table ();;
15c58191 2291 lto_splay_tree_insert (file_ids, id, file_data);
805389b2 2292
2293 /* Maintain list in linker order */
2294 if (!list->first)
2295 list->first = file_data;
2296 if (list->last)
2297 list->last->next = file_data;
2298 list->last = file_data;
f18bad33 2299 }
2300
2301 /* Copy section into sub module hash table */
2302 new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1);
2303 s_slot.name = new_name;
2304 hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT);
2305 gcc_assert (*hash_slot == NULL);
2306
2307 new_slot = XDUP (struct lto_section_slot, ls);
2308 new_slot->name = new_name;
2309 *hash_slot = new_slot;
2310 return 1;
2311}
2312
2313/* Read declarations and other initializations for a FILE_DATA. */
2314
2315static void
2316lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file)
2317{
2318 const char *data;
2319 size_t len;
f1f41a6c 2320 vec<ld_plugin_symbol_resolution_t>
2321 resolutions = vec<ld_plugin_symbol_resolution_t>();
b5c89d58 2322 int i;
2323 res_pair *rp;
2324
2325 /* Create vector for fast access of resolution. We do this lazily
2326 to save memory. */
f1f41a6c 2327 resolutions.safe_grow_cleared (file_data->max_index + 1);
2328 for (i = 0; file_data->respairs.iterate (i, &rp); i++)
2329 resolutions[rp->index] = rp->res;
2330 file_data->respairs.release ();
f18bad33 2331
2332 file_data->renaming_hash_table = lto_create_renaming_table ();
2333 file_data->file_name = file->filename;
2334 data = lto_get_section_data (file_data, LTO_section_decls, NULL, &len);
fd30d60a 2335 if (data == NULL)
2336 {
bf776685 2337 internal_error ("cannot read LTO decls from %s", file_data->file_name);
fd30d60a 2338 return;
2339 }
b5c89d58 2340 /* Frees resolutions */
2341 lto_read_decls (file_data, data, resolutions);
f18bad33 2342 lto_free_section_data (file_data, LTO_section_decls, NULL, data, len);
2343}
2344
805389b2 2345/* Finalize FILE_DATA in FILE and increase COUNT. */
f18bad33 2346
805389b2 2347static int
f1f41a6c 2348lto_create_files_from_ids (lto_file *file, struct lto_file_decl_data *file_data,
805389b2 2349 int *count)
f18bad33 2350{
805389b2 2351 lto_file_finalize (file_data, file);
f18bad33 2352 if (cgraph_dump_file)
badc6cfa 2353 fprintf (cgraph_dump_file, "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX "\n",
f18bad33 2354 file_data->file_name, file_data->id);
805389b2 2355 (*count)++;
f18bad33 2356 return 0;
7bfefa9d 2357}
2358
2359/* Generate a TREE representation for all types and external decls
2360 entities in FILE.
2361
2362 Read all of the globals out of the file. Then read the cgraph
2363 and process the .o index into the cgraph nodes so that it can open
2364 the .o file to load the functions and ipa information. */
2365
2366static struct lto_file_decl_data *
f18bad33 2367lto_file_read (lto_file *file, FILE *resolution_file, int *count)
7bfefa9d 2368{
f18bad33 2369 struct lto_file_decl_data *file_data = NULL;
2370 splay_tree file_ids;
2371 htab_t section_hash_table;
805389b2 2372 struct lto_section_slot *section;
2373 struct file_data_list file_list;
2374 struct lto_section_list section_list;
2375
2376 memset (&section_list, 0, sizeof (struct lto_section_list));
2377 section_hash_table = lto_obj_build_section_table (file, &section_list);
7bfefa9d 2378
f18bad33 2379 /* Find all sub modules in the object and put their sections into new hash
2380 tables in a splay tree. */
15c58191 2381 file_ids = lto_splay_tree_new ();
805389b2 2382 memset (&file_list, 0, sizeof (struct file_data_list));
2383 for (section = section_list.first; section != NULL; section = section->next)
2384 create_subid_section_table (section, file_ids, &file_list);
2385
f18bad33 2386 /* Add resolutions to file ids */
2387 lto_resolution_read (file_ids, resolution_file, file);
2388
805389b2 2389 /* Finalize each lto file for each submodule in the merged object */
2390 for (file_data = file_list.first; file_data != NULL; file_data = file_data->next)
2391 lto_create_files_from_ids (file, file_data, count);
2392
f18bad33 2393 splay_tree_delete (file_ids);
2394 htab_delete (section_hash_table);
7bfefa9d 2395
805389b2 2396 return file_list.first;
7bfefa9d 2397}
2398
2399#if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
2400#define LTO_MMAP_IO 1
2401#endif
2402
2403#if LTO_MMAP_IO
2404/* Page size of machine is used for mmap and munmap calls. */
2405static size_t page_mask;
2406#endif
2407
2408/* Get the section data of length LEN from FILENAME starting at
2409 OFFSET. The data segment must be freed by the caller when the
2410 caller is finished. Returns NULL if all was not well. */
2411
2412static char *
2413lto_read_section_data (struct lto_file_decl_data *file_data,
2414 intptr_t offset, size_t len)
2415{
2416 char *result;
f5a023c5 2417 static int fd = -1;
2418 static char *fd_name;
7bfefa9d 2419#if LTO_MMAP_IO
2420 intptr_t computed_len;
2421 intptr_t computed_offset;
2422 intptr_t diff;
2423#endif
2424
f5a023c5 2425 /* Keep a single-entry file-descriptor cache. The last file we
2426 touched will get closed at exit.
2427 ??? Eventually we want to add a more sophisticated larger cache
2428 or rather fix function body streaming to not stream them in
2429 practically random order. */
2430 if (fd != -1
82715bcd 2431 && filename_cmp (fd_name, file_data->file_name) != 0)
f5a023c5 2432 {
2433 free (fd_name);
2434 close (fd);
2435 fd = -1;
2436 }
2437 if (fd == -1)
2438 {
27721832 2439 fd = open (file_data->file_name, O_RDONLY|O_BINARY);
f5a023c5 2440 if (fd == -1)
a5f12044 2441 {
2442 fatal_error ("Cannot open %s", file_data->file_name);
2443 return NULL;
2444 }
fad71d4f 2445 fd_name = xstrdup (file_data->file_name);
f5a023c5 2446 }
7bfefa9d 2447
2448#if LTO_MMAP_IO
2449 if (!page_mask)
2450 {
2451 size_t page_size = sysconf (_SC_PAGE_SIZE);
2452 page_mask = ~(page_size - 1);
2453 }
2454
2455 computed_offset = offset & page_mask;
2456 diff = offset - computed_offset;
2457 computed_len = len + diff;
2458
2459 result = (char *) mmap (NULL, computed_len, PROT_READ, MAP_PRIVATE,
f5a023c5 2460 fd, computed_offset);
7bfefa9d 2461 if (result == MAP_FAILED)
a5f12044 2462 {
2463 fatal_error ("Cannot map %s", file_data->file_name);
2464 return NULL;
2465 }
7bfefa9d 2466
2467 return result + diff;
2468#else
2469 result = (char *) xmalloc (len);
f5a023c5 2470 if (lseek (fd, offset, SEEK_SET) != offset
2471 || read (fd, result, len) != (ssize_t) len)
7bfefa9d 2472 {
2473 free (result);
a5f12044 2474 fatal_error ("Cannot read %s", file_data->file_name);
fad71d4f 2475 result = NULL;
7bfefa9d 2476 }
fad71d4f 2477#ifdef __MINGW32__
2478 /* Native windows doesn't supports delayed unlink on opened file. So
2479 we close file here again. This produces higher I/O load, but at least
2480 it prevents to have dangling file handles preventing unlink. */
2481 free (fd_name);
2482 fd_name = NULL;
2483 close (fd);
2484 fd = -1;
2485#endif
7bfefa9d 2486 return result;
2487#endif
2488}
2489
2490
2491/* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
2492 NAME will be NULL unless the section type is for a function
2493 body. */
2494
2495static const char *
2496get_section_data (struct lto_file_decl_data *file_data,
2497 enum lto_section_type section_type,
2498 const char *name,
2499 size_t *len)
2500{
2501 htab_t section_hash_table = file_data->section_hash_table;
2502 struct lto_section_slot *f_slot;
2503 struct lto_section_slot s_slot;
f18bad33 2504 const char *section_name = lto_get_section_name (section_type, name, file_data);
7bfefa9d 2505 char *data = NULL;
2506
2507 *len = 0;
2508 s_slot.name = section_name;
2509 f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot);
2510 if (f_slot)
2511 {
2512 data = lto_read_section_data (file_data, f_slot->start, f_slot->len);
2513 *len = f_slot->len;
2514 }
2515
2516 free (CONST_CAST (char *, section_name));
2517 return data;
2518}
2519
2520
2521/* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
2522 starts at OFFSET and has LEN bytes. */
2523
2524static void
f5a023c5 2525free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED,
7bfefa9d 2526 enum lto_section_type section_type ATTRIBUTE_UNUSED,
2527 const char *name ATTRIBUTE_UNUSED,
2528 const char *offset, size_t len ATTRIBUTE_UNUSED)
2529{
2530#if LTO_MMAP_IO
2531 intptr_t computed_len;
2532 intptr_t computed_offset;
2533 intptr_t diff;
2534#endif
2535
7bfefa9d 2536#if LTO_MMAP_IO
2537 computed_offset = ((intptr_t) offset) & page_mask;
2538 diff = (intptr_t) offset - computed_offset;
2539 computed_len = len + diff;
2540
2541 munmap ((caddr_t) computed_offset, computed_len);
2542#else
2543 free (CONST_CAST(char *, offset));
2544#endif
2545}
2546
7bfefa9d 2547static lto_file *current_lto_file;
2548
68b8d829 2549/* Helper for qsort; compare partitions and return one with smaller size.
2550 We sort from greatest to smallest so parallel build doesn't stale on the
2551 longest compilation being executed too late. */
2552
2553static int
bc2cc717 2554cmp_partitions_size (const void *a, const void *b)
68b8d829 2555{
2556 const struct ltrans_partition_def *pa
2557 = *(struct ltrans_partition_def *const *)a;
2558 const struct ltrans_partition_def *pb
2559 = *(struct ltrans_partition_def *const *)b;
2560 return pb->insns - pa->insns;
2561}
7bfefa9d 2562
bc2cc717 2563/* Helper for qsort; compare partitions and return one with smaller order. */
2564
2565static int
2566cmp_partitions_order (const void *a, const void *b)
2567{
2568 const struct ltrans_partition_def *pa
2569 = *(struct ltrans_partition_def *const *)a;
2570 const struct ltrans_partition_def *pb
2571 = *(struct ltrans_partition_def *const *)b;
2572 int ordera = -1, orderb = -1;
2573
5cf7e051 2574 if (lto_symtab_encoder_size (pa->encoder))
2575 ordera = lto_symtab_encoder_deref (pa->encoder, 0)->symbol.order;
2576 if (lto_symtab_encoder_size (pb->encoder))
2577 orderb = lto_symtab_encoder_deref (pb->encoder, 0)->symbol.order;
bc2cc717 2578 return orderb - ordera;
2579}
2580
2024fa7d 2581/* Write all output files in WPA mode and the file with the list of
2582 LTRANS units. */
7bfefa9d 2583
2024fa7d 2584static void
7bfefa9d 2585lto_wpa_write_files (void)
2586{
2024fa7d 2587 unsigned i, n_sets;
7bfefa9d 2588 lto_file *file;
68b8d829 2589 ltrans_partition part;
2024fa7d 2590 FILE *ltrans_output_list_stream;
2591 char *temp_filename;
2592 size_t blen;
2593
2594 /* Open the LTRANS output list. */
2595 if (!ltrans_output_list)
2596 fatal_error ("no LTRANS output list filename provided");
2597 ltrans_output_list_stream = fopen (ltrans_output_list, "w");
2598 if (ltrans_output_list_stream == NULL)
2599 fatal_error ("opening LTRANS output list %s: %m", ltrans_output_list);
7bfefa9d 2600
2601 timevar_push (TV_WHOPR_WPA);
2602
f1f41a6c 2603 FOR_EACH_VEC_ELT (ltrans_partitions, i, part)
5cf7e051 2604 lto_stats.num_output_symtab_nodes += lto_symtab_encoder_size (part->encoder);
7bfefa9d 2605
c5cc4842 2606 /* Find out statics that need to be promoted
2607 to globals with hidden visibility because they are accessed from multiple
2608 partitions. */
7bfefa9d 2609 lto_promote_cross_file_statics ();
2610
2611 timevar_pop (TV_WHOPR_WPA);
2612
2613 timevar_push (TV_WHOPR_WPA_IO);
2614
2024fa7d 2615 /* Generate a prefix for the LTRANS unit files. */
2616 blen = strlen (ltrans_output_list);
2617 temp_filename = (char *) xmalloc (blen + sizeof ("2147483648.o"));
2618 strcpy (temp_filename, ltrans_output_list);
2619 if (blen > sizeof (".out")
2620 && strcmp (temp_filename + blen - sizeof (".out") + 1,
2621 ".out") == 0)
2622 temp_filename[blen - sizeof (".out") + 1] = '\0';
2623 blen = strlen (temp_filename);
7bfefa9d 2624
f1f41a6c 2625 n_sets = ltrans_partitions.length ();
bc2cc717 2626
2627 /* Sort partitions by size so small ones are compiled last.
2628 FIXME: Even when not reordering we may want to output one list for parallel make
2629 and other for final link command. */
f1f41a6c 2630 ltrans_partitions.qsort (flag_toplevel_reorder
2631 ? cmp_partitions_size
2632 : cmp_partitions_order);
7bfefa9d 2633 for (i = 0; i < n_sets; i++)
2634 {
2024fa7d 2635 size_t len;
f1f41a6c 2636 ltrans_partition part = ltrans_partitions[i];
7bfefa9d 2637
2024fa7d 2638 /* Write all the nodes in SET. */
2639 sprintf (temp_filename + blen, "%u.o", i);
2640 file = lto_obj_file_open (temp_filename, true);
2641 if (!file)
2642 fatal_error ("lto_obj_file_open() failed");
7bfefa9d 2643
2024fa7d 2644 if (!quiet_flag)
68b8d829 2645 fprintf (stderr, " %s (%s %i insns)", temp_filename, part->name, part->insns);
ecb08119 2646 if (cgraph_dump_file)
2647 {
5cf7e051 2648 lto_symtab_encoder_iterator lsei;
2649
d534cab0 2650 fprintf (cgraph_dump_file, "Writing partition %s to file %s, %i insns\n",
ecb08119 2651 part->name, temp_filename, part->insns);
0851d795 2652 fprintf (cgraph_dump_file, " Symbols in partition: ");
5cf7e051 2653 for (lsei = lsei_start_in_partition (part->encoder); !lsei_end_p (lsei);
2654 lsei_next_in_partition (&lsei))
2655 {
2656 symtab_node node = lsei_node (lsei);
0851d795 2657 fprintf (cgraph_dump_file, "%s ", symtab_node_asm_name (node));
2658 }
2659 fprintf (cgraph_dump_file, "\n Symbols in boundary: ");
2660 for (lsei = lsei_start (part->encoder); !lsei_end_p (lsei);
2661 lsei_next (&lsei))
2662 {
2663 symtab_node node = lsei_node (lsei);
2664 if (!lto_symtab_encoder_in_partition_p (part->encoder, node))
2665 {
2666 fprintf (cgraph_dump_file, "%s ", symtab_node_asm_name (node));
2dc9831f 2667 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
2668 if (cnode
2669 && lto_symtab_encoder_encode_body_p (part->encoder, cnode))
0851d795 2670 fprintf (cgraph_dump_file, "(body included)");
2dc9831f 2671 else
2672 {
2673 varpool_node *vnode = dyn_cast <varpool_node> (node);
2674 if (vnode
2675 && lto_symtab_encoder_encode_initializer_p (part->encoder, vnode))
2676 fprintf (cgraph_dump_file, "(initializer included)");
2677 }
0851d795 2678 }
5cf7e051 2679 }
2680 fprintf (cgraph_dump_file, "\n");
ecb08119 2681 }
5cf7e051 2682 gcc_checking_assert (lto_symtab_encoder_size (part->encoder) || !i);
7bfefa9d 2683
2024fa7d 2684 lto_set_current_out_file (file);
264cbb51 2685
5cf7e051 2686 ipa_write_optimization_summaries (part->encoder);
7bfefa9d 2687
2024fa7d 2688 lto_set_current_out_file (NULL);
2689 lto_obj_file_close (file);
14c7dd36 2690 free (file);
5cf7e051 2691 part->encoder = NULL;
7bfefa9d 2692
2024fa7d 2693 len = strlen (temp_filename);
2694 if (fwrite (temp_filename, 1, len, ltrans_output_list_stream) < len
264cbb51 2695 || fwrite ("\n", 1, 1, ltrans_output_list_stream) < 1)
2024fa7d 2696 fatal_error ("writing to LTRANS output list %s: %m",
2697 ltrans_output_list);
7bfefa9d 2698 }
2699
2024fa7d 2700 lto_stats.num_output_files += n_sets;
2701
7bfefa9d 2702 /* Close the LTRANS output list. */
264cbb51 2703 if (fclose (ltrans_output_list_stream))
2024fa7d 2704 fatal_error ("closing LTRANS output list %s: %m", ltrans_output_list);
2705
19ad01f7 2706 free_ltrans_partitions();
14c7dd36 2707 free (temp_filename);
19ad01f7 2708
2024fa7d 2709 timevar_pop (TV_WHOPR_WPA_IO);
7bfefa9d 2710}
2711
2712
d534cab0 2713/* If TT is a variable or function decl replace it with its
2714 prevailing variant. */
2715#define LTO_SET_PREVAIL(tt) \
2716 do {\
2717 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt)) \
2718 tt = lto_symtab_prevailing_decl (tt); \
2719 } while (0)
7bfefa9d 2720
d534cab0 2721/* Ensure that TT isn't a replacable var of function decl. */
2722#define LTO_NO_PREVAIL(tt) \
2723 gcc_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
7bfefa9d 2724
d534cab0 2725/* Given a tree T replace all fields referring to variables or functions
2726 with their prevailing variant. */
7bfefa9d 2727static void
d534cab0 2728lto_fixup_prevailing_decls (tree t)
7bfefa9d 2729{
d534cab0 2730 enum tree_code code = TREE_CODE (t);
2731 LTO_NO_PREVAIL (TREE_TYPE (t));
1e184c62 2732 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
2733 LTO_NO_PREVAIL (TREE_CHAIN (t));
d534cab0 2734 if (DECL_P (t))
7bfefa9d 2735 {
d534cab0 2736 LTO_NO_PREVAIL (DECL_NAME (t));
2737 LTO_SET_PREVAIL (DECL_CONTEXT (t));
2738 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
7bfefa9d 2739 {
d534cab0 2740 LTO_SET_PREVAIL (DECL_SIZE (t));
2741 LTO_SET_PREVAIL (DECL_SIZE_UNIT (t));
2742 LTO_SET_PREVAIL (DECL_INITIAL (t));
2743 LTO_NO_PREVAIL (DECL_ATTRIBUTES (t));
2744 LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t));
7bfefa9d 2745 }
d534cab0 2746 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
7bfefa9d 2747 {
d534cab0 2748 LTO_NO_PREVAIL (t->decl_with_vis.assembler_name);
2749 LTO_NO_PREVAIL (DECL_SECTION_NAME (t));
7bfefa9d 2750 }
d534cab0 2751 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
7bfefa9d 2752 {
d534cab0 2753 LTO_NO_PREVAIL (DECL_ARGUMENT_FLD (t));
2754 LTO_NO_PREVAIL (DECL_RESULT_FLD (t));
2755 LTO_NO_PREVAIL (DECL_VINDEX (t));
2756 }
2757 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
2758 LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t));
2759 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
2760 {
2761 LTO_NO_PREVAIL (DECL_FIELD_OFFSET (t));
2762 LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t));
2763 LTO_NO_PREVAIL (DECL_QUALIFIER (t));
2764 LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t));
2765 LTO_NO_PREVAIL (DECL_FCONTEXT (t));
7bfefa9d 2766 }
2767 }
d534cab0 2768 else if (TYPE_P (t))
7bfefa9d 2769 {
d534cab0 2770 LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t));
2771 LTO_SET_PREVAIL (TYPE_SIZE (t));
2772 LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t));
2773 LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t));
2774 LTO_NO_PREVAIL (TYPE_NAME (t));
7bfefa9d 2775
8f2eb9e1 2776 LTO_SET_PREVAIL (TYPE_MINVAL (t));
2777 LTO_SET_PREVAIL (TYPE_MAXVAL (t));
2778 LTO_SET_PREVAIL (t->type_non_common.binfo);
7bfefa9d 2779
d534cab0 2780 LTO_SET_PREVAIL (TYPE_CONTEXT (t));
7bfefa9d 2781
d534cab0 2782 LTO_NO_PREVAIL (TYPE_CANONICAL (t));
2783 LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t));
2784 LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t));
7bfefa9d 2785 }
d534cab0 2786 else if (EXPR_P (t))
7bfefa9d 2787 {
d534cab0 2788 int i;
d534cab0 2789 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
2790 LTO_SET_PREVAIL (TREE_OPERAND (t, i));
7bfefa9d 2791 }
d534cab0 2792 else
7bfefa9d 2793 {
d534cab0 2794 switch (code)
7bfefa9d 2795 {
d534cab0 2796 case TREE_LIST:
2797 LTO_SET_PREVAIL (TREE_VALUE (t));
2798 LTO_SET_PREVAIL (TREE_PURPOSE (t));
2799 break;
2800 default:
2801 gcc_unreachable ();
7bfefa9d 2802 }
2803 }
7bfefa9d 2804}
d534cab0 2805#undef LTO_SET_PREVAIL
2806#undef LTO_NO_PREVAIL
7bfefa9d 2807
2808/* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
d534cab0 2809 replaces var and function decls with the corresponding prevailing def. */
7bfefa9d 2810
2811static void
d534cab0 2812lto_fixup_state (struct lto_in_decl_state *state)
7bfefa9d 2813{
2814 unsigned i, si;
2815 struct lto_tree_ref_table *table;
2816
2817 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
2818 we still need to walk from all DECLs to find the reachable
2819 FUNCTION_DECLs and VAR_DECLs. */
2820 for (si = 0; si < LTO_N_DECL_STREAMS; si++)
2821 {
2822 table = &state->streams[si];
2823 for (i = 0; i < table->size; i++)
d534cab0 2824 {
2825 tree *tp = table->trees + i;
2826 if (VAR_OR_FUNCTION_DECL_P (*tp))
2827 *tp = lto_symtab_prevailing_decl (*tp);
2828 }
7bfefa9d 2829 }
2830}
2831
d534cab0 2832/* A callback of htab_traverse. Just extracts a state from SLOT
2833 and calls lto_fixup_state. */
7bfefa9d 2834
2835static int
d534cab0 2836lto_fixup_state_aux (void **slot, void *aux ATTRIBUTE_UNUSED)
7bfefa9d 2837{
2838 struct lto_in_decl_state *state = (struct lto_in_decl_state *) *slot;
d534cab0 2839 lto_fixup_state (state);
7bfefa9d 2840 return 1;
2841}
2842
7bfefa9d 2843/* Fix the decls from all FILES. Replaces each decl with the corresponding
2844 prevailing one. */
2845
2846static void
2847lto_fixup_decls (struct lto_file_decl_data **files)
2848{
2849 unsigned int i;
d534cab0 2850 htab_iterator hi;
2851 tree t;
2852
2853 FOR_EACH_HTAB_ELEMENT (tree_with_vars, t, tree, hi)
2854 lto_fixup_prevailing_decls (t);
7bfefa9d 2855
7bfefa9d 2856 for (i = 0; files[i]; i++)
2857 {
2858 struct lto_file_decl_data *file = files[i];
2859 struct lto_in_decl_state *state = file->global_decl_state;
d534cab0 2860 lto_fixup_state (state);
7bfefa9d 2861
d534cab0 2862 htab_traverse (file->function_decl_states, lto_fixup_state_aux, NULL);
7bfefa9d 2863 }
7bfefa9d 2864}
2865
57305941 2866static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data **all_file_decl_data;
7bfefa9d 2867
f18bad33 2868/* Turn file datas for sub files into a single array, so that they look
2869 like separate files for further passes. */
2870
2871static void
2872lto_flatten_files (struct lto_file_decl_data **orig, int count, int last_file_ix)
2873{
2874 struct lto_file_decl_data *n, *next;
2875 int i, k;
2876
2877 lto_stats.num_input_files = count;
2878 all_file_decl_data
2879 = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (count + 1);
2880 /* Set the hooks so that all of the ipa passes can read in their data. */
2881 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2882 for (i = 0, k = 0; i < last_file_ix; i++)
2883 {
2884 for (n = orig[i]; n != NULL; n = next)
2885 {
2886 all_file_decl_data[k++] = n;
2887 next = n->next;
2888 n->next = NULL;
2889 }
2890 }
2891 all_file_decl_data[k] = NULL;
2892 gcc_assert (k == count);
2893}
2894
3210ebe5 2895/* Input file data before flattening (i.e. splitting them to subfiles to support
2896 incremental linking. */
2897static int real_file_count;
2898static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data **real_file_decl_data;
2899
7bfefa9d 2900/* Read all the symbols from the input files FNAMES. NFILES is the
2901 number of files requested in the command line. Instantiate a
2902 global call graph by aggregating all the sub-graphs found in each
2903 file. */
2904
2905static void
2906read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
2907{
2908 unsigned int i, last_file_ix;
7bfefa9d 2909 FILE *resolution;
fd193bcd 2910 struct cgraph_node *node;
f18bad33 2911 int count = 0;
2912 struct lto_file_decl_data **decl_data;
7bfefa9d 2913
01ec0a6c 2914 init_cgraph ();
7bfefa9d 2915
e2050933 2916 timevar_push (TV_IPA_LTO_DECL_IN);
7bfefa9d 2917
3210ebe5 2918 real_file_decl_data
2919 = decl_data = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (nfiles + 1);
2920 real_file_count = nfiles;
7bfefa9d 2921
2922 /* Read the resolution file. */
2923 resolution = NULL;
2924 if (resolution_file_name)
2925 {
2926 int t;
2927 unsigned num_objects;
2928
2929 resolution = fopen (resolution_file_name, "r");
c515f146 2930 if (resolution == NULL)
8fb69344 2931 fatal_error ("could not open symbol resolution file: %m");
c515f146 2932
7bfefa9d 2933 t = fscanf (resolution, "%u", &num_objects);
2934 gcc_assert (t == 1);
2935
2936 /* True, since the plugin splits the archives. */
2937 gcc_assert (num_objects == nfiles);
2938 }
2939
d534cab0 2940 tree_with_vars = htab_create_ggc (101, htab_hash_pointer, htab_eq_pointer,
2941 NULL);
ed29d77f 2942 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
2943 tree_int_map_eq, NULL);
2944 type_pair_cache = XCNEWVEC (struct type_pair_d, GIMPLE_TYPE_PAIR_SIZE);
2945 gimple_type_leader = ggc_alloc_cleared_vec_gimple_type_leader_entry_s
2946 (GIMPLE_TYPE_LEADER_SIZE);
2947 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
d534cab0 2948
ddc90d88 2949 if (!quiet_flag)
2950 fprintf (stderr, "Reading object files:");
2951
7bfefa9d 2952 /* Read all of the object files specified on the command line. */
2953 for (i = 0, last_file_ix = 0; i < nfiles; ++i)
2954 {
2955 struct lto_file_decl_data *file_data = NULL;
ddc90d88 2956 if (!quiet_flag)
2957 {
2958 fprintf (stderr, " %s", fnames[i]);
2959 fflush (stderr);
2960 }
7bfefa9d 2961
3ba0ce47 2962 current_lto_file = lto_obj_file_open (fnames[i], false);
7bfefa9d 2963 if (!current_lto_file)
2964 break;
2965
f18bad33 2966 file_data = lto_file_read (current_lto_file, resolution, &count);
7bfefa9d 2967 if (!file_data)
fad71d4f 2968 {
2969 lto_obj_file_close (current_lto_file);
14c7dd36 2970 free (current_lto_file);
fad71d4f 2971 current_lto_file = NULL;
2972 break;
2973 }
7bfefa9d 2974
f18bad33 2975 decl_data[last_file_ix++] = file_data;
7bfefa9d 2976
3ba0ce47 2977 lto_obj_file_close (current_lto_file);
14c7dd36 2978 free (current_lto_file);
7bfefa9d 2979 current_lto_file = NULL;
7a52b640 2980 ggc_collect ();
7bfefa9d 2981 }
2982
f18bad33 2983 lto_flatten_files (decl_data, count, last_file_ix);
2984 lto_stats.num_input_files = count;
3210ebe5 2985 ggc_free(decl_data);
2986 real_file_decl_data = NULL;
f18bad33 2987
7bfefa9d 2988 if (resolution_file_name)
2989 fclose (resolution);
2990
b8925abd 2991 /* Free gimple type merging datastructures. */
2992 htab_delete (gimple_types);
2993 gimple_types = NULL;
2994 htab_delete (type_hash_cache);
2995 type_hash_cache = NULL;
2996 free (type_pair_cache);
2997 type_pair_cache = NULL;
2998 gimple_type_leader = NULL;
2999 free_gimple_type_tables ();
3000 ggc_collect ();
3001
7bfefa9d 3002 /* Set the hooks so that all of the ipa passes can read in their data. */
3003 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
3004
e2050933 3005 timevar_pop (TV_IPA_LTO_DECL_IN);
7bfefa9d 3006
ddc90d88 3007 if (!quiet_flag)
3008 fprintf (stderr, "\nReading the callgraph\n");
3009
57305941 3010 timevar_push (TV_IPA_LTO_CGRAPH_IO);
02b699d5 3011 /* Read the symtab. */
3012 input_symtab ();
18ff06f9 3013
3014 /* Store resolutions into the symbol table. */
3015 if (resolution_map)
3016 {
3017 void **res;
3018 symtab_node snode;
3019
3020 FOR_EACH_SYMBOL (snode)
3021 if (symtab_real_symbol_p (snode)
3022 && (res = pointer_map_contains (resolution_map,
3023 snode->symbol.decl)))
3024 snode->symbol.resolution
3025 = (enum ld_plugin_symbol_resolution)(size_t)*res;
3026
3027 pointer_map_destroy (resolution_map);
3028 resolution_map = NULL;
3029 }
3030
57305941 3031 timevar_pop (TV_IPA_LTO_CGRAPH_IO);
7bfefa9d 3032
ddc90d88 3033 if (!quiet_flag)
3034 fprintf (stderr, "Merging declarations\n");
3035
57305941 3036 timevar_push (TV_IPA_LTO_DECL_MERGE);
21ce3cc7 3037 /* Merge global decls. */
3038 lto_symtab_merge_decls ();
3039
4f3aea0d 3040 /* If there were errors during symbol merging bail out, we have no
3041 good way to recover here. */
3042 if (seen_error ())
baa6eec4 3043 fatal_error ("errors during merging of translation units");
4f3aea0d 3044
b8925abd 3045 /* Fixup all decls. */
21ce3cc7 3046 lto_fixup_decls (all_file_decl_data);
d534cab0 3047 htab_delete (tree_with_vars);
3048 tree_with_vars = NULL;
57305941 3049 ggc_collect ();
3050
3051 timevar_pop (TV_IPA_LTO_DECL_MERGE);
3052 /* Each pass will set the appropriate timer. */
21ce3cc7 3053
ddc90d88 3054 if (!quiet_flag)
3055 fprintf (stderr, "Reading summaries\n");
3056
fd193bcd 3057 /* Read the IPA summary data. */
ddc90d88 3058 if (flag_ltrans)
3059 ipa_read_optimization_summaries ();
3060 else
3061 ipa_read_summaries ();
7bfefa9d 3062
b8925abd 3063 for (i = 0; all_file_decl_data[i]; i++)
3064 {
3065 gcc_assert (all_file_decl_data[i]->symtab_node_encoder);
3066 lto_symtab_encoder_delete (all_file_decl_data[i]->symtab_node_encoder);
3067 all_file_decl_data[i]->symtab_node_encoder = NULL;
3068 }
3069
21ce3cc7 3070 /* Finally merge the cgraph according to the decl merging decisions. */
57305941 3071 timevar_push (TV_IPA_LTO_CGRAPH_MERGE);
01ec0a6c 3072 if (cgraph_dump_file)
3073 {
3074 fprintf (cgraph_dump_file, "Before merging:\n");
3075 dump_cgraph (cgraph_dump_file);
3076 dump_varpool (cgraph_dump_file);
3077 }
21ce3cc7 3078 lto_symtab_merge_cgraph_nodes ();
57305941 3079 ggc_collect ();
7bfefa9d 3080
7c455d87 3081 /* FIXME: ipa_transforms_to_apply holds list of passes that have optimization
3082 summaries computed and needs to apply changes. At the moment WHOPR only
3083 supports inlining, so we can push it here by hand. In future we need to stream
3084 this field into ltrans compilation. */
59dd4830 3085 if (flag_ltrans)
7c455d87 3086 FOR_EACH_DEFINED_FUNCTION (node)
f1f41a6c 3087 node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass)&pass_ipa_inline);
25429dc2 3088
57305941 3089 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE);
7bfefa9d 3090
57305941 3091 timevar_push (TV_IPA_LTO_DECL_INIT_IO);
7bfefa9d 3092
7bfefa9d 3093 /* Indicate that the cgraph is built and ready. */
3094 cgraph_function_flags_ready = true;
3095
57305941 3096 timevar_pop (TV_IPA_LTO_DECL_INIT_IO);
3097 ggc_free (all_file_decl_data);
3098 all_file_decl_data = NULL;
7bfefa9d 3099}
3100
3101
3102/* Materialize all the bodies for all the nodes in the callgraph. */
3103
3104static void
3105materialize_cgraph (void)
3106{
3107 tree decl;
3108 struct cgraph_node *node;
3109 unsigned i;
3110 timevar_id_t lto_timer;
3111
ddc90d88 3112 if (!quiet_flag)
3113 fprintf (stderr,
3114 flag_wpa ? "Materializing decls:" : "Reading function bodies:");
3115
7bfefa9d 3116 /* Now that we have input the cgraph, we need to clear all of the aux
3117 nodes and read the functions if we are not running in WPA mode. */
e2050933 3118 timevar_push (TV_IPA_LTO_GIMPLE_IN);
7bfefa9d 3119
7c455d87 3120 FOR_EACH_FUNCTION (node)
7bfefa9d 3121 {
7d0d0ce1 3122 if (node->symbol.lto_file_data)
7bfefa9d 3123 {
3124 lto_materialize_function (node);
3125 lto_stats.num_input_cgraph_nodes++;
3126 }
3127 }
3128
e2050933 3129 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
7bfefa9d 3130
3131 /* Start the appropriate timer depending on the mode that we are
3132 operating in. */
3133 lto_timer = (flag_wpa) ? TV_WHOPR_WPA
3134 : (flag_ltrans) ? TV_WHOPR_LTRANS
3135 : TV_LTO;
3136 timevar_push (lto_timer);
3137
3138 current_function_decl = NULL;
3139 set_cfun (NULL);
3140
3141 /* Inform the middle end about the global variables we have seen. */
f1f41a6c 3142 FOR_EACH_VEC_ELT (*lto_global_var_decls, i, decl)
7bfefa9d 3143 rest_of_decl_compilation (decl, 1, 0);
3144
ddc90d88 3145 if (!quiet_flag)
3146 fprintf (stderr, "\n");
7bfefa9d 3147
3148 timevar_pop (lto_timer);
3149}
3150
3151
bdaea387 3152/* Show various memory usage statistics related to LTO. */
3153static void
3154print_lto_report_1 (void)
3155{
3156 const char *pfx = (flag_lto) ? "LTO" : (flag_wpa) ? "WPA" : "LTRANS";
3157 fprintf (stderr, "%s statistics\n", pfx);
3158
3159 if (gimple_types)
3160 fprintf (stderr, "[%s] GIMPLE type table: size %ld, %ld elements, "
3161 "%ld searches, %ld collisions (ratio: %f)\n", pfx,
3162 (long) htab_size (gimple_types),
3163 (long) htab_elements (gimple_types),
3164 (long) gimple_types->searches,
3165 (long) gimple_types->collisions,
3166 htab_collisions (gimple_types));
3167 else
3168 fprintf (stderr, "[%s] GIMPLE type table is empty\n", pfx);
3169 if (type_hash_cache)
3170 fprintf (stderr, "[%s] GIMPLE type hash table: size %ld, %ld elements, "
3171 "%ld searches, %ld collisions (ratio: %f)\n", pfx,
3172 (long) htab_size (type_hash_cache),
3173 (long) htab_elements (type_hash_cache),
3174 (long) type_hash_cache->searches,
3175 (long) type_hash_cache->collisions,
3176 htab_collisions (type_hash_cache));
3177 else
3178 fprintf (stderr, "[%s] GIMPLE type hash table is empty\n", pfx);
3179
3180 print_gimple_types_stats (pfx);
3181 print_lto_report (pfx);
3182}
3183
7bfefa9d 3184/* Perform whole program analysis (WPA) on the callgraph and write out the
3185 optimization plan. */
3186
3187static void
3188do_whole_program_analysis (void)
3189{
0851d795 3190 symtab_node node;
3191
161121a9 3192 timevar_start (TV_PHASE_OPT_GEN);
3193
7bfefa9d 3194 /* Note that since we are in WPA mode, materialize_cgraph will not
3195 actually read in all the function bodies. It only materializes
3196 the decls and cgraph nodes so that analysis can be performed. */
3197 materialize_cgraph ();
3198
3199 /* Reading in the cgraph uses different timers, start timing WPA now. */
3200 timevar_push (TV_WHOPR_WPA);
3201
57305941 3202 if (pre_ipa_mem_report)
3203 {
3204 fprintf (stderr, "Memory consumption before IPA\n");
3205 dump_memory_report (false);
3206 }
3207
4037a4c1 3208 cgraph_function_flags_ready = true;
3209
ecb08119 3210 if (cgraph_dump_file)
3211 {
3212 dump_cgraph (cgraph_dump_file);
3213 dump_varpool (cgraph_dump_file);
3214 }
7bfefa9d 3215 bitmap_obstack_initialize (NULL);
e7eaba7b 3216 cgraph_state = CGRAPH_STATE_IPA_SSA;
7bfefa9d 3217
e7eaba7b 3218 execute_ipa_pass_list (all_regular_ipa_passes);
7bfefa9d 3219
ecb08119 3220 if (cgraph_dump_file)
3221 {
3222 fprintf (cgraph_dump_file, "Optimized ");
3223 dump_cgraph (cgraph_dump_file);
3224 dump_varpool (cgraph_dump_file);
3225 }
0851d795 3226#ifdef ENABLE_CHECKING
7bfefa9d 3227 verify_cgraph ();
0851d795 3228#endif
7bfefa9d 3229 bitmap_obstack_release (NULL);
3230
3231 /* We are about to launch the final LTRANS phase, stop the WPA timer. */
3232 timevar_pop (TV_WHOPR_WPA);
3233
0851d795 3234 timevar_push (TV_WHOPR_PARTITIONING);
48e3ea52 3235 if (flag_lto_partition_1to1)
3236 lto_1_to_1_map ();
0851d795 3237 else if (flag_lto_partition_max)
3238 lto_max_map ();
48e3ea52 3239 else
3240 lto_balanced_map ();
e7eaba7b 3241
0851d795 3242 /* AUX pointers are used by partitioning code to bookkeep number of
3243 partitions symbol is in. This is no longer needed. */
3244 FOR_EACH_SYMBOL (node)
3245 node->symbol.aux = NULL;
3246
f1f41a6c 3247 lto_stats.num_cgraph_partitions += ltrans_partitions.length ();
0851d795 3248 timevar_pop (TV_WHOPR_PARTITIONING);
3249
161121a9 3250 timevar_stop (TV_PHASE_OPT_GEN);
3251 timevar_start (TV_PHASE_STREAM_OUT);
3252
ddc90d88 3253 if (!quiet_flag)
3254 {
3255 fprintf (stderr, "\nStreaming out");
3256 fflush (stderr);
3257 }
2024fa7d 3258 lto_wpa_write_files ();
ddc90d88 3259 if (!quiet_flag)
3260 fprintf (stderr, "\n");
7bfefa9d 3261
161121a9 3262 timevar_stop (TV_PHASE_STREAM_OUT);
3263
3264 ggc_collect ();
57305941 3265 if (post_ipa_mem_report)
3266 {
3267 fprintf (stderr, "Memory consumption after IPA\n");
3268 dump_memory_report (false);
3269 }
3270
7bfefa9d 3271 /* Show the LTO report before launching LTRANS. */
3272 if (flag_lto_report)
bdaea387 3273 print_lto_report_1 ();
93e5f148 3274 if (mem_report_wpa)
0d7fa088 3275 dump_memory_report (true);
7bfefa9d 3276}
3277
3278
3c9e9cba 3279static GTY(()) tree lto_eh_personality_decl;
3280
3281/* Return the LTO personality function decl. */
3282
3283tree
3284lto_eh_personality (void)
3285{
3286 if (!lto_eh_personality_decl)
3287 {
3288 /* Use the first personality DECL for our personality if we don't
3289 support multiple ones. This ensures that we don't artificially
3290 create the need for them in a single-language program. */
3291 if (first_personality_decl && !dwarf2out_do_cfi_asm ())
3292 lto_eh_personality_decl = first_personality_decl;
3293 else
3294 lto_eh_personality_decl = lhd_gcc_personality ();
3295 }
3296
3297 return lto_eh_personality_decl;
3298}
3299
a3de1f55 3300/* Set the process name based on the LTO mode. */
3301
3302static void
3303lto_process_name (void)
3304{
3305 if (flag_lto)
3306 setproctitle ("lto1-lto");
3307 if (flag_wpa)
3308 setproctitle ("lto1-wpa");
3309 if (flag_ltrans)
3310 setproctitle ("lto1-ltrans");
3311}
3c9e9cba 3312
a0605d65 3313
3314/* Initialize the LTO front end. */
3315
3316static void
3317lto_init (void)
3318{
3319 lto_process_name ();
3320 lto_streamer_hooks_init ();
3321 lto_reader_init ();
954825c9 3322 lto_set_in_hooks (NULL, get_section_data, free_section_data);
a0605d65 3323 memset (&lto_stats, 0, sizeof (lto_stats));
3324 bitmap_obstack_initialize (NULL);
3325 gimple_register_cfg_hooks ();
3326}
3327
3328
7bfefa9d 3329/* Main entry point for the GIMPLE front end. This front end has
3330 three main personalities:
3331
3332 - LTO (-flto). All the object files on the command line are
3333 loaded in memory and processed as a single translation unit.
3334 This is the traditional link-time optimization behavior.
3335
3336 - WPA (-fwpa). Only the callgraph and summary information for
3337 files in the command file are loaded. A single callgraph
3338 (without function bodies) is instantiated for the whole set of
3339 files. IPA passes are only allowed to analyze the call graph
3340 and make transformation decisions. The callgraph is
3341 partitioned, each partition is written to a new object file
3342 together with the transformation decisions.
3343
3344 - LTRANS (-fltrans). Similar to -flto but it prevents the IPA
3345 summary files from running again. Since WPA computed summary
3346 information and decided what transformations to apply, LTRANS
3347 simply applies them. */
3348
3349void
b8ba44e7 3350lto_main (void)
7bfefa9d 3351{
161121a9 3352 /* LTO is called as a front end, even though it is not a front end.
3353 Because it is called as a front end, TV_PHASE_PARSING and
3354 TV_PARSE_GLOBAL are active, and we need to turn them off while
3355 doing LTO. Later we turn them back on so they are active up in
3356 toplev.c. */
3357 timevar_pop (TV_PARSE_GLOBAL);
3358 timevar_stop (TV_PHASE_PARSING);
3359
3360 timevar_start (TV_PHASE_SETUP);
3361
a0605d65 3362 /* Initialize the LTO front end. */
3363 lto_init ();
7bfefa9d 3364
161121a9 3365 timevar_stop (TV_PHASE_SETUP);
3366 timevar_start (TV_PHASE_STREAM_IN);
3367
7bfefa9d 3368 /* Read all the symbols and call graph from all the files in the
3369 command line. */
3370 read_cgraph_and_symbols (num_in_fnames, in_fnames);
3371
161121a9 3372 timevar_stop (TV_PHASE_STREAM_IN);
3373
852f689e 3374 if (!seen_error ())
7bfefa9d 3375 {
3376 /* If WPA is enabled analyze the whole call graph and create an
3377 optimization plan. Otherwise, read in all the function
3378 bodies and continue with optimization. */
3379 if (flag_wpa)
3380 do_whole_program_analysis ();
3381 else
3382 {
161121a9 3383 timevar_start (TV_PHASE_OPT_GEN);
3384
7bfefa9d 3385 materialize_cgraph ();
3386
3387 /* Let the middle end know that we have read and merged all of
3388 the input files. */
cf951b1a 3389 compile ();
161121a9 3390
3391 timevar_stop (TV_PHASE_OPT_GEN);
7bfefa9d 3392
3393 /* FIXME lto, if the processes spawned by WPA fail, we miss
3394 the chance to print WPA's report, so WPA will call
3395 print_lto_report before launching LTRANS. If LTRANS was
3396 launched directly by the driver we would not need to do
3397 this. */
3398 if (flag_lto_report)
bdaea387 3399 print_lto_report_1 ();
7bfefa9d 3400 }
3401 }
161121a9 3402
3403 /* Here we make LTO pretend to be a parser. */
3404 timevar_start (TV_PHASE_PARSING);
3405 timevar_push (TV_PARSE_GLOBAL);
7bfefa9d 3406}
3407
3408#include "gt-lto-lto.h"