]>
Commit | Line | Data |
---|---|---|
1 | /* Basic IPA utilities for type inheritance graph construction and | |
2 | devirtualization. | |
3 | Copyright (C) 2013-2014 Free Software Foundation, Inc. | |
4 | Contributed by Jan Hubicka | |
5 | ||
6 | This file is part of GCC. | |
7 | ||
8 | GCC is free software; you can redistribute it and/or modify it under | |
9 | the terms of the GNU General Public License as published by the Free | |
10 | Software Foundation; either version 3, or (at your option) any later | |
11 | version. | |
12 | ||
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 | for more details. | |
17 | ||
18 | You should have received a copy of the GNU General Public License | |
19 | along with GCC; see the file COPYING3. If not see | |
20 | <http://www.gnu.org/licenses/>. */ | |
21 | ||
22 | /* Brief vocalburary: | |
23 | ODR = One Definition Rule | |
24 | In short, the ODR states that: | |
25 | 1 In any translation unit, a template, type, function, or object can | |
26 | have no more than one definition. Some of these can have any number | |
27 | of declarations. A definition provides an instance. | |
28 | 2 In the entire program, an object or non-inline function cannot have | |
29 | more than one definition; if an object or function is used, it must | |
30 | have exactly one definition. You can declare an object or function | |
31 | that is never used, in which case you don't have to provide | |
32 | a definition. In no event can there be more than one definition. | |
33 | 3 Some things, like types, templates, and extern inline functions, can | |
34 | be defined in more than one translation unit. For a given entity, | |
35 | each definition must be the same. Non-extern objects and functions | |
36 | in different translation units are different entities, even if their | |
37 | names and types are the same. | |
38 | ||
39 | OTR = OBJ_TYPE_REF | |
40 | This is the Gimple representation of type information of a polymorphic call. | |
41 | It contains two parameters: | |
42 | otr_type is a type of class whose method is called. | |
43 | otr_token is the index into virtual table where address is taken. | |
44 | ||
45 | BINFO | |
46 | This is the type inheritance information attached to each tree | |
47 | RECORD_TYPE by the C++ frotend. It provides information about base | |
48 | types and virtual tables. | |
49 | ||
50 | BINFO is linked to the RECORD_TYPE by TYPE_BINFO. | |
51 | BINFO also links to its type by BINFO_TYPE and to the virtual table by | |
52 | BINFO_VTABLE. | |
53 | ||
54 | Base types of a given type are enumerated by BINFO_BASE_BINFO | |
55 | vector. Members of this vectors are not BINFOs associated | |
56 | with a base type. Rather they are new copies of BINFOs | |
57 | (base BINFOs). Their virtual tables may differ from | |
58 | virtual table of the base type. Also BINFO_OFFSET specifies | |
59 | offset of the base within the type. | |
60 | ||
61 | In the case of single inheritance, the virtual table is shared | |
62 | and BINFO_VTABLE of base BINFO is NULL. In the case of multiple | |
63 | inheritance the individual virtual tables are pointer to by | |
64 | BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of | |
65 | binfo associated to the base type). | |
66 | ||
67 | BINFO lookup for a given base type and offset can be done by | |
68 | get_binfo_at_offset. It returns proper BINFO whose virtual table | |
69 | can be used for lookup of virtual methods associated with the | |
70 | base type. | |
71 | ||
72 | token | |
73 | This is an index of virtual method in virtual table associated | |
74 | to the type defining it. Token can be looked up from OBJ_TYPE_REF | |
75 | or from DECL_VINDEX of a given virtual table. | |
76 | ||
77 | polymorphic (indirect) call | |
78 | This is callgraph represention of virtual method call. Every | |
79 | polymorphic call contains otr_type and otr_token taken from | |
80 | original OBJ_TYPE_REF at callgraph construction time. | |
81 | ||
82 | What we do here: | |
83 | ||
84 | build_type_inheritance_graph triggers a construction of the type inheritance | |
85 | graph. | |
86 | ||
87 | We reconstruct it based on types of methods we see in the unit. | |
88 | This means that the graph is not complete. Types with no methods are not | |
89 | inserted into the graph. Also types without virtual methods are not | |
90 | represented at all, though it may be easy to add this. | |
91 | ||
92 | The inheritance graph is represented as follows: | |
93 | ||
94 | Vertices are structures odr_type. Every odr_type may correspond | |
95 | to one or more tree type nodes that are equivalent by ODR rule. | |
96 | (the multiple type nodes appear only with linktime optimization) | |
97 | ||
98 | Edges are represented by odr_type->base and odr_type->derived_types. | |
99 | At the moment we do not track offsets of types for multiple inheritance. | |
100 | Adding this is easy. | |
101 | ||
102 | possible_polymorphic_call_targets returns, given an parameters found in | |
103 | indirect polymorphic edge all possible polymorphic call targets of the call. | |
104 | ||
105 | pass_ipa_devirt performs simple speculative devirtualization. | |
106 | */ | |
107 | ||
108 | #include "config.h" | |
109 | #include "system.h" | |
110 | #include "coretypes.h" | |
111 | #include "tm.h" | |
112 | #include "tree.h" | |
113 | #include "print-tree.h" | |
114 | #include "calls.h" | |
115 | #include "cgraph.h" | |
116 | #include "expr.h" | |
117 | #include "tree-pass.h" | |
118 | #include "hash-set.h" | |
119 | #include "target.h" | |
120 | #include "hash-table.h" | |
121 | #include "inchash.h" | |
122 | #include "tree-pretty-print.h" | |
123 | #include "ipa-utils.h" | |
124 | #include "tree-ssa-alias.h" | |
125 | #include "internal-fn.h" | |
126 | #include "gimple-fold.h" | |
127 | #include "gimple-expr.h" | |
128 | #include "gimple.h" | |
129 | #include "ipa-inline.h" | |
130 | #include "diagnostic.h" | |
131 | #include "tree-dfa.h" | |
132 | #include "demangle.h" | |
133 | #include "dbgcnt.h" | |
134 | #include "gimple-pretty-print.h" | |
135 | #include "stor-layout.h" | |
136 | #include "intl.h" | |
137 | #include "hash-map.h" | |
138 | ||
139 | static bool odr_types_equivalent_p (tree, tree, bool, bool *, | |
140 | hash_set<tree> *); | |
141 | ||
142 | static bool odr_violation_reported = false; | |
143 | ||
144 | /* Dummy polymorphic call context. */ | |
145 | ||
146 | const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context | |
147 | = {0, 0, NULL, NULL, false, true, true}; | |
148 | ||
149 | /* Pointer set of all call targets appearing in the cache. */ | |
150 | static hash_set<cgraph_node *> *cached_polymorphic_call_targets; | |
151 | ||
152 | /* The node of type inheritance graph. For each type unique in | |
153 | One Defintion Rule (ODR) sense, we produce one node linking all | |
154 | main variants of types equivalent to it, bases and derived types. */ | |
155 | ||
156 | struct GTY(()) odr_type_d | |
157 | { | |
158 | /* leader type. */ | |
159 | tree type; | |
160 | /* All bases; built only for main variants of types */ | |
161 | vec<odr_type> GTY((skip)) bases; | |
162 | /* All derrived types with virtual methods seen in unit; | |
163 | built only for main variants oftypes */ | |
164 | vec<odr_type> GTY((skip)) derived_types; | |
165 | ||
166 | /* All equivalent types, if more than one. */ | |
167 | vec<tree, va_gc> *types; | |
168 | /* Set of all equivalent types, if NON-NULL. */ | |
169 | hash_set<tree> * GTY((skip)) types_set; | |
170 | ||
171 | /* Unique ID indexing the type in odr_types array. */ | |
172 | int id; | |
173 | /* Is it in anonymous namespace? */ | |
174 | bool anonymous_namespace; | |
175 | /* Do we know about all derivations of given type? */ | |
176 | bool all_derivations_known; | |
177 | /* Did we report ODR violation here? */ | |
178 | bool odr_violated; | |
179 | }; | |
180 | ||
181 | static bool contains_type_p (tree, HOST_WIDE_INT, tree); | |
182 | ||
183 | ||
184 | /* Return true if BINFO corresponds to a type with virtual methods. | |
185 | ||
186 | Every type has several BINFOs. One is the BINFO associated by the type | |
187 | while other represents bases of derived types. The BINFOs representing | |
188 | bases do not have BINFO_VTABLE pointer set when this is the single | |
189 | inheritance (because vtables are shared). Look up the BINFO of type | |
190 | and check presence of its vtable. */ | |
191 | ||
192 | static inline bool | |
193 | polymorphic_type_binfo_p (tree binfo) | |
194 | { | |
195 | /* See if BINFO's type has an virtual table associtated with it. */ | |
196 | return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo))); | |
197 | } | |
198 | ||
199 | /* Return TRUE if all derived types of T are known and thus | |
200 | we may consider the walk of derived type complete. | |
201 | ||
202 | This is typically true only for final anonymous namespace types and types | |
203 | defined within functions (that may be COMDAT and thus shared across units, | |
204 | but with the same set of derived types). */ | |
205 | ||
206 | static bool | |
207 | type_all_derivations_known_p (tree t) | |
208 | { | |
209 | if (TYPE_FINAL_P (t)) | |
210 | return true; | |
211 | if (flag_ltrans) | |
212 | return false; | |
213 | if (type_in_anonymous_namespace_p (t)) | |
214 | return true; | |
215 | return (decl_function_context (TYPE_NAME (t)) != NULL); | |
216 | } | |
217 | ||
218 | /* Return TURE if type's constructors are all visible. */ | |
219 | ||
220 | static bool | |
221 | type_all_ctors_visible_p (tree t) | |
222 | { | |
223 | return !flag_ltrans | |
224 | && cgraph_state >= CGRAPH_STATE_CONSTRUCTION | |
225 | /* We can not always use type_all_derivations_known_p. | |
226 | For function local types we must assume case where | |
227 | the function is COMDAT and shared in between units. | |
228 | ||
229 | TODO: These cases are quite easy to get, but we need | |
230 | to keep track of C++ privatizing via -Wno-weak | |
231 | as well as the IPA privatizing. */ | |
232 | && type_in_anonymous_namespace_p (t); | |
233 | } | |
234 | ||
235 | /* Return TRUE if type may have instance. */ | |
236 | ||
237 | static bool | |
238 | type_possibly_instantiated_p (tree t) | |
239 | { | |
240 | tree vtable; | |
241 | varpool_node *vnode; | |
242 | ||
243 | /* TODO: Add abstract types here. */ | |
244 | if (!type_all_ctors_visible_p (t)) | |
245 | return true; | |
246 | ||
247 | vtable = BINFO_VTABLE (TYPE_BINFO (t)); | |
248 | if (TREE_CODE (vtable) == POINTER_PLUS_EXPR) | |
249 | vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0); | |
250 | vnode = varpool_node::get (vtable); | |
251 | return vnode && vnode->definition; | |
252 | } | |
253 | ||
254 | /* One Definition Rule hashtable helpers. */ | |
255 | ||
256 | struct odr_hasher | |
257 | { | |
258 | typedef odr_type_d value_type; | |
259 | typedef union tree_node compare_type; | |
260 | static inline hashval_t hash (const value_type *); | |
261 | static inline bool equal (const value_type *, const compare_type *); | |
262 | static inline void remove (value_type *); | |
263 | }; | |
264 | ||
265 | /* Return type that was declared with T's name so that T is an | |
266 | qualified variant of it. */ | |
267 | ||
268 | static inline tree | |
269 | main_odr_variant (const_tree t) | |
270 | { | |
271 | if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL) | |
272 | return TREE_TYPE (TYPE_NAME (t)); | |
273 | /* Unnamed types and non-C++ produced types can be compared by variants. */ | |
274 | else | |
275 | return TYPE_MAIN_VARIANT (t); | |
276 | } | |
277 | ||
278 | /* Produce hash based on type name. */ | |
279 | ||
280 | static hashval_t | |
281 | hash_type_name (tree t) | |
282 | { | |
283 | gcc_checking_assert (main_odr_variant (t) == t); | |
284 | ||
285 | /* If not in LTO, all main variants are unique, so we can do | |
286 | pointer hash. */ | |
287 | if (!in_lto_p) | |
288 | return htab_hash_pointer (t); | |
289 | ||
290 | /* Anonymous types are unique. */ | |
291 | if (type_in_anonymous_namespace_p (t)) | |
292 | return htab_hash_pointer (t); | |
293 | ||
294 | /* For polymorphic types, we can simply hash the virtual table. */ | |
295 | if (TREE_CODE (t) == RECORD_TYPE | |
296 | && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t))) | |
297 | { | |
298 | tree v = BINFO_VTABLE (TYPE_BINFO (t)); | |
299 | hashval_t hash = 0; | |
300 | ||
301 | if (TREE_CODE (v) == POINTER_PLUS_EXPR) | |
302 | { | |
303 | hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1)); | |
304 | v = TREE_OPERAND (TREE_OPERAND (v, 0), 0); | |
305 | } | |
306 | ||
307 | v = DECL_ASSEMBLER_NAME (v); | |
308 | hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v)); | |
309 | return hash; | |
310 | } | |
311 | ||
312 | /* Rest is not implemented yet. */ | |
313 | gcc_unreachable (); | |
314 | } | |
315 | ||
316 | /* Return the computed hashcode for ODR_TYPE. */ | |
317 | ||
318 | inline hashval_t | |
319 | odr_hasher::hash (const value_type *odr_type) | |
320 | { | |
321 | return hash_type_name (odr_type->type); | |
322 | } | |
323 | ||
324 | /* For languages with One Definition Rule, work out if | |
325 | types are the same based on their name. | |
326 | ||
327 | This is non-trivial for LTO where minnor differences in | |
328 | the type representation may have prevented type merging | |
329 | to merge two copies of otherwise equivalent type. | |
330 | ||
331 | Until we start streaming mangled type names, this function works | |
332 | only for polymorphic types. */ | |
333 | ||
334 | bool | |
335 | types_same_for_odr (const_tree type1, const_tree type2) | |
336 | { | |
337 | gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2)); | |
338 | ||
339 | type1 = main_odr_variant (type1); | |
340 | type2 = main_odr_variant (type2); | |
341 | ||
342 | if (type1 == type2) | |
343 | return true; | |
344 | ||
345 | if (!in_lto_p) | |
346 | return false; | |
347 | ||
348 | /* Check for anonymous namespaces. Those have !TREE_PUBLIC | |
349 | on the corresponding TYPE_STUB_DECL. */ | |
350 | if (type_in_anonymous_namespace_p (type1) | |
351 | || type_in_anonymous_namespace_p (type2)) | |
352 | return false; | |
353 | ||
354 | /* See if types are obvoiusly different (i.e. different codes | |
355 | or polymorphis wrt non-polymorphic). This is not strictly correct | |
356 | for ODR violating programs, but we can't do better without streaming | |
357 | ODR names. */ | |
358 | if (TREE_CODE (type1) != TREE_CODE (type2)) | |
359 | return false; | |
360 | if (TREE_CODE (type1) == RECORD_TYPE | |
361 | && (TYPE_BINFO (type1) == NULL_TREE) != (TYPE_BINFO (type1) == NULL_TREE)) | |
362 | return false; | |
363 | if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1) | |
364 | && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE) | |
365 | != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE)) | |
366 | return false; | |
367 | ||
368 | /* At the moment we have no way to establish ODR equivlaence at LTO | |
369 | other than comparing virtual table pointrs of polymorphic types. | |
370 | Eventually we should start saving mangled names in TYPE_NAME. | |
371 | Then this condition will become non-trivial. */ | |
372 | ||
373 | if (TREE_CODE (type1) == RECORD_TYPE | |
374 | && TYPE_BINFO (type1) && TYPE_BINFO (type2) | |
375 | && BINFO_VTABLE (TYPE_BINFO (type1)) | |
376 | && BINFO_VTABLE (TYPE_BINFO (type2))) | |
377 | { | |
378 | tree v1 = BINFO_VTABLE (TYPE_BINFO (type1)); | |
379 | tree v2 = BINFO_VTABLE (TYPE_BINFO (type2)); | |
380 | gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR | |
381 | && TREE_CODE (v2) == POINTER_PLUS_EXPR); | |
382 | return (operand_equal_p (TREE_OPERAND (v1, 1), | |
383 | TREE_OPERAND (v2, 1), 0) | |
384 | && DECL_ASSEMBLER_NAME | |
385 | (TREE_OPERAND (TREE_OPERAND (v1, 0), 0)) | |
386 | == DECL_ASSEMBLER_NAME | |
387 | (TREE_OPERAND (TREE_OPERAND (v2, 0), 0))); | |
388 | } | |
389 | gcc_unreachable (); | |
390 | } | |
391 | ||
392 | ||
393 | /* Compare types T1 and T2 and return true if they are | |
394 | equivalent. */ | |
395 | ||
396 | inline bool | |
397 | odr_hasher::equal (const value_type *t1, const compare_type *ct2) | |
398 | { | |
399 | tree t2 = const_cast <tree> (ct2); | |
400 | ||
401 | gcc_checking_assert (main_odr_variant (t2) == t2); | |
402 | if (t1->type == t2) | |
403 | return true; | |
404 | if (!in_lto_p) | |
405 | return false; | |
406 | return types_same_for_odr (t1->type, t2); | |
407 | } | |
408 | ||
409 | /* Free ODR type V. */ | |
410 | ||
411 | inline void | |
412 | odr_hasher::remove (value_type *v) | |
413 | { | |
414 | v->bases.release (); | |
415 | v->derived_types.release (); | |
416 | if (v->types_set) | |
417 | delete v->types_set; | |
418 | ggc_free (v); | |
419 | } | |
420 | ||
421 | /* ODR type hash used to lookup ODR type based on tree type node. */ | |
422 | ||
423 | typedef hash_table<odr_hasher> odr_hash_type; | |
424 | static odr_hash_type *odr_hash; | |
425 | ||
426 | /* ODR types are also stored into ODR_TYPE vector to allow consistent | |
427 | walking. Bases appear before derived types. Vector is garbage collected | |
428 | so we won't end up visiting empty types. */ | |
429 | ||
430 | static GTY(()) vec <odr_type, va_gc> *odr_types_ptr; | |
431 | #define odr_types (*odr_types_ptr) | |
432 | ||
433 | /* Set TYPE_BINFO of TYPE and its variants to BINFO. */ | |
434 | void | |
435 | set_type_binfo (tree type, tree binfo) | |
436 | { | |
437 | for (; type; type = TYPE_NEXT_VARIANT (type)) | |
438 | if (COMPLETE_TYPE_P (type)) | |
439 | TYPE_BINFO (type) = binfo; | |
440 | else | |
441 | gcc_assert (!TYPE_BINFO (type)); | |
442 | } | |
443 | ||
444 | /* Compare T2 and T2 based on name or structure. */ | |
445 | ||
446 | static bool | |
447 | odr_subtypes_equivalent_p (tree t1, tree t2, hash_set<tree> *visited) | |
448 | { | |
449 | bool an1, an2; | |
450 | ||
451 | /* This can happen in incomplete types that should be handled earlier. */ | |
452 | gcc_assert (t1 && t2); | |
453 | ||
454 | t1 = main_odr_variant (t1); | |
455 | t2 = main_odr_variant (t2); | |
456 | if (t1 == t2) | |
457 | return true; | |
458 | if (TREE_CODE (t1) != TREE_CODE (t2)) | |
459 | return false; | |
460 | if ((TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE)) | |
461 | return false; | |
462 | if (TYPE_NAME (t1) && DECL_NAME (TYPE_NAME (t1)) != DECL_NAME (TYPE_NAME (t2))) | |
463 | return false; | |
464 | ||
465 | /* Anonymous namespace types must match exactly. */ | |
466 | an1 = type_in_anonymous_namespace_p (t1); | |
467 | an2 = type_in_anonymous_namespace_p (t2); | |
468 | if (an1 != an2 || an1) | |
469 | return false; | |
470 | ||
471 | /* For types where we can not establish ODR equivalency, recurse and deeply | |
472 | compare. */ | |
473 | if (TREE_CODE (t1) != RECORD_TYPE | |
474 | || !TYPE_BINFO (t1) || !TYPE_BINFO (t2) | |
475 | || !polymorphic_type_binfo_p (TYPE_BINFO (t1)) | |
476 | || !polymorphic_type_binfo_p (TYPE_BINFO (t2))) | |
477 | { | |
478 | /* This should really be a pair hash, but for the moment we do not need | |
479 | 100% reliability and it would be better to compare all ODR types so | |
480 | recursion here is needed only for component types. */ | |
481 | if (visited->add (t1)) | |
482 | return true; | |
483 | return odr_types_equivalent_p (t1, t2, false, NULL, visited); | |
484 | } | |
485 | return types_same_for_odr (t1, t2); | |
486 | } | |
487 | ||
488 | /* Compare two virtual tables, PREVAILING and VTABLE and output ODR | |
489 | violation warings. */ | |
490 | ||
491 | void | |
492 | compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable) | |
493 | { | |
494 | int n1, n2; | |
495 | if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl)) | |
496 | { | |
497 | odr_violation_reported = true; | |
498 | if (DECL_VIRTUAL_P (prevailing->decl)) | |
499 | { | |
500 | varpool_node *tmp = prevailing; | |
501 | prevailing = vtable; | |
502 | vtable = tmp; | |
503 | } | |
504 | if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (vtable->decl))), | |
505 | OPT_Wodr, | |
506 | "virtual table of type %qD violates one definition rule", | |
507 | DECL_CONTEXT (vtable->decl))) | |
508 | inform (DECL_SOURCE_LOCATION (prevailing->decl), | |
509 | "variable of same assembler name as the virtual table is " | |
510 | "defined in another translation unit"); | |
511 | return; | |
512 | } | |
513 | if (!prevailing->definition || !vtable->definition) | |
514 | return; | |
515 | for (n1 = 0, n2 = 0; true; n1++, n2++) | |
516 | { | |
517 | struct ipa_ref *ref1, *ref2; | |
518 | bool end1, end2; | |
519 | end1 = !prevailing->iterate_reference (n1, ref1); | |
520 | end2 = !vtable->iterate_reference (n2, ref2); | |
521 | if (end1 && end2) | |
522 | return; | |
523 | if (!end1 && !end2 | |
524 | && DECL_ASSEMBLER_NAME (ref1->referred->decl) | |
525 | != DECL_ASSEMBLER_NAME (ref2->referred->decl) | |
526 | && !n2 | |
527 | && !DECL_VIRTUAL_P (ref2->referred->decl) | |
528 | && DECL_VIRTUAL_P (ref1->referred->decl)) | |
529 | { | |
530 | if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0, | |
531 | "virtual table of type %qD contains RTTI information", | |
532 | DECL_CONTEXT (vtable->decl))) | |
533 | { | |
534 | inform (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (prevailing->decl))), | |
535 | "but is prevailed by one without from other translation unit"); | |
536 | inform (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (prevailing->decl))), | |
537 | "RTTI will not work on this type"); | |
538 | } | |
539 | n2++; | |
540 | end2 = !vtable->iterate_reference (n2, ref2); | |
541 | } | |
542 | if (!end1 && !end2 | |
543 | && DECL_ASSEMBLER_NAME (ref1->referred->decl) | |
544 | != DECL_ASSEMBLER_NAME (ref2->referred->decl) | |
545 | && !n1 | |
546 | && !DECL_VIRTUAL_P (ref1->referred->decl) | |
547 | && DECL_VIRTUAL_P (ref2->referred->decl)) | |
548 | { | |
549 | n1++; | |
550 | end1 = !vtable->iterate_reference (n1, ref1); | |
551 | } | |
552 | if (end1 || end2) | |
553 | { | |
554 | if (end1) | |
555 | { | |
556 | varpool_node *tmp = prevailing; | |
557 | prevailing = vtable; | |
558 | vtable = tmp; | |
559 | ref1 = ref2; | |
560 | } | |
561 | if (warning_at (DECL_SOURCE_LOCATION | |
562 | (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0, | |
563 | "virtual table of type %qD violates " | |
564 | "one definition rule", | |
565 | DECL_CONTEXT (vtable->decl))) | |
566 | { | |
567 | inform (DECL_SOURCE_LOCATION | |
568 | (TYPE_NAME (DECL_CONTEXT (prevailing->decl))), | |
569 | "the conflicting type defined in another translation " | |
570 | "unit"); | |
571 | inform (DECL_SOURCE_LOCATION | |
572 | (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))), | |
573 | "contains additional virtual method %qD", | |
574 | ref1->referred->decl); | |
575 | } | |
576 | return; | |
577 | } | |
578 | if (DECL_ASSEMBLER_NAME (ref1->referred->decl) | |
579 | != DECL_ASSEMBLER_NAME (ref2->referred->decl)) | |
580 | { | |
581 | if (warning_at (DECL_SOURCE_LOCATION | |
582 | (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0, | |
583 | "virtual table of type %qD violates " | |
584 | "one definition rule ", | |
585 | DECL_CONTEXT (vtable->decl))) | |
586 | { | |
587 | inform (DECL_SOURCE_LOCATION | |
588 | (TYPE_NAME (DECL_CONTEXT (prevailing->decl))), | |
589 | "the conflicting type defined in another translation " | |
590 | "unit"); | |
591 | inform (DECL_SOURCE_LOCATION (ref1->referred->decl), | |
592 | "virtual method %qD", ref1->referred->decl); | |
593 | inform (DECL_SOURCE_LOCATION (ref2->referred->decl), | |
594 | "ought to match virtual method %qD but does not", | |
595 | ref2->referred->decl); | |
596 | return; | |
597 | } | |
598 | } | |
599 | } | |
600 | } | |
601 | ||
602 | /* Output ODR violation warning about T1 and T2 with REASON. | |
603 | Display location of ST1 and ST2 if REASON speaks about field or | |
604 | method of the type. | |
605 | If WARN is false, do nothing. Set WARNED if warning was indeed | |
606 | output. */ | |
607 | ||
608 | void | |
609 | warn_odr (tree t1, tree t2, tree st1, tree st2, | |
610 | bool warn, bool *warned, const char *reason) | |
611 | { | |
612 | tree decl2 = TYPE_NAME (t2); | |
613 | ||
614 | if (!warn) | |
615 | return; | |
616 | if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr, | |
617 | "type %qT violates one definition rule", | |
618 | t1)) | |
619 | return; | |
620 | if (!st1) | |
621 | ; | |
622 | else if (TREE_CODE (st1) == FIELD_DECL) | |
623 | { | |
624 | inform (DECL_SOURCE_LOCATION (decl2), | |
625 | "a different type is defined in another translation unit"); | |
626 | inform (DECL_SOURCE_LOCATION (st1), | |
627 | "the first difference of corresponding definitions is field %qD", | |
628 | st1); | |
629 | decl2 = st2; | |
630 | } | |
631 | else if (TREE_CODE (st1) == FUNCTION_DECL) | |
632 | { | |
633 | inform (DECL_SOURCE_LOCATION (decl2), | |
634 | "a different type is defined in another translation unit"); | |
635 | inform (DECL_SOURCE_LOCATION (st1), | |
636 | "the first difference of corresponding definitions is method %qD", | |
637 | st1); | |
638 | decl2 = st2; | |
639 | } | |
640 | else | |
641 | return; | |
642 | inform (DECL_SOURCE_LOCATION (decl2), reason); | |
643 | ||
644 | if (warned) | |
645 | *warned = true; | |
646 | } | |
647 | ||
648 | /* We already warned about ODR mismatch. T1 and T2 ought to be equivalent | |
649 | because they are used on same place in ODR matching types. | |
650 | They are not; inform the user. */ | |
651 | ||
652 | void | |
653 | warn_types_mismatch (tree t1, tree t2) | |
654 | { | |
655 | if (!TYPE_NAME (t1) || !TYPE_NAME (t2)) | |
656 | return; | |
657 | /* In Firefox it is a common bug to have same types but in | |
658 | different namespaces. Be a bit more informative on | |
659 | this. */ | |
660 | if (TYPE_CONTEXT (t1) && TYPE_CONTEXT (t2) | |
661 | && (((TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL) | |
662 | != (TREE_CODE (TYPE_CONTEXT (t2)) == NAMESPACE_DECL)) | |
663 | || (TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL | |
664 | && (DECL_NAME (TYPE_CONTEXT (t1)) != | |
665 | DECL_NAME (TYPE_CONTEXT (t2)))))) | |
666 | inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), | |
667 | "type %qT should match type %qT but is defined " | |
668 | "in different namespace ", | |
669 | t1, t2); | |
670 | else | |
671 | inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), | |
672 | "type %qT should match type %qT", | |
673 | t1, t2); | |
674 | inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)), | |
675 | "the incompatible type is defined here"); | |
676 | } | |
677 | ||
678 | /* Compare T1 and T2, report ODR violations if WARN is true and set | |
679 | WARNED to true if anything is reported. Return true if types match. | |
680 | If true is returned, the types are also compatible in the sense of | |
681 | gimple_canonical_types_compatible_p. */ | |
682 | ||
683 | static bool | |
684 | odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned, hash_set<tree> *visited) | |
685 | { | |
686 | /* Check first for the obvious case of pointer identity. */ | |
687 | if (t1 == t2) | |
688 | return true; | |
689 | gcc_assert (!type_in_anonymous_namespace_p (t1)); | |
690 | gcc_assert (!type_in_anonymous_namespace_p (t2)); | |
691 | ||
692 | /* Can't be the same type if the types don't have the same code. */ | |
693 | if (TREE_CODE (t1) != TREE_CODE (t2)) | |
694 | { | |
695 | warn_odr (t1, t2, NULL, NULL, warn, warned, | |
696 | G_("a different type is defined in another translation unit")); | |
697 | return false; | |
698 | } | |
699 | ||
700 | if (TYPE_QUALS (t1) != TYPE_QUALS (t2)) | |
701 | { | |
702 | warn_odr (t1, t2, NULL, NULL, warn, warned, | |
703 | G_("a type with different qualifiers is defined in another " | |
704 | "translation unit")); | |
705 | return false; | |
706 | } | |
707 | ||
708 | if (comp_type_attributes (t1, t2) != 1) | |
709 | { | |
710 | warn_odr (t1, t2, NULL, NULL, warn, warned, | |
711 | G_("a type with attributes " | |
712 | "is defined in another translation unit")); | |
713 | return false; | |
714 | } | |
715 | ||
716 | if (TREE_CODE (t1) == ENUMERAL_TYPE) | |
717 | { | |
718 | tree v1, v2; | |
719 | for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2); | |
720 | v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2)) | |
721 | { | |
722 | if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2)) | |
723 | { | |
724 | warn_odr (t1, t2, NULL, NULL, warn, warned, | |
725 | G_("an enum with different value name" | |
726 | " is defined in another translation unit")); | |
727 | return false; | |
728 | } | |
729 | if (TREE_VALUE (v1) != TREE_VALUE (v2) | |
730 | && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)), | |
731 | DECL_INITIAL (TREE_VALUE (v2)), 0)) | |
732 | { | |
733 | warn_odr (t1, t2, NULL, NULL, warn, warned, | |
734 | G_("an enum with different values is defined" | |
735 | " in another translation unit")); | |
736 | return false; | |
737 | } | |
738 | } | |
739 | if (v1 || v2) | |
740 | { | |
741 | warn_odr (t1, t2, NULL, NULL, warn, warned, | |
742 | G_("an enum with mismatching number of values " | |
743 | "is defined in another translation unit")); | |
744 | return false; | |
745 | } | |
746 | } | |
747 | ||
748 | /* Non-aggregate types can be handled cheaply. */ | |
749 | if (INTEGRAL_TYPE_P (t1) | |
750 | || SCALAR_FLOAT_TYPE_P (t1) | |
751 | || FIXED_POINT_TYPE_P (t1) | |
752 | || TREE_CODE (t1) == VECTOR_TYPE | |
753 | || TREE_CODE (t1) == COMPLEX_TYPE | |
754 | || TREE_CODE (t1) == OFFSET_TYPE | |
755 | || POINTER_TYPE_P (t1)) | |
756 | { | |
757 | if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)) | |
758 | { | |
759 | warn_odr (t1, t2, NULL, NULL, warn, warned, | |
760 | G_("a type with different precision is defined " | |
761 | "in another translation unit")); | |
762 | return false; | |
763 | } | |
764 | if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)) | |
765 | { | |
766 | warn_odr (t1, t2, NULL, NULL, warn, warned, | |
767 | G_("a type with different signedness is defined " | |
768 | "in another translation unit")); | |
769 | return false; | |
770 | } | |
771 | ||
772 | if (TREE_CODE (t1) == INTEGER_TYPE | |
773 | && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)) | |
774 | { | |
775 | /* char WRT uint_8? */ | |
776 | warn_odr (t1, t2, NULL, NULL, warn, warned, | |
777 | G_("a different type is defined in another " | |
778 | "translation unit")); | |
779 | return false; | |
780 | } | |
781 | ||
782 | /* For canonical type comparisons we do not want to build SCCs | |
783 | so we cannot compare pointed-to types. But we can, for now, | |
784 | require the same pointed-to type kind and match what | |
785 | useless_type_conversion_p would do. */ | |
786 | if (POINTER_TYPE_P (t1)) | |
787 | { | |
788 | if (TYPE_ADDR_SPACE (TREE_TYPE (t1)) | |
789 | != TYPE_ADDR_SPACE (TREE_TYPE (t2))) | |
790 | { | |
791 | warn_odr (t1, t2, NULL, NULL, warn, warned, | |
792 | G_("it is defined as a pointer in different address " | |
793 | "space in another translation unit")); | |
794 | return false; | |
795 | } | |
796 | ||
797 | if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited)) | |
798 | { | |
799 | warn_odr (t1, t2, NULL, NULL, warn, warned, | |
800 | G_("it is defined as a pointer to different type " | |
801 | "in another translation unit")); | |
802 | if (warn && warned) | |
803 | warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2)); | |
804 | return false; | |
805 | } | |
806 | } | |
807 | ||
808 | /* Tail-recurse to components. */ | |
809 | if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE) | |
810 | && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited)) | |
811 | { | |
812 | /* Probably specific enough. */ | |
813 | warn_odr (t1, t2, NULL, NULL, warn, warned, | |
814 | G_("a different type is defined " | |
815 | "in another translation unit")); | |
816 | if (warn && warned) | |
817 | warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2)); | |
818 | return false; | |
819 | } | |
820 | ||
821 | gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0)); | |
822 | gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1), | |
823 | TYPE_SIZE_UNIT (t2), 0)); | |
824 | gcc_assert (TYPE_MODE (t1) == TYPE_MODE (t2)); | |
825 | ||
826 | return true; | |
827 | } | |
828 | ||
829 | /* Do type-specific comparisons. */ | |
830 | switch (TREE_CODE (t1)) | |
831 | { | |
832 | case ARRAY_TYPE: | |
833 | { | |
834 | /* Array types are the same if the element types are the same and | |
835 | the number of elements are the same. */ | |
836 | if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited)) | |
837 | { | |
838 | warn_odr (t1, t2, NULL, NULL, warn, warned, | |
839 | G_("a different type is defined in another " | |
840 | "translation unit")); | |
841 | if (warn && warned) | |
842 | warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2)); | |
843 | } | |
844 | gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2)); | |
845 | gcc_assert (TYPE_NONALIASED_COMPONENT (t1) | |
846 | == TYPE_NONALIASED_COMPONENT (t2)); | |
847 | ||
848 | tree i1 = TYPE_DOMAIN (t1); | |
849 | tree i2 = TYPE_DOMAIN (t2); | |
850 | ||
851 | /* For an incomplete external array, the type domain can be | |
852 | NULL_TREE. Check this condition also. */ | |
853 | if (i1 == NULL_TREE || i2 == NULL_TREE) | |
854 | return true; | |
855 | ||
856 | tree min1 = TYPE_MIN_VALUE (i1); | |
857 | tree min2 = TYPE_MIN_VALUE (i2); | |
858 | tree max1 = TYPE_MAX_VALUE (i1); | |
859 | tree max2 = TYPE_MAX_VALUE (i2); | |
860 | ||
861 | /* In C++, minimums should be always 0. */ | |
862 | gcc_assert (min1 == min2); | |
863 | if (!operand_equal_p (max1, max2, 0)) | |
864 | { | |
865 | warn_odr (t1, t2, NULL, NULL, warn, warned, | |
866 | G_("an array of different size is defined " | |
867 | "in another translation unit")); | |
868 | return false; | |
869 | } | |
870 | gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0)); | |
871 | gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1), | |
872 | TYPE_SIZE_UNIT (t2), 0)); | |
873 | } | |
874 | return true; | |
875 | ||
876 | case METHOD_TYPE: | |
877 | case FUNCTION_TYPE: | |
878 | /* Function types are the same if the return type and arguments types | |
879 | are the same. */ | |
880 | if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited)) | |
881 | { | |
882 | warn_odr (t1, t2, NULL, NULL, warn, warned, | |
883 | G_("has different return value " | |
884 | "in another translation unit")); | |
885 | if (warn && warned) | |
886 | warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2)); | |
887 | return false; | |
888 | } | |
889 | ||
890 | if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)) | |
891 | return true; | |
892 | else | |
893 | { | |
894 | tree parms1, parms2; | |
895 | ||
896 | for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2); | |
897 | parms1 && parms2; | |
898 | parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2)) | |
899 | { | |
900 | if (!odr_subtypes_equivalent_p | |
901 | (TREE_VALUE (parms1), TREE_VALUE (parms2), visited)) | |
902 | { | |
903 | warn_odr (t1, t2, NULL, NULL, warn, warned, | |
904 | G_("has different parameters in another " | |
905 | "translation unit")); | |
906 | if (warn && warned) | |
907 | warn_types_mismatch (TREE_VALUE (parms1), | |
908 | TREE_VALUE (parms2)); | |
909 | return false; | |
910 | } | |
911 | } | |
912 | ||
913 | if (parms1 || parms2) | |
914 | { | |
915 | warn_odr (t1, t2, NULL, NULL, warn, warned, | |
916 | G_("has different parameters " | |
917 | "in another translation unit")); | |
918 | return false; | |
919 | } | |
920 | ||
921 | return true; | |
922 | } | |
923 | ||
924 | case RECORD_TYPE: | |
925 | case UNION_TYPE: | |
926 | case QUAL_UNION_TYPE: | |
927 | { | |
928 | tree f1, f2; | |
929 | ||
930 | /* For aggregate types, all the fields must be the same. */ | |
931 | if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2)) | |
932 | { | |
933 | for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2); | |
934 | f1 || f2; | |
935 | f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2)) | |
936 | { | |
937 | /* Skip non-fields. */ | |
938 | while (f1 && TREE_CODE (f1) != FIELD_DECL) | |
939 | f1 = TREE_CHAIN (f1); | |
940 | while (f2 && TREE_CODE (f2) != FIELD_DECL) | |
941 | f2 = TREE_CHAIN (f2); | |
942 | if (!f1 || !f2) | |
943 | break; | |
944 | if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2)) | |
945 | break; | |
946 | if (DECL_NAME (f1) != DECL_NAME (f2) | |
947 | && !DECL_ARTIFICIAL (f1)) | |
948 | { | |
949 | warn_odr (t1, t2, f1, f2, warn, warned, | |
950 | G_("a field with different name is defined " | |
951 | "in another translation unit")); | |
952 | return false; | |
953 | } | |
954 | if (!odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited)) | |
955 | { | |
956 | /* Do not warn about artificial fields and just go into generic | |
957 | field mismatch warning. */ | |
958 | if (DECL_ARTIFICIAL (f1)) | |
959 | break; | |
960 | ||
961 | warn_odr (t1, t2, f1, f2, warn, warned, | |
962 | G_("a field of same name but different type " | |
963 | "is defined in another translation unit")); | |
964 | if (warn && warned) | |
965 | warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2)); | |
966 | return false; | |
967 | } | |
968 | if (!gimple_compare_field_offset (f1, f2)) | |
969 | { | |
970 | /* Do not warn about artificial fields and just go into generic | |
971 | field mismatch warning. */ | |
972 | if (DECL_ARTIFICIAL (f1)) | |
973 | break; | |
974 | warn_odr (t1, t2, t1, t2, warn, warned, | |
975 | G_("fields has different layout " | |
976 | "in another translation unit")); | |
977 | return false; | |
978 | } | |
979 | gcc_assert (DECL_NONADDRESSABLE_P (f1) | |
980 | == DECL_NONADDRESSABLE_P (f2)); | |
981 | } | |
982 | ||
983 | /* If one aggregate has more fields than the other, they | |
984 | are not the same. */ | |
985 | if (f1 || f2) | |
986 | { | |
987 | warn_odr (t1, t2, NULL, NULL, warn, warned, | |
988 | G_("a type with different number of fields " | |
989 | "is defined in another translation unit")); | |
990 | return false; | |
991 | } | |
992 | if ((TYPE_MAIN_VARIANT (t1) == t1 || TYPE_MAIN_VARIANT (t2) == t2) | |
993 | && (TYPE_METHODS (TYPE_MAIN_VARIANT (t1)) | |
994 | != TYPE_METHODS (TYPE_MAIN_VARIANT (t2)))) | |
995 | { | |
996 | for (f1 = TYPE_METHODS (TYPE_MAIN_VARIANT (t1)), | |
997 | f2 = TYPE_METHODS (TYPE_MAIN_VARIANT (t2)); | |
998 | f1 && f2 ; f1 = DECL_CHAIN (f1), f2 = DECL_CHAIN (f2)) | |
999 | { | |
1000 | if (DECL_ASSEMBLER_NAME (f1) != DECL_ASSEMBLER_NAME (f2)) | |
1001 | { | |
1002 | warn_odr (t1, t2, f1, f2, warn, warned, | |
1003 | G_("a different method of same type " | |
1004 | "is defined in another translation unit")); | |
1005 | return false; | |
1006 | } | |
1007 | if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2)) | |
1008 | { | |
1009 | warn_odr (t1, t2, f1, f2, warn, warned, | |
1010 | G_("s definition that differs by virtual " | |
1011 | "keyword in another translation unit")); | |
1012 | return false; | |
1013 | } | |
1014 | if (DECL_VINDEX (f1) != DECL_VINDEX (f2)) | |
1015 | { | |
1016 | warn_odr (t1, t2, f1, f2, warn, warned, | |
1017 | G_("virtual table layout differs in another " | |
1018 | "translation unit")); | |
1019 | return false; | |
1020 | } | |
1021 | if (odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited)) | |
1022 | { | |
1023 | warn_odr (t1, t2, f1, f2, warn, warned, | |
1024 | G_("method with incompatible type is defined " | |
1025 | "in another translation unit")); | |
1026 | return false; | |
1027 | } | |
1028 | } | |
1029 | if (f1 || f2) | |
1030 | { | |
1031 | warn_odr (t1, t2, NULL, NULL, warn, warned, | |
1032 | G_("a type with different number of methods " | |
1033 | "is defined in another translation unit")); | |
1034 | return false; | |
1035 | } | |
1036 | } | |
1037 | gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0)); | |
1038 | gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1), | |
1039 | TYPE_SIZE_UNIT (t2), 0)); | |
1040 | } | |
1041 | ||
1042 | return true; | |
1043 | } | |
1044 | ||
1045 | default: | |
1046 | gcc_unreachable (); | |
1047 | } | |
1048 | } | |
1049 | ||
1050 | /* TYPE is equivalent to VAL by ODR, but its tree representation differs | |
1051 | from VAL->type. This may happen in LTO where tree merging did not merge | |
1052 | all variants of the same type. It may or may not mean the ODR violation. | |
1053 | Add it to the list of duplicates and warn on some violations. */ | |
1054 | ||
1055 | static bool | |
1056 | add_type_duplicate (odr_type val, tree type) | |
1057 | { | |
1058 | bool build_bases = false; | |
1059 | if (!val->types_set) | |
1060 | val->types_set = new hash_set<tree>; | |
1061 | ||
1062 | /* Always prefer complete type to be the leader. */ | |
1063 | if (!COMPLETE_TYPE_P (val->type) | |
1064 | && COMPLETE_TYPE_P (type)) | |
1065 | { | |
1066 | tree tmp = type; | |
1067 | ||
1068 | build_bases = true; | |
1069 | type = val->type; | |
1070 | val->type = tmp; | |
1071 | } | |
1072 | ||
1073 | /* See if this duplicate is new. */ | |
1074 | if (!val->types_set->add (type)) | |
1075 | { | |
1076 | bool merge = true; | |
1077 | bool base_mismatch = false; | |
1078 | unsigned int i,j; | |
1079 | bool warned = false; | |
1080 | hash_set<tree> visited; | |
1081 | ||
1082 | gcc_assert (in_lto_p); | |
1083 | vec_safe_push (val->types, type); | |
1084 | ||
1085 | /* First we compare memory layout. */ | |
1086 | if (!odr_types_equivalent_p (val->type, type, !flag_ltrans && !val->odr_violated, | |
1087 | &warned, &visited)) | |
1088 | { | |
1089 | merge = false; | |
1090 | odr_violation_reported = true; | |
1091 | val->odr_violated = true; | |
1092 | if (cgraph_dump_file) | |
1093 | { | |
1094 | fprintf (cgraph_dump_file, "ODR violation\n"); | |
1095 | ||
1096 | print_node (cgraph_dump_file, "", val->type, 0); | |
1097 | putc ('\n',cgraph_dump_file); | |
1098 | print_node (cgraph_dump_file, "", type, 0); | |
1099 | putc ('\n',cgraph_dump_file); | |
1100 | } | |
1101 | } | |
1102 | ||
1103 | /* Next sanity check that bases are the same. If not, we will end | |
1104 | up producing wrong answers. */ | |
1105 | if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type) | |
1106 | && TREE_CODE (val->type) == RECORD_TYPE | |
1107 | && TREE_CODE (type) == RECORD_TYPE | |
1108 | && TYPE_BINFO (val->type) && TYPE_BINFO (type)) | |
1109 | { | |
1110 | for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++) | |
1111 | if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i))) | |
1112 | { | |
1113 | odr_type base = get_odr_type | |
1114 | (BINFO_TYPE | |
1115 | (BINFO_BASE_BINFO (TYPE_BINFO (type), | |
1116 | i)), | |
1117 | true); | |
1118 | if (val->bases.length () <= j || val->bases[j] != base) | |
1119 | base_mismatch = true; | |
1120 | j++; | |
1121 | } | |
1122 | if (base_mismatch) | |
1123 | { | |
1124 | merge = false; | |
1125 | odr_violation_reported = true; | |
1126 | ||
1127 | if (!warned && !val->odr_violated) | |
1128 | warn_odr (type, val->type, NULL, NULL, !warned, &warned, | |
1129 | "a type with the same name but different bases is " | |
1130 | "defined in another translation unit"); | |
1131 | val->odr_violated = true; | |
1132 | if (cgraph_dump_file) | |
1133 | { | |
1134 | fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n"); | |
1135 | ||
1136 | print_node (cgraph_dump_file, "", val->type, 0); | |
1137 | putc ('\n',cgraph_dump_file); | |
1138 | print_node (cgraph_dump_file, "", type, 0); | |
1139 | putc ('\n',cgraph_dump_file); | |
1140 | } | |
1141 | } | |
1142 | } | |
1143 | ||
1144 | /* Regularize things a little. During LTO same types may come with | |
1145 | different BINFOs. Either because their virtual table was | |
1146 | not merged by tree merging and only later at decl merging or | |
1147 | because one type comes with external vtable, while other | |
1148 | with internal. We want to merge equivalent binfos to conserve | |
1149 | memory and streaming overhead. | |
1150 | ||
1151 | The external vtables are more harmful: they contain references | |
1152 | to external declarations of methods that may be defined in the | |
1153 | merged LTO unit. For this reason we absolutely need to remove | |
1154 | them and replace by internal variants. Not doing so will lead | |
1155 | to incomplete answers from possible_polymorphic_call_targets. */ | |
1156 | if (!flag_ltrans && merge | |
1157 | && TREE_CODE (val->type) == RECORD_TYPE | |
1158 | && TREE_CODE (type) == RECORD_TYPE | |
1159 | && TYPE_BINFO (val->type) && TYPE_BINFO (type) | |
1160 | && TYPE_MAIN_VARIANT (type) == type | |
1161 | && TYPE_MAIN_VARIANT (val->type) == val->type | |
1162 | && BINFO_VTABLE (TYPE_BINFO (val->type)) | |
1163 | && BINFO_VTABLE (TYPE_BINFO (type))) | |
1164 | { | |
1165 | tree master_binfo = TYPE_BINFO (val->type); | |
1166 | tree v1 = BINFO_VTABLE (master_binfo); | |
1167 | tree v2 = BINFO_VTABLE (TYPE_BINFO (type)); | |
1168 | ||
1169 | if (TREE_CODE (v1) == POINTER_PLUS_EXPR) | |
1170 | { | |
1171 | gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR | |
1172 | && operand_equal_p (TREE_OPERAND (v1, 1), | |
1173 | TREE_OPERAND (v2, 1), 0)); | |
1174 | v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0); | |
1175 | v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0); | |
1176 | } | |
1177 | gcc_assert (DECL_ASSEMBLER_NAME (v1) | |
1178 | == DECL_ASSEMBLER_NAME (v2)); | |
1179 | ||
1180 | if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2)) | |
1181 | { | |
1182 | unsigned int i; | |
1183 | ||
1184 | set_type_binfo (val->type, TYPE_BINFO (type)); | |
1185 | for (i = 0; i < val->types->length (); i++) | |
1186 | { | |
1187 | if (TYPE_BINFO ((*val->types)[i]) | |
1188 | == master_binfo) | |
1189 | set_type_binfo ((*val->types)[i], TYPE_BINFO (type)); | |
1190 | } | |
1191 | BINFO_TYPE (TYPE_BINFO (type)) = val->type; | |
1192 | } | |
1193 | else | |
1194 | set_type_binfo (type, master_binfo); | |
1195 | } | |
1196 | } | |
1197 | return build_bases; | |
1198 | } | |
1199 | ||
1200 | /* Get ODR type hash entry for TYPE. If INSERT is true, create | |
1201 | possibly new entry. */ | |
1202 | ||
1203 | odr_type | |
1204 | get_odr_type (tree type, bool insert) | |
1205 | { | |
1206 | odr_type_d **slot; | |
1207 | odr_type val; | |
1208 | hashval_t hash; | |
1209 | bool build_bases = false; | |
1210 | bool insert_to_odr_array = false; | |
1211 | int base_id = -1; | |
1212 | ||
1213 | type = main_odr_variant (type); | |
1214 | ||
1215 | hash = hash_type_name (type); | |
1216 | slot | |
1217 | = odr_hash->find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT); | |
1218 | if (!slot) | |
1219 | return NULL; | |
1220 | ||
1221 | /* See if we already have entry for type. */ | |
1222 | if (*slot) | |
1223 | { | |
1224 | val = *slot; | |
1225 | ||
1226 | /* With LTO we need to support multiple tree representation of | |
1227 | the same ODR type. */ | |
1228 | if (val->type != type) | |
1229 | build_bases = add_type_duplicate (val, type); | |
1230 | } | |
1231 | else | |
1232 | { | |
1233 | val = ggc_cleared_alloc<odr_type_d> (); | |
1234 | val->type = type; | |
1235 | val->bases = vNULL; | |
1236 | val->derived_types = vNULL; | |
1237 | val->anonymous_namespace = type_in_anonymous_namespace_p (type); | |
1238 | build_bases = COMPLETE_TYPE_P (val->type); | |
1239 | insert_to_odr_array = true; | |
1240 | } | |
1241 | ||
1242 | if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type) | |
1243 | && type == TYPE_MAIN_VARIANT (type)) | |
1244 | { | |
1245 | tree binfo = TYPE_BINFO (type); | |
1246 | unsigned int i; | |
1247 | ||
1248 | gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) = type); | |
1249 | ||
1250 | val->all_derivations_known = type_all_derivations_known_p (type); | |
1251 | *slot = val; | |
1252 | for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++) | |
1253 | /* For now record only polymorphic types. other are | |
1254 | pointless for devirtualization and we can not precisely | |
1255 | determine ODR equivalency of these during LTO. */ | |
1256 | if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i))) | |
1257 | { | |
1258 | odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo, | |
1259 | i)), | |
1260 | true); | |
1261 | gcc_assert (TYPE_MAIN_VARIANT (base->type) == base->type); | |
1262 | base->derived_types.safe_push (val); | |
1263 | val->bases.safe_push (base); | |
1264 | if (base->id > base_id) | |
1265 | base_id = base->id; | |
1266 | } | |
1267 | } | |
1268 | /* Ensure that type always appears after bases. */ | |
1269 | if (insert_to_odr_array) | |
1270 | { | |
1271 | if (odr_types_ptr) | |
1272 | val->id = odr_types.length (); | |
1273 | vec_safe_push (odr_types_ptr, val); | |
1274 | } | |
1275 | else if (base_id > val->id) | |
1276 | { | |
1277 | odr_types[val->id] = 0; | |
1278 | /* Be sure we did not recorded any derived types; these may need | |
1279 | renumbering too. */ | |
1280 | gcc_assert (val->derived_types.length() == 0); | |
1281 | if (odr_types_ptr) | |
1282 | val->id = odr_types.length (); | |
1283 | vec_safe_push (odr_types_ptr, val); | |
1284 | } | |
1285 | return val; | |
1286 | } | |
1287 | ||
1288 | /* Dump ODR type T and all its derrived type. INDENT specify indentation for | |
1289 | recusive printing. */ | |
1290 | ||
1291 | static void | |
1292 | dump_odr_type (FILE *f, odr_type t, int indent=0) | |
1293 | { | |
1294 | unsigned int i; | |
1295 | fprintf (f, "%*s type %i: ", indent * 2, "", t->id); | |
1296 | print_generic_expr (f, t->type, TDF_SLIM); | |
1297 | fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":""); | |
1298 | fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":""); | |
1299 | if (TYPE_NAME (t->type)) | |
1300 | { | |
1301 | fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "", | |
1302 | DECL_SOURCE_FILE (TYPE_NAME (t->type)), | |
1303 | DECL_SOURCE_LINE (TYPE_NAME (t->type))); | |
1304 | } | |
1305 | if (t->bases.length ()) | |
1306 | { | |
1307 | fprintf (f, "%*s base odr type ids: ", indent * 2, ""); | |
1308 | for (i = 0; i < t->bases.length (); i++) | |
1309 | fprintf (f, " %i", t->bases[i]->id); | |
1310 | fprintf (f, "\n"); | |
1311 | } | |
1312 | if (t->derived_types.length ()) | |
1313 | { | |
1314 | fprintf (f, "%*s derived types:\n", indent * 2, ""); | |
1315 | for (i = 0; i < t->derived_types.length (); i++) | |
1316 | dump_odr_type (f, t->derived_types[i], indent + 1); | |
1317 | } | |
1318 | fprintf (f, "\n"); | |
1319 | } | |
1320 | ||
1321 | /* Dump the type inheritance graph. */ | |
1322 | ||
1323 | static void | |
1324 | dump_type_inheritance_graph (FILE *f) | |
1325 | { | |
1326 | unsigned int i; | |
1327 | if (!odr_types_ptr) | |
1328 | return; | |
1329 | fprintf (f, "\n\nType inheritance graph:\n"); | |
1330 | for (i = 0; i < odr_types.length (); i++) | |
1331 | { | |
1332 | if (odr_types[i] && odr_types[i]->bases.length () == 0) | |
1333 | dump_odr_type (f, odr_types[i]); | |
1334 | } | |
1335 | for (i = 0; i < odr_types.length (); i++) | |
1336 | { | |
1337 | if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ()) | |
1338 | { | |
1339 | unsigned int j; | |
1340 | fprintf (f, "Duplicate tree types for odr type %i\n", i); | |
1341 | print_node (f, "", odr_types[i]->type, 0); | |
1342 | for (j = 0; j < odr_types[i]->types->length (); j++) | |
1343 | { | |
1344 | tree t; | |
1345 | fprintf (f, "duplicate #%i\n", j); | |
1346 | print_node (f, "", (*odr_types[i]->types)[j], 0); | |
1347 | t = (*odr_types[i]->types)[j]; | |
1348 | while (TYPE_P (t) && TYPE_CONTEXT (t)) | |
1349 | { | |
1350 | t = TYPE_CONTEXT (t); | |
1351 | print_node (f, "", t, 0); | |
1352 | } | |
1353 | putc ('\n',f); | |
1354 | } | |
1355 | } | |
1356 | } | |
1357 | } | |
1358 | ||
1359 | /* Given method type T, return type of class it belongs to. | |
1360 | Lookup this pointer and get its type. */ | |
1361 | ||
1362 | tree | |
1363 | method_class_type (const_tree t) | |
1364 | { | |
1365 | tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t)); | |
1366 | gcc_assert (TREE_CODE (t) == METHOD_TYPE); | |
1367 | ||
1368 | return TREE_TYPE (first_parm_type); | |
1369 | } | |
1370 | ||
1371 | /* Initialize IPA devirt and build inheritance tree graph. */ | |
1372 | ||
1373 | void | |
1374 | build_type_inheritance_graph (void) | |
1375 | { | |
1376 | struct symtab_node *n; | |
1377 | FILE *inheritance_dump_file; | |
1378 | int flags; | |
1379 | ||
1380 | if (odr_hash) | |
1381 | return; | |
1382 | timevar_push (TV_IPA_INHERITANCE); | |
1383 | inheritance_dump_file = dump_begin (TDI_inheritance, &flags); | |
1384 | odr_hash = new odr_hash_type (23); | |
1385 | ||
1386 | /* We reconstruct the graph starting of types of all methods seen in the | |
1387 | the unit. */ | |
1388 | FOR_EACH_SYMBOL (n) | |
1389 | if (is_a <cgraph_node *> (n) | |
1390 | && DECL_VIRTUAL_P (n->decl) | |
1391 | && n->real_symbol_p ()) | |
1392 | get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))), | |
1393 | true); | |
1394 | ||
1395 | /* Look also for virtual tables of types that do not define any methods. | |
1396 | ||
1397 | We need it in a case where class B has virtual base of class A | |
1398 | re-defining its virtual method and there is class C with no virtual | |
1399 | methods with B as virtual base. | |
1400 | ||
1401 | Here we output B's virtual method in two variant - for non-virtual | |
1402 | and virtual inheritance. B's virtual table has non-virtual version, | |
1403 | while C's has virtual. | |
1404 | ||
1405 | For this reason we need to know about C in order to include both | |
1406 | variants of B. More correctly, record_target_from_binfo should | |
1407 | add both variants of the method when walking B, but we have no | |
1408 | link in between them. | |
1409 | ||
1410 | We rely on fact that either the method is exported and thus we | |
1411 | assume it is called externally or C is in anonymous namespace and | |
1412 | thus we will see the vtable. */ | |
1413 | ||
1414 | else if (is_a <varpool_node *> (n) | |
1415 | && DECL_VIRTUAL_P (n->decl) | |
1416 | && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE | |
1417 | && TYPE_BINFO (DECL_CONTEXT (n->decl)) | |
1418 | && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl)))) | |
1419 | get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true); | |
1420 | if (inheritance_dump_file) | |
1421 | { | |
1422 | dump_type_inheritance_graph (inheritance_dump_file); | |
1423 | dump_end (TDI_inheritance, inheritance_dump_file); | |
1424 | } | |
1425 | timevar_pop (TV_IPA_INHERITANCE); | |
1426 | } | |
1427 | ||
1428 | /* Return true if N has reference from live virtual table | |
1429 | (and thus can be a destination of polymorphic call). | |
1430 | Be conservatively correct when callgraph is not built or | |
1431 | if the method may be referred externally. */ | |
1432 | ||
1433 | static bool | |
1434 | referenced_from_vtable_p (struct cgraph_node *node) | |
1435 | { | |
1436 | int i; | |
1437 | struct ipa_ref *ref; | |
1438 | bool found = false; | |
1439 | ||
1440 | if (node->externally_visible | |
1441 | || DECL_EXTERNAL (node->decl) | |
1442 | || node->used_from_other_partition) | |
1443 | return true; | |
1444 | ||
1445 | /* Keep this test constant time. | |
1446 | It is unlikely this can happen except for the case where speculative | |
1447 | devirtualization introduced many speculative edges to this node. | |
1448 | In this case the target is very likely alive anyway. */ | |
1449 | if (node->ref_list.referring.length () > 100) | |
1450 | return true; | |
1451 | ||
1452 | /* We need references built. */ | |
1453 | if (cgraph_state <= CGRAPH_STATE_CONSTRUCTION) | |
1454 | return true; | |
1455 | ||
1456 | for (i = 0; node->iterate_referring (i, ref); i++) | |
1457 | ||
1458 | if ((ref->use == IPA_REF_ALIAS | |
1459 | && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring))) | |
1460 | || (ref->use == IPA_REF_ADDR | |
1461 | && TREE_CODE (ref->referring->decl) == VAR_DECL | |
1462 | && DECL_VIRTUAL_P (ref->referring->decl))) | |
1463 | { | |
1464 | found = true; | |
1465 | break; | |
1466 | } | |
1467 | return found; | |
1468 | } | |
1469 | ||
1470 | /* If TARGET has associated node, record it in the NODES array. | |
1471 | CAN_REFER specify if program can refer to the target directly. | |
1472 | if TARGET is unknown (NULL) or it can not be inserted (for example because | |
1473 | its body was already removed and there is no way to refer to it), clear | |
1474 | COMPLETEP. */ | |
1475 | ||
1476 | static void | |
1477 | maybe_record_node (vec <cgraph_node *> &nodes, | |
1478 | tree target, hash_set<tree> *inserted, | |
1479 | bool can_refer, | |
1480 | bool *completep) | |
1481 | { | |
1482 | struct cgraph_node *target_node, *alias_target; | |
1483 | enum availability avail; | |
1484 | ||
1485 | /* cxa_pure_virtual and __builtin_unreachable do not need to be added into | |
1486 | list of targets; the runtime effect of calling them is undefined. | |
1487 | Only "real" virtual methods should be accounted. */ | |
1488 | if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE) | |
1489 | return; | |
1490 | ||
1491 | if (!can_refer) | |
1492 | { | |
1493 | /* The only case when method of anonymous namespace becomes unreferable | |
1494 | is when we completely optimized it out. */ | |
1495 | if (flag_ltrans | |
1496 | || !target | |
1497 | || !type_in_anonymous_namespace_p (DECL_CONTEXT (target))) | |
1498 | *completep = false; | |
1499 | return; | |
1500 | } | |
1501 | ||
1502 | if (!target) | |
1503 | return; | |
1504 | ||
1505 | target_node = cgraph_node::get (target); | |
1506 | ||
1507 | /* Preffer alias target over aliases, so we do not get confused by | |
1508 | fake duplicates. */ | |
1509 | if (target_node) | |
1510 | { | |
1511 | alias_target = target_node->ultimate_alias_target (&avail); | |
1512 | if (target_node != alias_target | |
1513 | && avail >= AVAIL_AVAILABLE | |
1514 | && target_node->get_availability ()) | |
1515 | target_node = alias_target; | |
1516 | } | |
1517 | ||
1518 | /* Method can only be called by polymorphic call if any | |
1519 | of vtables refering to it are alive. | |
1520 | ||
1521 | While this holds for non-anonymous functions, too, there are | |
1522 | cases where we want to keep them in the list; for example | |
1523 | inline functions with -fno-weak are static, but we still | |
1524 | may devirtualize them when instance comes from other unit. | |
1525 | The same holds for LTO. | |
1526 | ||
1527 | Currently we ignore these functions in speculative devirtualization. | |
1528 | ??? Maybe it would make sense to be more aggressive for LTO even | |
1529 | eslewhere. */ | |
1530 | if (!flag_ltrans | |
1531 | && type_in_anonymous_namespace_p (DECL_CONTEXT (target)) | |
1532 | && (!target_node | |
1533 | || !referenced_from_vtable_p (target_node))) | |
1534 | ; | |
1535 | /* See if TARGET is useful function we can deal with. */ | |
1536 | else if (target_node != NULL | |
1537 | && (TREE_PUBLIC (target) | |
1538 | || DECL_EXTERNAL (target) | |
1539 | || target_node->definition) | |
1540 | && target_node->real_symbol_p ()) | |
1541 | { | |
1542 | gcc_assert (!target_node->global.inlined_to); | |
1543 | gcc_assert (target_node->real_symbol_p ()); | |
1544 | if (!inserted->add (target)) | |
1545 | { | |
1546 | cached_polymorphic_call_targets->add (target_node); | |
1547 | nodes.safe_push (target_node); | |
1548 | } | |
1549 | } | |
1550 | else if (completep | |
1551 | && (!type_in_anonymous_namespace_p | |
1552 | (DECL_CONTEXT (target)) | |
1553 | || flag_ltrans)) | |
1554 | *completep = false; | |
1555 | } | |
1556 | ||
1557 | /* See if BINFO's type match OUTER_TYPE. If so, lookup | |
1558 | BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find | |
1559 | method in vtable and insert method to NODES array | |
1560 | or BASES_TO_CONSIDER if this array is non-NULL. | |
1561 | Otherwise recurse to base BINFOs. | |
1562 | This match what get_binfo_at_offset does, but with offset | |
1563 | being unknown. | |
1564 | ||
1565 | TYPE_BINFOS is a stack of BINFOS of types with defined | |
1566 | virtual table seen on way from class type to BINFO. | |
1567 | ||
1568 | MATCHED_VTABLES tracks virtual tables we already did lookup | |
1569 | for virtual function in. INSERTED tracks nodes we already | |
1570 | inserted. | |
1571 | ||
1572 | ANONYMOUS is true if BINFO is part of anonymous namespace. | |
1573 | ||
1574 | Clear COMPLETEP when we hit unreferable target. | |
1575 | */ | |
1576 | ||
1577 | static void | |
1578 | record_target_from_binfo (vec <cgraph_node *> &nodes, | |
1579 | vec <tree> *bases_to_consider, | |
1580 | tree binfo, | |
1581 | tree otr_type, | |
1582 | vec <tree> &type_binfos, | |
1583 | HOST_WIDE_INT otr_token, | |
1584 | tree outer_type, | |
1585 | HOST_WIDE_INT offset, | |
1586 | hash_set<tree> *inserted, | |
1587 | hash_set<tree> *matched_vtables, | |
1588 | bool anonymous, | |
1589 | bool *completep) | |
1590 | { | |
1591 | tree type = BINFO_TYPE (binfo); | |
1592 | int i; | |
1593 | tree base_binfo; | |
1594 | ||
1595 | ||
1596 | if (BINFO_VTABLE (binfo)) | |
1597 | type_binfos.safe_push (binfo); | |
1598 | if (types_same_for_odr (type, outer_type)) | |
1599 | { | |
1600 | int i; | |
1601 | tree type_binfo = NULL; | |
1602 | ||
1603 | /* Lookup BINFO with virtual table. For normal types it is always last | |
1604 | binfo on stack. */ | |
1605 | for (i = type_binfos.length () - 1; i >= 0; i--) | |
1606 | if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo)) | |
1607 | { | |
1608 | type_binfo = type_binfos[i]; | |
1609 | break; | |
1610 | } | |
1611 | if (BINFO_VTABLE (binfo)) | |
1612 | type_binfos.pop (); | |
1613 | /* If this is duplicated BINFO for base shared by virtual inheritance, | |
1614 | we may not have its associated vtable. This is not a problem, since | |
1615 | we will walk it on the other path. */ | |
1616 | if (!type_binfo) | |
1617 | return; | |
1618 | tree inner_binfo = get_binfo_at_offset (type_binfo, | |
1619 | offset, otr_type); | |
1620 | if (!inner_binfo) | |
1621 | { | |
1622 | gcc_assert (odr_violation_reported); | |
1623 | return; | |
1624 | } | |
1625 | /* For types in anonymous namespace first check if the respective vtable | |
1626 | is alive. If not, we know the type can't be called. */ | |
1627 | if (!flag_ltrans && anonymous) | |
1628 | { | |
1629 | tree vtable = BINFO_VTABLE (inner_binfo); | |
1630 | varpool_node *vnode; | |
1631 | ||
1632 | if (TREE_CODE (vtable) == POINTER_PLUS_EXPR) | |
1633 | vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0); | |
1634 | vnode = varpool_node::get (vtable); | |
1635 | if (!vnode || !vnode->definition) | |
1636 | return; | |
1637 | } | |
1638 | gcc_assert (inner_binfo); | |
1639 | if (bases_to_consider | |
1640 | ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo)) | |
1641 | : !matched_vtables->add (BINFO_VTABLE (inner_binfo))) | |
1642 | { | |
1643 | bool can_refer; | |
1644 | tree target = gimple_get_virt_method_for_binfo (otr_token, | |
1645 | inner_binfo, | |
1646 | &can_refer); | |
1647 | if (!bases_to_consider) | |
1648 | maybe_record_node (nodes, target, inserted, can_refer, completep); | |
1649 | /* Destructors are never called via construction vtables. */ | |
1650 | else if (!target || !DECL_CXX_DESTRUCTOR_P (target)) | |
1651 | bases_to_consider->safe_push (target); | |
1652 | } | |
1653 | return; | |
1654 | } | |
1655 | ||
1656 | /* Walk bases. */ | |
1657 | for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++) | |
1658 | /* Walking bases that have no virtual method is pointless excercise. */ | |
1659 | if (polymorphic_type_binfo_p (base_binfo)) | |
1660 | record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type, | |
1661 | type_binfos, | |
1662 | otr_token, outer_type, offset, inserted, | |
1663 | matched_vtables, anonymous, completep); | |
1664 | if (BINFO_VTABLE (binfo)) | |
1665 | type_binfos.pop (); | |
1666 | } | |
1667 | ||
1668 | /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN) | |
1669 | of TYPE, insert them to NODES, recurse into derived nodes. | |
1670 | INSERTED is used to avoid duplicate insertions of methods into NODES. | |
1671 | MATCHED_VTABLES are used to avoid duplicate walking vtables. | |
1672 | Clear COMPLETEP if unreferable target is found. | |
1673 | ||
1674 | If CONSIDER_CONSTURCTION is true, record to BASES_TO_CONSDIER | |
1675 | all cases where BASE_SKIPPED is true (because the base is abstract | |
1676 | class). */ | |
1677 | ||
1678 | static void | |
1679 | possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes, | |
1680 | hash_set<tree> *inserted, | |
1681 | hash_set<tree> *matched_vtables, | |
1682 | tree otr_type, | |
1683 | odr_type type, | |
1684 | HOST_WIDE_INT otr_token, | |
1685 | tree outer_type, | |
1686 | HOST_WIDE_INT offset, | |
1687 | bool *completep, | |
1688 | vec <tree> &bases_to_consider, | |
1689 | bool consider_construction) | |
1690 | { | |
1691 | tree binfo = TYPE_BINFO (type->type); | |
1692 | unsigned int i; | |
1693 | vec <tree> type_binfos = vNULL; | |
1694 | bool possibly_instantiated = type_possibly_instantiated_p (type->type); | |
1695 | ||
1696 | /* We may need to consider types w/o instances because of possible derived | |
1697 | types using their methods either directly or via construction vtables. | |
1698 | We are safe to skip them when all derivations are known, since we will | |
1699 | handle them later. | |
1700 | This is done by recording them to BASES_TO_CONSIDER array. */ | |
1701 | if (possibly_instantiated || consider_construction) | |
1702 | { | |
1703 | record_target_from_binfo (nodes, | |
1704 | (!possibly_instantiated | |
1705 | && type_all_derivations_known_p (type->type)) | |
1706 | ? &bases_to_consider : NULL, | |
1707 | binfo, otr_type, type_binfos, otr_token, | |
1708 | outer_type, offset, | |
1709 | inserted, matched_vtables, | |
1710 | type->anonymous_namespace, completep); | |
1711 | } | |
1712 | type_binfos.release (); | |
1713 | for (i = 0; i < type->derived_types.length (); i++) | |
1714 | possible_polymorphic_call_targets_1 (nodes, inserted, | |
1715 | matched_vtables, | |
1716 | otr_type, | |
1717 | type->derived_types[i], | |
1718 | otr_token, outer_type, offset, completep, | |
1719 | bases_to_consider, consider_construction); | |
1720 | } | |
1721 | ||
1722 | /* Cache of queries for polymorphic call targets. | |
1723 | ||
1724 | Enumerating all call targets may get expensive when there are many | |
1725 | polymorphic calls in the program, so we memoize all the previous | |
1726 | queries and avoid duplicated work. */ | |
1727 | ||
1728 | struct polymorphic_call_target_d | |
1729 | { | |
1730 | HOST_WIDE_INT otr_token; | |
1731 | ipa_polymorphic_call_context context; | |
1732 | odr_type type; | |
1733 | vec <cgraph_node *> targets; | |
1734 | int speculative_targets; | |
1735 | bool complete; | |
1736 | int type_warning; | |
1737 | tree decl_warning; | |
1738 | }; | |
1739 | ||
1740 | /* Polymorphic call target cache helpers. */ | |
1741 | ||
1742 | struct polymorphic_call_target_hasher | |
1743 | { | |
1744 | typedef polymorphic_call_target_d value_type; | |
1745 | typedef polymorphic_call_target_d compare_type; | |
1746 | static inline hashval_t hash (const value_type *); | |
1747 | static inline bool equal (const value_type *, const compare_type *); | |
1748 | static inline void remove (value_type *); | |
1749 | }; | |
1750 | ||
1751 | /* Return the computed hashcode for ODR_QUERY. */ | |
1752 | ||
1753 | inline hashval_t | |
1754 | polymorphic_call_target_hasher::hash (const value_type *odr_query) | |
1755 | { | |
1756 | inchash::hash hstate (odr_query->otr_token); | |
1757 | ||
1758 | hstate.add_wide_int (odr_query->type->id); | |
1759 | hstate.merge_hash (TYPE_UID (odr_query->context.outer_type)); | |
1760 | hstate.add_wide_int (odr_query->context.offset); | |
1761 | ||
1762 | if (odr_query->context.speculative_outer_type) | |
1763 | { | |
1764 | hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type)); | |
1765 | hstate.add_wide_int (odr_query->context.speculative_offset); | |
1766 | } | |
1767 | hstate.add_flag (odr_query->context.maybe_in_construction); | |
1768 | hstate.add_flag (odr_query->context.maybe_derived_type); | |
1769 | hstate.add_flag (odr_query->context.speculative_maybe_derived_type); | |
1770 | hstate.commit_flag (); | |
1771 | return hstate.end (); | |
1772 | } | |
1773 | ||
1774 | /* Compare cache entries T1 and T2. */ | |
1775 | ||
1776 | inline bool | |
1777 | polymorphic_call_target_hasher::equal (const value_type *t1, | |
1778 | const compare_type *t2) | |
1779 | { | |
1780 | return (t1->type == t2->type && t1->otr_token == t2->otr_token | |
1781 | && t1->context.offset == t2->context.offset | |
1782 | && t1->context.speculative_offset == t2->context.speculative_offset | |
1783 | && t1->context.outer_type == t2->context.outer_type | |
1784 | && t1->context.speculative_outer_type == t2->context.speculative_outer_type | |
1785 | && t1->context.maybe_in_construction | |
1786 | == t2->context.maybe_in_construction | |
1787 | && t1->context.maybe_derived_type == t2->context.maybe_derived_type | |
1788 | && (t1->context.speculative_maybe_derived_type | |
1789 | == t2->context.speculative_maybe_derived_type)); | |
1790 | } | |
1791 | ||
1792 | /* Remove entry in polymorphic call target cache hash. */ | |
1793 | ||
1794 | inline void | |
1795 | polymorphic_call_target_hasher::remove (value_type *v) | |
1796 | { | |
1797 | v->targets.release (); | |
1798 | free (v); | |
1799 | } | |
1800 | ||
1801 | /* Polymorphic call target query cache. */ | |
1802 | ||
1803 | typedef hash_table<polymorphic_call_target_hasher> | |
1804 | polymorphic_call_target_hash_type; | |
1805 | static polymorphic_call_target_hash_type *polymorphic_call_target_hash; | |
1806 | ||
1807 | /* Destroy polymorphic call target query cache. */ | |
1808 | ||
1809 | static void | |
1810 | free_polymorphic_call_targets_hash () | |
1811 | { | |
1812 | if (cached_polymorphic_call_targets) | |
1813 | { | |
1814 | delete polymorphic_call_target_hash; | |
1815 | polymorphic_call_target_hash = NULL; | |
1816 | delete cached_polymorphic_call_targets; | |
1817 | cached_polymorphic_call_targets = NULL; | |
1818 | } | |
1819 | } | |
1820 | ||
1821 | /* When virtual function is removed, we may need to flush the cache. */ | |
1822 | ||
1823 | static void | |
1824 | devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED) | |
1825 | { | |
1826 | if (cached_polymorphic_call_targets | |
1827 | && cached_polymorphic_call_targets->contains (n)) | |
1828 | free_polymorphic_call_targets_hash (); | |
1829 | } | |
1830 | ||
1831 | /* Return true when TYPE contains an polymorphic type and thus is interesting | |
1832 | for devirtualization machinery. */ | |
1833 | ||
1834 | bool | |
1835 | contains_polymorphic_type_p (const_tree type) | |
1836 | { | |
1837 | type = TYPE_MAIN_VARIANT (type); | |
1838 | ||
1839 | if (RECORD_OR_UNION_TYPE_P (type)) | |
1840 | { | |
1841 | if (TYPE_BINFO (type) | |
1842 | && polymorphic_type_binfo_p (TYPE_BINFO (type))) | |
1843 | return true; | |
1844 | for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld)) | |
1845 | if (TREE_CODE (fld) == FIELD_DECL | |
1846 | && !DECL_ARTIFICIAL (fld) | |
1847 | && contains_polymorphic_type_p (TREE_TYPE (fld))) | |
1848 | return true; | |
1849 | return false; | |
1850 | } | |
1851 | if (TREE_CODE (type) == ARRAY_TYPE) | |
1852 | return contains_polymorphic_type_p (TREE_TYPE (type)); | |
1853 | return false; | |
1854 | } | |
1855 | ||
1856 | /* Clear speculative info from CONTEXT. */ | |
1857 | ||
1858 | static void | |
1859 | clear_speculation (ipa_polymorphic_call_context *context) | |
1860 | { | |
1861 | context->speculative_outer_type = NULL; | |
1862 | context->speculative_offset = 0; | |
1863 | context->speculative_maybe_derived_type = false; | |
1864 | } | |
1865 | ||
1866 | /* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE | |
1867 | is contained at CONTEXT->OFFSET. Walk the memory representation of | |
1868 | CONTEXT->OUTER_TYPE and find the outermost class type that match | |
1869 | EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT | |
1870 | to represent it. | |
1871 | ||
1872 | For example when CONTEXT represents type | |
1873 | class A | |
1874 | { | |
1875 | int a; | |
1876 | class B b; | |
1877 | } | |
1878 | and we look for type at offset sizeof(int), we end up with B and offset 0. | |
1879 | If the same is produced by multiple inheritance, we end up with A and offset | |
1880 | sizeof(int). | |
1881 | ||
1882 | If we can not find corresponding class, give up by setting | |
1883 | CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL. | |
1884 | Return true when lookup was sucesful. */ | |
1885 | ||
1886 | static bool | |
1887 | get_class_context (ipa_polymorphic_call_context *context, | |
1888 | tree expected_type) | |
1889 | { | |
1890 | tree type = context->outer_type; | |
1891 | HOST_WIDE_INT offset = context->offset; | |
1892 | bool speculative = false; | |
1893 | bool speculation_valid = false; | |
1894 | bool valid = false; | |
1895 | ||
1896 | if (!context->outer_type) | |
1897 | { | |
1898 | type = context->outer_type = expected_type; | |
1899 | context->offset = offset = 0; | |
1900 | } | |
1901 | ||
1902 | if (context->speculative_outer_type == context->outer_type | |
1903 | && (!context->maybe_derived_type | |
1904 | || context->speculative_maybe_derived_type)) | |
1905 | { | |
1906 | context->speculative_outer_type = NULL; | |
1907 | context->speculative_offset = 0; | |
1908 | context->speculative_maybe_derived_type = false; | |
1909 | } | |
1910 | ||
1911 | /* See if speculative type seem to be derrived from outer_type. | |
1912 | Then speculation is valid only if it really is a derivate and derived types | |
1913 | are allowed. | |
1914 | ||
1915 | The test does not really look for derivate, but also accepts the case where | |
1916 | outer_type is a field of speculative_outer_type. In this case eiter | |
1917 | MAYBE_DERIVED_TYPE is false and we have full non-speculative information or | |
1918 | the loop bellow will correctly update SPECULATIVE_OUTER_TYPE | |
1919 | and SPECULATIVE_MAYBE_DERIVED_TYPE. */ | |
1920 | if (context->speculative_outer_type | |
1921 | && context->speculative_offset >= context->offset | |
1922 | && contains_type_p (context->speculative_outer_type, | |
1923 | context->offset - context->speculative_offset, | |
1924 | context->outer_type)) | |
1925 | speculation_valid = context->maybe_derived_type; | |
1926 | else | |
1927 | clear_speculation (context); | |
1928 | ||
1929 | /* Find the sub-object the constant actually refers to and mark whether it is | |
1930 | an artificial one (as opposed to a user-defined one). | |
1931 | ||
1932 | This loop is performed twice; first time for outer_type and second time | |
1933 | for speculative_outer_type. The second iteration has SPECULATIVE set. */ | |
1934 | while (true) | |
1935 | { | |
1936 | HOST_WIDE_INT pos, size; | |
1937 | tree fld; | |
1938 | ||
1939 | /* On a match, just return what we found. */ | |
1940 | if (TREE_CODE (type) == TREE_CODE (expected_type) | |
1941 | && (!in_lto_p | |
1942 | || (TREE_CODE (type) == RECORD_TYPE | |
1943 | && TYPE_BINFO (type) | |
1944 | && polymorphic_type_binfo_p (TYPE_BINFO (type)))) | |
1945 | && types_same_for_odr (type, expected_type)) | |
1946 | { | |
1947 | if (speculative) | |
1948 | { | |
1949 | gcc_assert (speculation_valid); | |
1950 | gcc_assert (valid); | |
1951 | ||
1952 | /* If we did not match the offset, just give up on speculation. */ | |
1953 | if (offset != 0 | |
1954 | || (types_same_for_odr (context->speculative_outer_type, | |
1955 | context->outer_type) | |
1956 | && (context->maybe_derived_type | |
1957 | == context->speculative_maybe_derived_type))) | |
1958 | clear_speculation (context); | |
1959 | return true; | |
1960 | } | |
1961 | else | |
1962 | { | |
1963 | /* Type can not contain itself on an non-zero offset. In that case | |
1964 | just give up. */ | |
1965 | if (offset != 0) | |
1966 | { | |
1967 | valid = false; | |
1968 | goto give_up; | |
1969 | } | |
1970 | valid = true; | |
1971 | /* If speculation is not valid or we determined type precisely, | |
1972 | we are done. */ | |
1973 | if (!speculation_valid | |
1974 | || !context->maybe_derived_type) | |
1975 | { | |
1976 | clear_speculation (context); | |
1977 | return true; | |
1978 | } | |
1979 | /* Otherwise look into speculation now. */ | |
1980 | else | |
1981 | { | |
1982 | speculative = true; | |
1983 | type = context->speculative_outer_type; | |
1984 | offset = context->speculative_offset; | |
1985 | continue; | |
1986 | } | |
1987 | } | |
1988 | } | |
1989 | ||
1990 | /* Walk fields and find corresponding on at OFFSET. */ | |
1991 | if (TREE_CODE (type) == RECORD_TYPE) | |
1992 | { | |
1993 | for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld)) | |
1994 | { | |
1995 | if (TREE_CODE (fld) != FIELD_DECL) | |
1996 | continue; | |
1997 | ||
1998 | pos = int_bit_position (fld); | |
1999 | size = tree_to_uhwi (DECL_SIZE (fld)); | |
2000 | if (pos <= offset && (pos + size) > offset) | |
2001 | break; | |
2002 | } | |
2003 | ||
2004 | if (!fld) | |
2005 | goto give_up; | |
2006 | ||
2007 | type = TYPE_MAIN_VARIANT (TREE_TYPE (fld)); | |
2008 | offset -= pos; | |
2009 | /* DECL_ARTIFICIAL represents a basetype. */ | |
2010 | if (!DECL_ARTIFICIAL (fld)) | |
2011 | { | |
2012 | if (!speculative) | |
2013 | { | |
2014 | context->outer_type = type; | |
2015 | context->offset = offset; | |
2016 | /* As soon as we se an field containing the type, | |
2017 | we know we are not looking for derivations. */ | |
2018 | context->maybe_derived_type = false; | |
2019 | } | |
2020 | else | |
2021 | { | |
2022 | context->speculative_outer_type = type; | |
2023 | context->speculative_offset = offset; | |
2024 | context->speculative_maybe_derived_type = false; | |
2025 | } | |
2026 | } | |
2027 | } | |
2028 | else if (TREE_CODE (type) == ARRAY_TYPE) | |
2029 | { | |
2030 | tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type)); | |
2031 | ||
2032 | /* Give up if we don't know array size. */ | |
2033 | if (!tree_fits_shwi_p (TYPE_SIZE (subtype)) | |
2034 | || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0) | |
2035 | goto give_up; | |
2036 | offset = offset % tree_to_shwi (TYPE_SIZE (subtype)); | |
2037 | type = subtype; | |
2038 | if (!speculative) | |
2039 | { | |
2040 | context->outer_type = type; | |
2041 | context->offset = offset; | |
2042 | context->maybe_derived_type = false; | |
2043 | } | |
2044 | else | |
2045 | { | |
2046 | context->speculative_outer_type = type; | |
2047 | context->speculative_offset = offset; | |
2048 | context->speculative_maybe_derived_type = false; | |
2049 | } | |
2050 | } | |
2051 | /* Give up on anything else. */ | |
2052 | else | |
2053 | goto give_up; | |
2054 | } | |
2055 | ||
2056 | /* If we failed to find subtype we look for, give up and fall back to the | |
2057 | most generic query. */ | |
2058 | give_up: | |
2059 | clear_speculation (context); | |
2060 | if (valid) | |
2061 | return true; | |
2062 | context->outer_type = expected_type; | |
2063 | context->offset = 0; | |
2064 | context->maybe_derived_type = true; | |
2065 | context->maybe_in_construction = true; | |
2066 | /* POD can be changed to an instance of a polymorphic type by | |
2067 | placement new. Here we play safe and assume that any | |
2068 | non-polymorphic type is POD. */ | |
2069 | if ((TREE_CODE (type) != RECORD_TYPE | |
2070 | || !TYPE_BINFO (type) | |
2071 | || !polymorphic_type_binfo_p (TYPE_BINFO (type))) | |
2072 | && (!TYPE_SIZE (type) | |
2073 | || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST | |
2074 | || (offset + tree_to_uhwi (TYPE_SIZE (expected_type)) <= | |
2075 | tree_to_uhwi (TYPE_SIZE (type))))) | |
2076 | return true; | |
2077 | return false; | |
2078 | } | |
2079 | ||
2080 | /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */ | |
2081 | ||
2082 | static bool | |
2083 | contains_type_p (tree outer_type, HOST_WIDE_INT offset, | |
2084 | tree otr_type) | |
2085 | { | |
2086 | ipa_polymorphic_call_context context = {offset, 0, | |
2087 | TYPE_MAIN_VARIANT (outer_type), | |
2088 | NULL, false, true, false}; | |
2089 | return get_class_context (&context, otr_type); | |
2090 | } | |
2091 | ||
2092 | /* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */ | |
2093 | ||
2094 | static tree | |
2095 | subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset, | |
2096 | tree vtable) | |
2097 | { | |
2098 | tree v = BINFO_VTABLE (binfo); | |
2099 | int i; | |
2100 | tree base_binfo; | |
2101 | unsigned HOST_WIDE_INT this_offset; | |
2102 | ||
2103 | if (v) | |
2104 | { | |
2105 | if (!vtable_pointer_value_to_vtable (v, &v, &this_offset)) | |
2106 | gcc_unreachable (); | |
2107 | ||
2108 | if (offset == this_offset | |
2109 | && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable)) | |
2110 | return binfo; | |
2111 | } | |
2112 | ||
2113 | for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++) | |
2114 | if (polymorphic_type_binfo_p (base_binfo)) | |
2115 | { | |
2116 | base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable); | |
2117 | if (base_binfo) | |
2118 | return base_binfo; | |
2119 | } | |
2120 | return NULL; | |
2121 | } | |
2122 | ||
2123 | /* T is known constant value of virtual table pointer. | |
2124 | Store virtual table to V and its offset to OFFSET. | |
2125 | Return false if T does not look like virtual table reference. */ | |
2126 | ||
2127 | bool | |
2128 | vtable_pointer_value_to_vtable (const_tree t, tree *v, | |
2129 | unsigned HOST_WIDE_INT *offset) | |
2130 | { | |
2131 | /* We expect &MEM[(void *)&virtual_table + 16B]. | |
2132 | We obtain object's BINFO from the context of the virtual table. | |
2133 | This one contains pointer to virtual table represented via | |
2134 | POINTER_PLUS_EXPR. Verify that this pointer match to what | |
2135 | we propagated through. | |
2136 | ||
2137 | In the case of virtual inheritance, the virtual tables may | |
2138 | be nested, i.e. the offset may be different from 16 and we may | |
2139 | need to dive into the type representation. */ | |
2140 | if (TREE_CODE (t) == ADDR_EXPR | |
2141 | && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF | |
2142 | && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR | |
2143 | && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST | |
2144 | && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0)) | |
2145 | == VAR_DECL) | |
2146 | && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND | |
2147 | (TREE_OPERAND (t, 0), 0), 0))) | |
2148 | { | |
2149 | *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0); | |
2150 | *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1)); | |
2151 | return true; | |
2152 | } | |
2153 | ||
2154 | /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR. | |
2155 | We need to handle it when T comes from static variable initializer or | |
2156 | BINFO. */ | |
2157 | if (TREE_CODE (t) == POINTER_PLUS_EXPR) | |
2158 | { | |
2159 | *offset = tree_to_uhwi (TREE_OPERAND (t, 1)); | |
2160 | t = TREE_OPERAND (t, 0); | |
2161 | } | |
2162 | else | |
2163 | *offset = 0; | |
2164 | ||
2165 | if (TREE_CODE (t) != ADDR_EXPR) | |
2166 | return false; | |
2167 | *v = TREE_OPERAND (t, 0); | |
2168 | return true; | |
2169 | } | |
2170 | ||
2171 | /* T is known constant value of virtual table pointer. Return BINFO of the | |
2172 | instance type. */ | |
2173 | ||
2174 | tree | |
2175 | vtable_pointer_value_to_binfo (const_tree t) | |
2176 | { | |
2177 | tree vtable; | |
2178 | unsigned HOST_WIDE_INT offset; | |
2179 | ||
2180 | if (!vtable_pointer_value_to_vtable (t, &vtable, &offset)) | |
2181 | return NULL_TREE; | |
2182 | ||
2183 | /* FIXME: for stores of construction vtables we return NULL, | |
2184 | because we do not have BINFO for those. Eventually we should fix | |
2185 | our representation to allow this case to be handled, too. | |
2186 | In the case we see store of BINFO we however may assume | |
2187 | that standard folding will be ale to cope with it. */ | |
2188 | return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)), | |
2189 | offset, vtable); | |
2190 | } | |
2191 | ||
2192 | /* We know that the instance is stored in variable or parameter | |
2193 | (not dynamically allocated) and we want to disprove the fact | |
2194 | that it may be in construction at invocation of CALL. | |
2195 | ||
2196 | For the variable to be in construction we actually need to | |
2197 | be in constructor of corresponding global variable or | |
2198 | the inline stack of CALL must contain the constructor. | |
2199 | Check this condition. This check works safely only before | |
2200 | IPA passes, because inline stacks may become out of date | |
2201 | later. */ | |
2202 | ||
2203 | bool | |
2204 | decl_maybe_in_construction_p (tree base, tree outer_type, | |
2205 | gimple call, tree function) | |
2206 | { | |
2207 | outer_type = TYPE_MAIN_VARIANT (outer_type); | |
2208 | gcc_assert (DECL_P (base)); | |
2209 | ||
2210 | /* After inlining the code unification optimizations may invalidate | |
2211 | inline stacks. Also we need to give up on global variables after | |
2212 | IPA, because addresses of these may have been propagated to their | |
2213 | constructors. */ | |
2214 | if (DECL_STRUCT_FUNCTION (function)->after_inlining) | |
2215 | return true; | |
2216 | ||
2217 | /* Pure functions can not do any changes on the dynamic type; | |
2218 | that require writting to memory. */ | |
2219 | if (!auto_var_in_fn_p (base, function) | |
2220 | && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST)) | |
2221 | return false; | |
2222 | ||
2223 | for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK; | |
2224 | block = BLOCK_SUPERCONTEXT (block)) | |
2225 | if (BLOCK_ABSTRACT_ORIGIN (block) | |
2226 | && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL) | |
2227 | { | |
2228 | tree fn = BLOCK_ABSTRACT_ORIGIN (block); | |
2229 | ||
2230 | if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE | |
2231 | || (!DECL_CXX_CONSTRUCTOR_P (fn) | |
2232 | && !DECL_CXX_DESTRUCTOR_P (fn))) | |
2233 | { | |
2234 | /* Watch for clones where we constant propagated the first | |
2235 | argument (pointer to the instance). */ | |
2236 | fn = DECL_ABSTRACT_ORIGIN (fn); | |
2237 | if (!fn | |
2238 | || !is_global_var (base) | |
2239 | || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE | |
2240 | || (!DECL_CXX_CONSTRUCTOR_P (fn) | |
2241 | && !DECL_CXX_DESTRUCTOR_P (fn))) | |
2242 | continue; | |
2243 | } | |
2244 | if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST)) | |
2245 | continue; | |
2246 | ||
2247 | /* FIXME: this can go away once we have ODR types equivalency on | |
2248 | LTO level. */ | |
2249 | if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type))) | |
2250 | return true; | |
2251 | tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (fn))); | |
2252 | if (types_same_for_odr (type, outer_type)) | |
2253 | return true; | |
2254 | } | |
2255 | ||
2256 | if (TREE_CODE (base) == VAR_DECL | |
2257 | && is_global_var (base)) | |
2258 | { | |
2259 | if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE | |
2260 | || (!DECL_CXX_CONSTRUCTOR_P (function) | |
2261 | && !DECL_CXX_DESTRUCTOR_P (function))) | |
2262 | { | |
2263 | if (!DECL_ABSTRACT_ORIGIN (function)) | |
2264 | return false; | |
2265 | /* Watch for clones where we constant propagated the first | |
2266 | argument (pointer to the instance). */ | |
2267 | function = DECL_ABSTRACT_ORIGIN (function); | |
2268 | if (!function | |
2269 | || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE | |
2270 | || (!DECL_CXX_CONSTRUCTOR_P (function) | |
2271 | && !DECL_CXX_DESTRUCTOR_P (function))) | |
2272 | return false; | |
2273 | } | |
2274 | /* FIXME: this can go away once we have ODR types equivalency on | |
2275 | LTO level. */ | |
2276 | if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type))) | |
2277 | return true; | |
2278 | tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (function))); | |
2279 | if (types_same_for_odr (type, outer_type)) | |
2280 | return true; | |
2281 | } | |
2282 | return false; | |
2283 | } | |
2284 | ||
2285 | /* Proudce polymorphic call context for call method of instance | |
2286 | that is located within BASE (that is assumed to be a decl) at OFFSET. */ | |
2287 | ||
2288 | static void | |
2289 | get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context, | |
2290 | tree base, HOST_WIDE_INT offset) | |
2291 | { | |
2292 | gcc_assert (DECL_P (base)); | |
2293 | ||
2294 | context->outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base)); | |
2295 | context->offset = offset; | |
2296 | context->speculative_outer_type = NULL; | |
2297 | context->speculative_offset = 0; | |
2298 | context->speculative_maybe_derived_type = true; | |
2299 | /* Make very conservative assumption that all objects | |
2300 | may be in construction. | |
2301 | TODO: ipa-prop already contains code to tell better. | |
2302 | merge it later. */ | |
2303 | context->maybe_in_construction = true; | |
2304 | context->maybe_derived_type = false; | |
2305 | } | |
2306 | ||
2307 | /* CST is an invariant (address of decl), try to get meaningful | |
2308 | polymorphic call context for polymorphic call of method | |
2309 | if instance of OTR_TYPE that is located at OFFSET of this invariant. | |
2310 | Return FALSE if nothing meaningful can be found. */ | |
2311 | ||
2312 | bool | |
2313 | get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context, | |
2314 | tree cst, | |
2315 | tree otr_type, | |
2316 | HOST_WIDE_INT offset) | |
2317 | { | |
2318 | HOST_WIDE_INT offset2, size, max_size; | |
2319 | tree base; | |
2320 | ||
2321 | if (TREE_CODE (cst) != ADDR_EXPR) | |
2322 | return false; | |
2323 | ||
2324 | cst = TREE_OPERAND (cst, 0); | |
2325 | base = get_ref_base_and_extent (cst, &offset2, &size, &max_size); | |
2326 | if (!DECL_P (base) || max_size == -1 || max_size != size) | |
2327 | return false; | |
2328 | ||
2329 | /* Only type inconsistent programs can have otr_type that is | |
2330 | not part of outer type. */ | |
2331 | if (!contains_type_p (TREE_TYPE (base), offset, otr_type)) | |
2332 | return false; | |
2333 | ||
2334 | get_polymorphic_call_info_for_decl (context, base, offset); | |
2335 | return true; | |
2336 | } | |
2337 | ||
2338 | /* See if OP is SSA name initialized as a copy or by single assignment. | |
2339 | If so, walk the SSA graph up. */ | |
2340 | ||
2341 | static tree | |
2342 | walk_ssa_copies (tree op) | |
2343 | { | |
2344 | STRIP_NOPS (op); | |
2345 | while (TREE_CODE (op) == SSA_NAME | |
2346 | && !SSA_NAME_IS_DEFAULT_DEF (op) | |
2347 | && SSA_NAME_DEF_STMT (op) | |
2348 | && gimple_assign_single_p (SSA_NAME_DEF_STMT (op))) | |
2349 | { | |
2350 | if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op))) | |
2351 | return op; | |
2352 | op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op)); | |
2353 | STRIP_NOPS (op); | |
2354 | } | |
2355 | return op; | |
2356 | } | |
2357 | ||
2358 | /* Given REF call in FNDECL, determine class of the polymorphic | |
2359 | call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT. | |
2360 | CALL is optional argument giving the actual statement (usually call) where | |
2361 | the context is used. | |
2362 | Return pointer to object described by the context or an declaration if | |
2363 | we found the instance to be stored in the static storage. */ | |
2364 | ||
2365 | tree | |
2366 | get_polymorphic_call_info (tree fndecl, | |
2367 | tree ref, | |
2368 | tree *otr_type, | |
2369 | HOST_WIDE_INT *otr_token, | |
2370 | ipa_polymorphic_call_context *context, | |
2371 | gimple call) | |
2372 | { | |
2373 | tree base_pointer; | |
2374 | *otr_type = obj_type_ref_class (ref); | |
2375 | *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref)); | |
2376 | ||
2377 | /* Set up basic info in case we find nothing interesting in the analysis. */ | |
2378 | context->speculative_outer_type = NULL; | |
2379 | context->speculative_offset = 0; | |
2380 | context->speculative_maybe_derived_type = true; | |
2381 | context->outer_type = TYPE_MAIN_VARIANT (*otr_type); | |
2382 | context->offset = 0; | |
2383 | base_pointer = OBJ_TYPE_REF_OBJECT (ref); | |
2384 | context->maybe_derived_type = true; | |
2385 | context->maybe_in_construction = true; | |
2386 | ||
2387 | /* Walk SSA for outer object. */ | |
2388 | do | |
2389 | { | |
2390 | base_pointer = walk_ssa_copies (base_pointer); | |
2391 | if (TREE_CODE (base_pointer) == ADDR_EXPR) | |
2392 | { | |
2393 | HOST_WIDE_INT size, max_size; | |
2394 | HOST_WIDE_INT offset2; | |
2395 | tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0), | |
2396 | &offset2, &size, &max_size); | |
2397 | ||
2398 | /* If this is a varying address, punt. */ | |
2399 | if ((TREE_CODE (base) == MEM_REF || DECL_P (base)) | |
2400 | && max_size != -1 | |
2401 | && max_size == size) | |
2402 | { | |
2403 | /* We found dereference of a pointer. Type of the pointer | |
2404 | and MEM_REF is meaningless, but we can look futher. */ | |
2405 | if (TREE_CODE (base) == MEM_REF) | |
2406 | { | |
2407 | base_pointer = TREE_OPERAND (base, 0); | |
2408 | context->offset | |
2409 | += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT; | |
2410 | context->outer_type = NULL; | |
2411 | } | |
2412 | /* We found base object. In this case the outer_type | |
2413 | is known. */ | |
2414 | else if (DECL_P (base)) | |
2415 | { | |
2416 | gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base))); | |
2417 | ||
2418 | /* Only type inconsistent programs can have otr_type that is | |
2419 | not part of outer type. */ | |
2420 | if (!contains_type_p (TREE_TYPE (base), | |
2421 | context->offset + offset2, *otr_type)) | |
2422 | { | |
2423 | /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent | |
2424 | code sequences; we arrange the calls to be builtin_unreachable | |
2425 | later. */ | |
2426 | *otr_token = INT_MAX; | |
2427 | return base_pointer; | |
2428 | } | |
2429 | get_polymorphic_call_info_for_decl (context, base, | |
2430 | context->offset + offset2); | |
2431 | if (context->maybe_in_construction && call) | |
2432 | context->maybe_in_construction | |
2433 | = decl_maybe_in_construction_p (base, | |
2434 | context->outer_type, | |
2435 | call, | |
2436 | fndecl); | |
2437 | return base; | |
2438 | } | |
2439 | else | |
2440 | break; | |
2441 | } | |
2442 | else | |
2443 | break; | |
2444 | } | |
2445 | else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR | |
2446 | && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1))) | |
2447 | { | |
2448 | context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1)) | |
2449 | * BITS_PER_UNIT; | |
2450 | base_pointer = TREE_OPERAND (base_pointer, 0); | |
2451 | } | |
2452 | else | |
2453 | break; | |
2454 | } | |
2455 | while (true); | |
2456 | ||
2457 | /* Try to determine type of the outer object. */ | |
2458 | if (TREE_CODE (base_pointer) == SSA_NAME | |
2459 | && SSA_NAME_IS_DEFAULT_DEF (base_pointer) | |
2460 | && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL) | |
2461 | { | |
2462 | /* See if parameter is THIS pointer of a method. */ | |
2463 | if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE | |
2464 | && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl)) | |
2465 | { | |
2466 | context->outer_type | |
2467 | = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer))); | |
2468 | gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE); | |
2469 | ||
2470 | /* Dynamic casting has possibly upcasted the type | |
2471 | in the hiearchy. In this case outer type is less | |
2472 | informative than inner type and we should forget | |
2473 | about it. */ | |
2474 | if (!contains_type_p (context->outer_type, context->offset, | |
2475 | *otr_type)) | |
2476 | { | |
2477 | context->outer_type = NULL; | |
2478 | return base_pointer; | |
2479 | } | |
2480 | ||
2481 | /* If the function is constructor or destructor, then | |
2482 | the type is possibly in construction, but we know | |
2483 | it is not derived type. */ | |
2484 | if (DECL_CXX_CONSTRUCTOR_P (fndecl) | |
2485 | || DECL_CXX_DESTRUCTOR_P (fndecl)) | |
2486 | { | |
2487 | context->maybe_in_construction = true; | |
2488 | context->maybe_derived_type = false; | |
2489 | } | |
2490 | else | |
2491 | { | |
2492 | context->maybe_derived_type = true; | |
2493 | context->maybe_in_construction = false; | |
2494 | } | |
2495 | return base_pointer; | |
2496 | } | |
2497 | /* Non-PODs passed by value are really passed by invisible | |
2498 | reference. In this case we also know the type of the | |
2499 | object. */ | |
2500 | if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer))) | |
2501 | { | |
2502 | context->outer_type | |
2503 | = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer))); | |
2504 | gcc_assert (!POINTER_TYPE_P (context->outer_type)); | |
2505 | /* Only type inconsistent programs can have otr_type that is | |
2506 | not part of outer type. */ | |
2507 | if (!contains_type_p (context->outer_type, context->offset, | |
2508 | *otr_type)) | |
2509 | { | |
2510 | /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent | |
2511 | code sequences; we arrange the calls to be builtin_unreachable | |
2512 | later. */ | |
2513 | *otr_token = INT_MAX; | |
2514 | return base_pointer; | |
2515 | } | |
2516 | context->maybe_derived_type = false; | |
2517 | context->maybe_in_construction = false; | |
2518 | return base_pointer; | |
2519 | } | |
2520 | } | |
2521 | ||
2522 | tree base_type = TREE_TYPE (base_pointer); | |
2523 | ||
2524 | if (TREE_CODE (base_pointer) == SSA_NAME | |
2525 | && SSA_NAME_IS_DEFAULT_DEF (base_pointer) | |
2526 | && TREE_CODE (SSA_NAME_VAR (base_pointer)) != PARM_DECL) | |
2527 | { | |
2528 | /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent | |
2529 | code sequences; we arrange the calls to be builtin_unreachable | |
2530 | later. */ | |
2531 | *otr_token = INT_MAX; | |
2532 | return base_pointer; | |
2533 | } | |
2534 | if (TREE_CODE (base_pointer) == SSA_NAME | |
2535 | && SSA_NAME_DEF_STMT (base_pointer) | |
2536 | && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer))) | |
2537 | base_type = TREE_TYPE (gimple_assign_rhs1 | |
2538 | (SSA_NAME_DEF_STMT (base_pointer))); | |
2539 | ||
2540 | if (POINTER_TYPE_P (base_type) | |
2541 | && contains_type_p (TYPE_MAIN_VARIANT (TREE_TYPE (base_type)), | |
2542 | context->offset, | |
2543 | *otr_type)) | |
2544 | { | |
2545 | context->speculative_outer_type = TYPE_MAIN_VARIANT | |
2546 | (TREE_TYPE (base_type)); | |
2547 | context->speculative_offset = context->offset; | |
2548 | context->speculative_maybe_derived_type = true; | |
2549 | } | |
2550 | /* TODO: There are multiple ways to derive a type. For instance | |
2551 | if BASE_POINTER is passed to an constructor call prior our refernece. | |
2552 | We do not make this type of flow sensitive analysis yet. */ | |
2553 | return base_pointer; | |
2554 | } | |
2555 | ||
2556 | /* Structure to be passed in between detect_type_change and | |
2557 | check_stmt_for_type_change. */ | |
2558 | ||
2559 | struct type_change_info | |
2560 | { | |
2561 | /* Offset into the object where there is the virtual method pointer we are | |
2562 | looking for. */ | |
2563 | HOST_WIDE_INT offset; | |
2564 | /* The declaration or SSA_NAME pointer of the base that we are checking for | |
2565 | type change. */ | |
2566 | tree instance; | |
2567 | /* The reference to virtual table pointer used. */ | |
2568 | tree vtbl_ptr_ref; | |
2569 | tree otr_type; | |
2570 | /* If we actually can tell the type that the object has changed to, it is | |
2571 | stored in this field. Otherwise it remains NULL_TREE. */ | |
2572 | tree known_current_type; | |
2573 | HOST_WIDE_INT known_current_offset; | |
2574 | ||
2575 | /* Set to true if dynamic type change has been detected. */ | |
2576 | bool type_maybe_changed; | |
2577 | /* Set to true if multiple types have been encountered. known_current_type | |
2578 | must be disregarded in that case. */ | |
2579 | bool multiple_types_encountered; | |
2580 | /* Set to true if we possibly missed some dynamic type changes and we should | |
2581 | consider the set to be speculative. */ | |
2582 | bool speculative; | |
2583 | bool seen_unanalyzed_store; | |
2584 | }; | |
2585 | ||
2586 | /* Return true if STMT is not call and can modify a virtual method table pointer. | |
2587 | We take advantage of fact that vtable stores must appear within constructor | |
2588 | and destructor functions. */ | |
2589 | ||
2590 | bool | |
2591 | noncall_stmt_may_be_vtbl_ptr_store (gimple stmt) | |
2592 | { | |
2593 | if (is_gimple_assign (stmt)) | |
2594 | { | |
2595 | tree lhs = gimple_assign_lhs (stmt); | |
2596 | ||
2597 | if (gimple_clobber_p (stmt)) | |
2598 | return false; | |
2599 | if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs))) | |
2600 | { | |
2601 | if (flag_strict_aliasing | |
2602 | && !POINTER_TYPE_P (TREE_TYPE (lhs))) | |
2603 | return false; | |
2604 | ||
2605 | if (TREE_CODE (lhs) == COMPONENT_REF | |
2606 | && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))) | |
2607 | return false; | |
2608 | /* In the future we might want to use get_base_ref_and_offset to find | |
2609 | if there is a field corresponding to the offset and if so, proceed | |
2610 | almost like if it was a component ref. */ | |
2611 | } | |
2612 | } | |
2613 | ||
2614 | /* Code unification may mess with inline stacks. */ | |
2615 | if (cfun->after_inlining) | |
2616 | return true; | |
2617 | ||
2618 | /* Walk the inline stack and watch out for ctors/dtors. | |
2619 | TODO: Maybe we can require the store to appear in toplevel | |
2620 | block of CTOR/DTOR. */ | |
2621 | for (tree block = gimple_block (stmt); block && TREE_CODE (block) == BLOCK; | |
2622 | block = BLOCK_SUPERCONTEXT (block)) | |
2623 | if (BLOCK_ABSTRACT_ORIGIN (block) | |
2624 | && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL) | |
2625 | { | |
2626 | tree fn = BLOCK_ABSTRACT_ORIGIN (block); | |
2627 | ||
2628 | if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST)) | |
2629 | return false; | |
2630 | return (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE | |
2631 | && (DECL_CXX_CONSTRUCTOR_P (fn) | |
2632 | || DECL_CXX_DESTRUCTOR_P (fn))); | |
2633 | } | |
2634 | return (TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE | |
2635 | && (DECL_CXX_CONSTRUCTOR_P (current_function_decl) | |
2636 | || DECL_CXX_DESTRUCTOR_P (current_function_decl))); | |
2637 | } | |
2638 | ||
2639 | /* If STMT can be proved to be an assignment to the virtual method table | |
2640 | pointer of ANALYZED_OBJ and the type associated with the new table | |
2641 | identified, return the type. Otherwise return NULL_TREE. */ | |
2642 | ||
2643 | static tree | |
2644 | extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci, | |
2645 | HOST_WIDE_INT *type_offset) | |
2646 | { | |
2647 | HOST_WIDE_INT offset, size, max_size; | |
2648 | tree lhs, rhs, base, binfo; | |
2649 | ||
2650 | if (!gimple_assign_single_p (stmt)) | |
2651 | return NULL_TREE; | |
2652 | ||
2653 | lhs = gimple_assign_lhs (stmt); | |
2654 | rhs = gimple_assign_rhs1 (stmt); | |
2655 | if (TREE_CODE (lhs) != COMPONENT_REF | |
2656 | || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))) | |
2657 | return NULL_TREE; | |
2658 | ||
2659 | if (tci->vtbl_ptr_ref && operand_equal_p (lhs, tci->vtbl_ptr_ref, 0)) | |
2660 | ; | |
2661 | else | |
2662 | { | |
2663 | base = get_ref_base_and_extent (lhs, &offset, &size, &max_size); | |
2664 | if (offset != tci->offset | |
2665 | || size != POINTER_SIZE | |
2666 | || max_size != POINTER_SIZE) | |
2667 | return NULL_TREE; | |
2668 | if (DECL_P (tci->instance)) | |
2669 | { | |
2670 | if (base != tci->instance) | |
2671 | return NULL_TREE; | |
2672 | } | |
2673 | else if (TREE_CODE (base) == MEM_REF) | |
2674 | { | |
2675 | if (!operand_equal_p (tci->instance, TREE_OPERAND (base, 0), 0) | |
2676 | || !integer_zerop (TREE_OPERAND (base, 1))) | |
2677 | return NULL_TREE; | |
2678 | } | |
2679 | else if (!operand_equal_p (tci->instance, base, 0) | |
2680 | || tci->offset) | |
2681 | return NULL_TREE; | |
2682 | } | |
2683 | ||
2684 | binfo = vtable_pointer_value_to_binfo (rhs); | |
2685 | ||
2686 | if (!binfo) | |
2687 | return NULL; | |
2688 | *type_offset = tree_to_shwi (BINFO_OFFSET (binfo)) * BITS_PER_UNIT; | |
2689 | if (TYPE_BINFO (BINFO_TYPE (binfo)) == binfo) | |
2690 | return BINFO_TYPE (binfo); | |
2691 | ||
2692 | /* TODO: Figure out the type containing BINFO. */ | |
2693 | return NULL; | |
2694 | } | |
2695 | ||
2696 | /* Record dynamic type change of TCI to TYPE. */ | |
2697 | ||
2698 | void | |
2699 | record_known_type (struct type_change_info *tci, tree type, HOST_WIDE_INT offset) | |
2700 | { | |
2701 | if (dump_file) | |
2702 | { | |
2703 | if (type) | |
2704 | { | |
2705 | fprintf (dump_file, " Recording type: "); | |
2706 | print_generic_expr (dump_file, type, TDF_SLIM); | |
2707 | fprintf (dump_file, " at offset %i\n", (int)offset); | |
2708 | } | |
2709 | else | |
2710 | fprintf (dump_file, " Recording unknown type\n"); | |
2711 | } | |
2712 | if (tci->type_maybe_changed | |
2713 | && (type != tci->known_current_type | |
2714 | || offset != tci->known_current_offset)) | |
2715 | tci->multiple_types_encountered = true; | |
2716 | tci->known_current_type = type; | |
2717 | tci->known_current_offset = offset; | |
2718 | tci->type_maybe_changed = true; | |
2719 | } | |
2720 | ||
2721 | /* Callback of walk_aliased_vdefs and a helper function for | |
2722 | detect_type_change to check whether a particular statement may modify | |
2723 | the virtual table pointer, and if possible also determine the new type of | |
2724 | the (sub-)object. It stores its result into DATA, which points to a | |
2725 | type_change_info structure. */ | |
2726 | ||
2727 | static bool | |
2728 | check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data) | |
2729 | { | |
2730 | gimple stmt = SSA_NAME_DEF_STMT (vdef); | |
2731 | struct type_change_info *tci = (struct type_change_info *) data; | |
2732 | tree fn; | |
2733 | ||
2734 | /* If we already gave up, just terminate the rest of walk. */ | |
2735 | if (tci->multiple_types_encountered) | |
2736 | return true; | |
2737 | ||
2738 | if (is_gimple_call (stmt)) | |
2739 | { | |
2740 | if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE)) | |
2741 | return false; | |
2742 | ||
2743 | /* Check for a constructor call. */ | |
2744 | if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE | |
2745 | && DECL_CXX_CONSTRUCTOR_P (fn) | |
2746 | && TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE | |
2747 | && gimple_call_num_args (stmt)) | |
2748 | { | |
2749 | tree op = walk_ssa_copies (gimple_call_arg (stmt, 0)); | |
2750 | tree type = method_class_type (TREE_TYPE (fn)); | |
2751 | HOST_WIDE_INT offset = 0, size, max_size; | |
2752 | ||
2753 | if (dump_file) | |
2754 | { | |
2755 | fprintf (dump_file, " Checking constructor call: "); | |
2756 | print_gimple_stmt (dump_file, stmt, 0, 0); | |
2757 | } | |
2758 | ||
2759 | /* See if THIS parameter seems like instance pointer. */ | |
2760 | if (TREE_CODE (op) == ADDR_EXPR) | |
2761 | { | |
2762 | op = get_ref_base_and_extent (TREE_OPERAND (op, 0), | |
2763 | &offset, &size, &max_size); | |
2764 | if (size != max_size || max_size == -1) | |
2765 | { | |
2766 | tci->speculative = true; | |
2767 | return false; | |
2768 | } | |
2769 | if (op && TREE_CODE (op) == MEM_REF) | |
2770 | { | |
2771 | if (!tree_fits_shwi_p (TREE_OPERAND (op, 1))) | |
2772 | { | |
2773 | tci->speculative = true; | |
2774 | return false; | |
2775 | } | |
2776 | offset += tree_to_shwi (TREE_OPERAND (op, 1)) | |
2777 | * BITS_PER_UNIT; | |
2778 | op = TREE_OPERAND (op, 0); | |
2779 | } | |
2780 | else if (DECL_P (op)) | |
2781 | ; | |
2782 | else | |
2783 | { | |
2784 | tci->speculative = true; | |
2785 | return false; | |
2786 | } | |
2787 | op = walk_ssa_copies (op); | |
2788 | } | |
2789 | if (operand_equal_p (op, tci->instance, 0) | |
2790 | && TYPE_SIZE (type) | |
2791 | && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST | |
2792 | && tree_fits_shwi_p (TYPE_SIZE (type)) | |
2793 | && tree_to_shwi (TYPE_SIZE (type)) + offset > tci->offset) | |
2794 | { | |
2795 | record_known_type (tci, type, tci->offset - offset); | |
2796 | return true; | |
2797 | } | |
2798 | } | |
2799 | /* Calls may possibly change dynamic type by placement new. Assume | |
2800 | it will not happen, but make result speculative only. */ | |
2801 | if (dump_file) | |
2802 | { | |
2803 | fprintf (dump_file, " Function call may change dynamic type:"); | |
2804 | print_gimple_stmt (dump_file, stmt, 0, 0); | |
2805 | } | |
2806 | tci->speculative = true; | |
2807 | return false; | |
2808 | } | |
2809 | /* Check for inlined virtual table store. */ | |
2810 | else if (noncall_stmt_may_be_vtbl_ptr_store (stmt)) | |
2811 | { | |
2812 | tree type; | |
2813 | HOST_WIDE_INT offset = 0; | |
2814 | if (dump_file) | |
2815 | { | |
2816 | fprintf (dump_file, " Checking vtbl store: "); | |
2817 | print_gimple_stmt (dump_file, stmt, 0, 0); | |
2818 | } | |
2819 | ||
2820 | type = extr_type_from_vtbl_ptr_store (stmt, tci, &offset); | |
2821 | gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type); | |
2822 | if (!type) | |
2823 | { | |
2824 | if (dump_file) | |
2825 | fprintf (dump_file, " Unanalyzed store may change type.\n"); | |
2826 | tci->seen_unanalyzed_store = true; | |
2827 | tci->speculative = true; | |
2828 | } | |
2829 | else | |
2830 | record_known_type (tci, type, offset); | |
2831 | return true; | |
2832 | } | |
2833 | else | |
2834 | return false; | |
2835 | } | |
2836 | ||
2837 | /* CONTEXT is polymorphic call context obtained from get_polymorphic_context. | |
2838 | OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT. | |
2839 | INSTANCE is pointer to the outer instance as returned by | |
2840 | get_polymorphic_context. To avoid creation of temporary expressions, | |
2841 | INSTANCE may also be an declaration of get_polymorphic_context found the | |
2842 | value to be in static storage. | |
2843 | ||
2844 | If the type of instance is not fully determined | |
2845 | (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES | |
2846 | is set), try to walk memory writes and find the actual construction of the | |
2847 | instance. | |
2848 | ||
2849 | We do not include this analysis in the context analysis itself, because | |
2850 | it needs memory SSA to be fully built and the walk may be expensive. | |
2851 | So it is not suitable for use withing fold_stmt and similar uses. */ | |
2852 | ||
2853 | bool | |
2854 | get_dynamic_type (tree instance, | |
2855 | ipa_polymorphic_call_context *context, | |
2856 | tree otr_object, | |
2857 | tree otr_type, | |
2858 | gimple call) | |
2859 | { | |
2860 | struct type_change_info tci; | |
2861 | ao_ref ao; | |
2862 | bool function_entry_reached = false; | |
2863 | tree instance_ref = NULL; | |
2864 | gimple stmt = call; | |
2865 | ||
2866 | if (!context->maybe_in_construction && !context->maybe_derived_type) | |
2867 | return false; | |
2868 | ||
2869 | /* We need to obtain refernce to virtual table pointer. It is better | |
2870 | to look it up in the code rather than build our own. This require bit | |
2871 | of pattern matching, but we end up verifying that what we found is | |
2872 | correct. | |
2873 | ||
2874 | What we pattern match is: | |
2875 | ||
2876 | tmp = instance->_vptr.A; // vtbl ptr load | |
2877 | tmp2 = tmp[otr_token]; // vtable lookup | |
2878 | OBJ_TYPE_REF(tmp2;instance->0) (instance); | |
2879 | ||
2880 | We want to start alias oracle walk from vtbl pointer load, | |
2881 | but we may not be able to identify it, for example, when PRE moved the | |
2882 | load around. */ | |
2883 | ||
2884 | if (gimple_code (call) == GIMPLE_CALL) | |
2885 | { | |
2886 | tree ref = gimple_call_fn (call); | |
2887 | HOST_WIDE_INT offset2, size, max_size; | |
2888 | ||
2889 | if (TREE_CODE (ref) == OBJ_TYPE_REF) | |
2890 | { | |
2891 | ref = OBJ_TYPE_REF_EXPR (ref); | |
2892 | ref = walk_ssa_copies (ref); | |
2893 | ||
2894 | /* Check if definition looks like vtable lookup. */ | |
2895 | if (TREE_CODE (ref) == SSA_NAME | |
2896 | && !SSA_NAME_IS_DEFAULT_DEF (ref) | |
2897 | && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)) | |
2898 | && TREE_CODE (gimple_assign_rhs1 | |
2899 | (SSA_NAME_DEF_STMT (ref))) == MEM_REF) | |
2900 | { | |
2901 | ref = get_base_address | |
2902 | (TREE_OPERAND (gimple_assign_rhs1 | |
2903 | (SSA_NAME_DEF_STMT (ref)), 0)); | |
2904 | ref = walk_ssa_copies (ref); | |
2905 | /* Find base address of the lookup and see if it looks like | |
2906 | vptr load. */ | |
2907 | if (TREE_CODE (ref) == SSA_NAME | |
2908 | && !SSA_NAME_IS_DEFAULT_DEF (ref) | |
2909 | && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref))) | |
2910 | { | |
2911 | tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref)); | |
2912 | tree base_ref = get_ref_base_and_extent | |
2913 | (ref_exp, &offset2, &size, &max_size); | |
2914 | ||
2915 | /* Finally verify that what we found looks like read from OTR_OBJECT | |
2916 | or from INSTANCE with offset OFFSET. */ | |
2917 | if (base_ref | |
2918 | && ((TREE_CODE (base_ref) == MEM_REF | |
2919 | && ((offset2 == context->offset | |
2920 | && TREE_OPERAND (base_ref, 0) == instance) | |
2921 | || (!offset2 && TREE_OPERAND (base_ref, 0) == otr_object))) | |
2922 | || (DECL_P (instance) && base_ref == instance | |
2923 | && offset2 == context->offset))) | |
2924 | { | |
2925 | stmt = SSA_NAME_DEF_STMT (ref); | |
2926 | instance_ref = ref_exp; | |
2927 | } | |
2928 | } | |
2929 | } | |
2930 | } | |
2931 | } | |
2932 | ||
2933 | /* If we failed to look up the refernece in code, build our own. */ | |
2934 | if (!instance_ref) | |
2935 | { | |
2936 | /* If the statement in question does not use memory, we can't tell | |
2937 | anything. */ | |
2938 | if (!gimple_vuse (stmt)) | |
2939 | return false; | |
2940 | ao_ref_init_from_ptr_and_size (&ao, otr_object, NULL); | |
2941 | } | |
2942 | else | |
2943 | /* Otherwise use the real reference. */ | |
2944 | ao_ref_init (&ao, instance_ref); | |
2945 | ||
2946 | /* We look for vtbl pointer read. */ | |
2947 | ao.size = POINTER_SIZE; | |
2948 | ao.max_size = ao.size; | |
2949 | ao.ref_alias_set | |
2950 | = get_deref_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type)))); | |
2951 | ||
2952 | if (dump_file) | |
2953 | { | |
2954 | fprintf (dump_file, "Determining dynamic type for call: "); | |
2955 | print_gimple_stmt (dump_file, call, 0, 0); | |
2956 | fprintf (dump_file, " Starting walk at: "); | |
2957 | print_gimple_stmt (dump_file, stmt, 0, 0); | |
2958 | fprintf (dump_file, " instance pointer: "); | |
2959 | print_generic_expr (dump_file, otr_object, TDF_SLIM); | |
2960 | fprintf (dump_file, " Outer instance pointer: "); | |
2961 | print_generic_expr (dump_file, instance, TDF_SLIM); | |
2962 | fprintf (dump_file, " offset: %i (bits)", (int)context->offset); | |
2963 | fprintf (dump_file, " vtbl reference: "); | |
2964 | print_generic_expr (dump_file, instance_ref, TDF_SLIM); | |
2965 | fprintf (dump_file, "\n"); | |
2966 | } | |
2967 | ||
2968 | tci.offset = context->offset; | |
2969 | tci.instance = instance; | |
2970 | tci.vtbl_ptr_ref = instance_ref; | |
2971 | gcc_assert (TREE_CODE (instance) != MEM_REF); | |
2972 | tci.known_current_type = NULL_TREE; | |
2973 | tci.known_current_offset = 0; | |
2974 | tci.otr_type = otr_type; | |
2975 | tci.type_maybe_changed = false; | |
2976 | tci.multiple_types_encountered = false; | |
2977 | tci.speculative = false; | |
2978 | tci.seen_unanalyzed_store = false; | |
2979 | ||
2980 | walk_aliased_vdefs (&ao, gimple_vuse (stmt), check_stmt_for_type_change, | |
2981 | &tci, NULL, &function_entry_reached); | |
2982 | ||
2983 | /* If we did not find any type changing statements, we may still drop | |
2984 | maybe_in_construction flag if the context already have outer type. | |
2985 | ||
2986 | Here we make special assumptions about both constructors and | |
2987 | destructors which are all the functions that are allowed to alter the | |
2988 | VMT pointers. It assumes that destructors begin with assignment into | |
2989 | all VMT pointers and that constructors essentially look in the | |
2990 | following way: | |
2991 | ||
2992 | 1) The very first thing they do is that they call constructors of | |
2993 | ancestor sub-objects that have them. | |
2994 | ||
2995 | 2) Then VMT pointers of this and all its ancestors is set to new | |
2996 | values corresponding to the type corresponding to the constructor. | |
2997 | ||
2998 | 3) Only afterwards, other stuff such as constructor of member | |
2999 | sub-objects and the code written by the user is run. Only this may | |
3000 | include calling virtual functions, directly or indirectly. | |
3001 | ||
3002 | 4) placement new can not be used to change type of non-POD statically | |
3003 | allocated variables. | |
3004 | ||
3005 | There is no way to call a constructor of an ancestor sub-object in any | |
3006 | other way. | |
3007 | ||
3008 | This means that we do not have to care whether constructors get the | |
3009 | correct type information because they will always change it (in fact, | |
3010 | if we define the type to be given by the VMT pointer, it is undefined). | |
3011 | ||
3012 | The most important fact to derive from the above is that if, for some | |
3013 | statement in the section 3, we try to detect whether the dynamic type | |
3014 | has changed, we can safely ignore all calls as we examine the function | |
3015 | body backwards until we reach statements in section 2 because these | |
3016 | calls cannot be ancestor constructors or destructors (if the input is | |
3017 | not bogus) and so do not change the dynamic type (this holds true only | |
3018 | for automatically allocated objects but at the moment we devirtualize | |
3019 | only these). We then must detect that statements in section 2 change | |
3020 | the dynamic type and can try to derive the new type. That is enough | |
3021 | and we can stop, we will never see the calls into constructors of | |
3022 | sub-objects in this code. | |
3023 | ||
3024 | Therefore if the static outer type was found (context->outer_type) | |
3025 | we can safely ignore tci.speculative that is set on calls and give up | |
3026 | only if there was dyanmic type store that may affect given variable | |
3027 | (seen_unanalyzed_store) */ | |
3028 | ||
3029 | if (!tci.type_maybe_changed) | |
3030 | { | |
3031 | if (!context->outer_type || tci.seen_unanalyzed_store) | |
3032 | return false; | |
3033 | if (context->maybe_in_construction) | |
3034 | context->maybe_in_construction = false; | |
3035 | if (dump_file) | |
3036 | fprintf (dump_file, " No dynamic type change found.\n"); | |
3037 | return true; | |
3038 | } | |
3039 | ||
3040 | if (tci.known_current_type | |
3041 | && !function_entry_reached | |
3042 | && !tci.multiple_types_encountered) | |
3043 | { | |
3044 | if (!tci.speculative | |
3045 | /* Again in instances located in static storage we are interested only | |
3046 | in constructor stores. */ | |
3047 | || (context->outer_type | |
3048 | && !tci.seen_unanalyzed_store | |
3049 | && context->offset == tci.offset | |
3050 | && types_same_for_odr (tci.known_current_type, | |
3051 | context->outer_type))) | |
3052 | { | |
3053 | context->outer_type = tci.known_current_type; | |
3054 | context->offset = tci.known_current_offset; | |
3055 | context->maybe_in_construction = false; | |
3056 | context->maybe_derived_type = false; | |
3057 | if (dump_file) | |
3058 | fprintf (dump_file, " Determined dynamic type.\n"); | |
3059 | } | |
3060 | else if (!context->speculative_outer_type | |
3061 | || context->speculative_maybe_derived_type) | |
3062 | { | |
3063 | context->speculative_outer_type = tci.known_current_type; | |
3064 | context->speculative_offset = tci.known_current_offset; | |
3065 | context->speculative_maybe_derived_type = false; | |
3066 | if (dump_file) | |
3067 | fprintf (dump_file, " Determined speculative dynamic type.\n"); | |
3068 | } | |
3069 | } | |
3070 | else if (dump_file) | |
3071 | fprintf (dump_file, " Found multiple types.\n"); | |
3072 | ||
3073 | return true; | |
3074 | } | |
3075 | ||
3076 | /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET. | |
3077 | Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE | |
3078 | and insert them to NODES. | |
3079 | ||
3080 | MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */ | |
3081 | ||
3082 | static void | |
3083 | record_targets_from_bases (tree otr_type, | |
3084 | HOST_WIDE_INT otr_token, | |
3085 | tree outer_type, | |
3086 | HOST_WIDE_INT offset, | |
3087 | vec <cgraph_node *> &nodes, | |
3088 | hash_set<tree> *inserted, | |
3089 | hash_set<tree> *matched_vtables, | |
3090 | bool *completep) | |
3091 | { | |
3092 | while (true) | |
3093 | { | |
3094 | HOST_WIDE_INT pos, size; | |
3095 | tree base_binfo; | |
3096 | tree fld; | |
3097 | ||
3098 | if (types_same_for_odr (outer_type, otr_type)) | |
3099 | return; | |
3100 | ||
3101 | for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld)) | |
3102 | { | |
3103 | if (TREE_CODE (fld) != FIELD_DECL) | |
3104 | continue; | |
3105 | ||
3106 | pos = int_bit_position (fld); | |
3107 | size = tree_to_shwi (DECL_SIZE (fld)); | |
3108 | if (pos <= offset && (pos + size) > offset | |
3109 | /* Do not get confused by zero sized bases. */ | |
3110 | && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld)))) | |
3111 | break; | |
3112 | } | |
3113 | /* Within a class type we should always find correcponding fields. */ | |
3114 | gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE); | |
3115 | ||
3116 | /* Nonbasetypes should have been stripped by outer_class_type. */ | |
3117 | gcc_assert (DECL_ARTIFICIAL (fld)); | |
3118 | ||
3119 | outer_type = TREE_TYPE (fld); | |
3120 | offset -= pos; | |
3121 | ||
3122 | base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type), | |
3123 | offset, otr_type); | |
3124 | if (!base_binfo) | |
3125 | { | |
3126 | gcc_assert (odr_violation_reported); | |
3127 | return; | |
3128 | } | |
3129 | gcc_assert (base_binfo); | |
3130 | if (!matched_vtables->add (BINFO_VTABLE (base_binfo))) | |
3131 | { | |
3132 | bool can_refer; | |
3133 | tree target = gimple_get_virt_method_for_binfo (otr_token, | |
3134 | base_binfo, | |
3135 | &can_refer); | |
3136 | if (!target || ! DECL_CXX_DESTRUCTOR_P (target)) | |
3137 | maybe_record_node (nodes, target, inserted, can_refer, completep); | |
3138 | matched_vtables->add (BINFO_VTABLE (base_binfo)); | |
3139 | } | |
3140 | } | |
3141 | } | |
3142 | ||
3143 | /* When virtual table is removed, we may need to flush the cache. */ | |
3144 | ||
3145 | static void | |
3146 | devirt_variable_node_removal_hook (varpool_node *n, | |
3147 | void *d ATTRIBUTE_UNUSED) | |
3148 | { | |
3149 | if (cached_polymorphic_call_targets | |
3150 | && DECL_VIRTUAL_P (n->decl) | |
3151 | && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl))) | |
3152 | free_polymorphic_call_targets_hash (); | |
3153 | } | |
3154 | ||
3155 | /* Record about how many calls would benefit from given type to be final. */ | |
3156 | ||
3157 | struct odr_type_warn_count | |
3158 | { | |
3159 | tree type; | |
3160 | int count; | |
3161 | gcov_type dyn_count; | |
3162 | }; | |
3163 | ||
3164 | /* Record about how many calls would benefit from given method to be final. */ | |
3165 | ||
3166 | struct decl_warn_count | |
3167 | { | |
3168 | tree decl; | |
3169 | int count; | |
3170 | gcov_type dyn_count; | |
3171 | }; | |
3172 | ||
3173 | /* Information about type and decl warnings. */ | |
3174 | ||
3175 | struct final_warning_record | |
3176 | { | |
3177 | gcov_type dyn_count; | |
3178 | vec<odr_type_warn_count> type_warnings; | |
3179 | hash_map<tree, decl_warn_count> decl_warnings; | |
3180 | }; | |
3181 | struct final_warning_record *final_warning_records; | |
3182 | ||
3183 | /* Return vector containing possible targets of polymorphic call of type | |
3184 | OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET. | |
3185 | If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig | |
3186 | OTR_TYPE and include their virtual method. This is useful for types | |
3187 | possibly in construction or destruction where the virtual table may | |
3188 | temporarily change to one of base types. INCLUDE_DERIVER_TYPES make | |
3189 | us to walk the inheritance graph for all derivations. | |
3190 | ||
3191 | OTR_TOKEN == INT_MAX is used to mark calls that are provably | |
3192 | undefined and should be redirected to unreachable. | |
3193 | ||
3194 | If COMPLETEP is non-NULL, store true if the list is complete. | |
3195 | CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry | |
3196 | in the target cache. If user needs to visit every target list | |
3197 | just once, it can memoize them. | |
3198 | ||
3199 | SPECULATION_TARGETS specify number of targets that are speculatively | |
3200 | likely. These include targets specified by the speculative part | |
3201 | of polymoprhic call context and also exclude all targets for classes | |
3202 | in construction. | |
3203 | ||
3204 | Returned vector is placed into cache. It is NOT caller's responsibility | |
3205 | to free it. The vector can be freed on cgraph_remove_node call if | |
3206 | the particular node is a virtual function present in the cache. */ | |
3207 | ||
3208 | vec <cgraph_node *> | |
3209 | possible_polymorphic_call_targets (tree otr_type, | |
3210 | HOST_WIDE_INT otr_token, | |
3211 | ipa_polymorphic_call_context context, | |
3212 | bool *completep, | |
3213 | void **cache_token, | |
3214 | int *speculative_targetsp) | |
3215 | { | |
3216 | static struct cgraph_node_hook_list *node_removal_hook_holder; | |
3217 | vec <cgraph_node *> nodes = vNULL; | |
3218 | vec <tree> bases_to_consider = vNULL; | |
3219 | odr_type type, outer_type; | |
3220 | polymorphic_call_target_d key; | |
3221 | polymorphic_call_target_d **slot; | |
3222 | unsigned int i; | |
3223 | tree binfo, target; | |
3224 | bool complete; | |
3225 | bool can_refer; | |
3226 | bool skipped = false; | |
3227 | ||
3228 | otr_type = TYPE_MAIN_VARIANT (otr_type); | |
3229 | ||
3230 | /* If ODR is not initialized, return empty incomplete list. */ | |
3231 | if (!odr_hash) | |
3232 | { | |
3233 | if (completep) | |
3234 | *completep = false; | |
3235 | if (cache_token) | |
3236 | *cache_token = NULL; | |
3237 | if (speculative_targetsp) | |
3238 | *speculative_targetsp = 0; | |
3239 | return nodes; | |
3240 | } | |
3241 | ||
3242 | /* If we hit type inconsistency, just return empty list of targets. */ | |
3243 | if (otr_token == INT_MAX) | |
3244 | { | |
3245 | if (completep) | |
3246 | *completep = true; | |
3247 | if (cache_token) | |
3248 | *cache_token = NULL; | |
3249 | if (speculative_targetsp) | |
3250 | *speculative_targetsp = 0; | |
3251 | return nodes; | |
3252 | } | |
3253 | ||
3254 | /* Do not bother to compute speculative info when user do not asks for it. */ | |
3255 | if (!speculative_targetsp || !context.speculative_outer_type) | |
3256 | clear_speculation (&context); | |
3257 | ||
3258 | type = get_odr_type (otr_type, true); | |
3259 | ||
3260 | /* Recording type variants would wast results cache. */ | |
3261 | gcc_assert (!context.outer_type | |
3262 | || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type); | |
3263 | ||
3264 | /* Lookup the outer class type we want to walk. */ | |
3265 | if ((context.outer_type || context.speculative_outer_type) | |
3266 | && !get_class_context (&context, otr_type)) | |
3267 | { | |
3268 | if (completep) | |
3269 | *completep = false; | |
3270 | if (cache_token) | |
3271 | *cache_token = NULL; | |
3272 | if (speculative_targetsp) | |
3273 | *speculative_targetsp = 0; | |
3274 | return nodes; | |
3275 | } | |
3276 | ||
3277 | /* Check that get_class_context kept the main variant. */ | |
3278 | gcc_assert (!context.outer_type | |
3279 | || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type); | |
3280 | ||
3281 | /* We canonicalize our query, so we do not need extra hashtable entries. */ | |
3282 | ||
3283 | /* Without outer type, we have no use for offset. Just do the | |
3284 | basic search from innter type */ | |
3285 | if (!context.outer_type) | |
3286 | { | |
3287 | context.outer_type = otr_type; | |
3288 | context.offset = 0; | |
3289 | } | |
3290 | /* We need to update our hiearchy if the type does not exist. */ | |
3291 | outer_type = get_odr_type (context.outer_type, true); | |
3292 | /* If the type is complete, there are no derivations. */ | |
3293 | if (TYPE_FINAL_P (outer_type->type)) | |
3294 | context.maybe_derived_type = false; | |
3295 | ||
3296 | /* Initialize query cache. */ | |
3297 | if (!cached_polymorphic_call_targets) | |
3298 | { | |
3299 | cached_polymorphic_call_targets = new hash_set<cgraph_node *>; | |
3300 | polymorphic_call_target_hash | |
3301 | = new polymorphic_call_target_hash_type (23); | |
3302 | if (!node_removal_hook_holder) | |
3303 | { | |
3304 | node_removal_hook_holder = | |
3305 | cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL); | |
3306 | varpool_add_node_removal_hook (&devirt_variable_node_removal_hook, | |
3307 | NULL); | |
3308 | } | |
3309 | } | |
3310 | ||
3311 | /* Lookup cached answer. */ | |
3312 | key.type = type; | |
3313 | key.otr_token = otr_token; | |
3314 | key.context = context; | |
3315 | slot = polymorphic_call_target_hash->find_slot (&key, INSERT); | |
3316 | if (cache_token) | |
3317 | *cache_token = (void *)*slot; | |
3318 | if (*slot) | |
3319 | { | |
3320 | if (completep) | |
3321 | *completep = (*slot)->complete; | |
3322 | if (speculative_targetsp) | |
3323 | *speculative_targetsp = (*slot)->speculative_targets; | |
3324 | if ((*slot)->type_warning && final_warning_records) | |
3325 | { | |
3326 | final_warning_records->type_warnings[(*slot)->type_warning - 1].count++; | |
3327 | final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count | |
3328 | += final_warning_records->dyn_count; | |
3329 | } | |
3330 | if ((*slot)->decl_warning && final_warning_records) | |
3331 | { | |
3332 | struct decl_warn_count *c = | |
3333 | final_warning_records->decl_warnings.get ((*slot)->decl_warning); | |
3334 | c->count++; | |
3335 | c->dyn_count += final_warning_records->dyn_count; | |
3336 | } | |
3337 | return (*slot)->targets; | |
3338 | } | |
3339 | ||
3340 | complete = true; | |
3341 | ||
3342 | /* Do actual search. */ | |
3343 | timevar_push (TV_IPA_VIRTUAL_CALL); | |
3344 | *slot = XCNEW (polymorphic_call_target_d); | |
3345 | if (cache_token) | |
3346 | *cache_token = (void *)*slot; | |
3347 | (*slot)->type = type; | |
3348 | (*slot)->otr_token = otr_token; | |
3349 | (*slot)->context = context; | |
3350 | (*slot)->speculative_targets = 0; | |
3351 | ||
3352 | hash_set<tree> inserted; | |
3353 | hash_set<tree> matched_vtables; | |
3354 | ||
3355 | /* First insert targets we speculatively identified as likely. */ | |
3356 | if (context.speculative_outer_type) | |
3357 | { | |
3358 | odr_type speculative_outer_type; | |
3359 | bool speculation_complete = true; | |
3360 | ||
3361 | /* First insert target from type itself and check if it may have derived types. */ | |
3362 | speculative_outer_type = get_odr_type (context.speculative_outer_type, true); | |
3363 | if (TYPE_FINAL_P (speculative_outer_type->type)) | |
3364 | context.speculative_maybe_derived_type = false; | |
3365 | binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type), | |
3366 | context.speculative_offset, otr_type); | |
3367 | if (binfo) | |
3368 | target = gimple_get_virt_method_for_binfo (otr_token, binfo, | |
3369 | &can_refer); | |
3370 | else | |
3371 | target = NULL; | |
3372 | ||
3373 | /* In the case we get complete method, we don't need | |
3374 | to walk derivations. */ | |
3375 | if (target && DECL_FINAL_P (target)) | |
3376 | context.speculative_maybe_derived_type = false; | |
3377 | if (type_possibly_instantiated_p (speculative_outer_type->type)) | |
3378 | maybe_record_node (nodes, target, &inserted, can_refer, &speculation_complete); | |
3379 | if (binfo) | |
3380 | matched_vtables.add (BINFO_VTABLE (binfo)); | |
3381 | ||
3382 | ||
3383 | /* Next walk recursively all derived types. */ | |
3384 | if (context.speculative_maybe_derived_type) | |
3385 | for (i = 0; i < speculative_outer_type->derived_types.length(); i++) | |
3386 | possible_polymorphic_call_targets_1 (nodes, &inserted, | |
3387 | &matched_vtables, | |
3388 | otr_type, | |
3389 | speculative_outer_type->derived_types[i], | |
3390 | otr_token, speculative_outer_type->type, | |
3391 | context.speculative_offset, | |
3392 | &speculation_complete, | |
3393 | bases_to_consider, | |
3394 | false); | |
3395 | (*slot)->speculative_targets = nodes.length(); | |
3396 | } | |
3397 | ||
3398 | /* First see virtual method of type itself. */ | |
3399 | binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type), | |
3400 | context.offset, otr_type); | |
3401 | if (binfo) | |
3402 | target = gimple_get_virt_method_for_binfo (otr_token, binfo, | |
3403 | &can_refer); | |
3404 | else | |
3405 | { | |
3406 | gcc_assert (odr_violation_reported); | |
3407 | target = NULL; | |
3408 | } | |
3409 | ||
3410 | /* Destructors are never called through construction virtual tables, | |
3411 | because the type is always known. */ | |
3412 | if (target && DECL_CXX_DESTRUCTOR_P (target)) | |
3413 | context.maybe_in_construction = false; | |
3414 | ||
3415 | if (target) | |
3416 | { | |
3417 | /* In the case we get complete method, we don't need | |
3418 | to walk derivations. */ | |
3419 | if (DECL_FINAL_P (target)) | |
3420 | context.maybe_derived_type = false; | |
3421 | } | |
3422 | ||
3423 | /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */ | |
3424 | if (type_possibly_instantiated_p (outer_type->type)) | |
3425 | maybe_record_node (nodes, target, &inserted, can_refer, &complete); | |
3426 | else | |
3427 | { | |
3428 | skipped = true; | |
3429 | gcc_assert (in_lto_p || context.maybe_derived_type); | |
3430 | } | |
3431 | ||
3432 | if (binfo) | |
3433 | matched_vtables.add (BINFO_VTABLE (binfo)); | |
3434 | ||
3435 | /* Next walk recursively all derived types. */ | |
3436 | if (context.maybe_derived_type) | |
3437 | { | |
3438 | for (i = 0; i < outer_type->derived_types.length(); i++) | |
3439 | possible_polymorphic_call_targets_1 (nodes, &inserted, | |
3440 | &matched_vtables, | |
3441 | otr_type, | |
3442 | outer_type->derived_types[i], | |
3443 | otr_token, outer_type->type, | |
3444 | context.offset, &complete, | |
3445 | bases_to_consider, | |
3446 | context.maybe_in_construction); | |
3447 | ||
3448 | if (!outer_type->all_derivations_known) | |
3449 | { | |
3450 | if (final_warning_records) | |
3451 | { | |
3452 | if (complete | |
3453 | && nodes.length () == 1 | |
3454 | && warn_suggest_final_types | |
3455 | && !outer_type->derived_types.length ()) | |
3456 | { | |
3457 | if (outer_type->id >= (int)final_warning_records->type_warnings.length ()) | |
3458 | final_warning_records->type_warnings.safe_grow_cleared | |
3459 | (odr_types.length ()); | |
3460 | final_warning_records->type_warnings[outer_type->id].count++; | |
3461 | final_warning_records->type_warnings[outer_type->id].dyn_count | |
3462 | += final_warning_records->dyn_count; | |
3463 | final_warning_records->type_warnings[outer_type->id].type | |
3464 | = outer_type->type; | |
3465 | (*slot)->type_warning = outer_type->id + 1; | |
3466 | } | |
3467 | if (complete | |
3468 | && warn_suggest_final_methods | |
3469 | && nodes.length () == 1 | |
3470 | && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl), | |
3471 | outer_type->type)) | |
3472 | { | |
3473 | bool existed; | |
3474 | struct decl_warn_count &c = | |
3475 | final_warning_records->decl_warnings.get_or_insert | |
3476 | (nodes[0]->decl, &existed); | |
3477 | ||
3478 | if (existed) | |
3479 | { | |
3480 | c.count++; | |
3481 | c.dyn_count += final_warning_records->dyn_count; | |
3482 | } | |
3483 | else | |
3484 | { | |
3485 | c.count = 1; | |
3486 | c.dyn_count = final_warning_records->dyn_count; | |
3487 | c.decl = nodes[0]->decl; | |
3488 | } | |
3489 | (*slot)->decl_warning = nodes[0]->decl; | |
3490 | } | |
3491 | } | |
3492 | complete = false; | |
3493 | } | |
3494 | } | |
3495 | ||
3496 | /* Finally walk bases, if asked to. */ | |
3497 | if (!(*slot)->speculative_targets) | |
3498 | (*slot)->speculative_targets = nodes.length(); | |
3499 | ||
3500 | /* Destructors are never called through construction virtual tables, | |
3501 | because the type is always known. One of entries may be cxa_pure_virtual | |
3502 | so look to at least two of them. */ | |
3503 | if (context.maybe_in_construction) | |
3504 | for (i =0 ; i < MIN (nodes.length (), 2); i++) | |
3505 | if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl)) | |
3506 | context.maybe_in_construction = false; | |
3507 | if (context.maybe_in_construction) | |
3508 | { | |
3509 | if (type != outer_type | |
3510 | && (!skipped | |
3511 | || (context.maybe_derived_type | |
3512 | && !type_all_derivations_known_p (outer_type->type)))) | |
3513 | record_targets_from_bases (otr_type, otr_token, outer_type->type, | |
3514 | context.offset, nodes, &inserted, | |
3515 | &matched_vtables, &complete); | |
3516 | if (skipped) | |
3517 | maybe_record_node (nodes, target, &inserted, can_refer, &complete); | |
3518 | for (i = 0; i < bases_to_consider.length(); i++) | |
3519 | maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete); | |
3520 | } | |
3521 | bases_to_consider.release(); | |
3522 | ||
3523 | (*slot)->targets = nodes; | |
3524 | (*slot)->complete = complete; | |
3525 | if (completep) | |
3526 | *completep = complete; | |
3527 | if (speculative_targetsp) | |
3528 | *speculative_targetsp = (*slot)->speculative_targets; | |
3529 | ||
3530 | timevar_pop (TV_IPA_VIRTUAL_CALL); | |
3531 | return nodes; | |
3532 | } | |
3533 | ||
3534 | bool | |
3535 | add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value, | |
3536 | vec<const decl_warn_count*> *vec) | |
3537 | { | |
3538 | vec->safe_push (&value); | |
3539 | return true; | |
3540 | } | |
3541 | ||
3542 | /* Dump all possible targets of a polymorphic call. */ | |
3543 | ||
3544 | void | |
3545 | dump_possible_polymorphic_call_targets (FILE *f, | |
3546 | tree otr_type, | |
3547 | HOST_WIDE_INT otr_token, | |
3548 | const ipa_polymorphic_call_context &ctx) | |
3549 | { | |
3550 | vec <cgraph_node *> targets; | |
3551 | bool final; | |
3552 | odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false); | |
3553 | unsigned int i; | |
3554 | int speculative; | |
3555 | ||
3556 | if (!type) | |
3557 | return; | |
3558 | targets = possible_polymorphic_call_targets (otr_type, otr_token, | |
3559 | ctx, | |
3560 | &final, NULL, &speculative); | |
3561 | fprintf (f, " Targets of polymorphic call of type %i:", type->id); | |
3562 | print_generic_expr (f, type->type, TDF_SLIM); | |
3563 | fprintf (f, " token %i\n", (int)otr_token); | |
3564 | if (ctx.outer_type || ctx.offset) | |
3565 | { | |
3566 | fprintf (f, " Contained in type:"); | |
3567 | print_generic_expr (f, ctx.outer_type, TDF_SLIM); | |
3568 | fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n", | |
3569 | ctx.offset); | |
3570 | } | |
3571 | if (ctx.speculative_outer_type) | |
3572 | { | |
3573 | fprintf (f, " Speculatively contained in type:"); | |
3574 | print_generic_expr (f, ctx.speculative_outer_type, TDF_SLIM); | |
3575 | fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n", | |
3576 | ctx.speculative_offset); | |
3577 | } | |
3578 | ||
3579 | fprintf (f, " %s%s%s%s\n ", | |
3580 | final ? "This is a complete list." : | |
3581 | "This is partial list; extra targets may be defined in other units.", | |
3582 | ctx.maybe_in_construction ? " (base types included)" : "", | |
3583 | ctx.maybe_derived_type ? " (derived types included)" : "", | |
3584 | ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : ""); | |
3585 | for (i = 0; i < targets.length (); i++) | |
3586 | { | |
3587 | char *name = NULL; | |
3588 | if (i == (unsigned)speculative) | |
3589 | fprintf (f, "\n Targets that are not likely:\n" | |
3590 | " "); | |
3591 | if (in_lto_p) | |
3592 | name = cplus_demangle_v3 (targets[i]->asm_name (), 0); | |
3593 | fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order); | |
3594 | if (in_lto_p) | |
3595 | free (name); | |
3596 | if (!targets[i]->definition) | |
3597 | fprintf (f, " (no definition%s)", | |
3598 | DECL_DECLARED_INLINE_P (targets[i]->decl) | |
3599 | ? " inline" : ""); | |
3600 | } | |
3601 | fprintf (f, "\n\n"); | |
3602 | } | |
3603 | ||
3604 | ||
3605 | /* Return true if N can be possibly target of a polymorphic call of | |
3606 | OTR_TYPE/OTR_TOKEN. */ | |
3607 | ||
3608 | bool | |
3609 | possible_polymorphic_call_target_p (tree otr_type, | |
3610 | HOST_WIDE_INT otr_token, | |
3611 | const ipa_polymorphic_call_context &ctx, | |
3612 | struct cgraph_node *n) | |
3613 | { | |
3614 | vec <cgraph_node *> targets; | |
3615 | unsigned int i; | |
3616 | enum built_in_function fcode; | |
3617 | bool final; | |
3618 | ||
3619 | if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE | |
3620 | && ((fcode = DECL_FUNCTION_CODE (n->decl)) | |
3621 | == BUILT_IN_UNREACHABLE | |
3622 | || fcode == BUILT_IN_TRAP)) | |
3623 | return true; | |
3624 | ||
3625 | if (!odr_hash) | |
3626 | return true; | |
3627 | targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final); | |
3628 | for (i = 0; i < targets.length (); i++) | |
3629 | if (n->semantically_equivalent_p (targets[i])) | |
3630 | return true; | |
3631 | ||
3632 | /* At a moment we allow middle end to dig out new external declarations | |
3633 | as a targets of polymorphic calls. */ | |
3634 | if (!final && !n->definition) | |
3635 | return true; | |
3636 | return false; | |
3637 | } | |
3638 | ||
3639 | ||
3640 | /* After callgraph construction new external nodes may appear. | |
3641 | Add them into the graph. */ | |
3642 | ||
3643 | void | |
3644 | update_type_inheritance_graph (void) | |
3645 | { | |
3646 | struct cgraph_node *n; | |
3647 | ||
3648 | if (!odr_hash) | |
3649 | return; | |
3650 | free_polymorphic_call_targets_hash (); | |
3651 | timevar_push (TV_IPA_INHERITANCE); | |
3652 | /* We reconstruct the graph starting from types of all methods seen in the | |
3653 | the unit. */ | |
3654 | FOR_EACH_FUNCTION (n) | |
3655 | if (DECL_VIRTUAL_P (n->decl) | |
3656 | && !n->definition | |
3657 | && n->real_symbol_p ()) | |
3658 | get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))), | |
3659 | true); | |
3660 | timevar_pop (TV_IPA_INHERITANCE); | |
3661 | } | |
3662 | ||
3663 | ||
3664 | /* Return true if N looks like likely target of a polymorphic call. | |
3665 | Rule out cxa_pure_virtual, noreturns, function declared cold and | |
3666 | other obvious cases. */ | |
3667 | ||
3668 | bool | |
3669 | likely_target_p (struct cgraph_node *n) | |
3670 | { | |
3671 | int flags; | |
3672 | /* cxa_pure_virtual and similar things are not likely. */ | |
3673 | if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE) | |
3674 | return false; | |
3675 | flags = flags_from_decl_or_type (n->decl); | |
3676 | if (flags & ECF_NORETURN) | |
3677 | return false; | |
3678 | if (lookup_attribute ("cold", | |
3679 | DECL_ATTRIBUTES (n->decl))) | |
3680 | return false; | |
3681 | if (n->frequency < NODE_FREQUENCY_NORMAL) | |
3682 | return false; | |
3683 | /* If there are no virtual tables refering the target alive, | |
3684 | the only way the target can be called is an instance comming from other | |
3685 | compilation unit; speculative devirtualization is build around an | |
3686 | assumption that won't happen. */ | |
3687 | if (!referenced_from_vtable_p (n)) | |
3688 | return false; | |
3689 | return true; | |
3690 | } | |
3691 | ||
3692 | /* Compare type warning records P1 and P2 and chose one with larger count; | |
3693 | helper for qsort. */ | |
3694 | ||
3695 | int | |
3696 | type_warning_cmp (const void *p1, const void *p2) | |
3697 | { | |
3698 | const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1; | |
3699 | const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2; | |
3700 | ||
3701 | if (t1->dyn_count < t2->dyn_count) | |
3702 | return 1; | |
3703 | if (t1->dyn_count > t2->dyn_count) | |
3704 | return -1; | |
3705 | return t2->count - t1->count; | |
3706 | } | |
3707 | ||
3708 | /* Compare decl warning records P1 and P2 and chose one with larger count; | |
3709 | helper for qsort. */ | |
3710 | ||
3711 | int | |
3712 | decl_warning_cmp (const void *p1, const void *p2) | |
3713 | { | |
3714 | const decl_warn_count *t1 = *(const decl_warn_count * const *)p1; | |
3715 | const decl_warn_count *t2 = *(const decl_warn_count * const *)p2; | |
3716 | ||
3717 | if (t1->dyn_count < t2->dyn_count) | |
3718 | return 1; | |
3719 | if (t1->dyn_count > t2->dyn_count) | |
3720 | return -1; | |
3721 | return t2->count - t1->count; | |
3722 | } | |
3723 | ||
3724 | /* The ipa-devirt pass. | |
3725 | When polymorphic call has only one likely target in the unit, | |
3726 | turn it into speculative call. */ | |
3727 | ||
3728 | static unsigned int | |
3729 | ipa_devirt (void) | |
3730 | { | |
3731 | struct cgraph_node *n; | |
3732 | hash_set<void *> bad_call_targets; | |
3733 | struct cgraph_edge *e; | |
3734 | ||
3735 | int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0; | |
3736 | int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0; | |
3737 | int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0; | |
3738 | ||
3739 | /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings. | |
3740 | This is implemented by setting up final_warning_records that are updated | |
3741 | by get_polymorphic_call_targets. | |
3742 | We need to clear cache in this case to trigger recomputation of all | |
3743 | entries. */ | |
3744 | if (warn_suggest_final_methods || warn_suggest_final_types) | |
3745 | { | |
3746 | final_warning_records = new (final_warning_record); | |
3747 | final_warning_records->type_warnings = vNULL; | |
3748 | final_warning_records->type_warnings.safe_grow_cleared (odr_types.length ()); | |
3749 | free_polymorphic_call_targets_hash (); | |
3750 | } | |
3751 | ||
3752 | FOR_EACH_DEFINED_FUNCTION (n) | |
3753 | { | |
3754 | bool update = false; | |
3755 | if (dump_file && n->indirect_calls) | |
3756 | fprintf (dump_file, "\n\nProcesing function %s/%i\n", | |
3757 | n->name (), n->order); | |
3758 | for (e = n->indirect_calls; e; e = e->next_callee) | |
3759 | if (e->indirect_info->polymorphic) | |
3760 | { | |
3761 | struct cgraph_node *likely_target = NULL; | |
3762 | void *cache_token; | |
3763 | bool final; | |
3764 | int speculative_targets; | |
3765 | ||
3766 | if (final_warning_records) | |
3767 | final_warning_records->dyn_count = e->count; | |
3768 | ||
3769 | vec <cgraph_node *>targets | |
3770 | = possible_polymorphic_call_targets | |
3771 | (e, &final, &cache_token, &speculative_targets); | |
3772 | unsigned int i; | |
3773 | ||
3774 | if (dump_file) | |
3775 | dump_possible_polymorphic_call_targets | |
3776 | (dump_file, e); | |
3777 | ||
3778 | npolymorphic++; | |
3779 | ||
3780 | if (!flag_devirtualize_speculatively) | |
3781 | continue; | |
3782 | ||
3783 | if (!cgraph_maybe_hot_edge_p (e)) | |
3784 | { | |
3785 | if (dump_file) | |
3786 | fprintf (dump_file, "Call is cold\n\n"); | |
3787 | ncold++; | |
3788 | continue; | |
3789 | } | |
3790 | if (e->speculative) | |
3791 | { | |
3792 | if (dump_file) | |
3793 | fprintf (dump_file, "Call is aready speculated\n\n"); | |
3794 | nspeculated++; | |
3795 | ||
3796 | /* When dumping see if we agree with speculation. */ | |
3797 | if (!dump_file) | |
3798 | continue; | |
3799 | } | |
3800 | if (bad_call_targets.contains (cache_token)) | |
3801 | { | |
3802 | if (dump_file) | |
3803 | fprintf (dump_file, "Target list is known to be useless\n\n"); | |
3804 | nmultiple++; | |
3805 | continue; | |
3806 | } | |
3807 | for (i = 0; i < targets.length (); i++) | |
3808 | if (likely_target_p (targets[i])) | |
3809 | { | |
3810 | if (likely_target) | |
3811 | { | |
3812 | if (i < (unsigned) speculative_targets) | |
3813 | { | |
3814 | likely_target = NULL; | |
3815 | if (dump_file) | |
3816 | fprintf (dump_file, "More than one likely target\n\n"); | |
3817 | nmultiple++; | |
3818 | } | |
3819 | break; | |
3820 | } | |
3821 | likely_target = targets[i]; | |
3822 | } | |
3823 | if (!likely_target) | |
3824 | { | |
3825 | bad_call_targets.add (cache_token); | |
3826 | continue; | |
3827 | } | |
3828 | /* This is reached only when dumping; check if we agree or disagree | |
3829 | with the speculation. */ | |
3830 | if (e->speculative) | |
3831 | { | |
3832 | struct cgraph_edge *e2; | |
3833 | struct ipa_ref *ref; | |
3834 | cgraph_speculative_call_info (e, e2, e, ref); | |
3835 | if (e2->callee->ultimate_alias_target () | |
3836 | == likely_target->ultimate_alias_target ()) | |
3837 | { | |
3838 | fprintf (dump_file, "We agree with speculation\n\n"); | |
3839 | nok++; | |
3840 | } | |
3841 | else | |
3842 | { | |
3843 | fprintf (dump_file, "We disagree with speculation\n\n"); | |
3844 | nwrong++; | |
3845 | } | |
3846 | continue; | |
3847 | } | |
3848 | if (!likely_target->definition) | |
3849 | { | |
3850 | if (dump_file) | |
3851 | fprintf (dump_file, "Target is not an definition\n\n"); | |
3852 | nnotdefined++; | |
3853 | continue; | |
3854 | } | |
3855 | /* Do not introduce new references to external symbols. While we | |
3856 | can handle these just well, it is common for programs to | |
3857 | incorrectly with headers defining methods they are linked | |
3858 | with. */ | |
3859 | if (DECL_EXTERNAL (likely_target->decl)) | |
3860 | { | |
3861 | if (dump_file) | |
3862 | fprintf (dump_file, "Target is external\n\n"); | |
3863 | nexternal++; | |
3864 | continue; | |
3865 | } | |
3866 | /* Don't use an implicitly-declared destructor (c++/58678). */ | |
3867 | struct cgraph_node *non_thunk_target | |
3868 | = likely_target->function_symbol (); | |
3869 | if (DECL_ARTIFICIAL (non_thunk_target->decl) | |
3870 | && DECL_COMDAT (non_thunk_target->decl)) | |
3871 | { | |
3872 | if (dump_file) | |
3873 | fprintf (dump_file, "Target is artificial\n\n"); | |
3874 | nartificial++; | |
3875 | continue; | |
3876 | } | |
3877 | if (likely_target->get_availability () <= AVAIL_INTERPOSABLE | |
3878 | && likely_target->can_be_discarded_p ()) | |
3879 | { | |
3880 | if (dump_file) | |
3881 | fprintf (dump_file, "Target is overwritable\n\n"); | |
3882 | noverwritable++; | |
3883 | continue; | |
3884 | } | |
3885 | else if (dbg_cnt (devirt)) | |
3886 | { | |
3887 | if (dump_enabled_p ()) | |
3888 | { | |
3889 | location_t locus = gimple_location_safe (e->call_stmt); | |
3890 | dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus, | |
3891 | "speculatively devirtualizing call in %s/%i to %s/%i\n", | |
3892 | n->name (), n->order, | |
3893 | likely_target->name (), | |
3894 | likely_target->order); | |
3895 | } | |
3896 | if (!likely_target->can_be_discarded_p ()) | |
3897 | { | |
3898 | cgraph_node *alias; | |
3899 | alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ()); | |
3900 | if (alias) | |
3901 | likely_target = alias; | |
3902 | } | |
3903 | nconverted++; | |
3904 | update = true; | |
3905 | cgraph_turn_edge_to_speculative | |
3906 | (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10); | |
3907 | } | |
3908 | } | |
3909 | if (update) | |
3910 | inline_update_overall_summary (n); | |
3911 | } | |
3912 | if (warn_suggest_final_methods || warn_suggest_final_types) | |
3913 | { | |
3914 | if (warn_suggest_final_types) | |
3915 | { | |
3916 | final_warning_records->type_warnings.qsort (type_warning_cmp); | |
3917 | for (unsigned int i = 0; | |
3918 | i < final_warning_records->type_warnings.length (); i++) | |
3919 | if (final_warning_records->type_warnings[i].count) | |
3920 | { | |
3921 | tree type = final_warning_records->type_warnings[i].type; | |
3922 | warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), | |
3923 | OPT_Wsuggest_final_types, | |
3924 | "Declaring type %qD final " | |
3925 | "would enable devirtualization of %i calls", | |
3926 | type, | |
3927 | final_warning_records->type_warnings[i].count); | |
3928 | } | |
3929 | } | |
3930 | ||
3931 | if (warn_suggest_final_methods) | |
3932 | { | |
3933 | vec<const decl_warn_count*> decl_warnings_vec = vNULL; | |
3934 | ||
3935 | final_warning_records->decl_warnings.traverse | |
3936 | <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec); | |
3937 | decl_warnings_vec.qsort (decl_warning_cmp); | |
3938 | for (unsigned int i = 0; i < decl_warnings_vec.length (); i++) | |
3939 | { | |
3940 | tree decl = decl_warnings_vec[i]->decl; | |
3941 | int count = decl_warnings_vec[i]->count; | |
3942 | ||
3943 | if (DECL_CXX_DESTRUCTOR_P (decl)) | |
3944 | warning_at (DECL_SOURCE_LOCATION (decl), | |
3945 | OPT_Wsuggest_final_methods, | |
3946 | "Declaring virtual destructor of %qD final " | |
3947 | "would enable devirtualization of %i calls", | |
3948 | DECL_CONTEXT (decl), count); | |
3949 | else | |
3950 | warning_at (DECL_SOURCE_LOCATION (decl), | |
3951 | OPT_Wsuggest_final_methods, | |
3952 | "Declaring method %qD final " | |
3953 | "would enable devirtualization of %i calls", | |
3954 | decl, count); | |
3955 | } | |
3956 | } | |
3957 | ||
3958 | delete (final_warning_records); | |
3959 | final_warning_records = 0; | |
3960 | } | |
3961 | ||
3962 | if (dump_file) | |
3963 | fprintf (dump_file, | |
3964 | "%i polymorphic calls, %i devirtualized," | |
3965 | " %i speculatively devirtualized, %i cold\n" | |
3966 | "%i have multiple targets, %i overwritable," | |
3967 | " %i already speculated (%i agree, %i disagree)," | |
3968 | " %i external, %i not defined, %i artificial\n", | |
3969 | npolymorphic, ndevirtualized, nconverted, ncold, | |
3970 | nmultiple, noverwritable, nspeculated, nok, nwrong, | |
3971 | nexternal, nnotdefined, nartificial); | |
3972 | return ndevirtualized ? TODO_remove_functions : 0; | |
3973 | } | |
3974 | ||
3975 | namespace { | |
3976 | ||
3977 | const pass_data pass_data_ipa_devirt = | |
3978 | { | |
3979 | IPA_PASS, /* type */ | |
3980 | "devirt", /* name */ | |
3981 | OPTGROUP_NONE, /* optinfo_flags */ | |
3982 | TV_IPA_DEVIRT, /* tv_id */ | |
3983 | 0, /* properties_required */ | |
3984 | 0, /* properties_provided */ | |
3985 | 0, /* properties_destroyed */ | |
3986 | 0, /* todo_flags_start */ | |
3987 | ( TODO_dump_symtab ), /* todo_flags_finish */ | |
3988 | }; | |
3989 | ||
3990 | class pass_ipa_devirt : public ipa_opt_pass_d | |
3991 | { | |
3992 | public: | |
3993 | pass_ipa_devirt (gcc::context *ctxt) | |
3994 | : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt, | |
3995 | NULL, /* generate_summary */ | |
3996 | NULL, /* write_summary */ | |
3997 | NULL, /* read_summary */ | |
3998 | NULL, /* write_optimization_summary */ | |
3999 | NULL, /* read_optimization_summary */ | |
4000 | NULL, /* stmt_fixup */ | |
4001 | 0, /* function_transform_todo_flags_start */ | |
4002 | NULL, /* function_transform */ | |
4003 | NULL) /* variable_transform */ | |
4004 | {} | |
4005 | ||
4006 | /* opt_pass methods: */ | |
4007 | virtual bool gate (function *) | |
4008 | { | |
4009 | return (flag_devirtualize | |
4010 | && (flag_devirtualize_speculatively | |
4011 | || (warn_suggest_final_methods | |
4012 | || warn_suggest_final_types)) | |
4013 | && optimize); | |
4014 | } | |
4015 | ||
4016 | virtual unsigned int execute (function *) { return ipa_devirt (); } | |
4017 | ||
4018 | }; // class pass_ipa_devirt | |
4019 | ||
4020 | } // anon namespace | |
4021 | ||
4022 | ipa_opt_pass_d * | |
4023 | make_pass_ipa_devirt (gcc::context *ctxt) | |
4024 | { | |
4025 | return new pass_ipa_devirt (ctxt); | |
4026 | } | |
4027 | ||
4028 | #include "gt-ipa-devirt.h" |