]>
Commit | Line | Data |
---|---|---|
3b22db66 | 1 | /* Interprocedural analyses. |
fbd26352 | 2 | Copyright (C) 2005-2019 Free Software Foundation, Inc. |
3b22db66 | 3 | |
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify it under | |
7 | the terms of the GNU General Public License as published by the Free | |
8c4c00c1 | 8 | Software Foundation; either version 3, or (at your option) any later |
3b22db66 | 9 | version. |
10 | ||
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
3b22db66 | 19 | |
20 | #include "config.h" | |
21 | #include "system.h" | |
22 | #include "coretypes.h" | |
9ef16211 | 23 | #include "backend.h" |
7c29e30e | 24 | #include "rtl.h" |
b20a8bb4 | 25 | #include "tree.h" |
9ef16211 | 26 | #include "gimple.h" |
7c29e30e | 27 | #include "alloc-pool.h" |
28 | #include "tree-pass.h" | |
9ef16211 | 29 | #include "ssa.h" |
7c29e30e | 30 | #include "tree-streamer.h" |
31 | #include "cgraph.h" | |
32 | #include "diagnostic.h" | |
b20a8bb4 | 33 | #include "fold-const.h" |
bc61cadb | 34 | #include "gimple-fold.h" |
35 | #include "tree-eh.h" | |
d53441c8 | 36 | #include "calls.h" |
9ed99284 | 37 | #include "stor-layout.h" |
38 | #include "print-tree.h" | |
a8783bee | 39 | #include "gimplify.h" |
dcf1a1ec | 40 | #include "gimple-iterator.h" |
e795d6e1 | 41 | #include "gimplify-me.h" |
dcf1a1ec | 42 | #include "gimple-walk.h" |
2cc80ac3 | 43 | #include "symbol-summary.h" |
3b22db66 | 44 | #include "ipa-prop.h" |
073c1fd5 | 45 | #include "tree-cfg.h" |
073c1fd5 | 46 | #include "tree-dfa.h" |
545eff8f | 47 | #include "tree-inline.h" |
b9a58fc5 | 48 | #include "ipa-fnsummary.h" |
ce084dfc | 49 | #include "gimple-pretty-print.h" |
699f00b5 | 50 | #include "params.h" |
10fba9c0 | 51 | #include "ipa-utils.h" |
ceb49bba | 52 | #include "dbgcnt.h" |
24430d08 | 53 | #include "domwalk.h" |
f7715905 | 54 | #include "builtins.h" |
9eafff7e | 55 | #include "tree-cfgcleanup.h" |
545eff8f | 56 | |
2cc80ac3 | 57 | /* Function summary where the parameter infos are actually stored. */ |
58 | ipa_node_params_t *ipa_node_params_sum = NULL; | |
00637f9c | 59 | |
60 | function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL; | |
61 | ||
322dd010 | 62 | /* Edge summary for IPA-CP edge information. */ |
63 | ipa_edge_args_sum_t *ipa_edge_args_sum; | |
545eff8f | 64 | |
97cb825b | 65 | /* Traits for a hash table for reusing already existing ipa_bits. */ |
66 | ||
67 | struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *> | |
68 | { | |
69 | typedef ipa_bits *value_type; | |
70 | typedef ipa_bits *compare_type; | |
71 | static hashval_t | |
72 | hash (const ipa_bits *p) | |
73 | { | |
74 | hashval_t t = (hashval_t) p->value.to_shwi (); | |
75 | return iterative_hash_host_wide_int (p->mask.to_shwi (), t); | |
76 | } | |
77 | static bool | |
78 | equal (const ipa_bits *a, const ipa_bits *b) | |
79 | { | |
80 | return a->value == b->value && a->mask == b->mask; | |
81 | } | |
82 | static void | |
83 | mark_empty (ipa_bits *&p) | |
84 | { | |
85 | p = NULL; | |
86 | } | |
87 | static bool | |
88 | is_empty (const ipa_bits *p) | |
89 | { | |
90 | return p == NULL; | |
91 | } | |
92 | static bool | |
93 | is_deleted (const ipa_bits *p) | |
94 | { | |
95 | return p == reinterpret_cast<const ipa_bits *> (1); | |
96 | } | |
97 | static void | |
98 | mark_deleted (ipa_bits *&p) | |
99 | { | |
100 | p = reinterpret_cast<ipa_bits *> (1); | |
101 | } | |
102 | }; | |
103 | ||
104 | /* Hash table for avoid repeated allocations of equal ipa_bits. */ | |
105 | static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table; | |
106 | ||
107 | /* Traits for a hash table for reusing value_ranges used for IPA. Note that | |
108 | the equiv bitmap is not hashed and is expected to be NULL. */ | |
109 | ||
a1054504 | 110 | struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range_base *> |
97cb825b | 111 | { |
a1054504 | 112 | typedef value_range_base *value_type; |
113 | typedef value_range_base *compare_type; | |
97cb825b | 114 | static hashval_t |
a1054504 | 115 | hash (const value_range_base *p) |
0b41735a | 116 | { |
be44111e | 117 | inchash::hash hstate (p->kind ()); |
018d6175 | 118 | inchash::add_expr (p->min (), hstate); |
119 | inchash::add_expr (p->max (), hstate); | |
0b41735a | 120 | return hstate.end (); |
121 | } | |
97cb825b | 122 | static bool |
a1054504 | 123 | equal (const value_range_base *a, const value_range_base *b) |
97cb825b | 124 | { |
2d5d5612 | 125 | return a->equal_p (*b); |
97cb825b | 126 | } |
127 | static void | |
a1054504 | 128 | mark_empty (value_range_base *&p) |
97cb825b | 129 | { |
130 | p = NULL; | |
131 | } | |
132 | static bool | |
a1054504 | 133 | is_empty (const value_range_base *p) |
97cb825b | 134 | { |
135 | return p == NULL; | |
136 | } | |
137 | static bool | |
a1054504 | 138 | is_deleted (const value_range_base *p) |
97cb825b | 139 | { |
a1054504 | 140 | return p == reinterpret_cast<const value_range_base *> (1); |
97cb825b | 141 | } |
142 | static void | |
a1054504 | 143 | mark_deleted (value_range_base *&p) |
97cb825b | 144 | { |
a1054504 | 145 | p = reinterpret_cast<value_range_base *> (1); |
97cb825b | 146 | } |
147 | }; | |
148 | ||
149 | /* Hash table for avoid repeated allocations of equal value_ranges. */ | |
150 | static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table; | |
151 | ||
545eff8f | 152 | /* Holders of ipa cgraph hooks: */ |
b1471ee0 | 153 | static struct cgraph_node_hook_list *function_insertion_hook_holder; |
3b22db66 | 154 | |
096295f6 | 155 | /* Description of a reference to an IPA constant. */ |
156 | struct ipa_cst_ref_desc | |
157 | { | |
158 | /* Edge that corresponds to the statement which took the reference. */ | |
159 | struct cgraph_edge *cs; | |
160 | /* Linked list of duplicates created when call graph edges are cloned. */ | |
161 | struct ipa_cst_ref_desc *next_duplicate; | |
162 | /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value | |
163 | if out of control. */ | |
164 | int refcount; | |
165 | }; | |
166 | ||
167 | /* Allocation pool for reference descriptions. */ | |
168 | ||
e16712b1 | 169 | static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool |
1dc6c44d | 170 | ("IPA-PROP ref descriptions"); |
096295f6 | 171 | |
6c0a4a25 | 172 | /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated |
173 | with NODE should prevent us from analyzing it for the purposes of IPA-CP. */ | |
174 | ||
175 | static bool | |
176 | ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node) | |
177 | { | |
02774f2d | 178 | tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl); |
6c0a4a25 | 179 | |
180 | if (!fs_opts) | |
181 | return false; | |
d1f68cd8 | 182 | return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp); |
6c0a4a25 | 183 | } |
184 | ||
1917e945 | 185 | /* Return index of the formal whose tree is PTREE in function which corresponds |
186 | to INFO. */ | |
187 | ||
7a4930e7 | 188 | static int |
7af25a10 | 189 | ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors, |
190 | tree ptree) | |
3b22db66 | 191 | { |
192 | int i, count; | |
193 | ||
7af25a10 | 194 | count = vec_safe_length (descriptors); |
3b22db66 | 195 | for (i = 0; i < count; i++) |
7af25a10 | 196 | if ((*descriptors)[i].decl_or_type == ptree) |
3b22db66 | 197 | return i; |
198 | ||
199 | return -1; | |
200 | } | |
201 | ||
7a4930e7 | 202 | /* Return index of the formal whose tree is PTREE in function which corresponds |
203 | to INFO. */ | |
204 | ||
205 | int | |
206 | ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree) | |
207 | { | |
208 | return ipa_get_param_decl_index_1 (info->descriptors, ptree); | |
209 | } | |
210 | ||
211 | /* Populate the param_decl field in parameter DESCRIPTORS that correspond to | |
212 | NODE. */ | |
1917e945 | 213 | |
3f2ff969 | 214 | static void |
215 | ipa_populate_param_decls (struct cgraph_node *node, | |
7af25a10 | 216 | vec<ipa_param_descriptor, va_gc> &descriptors) |
3b22db66 | 217 | { |
218 | tree fndecl; | |
219 | tree fnargs; | |
220 | tree parm; | |
221 | int param_num; | |
f8daee9b | 222 | |
02774f2d | 223 | fndecl = node->decl; |
09ab6335 | 224 | gcc_assert (gimple_has_body_p (fndecl)); |
3b22db66 | 225 | fnargs = DECL_ARGUMENTS (fndecl); |
226 | param_num = 0; | |
1767a056 | 227 | for (parm = fnargs; parm; parm = DECL_CHAIN (parm)) |
3b22db66 | 228 | { |
a54071b2 | 229 | descriptors[param_num].decl_or_type = parm; |
96c7c99c | 230 | descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm), |
231 | true); | |
3b22db66 | 232 | param_num++; |
233 | } | |
234 | } | |
235 | ||
547f1802 | 236 | /* Return how many formal parameters FNDECL has. */ |
237 | ||
09809ecc | 238 | int |
821d0e0f | 239 | count_formal_params (tree fndecl) |
547f1802 | 240 | { |
241 | tree parm; | |
242 | int count = 0; | |
09ab6335 | 243 | gcc_assert (gimple_has_body_p (fndecl)); |
547f1802 | 244 | |
1767a056 | 245 | for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm)) |
547f1802 | 246 | count++; |
247 | ||
248 | return count; | |
249 | } | |
250 | ||
09ab6335 | 251 | /* Return the declaration of Ith formal parameter of the function corresponding |
252 | to INFO. Note there is no setter function as this array is built just once | |
253 | using ipa_initialize_node_params. */ | |
254 | ||
255 | void | |
256 | ipa_dump_param (FILE *file, struct ipa_node_params *info, int i) | |
257 | { | |
258 | fprintf (file, "param #%i", i); | |
7af25a10 | 259 | if ((*info->descriptors)[i].decl_or_type) |
09ab6335 | 260 | { |
261 | fprintf (file, " "); | |
1ffa4346 | 262 | print_generic_expr (file, (*info->descriptors)[i].decl_or_type); |
09ab6335 | 263 | } |
264 | } | |
265 | ||
f579567d | 266 | /* If necessary, allocate vector of parameter descriptors in info of NODE. |
267 | Return true if they were allocated, false if not. */ | |
09ab6335 | 268 | |
f579567d | 269 | static bool |
09ab6335 | 270 | ipa_alloc_node_params (struct cgraph_node *node, int param_count) |
271 | { | |
272 | struct ipa_node_params *info = IPA_NODE_REF (node); | |
273 | ||
7af25a10 | 274 | if (!info->descriptors && param_count) |
f579567d | 275 | { |
276 | vec_safe_grow_cleared (info->descriptors, param_count); | |
277 | return true; | |
278 | } | |
279 | else | |
280 | return false; | |
09ab6335 | 281 | } |
282 | ||
3f2ff969 | 283 | /* Initialize the ipa_node_params structure associated with NODE by counting |
284 | the function parameters, creating the descriptors and populating their | |
285 | param_decls. */ | |
1917e945 | 286 | |
3f2ff969 | 287 | void |
288 | ipa_initialize_node_params (struct cgraph_node *node) | |
289 | { | |
290 | struct ipa_node_params *info = IPA_NODE_REF (node); | |
291 | ||
f579567d | 292 | if (!info->descriptors |
293 | && ipa_alloc_node_params (node, count_formal_params (node->decl))) | |
294 | ipa_populate_param_decls (node, *info->descriptors); | |
3b22db66 | 295 | } |
296 | ||
7115ea05 | 297 | /* Print the jump functions associated with call graph edge CS to file F. */ |
298 | ||
299 | static void | |
300 | ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs) | |
301 | { | |
302 | int i, count; | |
303 | ||
304 | count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs)); | |
305 | for (i = 0; i < count; i++) | |
306 | { | |
307 | struct ipa_jump_func *jump_func; | |
308 | enum jump_func_type type; | |
309 | ||
310 | jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i); | |
311 | type = jump_func->type; | |
312 | ||
313 | fprintf (f, " param %d: ", i); | |
314 | if (type == IPA_JF_UNKNOWN) | |
315 | fprintf (f, "UNKNOWN\n"); | |
7115ea05 | 316 | else if (type == IPA_JF_CONST) |
317 | { | |
096295f6 | 318 | tree val = jump_func->value.constant.value; |
7115ea05 | 319 | fprintf (f, "CONST: "); |
1ffa4346 | 320 | print_generic_expr (f, val); |
7115ea05 | 321 | if (TREE_CODE (val) == ADDR_EXPR |
322 | && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL) | |
323 | { | |
324 | fprintf (f, " -> "); | |
1ffa4346 | 325 | print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0))); |
7115ea05 | 326 | } |
327 | fprintf (f, "\n"); | |
328 | } | |
7115ea05 | 329 | else if (type == IPA_JF_PASS_THROUGH) |
330 | { | |
331 | fprintf (f, "PASS THROUGH: "); | |
0d491188 | 332 | fprintf (f, "%d, op %s", |
7115ea05 | 333 | jump_func->value.pass_through.formal_id, |
f3d35d4d | 334 | get_tree_code_name(jump_func->value.pass_through.operation)); |
7115ea05 | 335 | if (jump_func->value.pass_through.operation != NOP_EXPR) |
0d491188 | 336 | { |
337 | fprintf (f, " "); | |
1ffa4346 | 338 | print_generic_expr (f, jump_func->value.pass_through.operand); |
0d491188 | 339 | } |
340 | if (jump_func->value.pass_through.agg_preserved) | |
341 | fprintf (f, ", agg_preserved"); | |
734eb2d7 | 342 | fprintf (f, "\n"); |
7115ea05 | 343 | } |
344 | else if (type == IPA_JF_ANCESTOR) | |
345 | { | |
346 | fprintf (f, "ANCESTOR: "); | |
f03df321 | 347 | fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC, |
7115ea05 | 348 | jump_func->value.ancestor.formal_id, |
349 | jump_func->value.ancestor.offset); | |
0d491188 | 350 | if (jump_func->value.ancestor.agg_preserved) |
351 | fprintf (f, ", agg_preserved"); | |
734eb2d7 | 352 | fprintf (f, "\n"); |
7115ea05 | 353 | } |
0d491188 | 354 | |
355 | if (jump_func->agg.items) | |
356 | { | |
357 | struct ipa_agg_jf_item *item; | |
358 | int j; | |
359 | ||
360 | fprintf (f, " Aggregate passed by %s:\n", | |
361 | jump_func->agg.by_ref ? "reference" : "value"); | |
f1f41a6c | 362 | FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item) |
0d491188 | 363 | { |
364 | fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ", | |
365 | item->offset); | |
366 | if (TYPE_P (item->value)) | |
367 | fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits", | |
e913b5cd | 368 | tree_to_uhwi (TYPE_SIZE (item->value))); |
0d491188 | 369 | else |
370 | { | |
371 | fprintf (f, "cst: "); | |
1ffa4346 | 372 | print_generic_expr (f, item->value); |
0d491188 | 373 | } |
374 | fprintf (f, "\n"); | |
375 | } | |
376 | } | |
245ab191 | 377 | |
378 | struct ipa_polymorphic_call_context *ctx | |
379 | = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i); | |
380 | if (ctx && !ctx->useless_p ()) | |
381 | { | |
382 | fprintf (f, " Context: "); | |
383 | ctx->dump (dump_file); | |
384 | } | |
ae7b7bc8 | 385 | |
97cb825b | 386 | if (jump_func->bits) |
a54071b2 | 387 | { |
97cb825b | 388 | fprintf (f, " value: "); |
389 | print_hex (jump_func->bits->value, f); | |
390 | fprintf (f, ", mask: "); | |
391 | print_hex (jump_func->bits->mask, f); | |
a54071b2 | 392 | fprintf (f, "\n"); |
393 | } | |
394 | else | |
395 | fprintf (f, " Unknown bits\n"); | |
25a8e007 | 396 | |
97cb825b | 397 | if (jump_func->m_vr) |
25a8e007 | 398 | { |
399 | fprintf (f, " VR "); | |
400 | fprintf (f, "%s[", | |
be44111e | 401 | (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : ""); |
402 | print_decs (wi::to_wide (jump_func->m_vr->min ()), f); | |
25a8e007 | 403 | fprintf (f, ", "); |
be44111e | 404 | print_decs (wi::to_wide (jump_func->m_vr->max ()), f); |
25a8e007 | 405 | fprintf (f, "]\n"); |
406 | } | |
407 | else | |
408 | fprintf (f, " Unknown VR\n"); | |
7115ea05 | 409 | } |
410 | } | |
411 | ||
412 | ||
1917e945 | 413 | /* Print the jump functions of all arguments on all call graph edges going from |
414 | NODE to file F. */ | |
415 | ||
3b22db66 | 416 | void |
f8daee9b | 417 | ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node) |
3b22db66 | 418 | { |
f8daee9b | 419 | struct cgraph_edge *cs; |
3b22db66 | 420 | |
0e388735 | 421 | fprintf (f, " Jump functions of caller %s:\n", node->dump_name ()); |
f8daee9b | 422 | for (cs = node->callees; cs; cs = cs->next_callee) |
423 | { | |
424 | if (!ipa_edge_args_info_available_for_edge_p (cs)) | |
425 | continue; | |
426 | ||
0e388735 | 427 | fprintf (f, " callsite %s -> %s : \n", |
428 | node->dump_name (), | |
429 | cs->callee->dump_name ()); | |
7115ea05 | 430 | ipa_print_node_jump_functions_for_edge (f, cs); |
431 | } | |
3b22db66 | 432 | |
15c999e3 | 433 | for (cs = node->indirect_calls; cs; cs = cs->next_callee) |
7115ea05 | 434 | { |
15c999e3 | 435 | struct cgraph_indirect_call_info *ii; |
7115ea05 | 436 | if (!ipa_edge_args_info_available_for_edge_p (cs)) |
437 | continue; | |
f8daee9b | 438 | |
15c999e3 | 439 | ii = cs->indirect_info; |
440 | if (ii->agg_contents) | |
2f6c1cf4 | 441 | fprintf (f, " indirect %s callsite, calling param %i, " |
15c999e3 | 442 | "offset " HOST_WIDE_INT_PRINT_DEC ", %s", |
2f6c1cf4 | 443 | ii->member_ptr ? "member ptr" : "aggregate", |
15c999e3 | 444 | ii->param_index, ii->offset, |
445 | ii->by_ref ? "by reference" : "by_value"); | |
446 | else | |
02636da3 | 447 | fprintf (f, " indirect %s callsite, calling param %i, " |
448 | "offset " HOST_WIDE_INT_PRINT_DEC, | |
449 | ii->polymorphic ? "polymorphic" : "simple", ii->param_index, | |
450 | ii->offset); | |
15c999e3 | 451 | |
7115ea05 | 452 | if (cs->call_stmt) |
453 | { | |
15c999e3 | 454 | fprintf (f, ", for stmt "); |
7115ea05 | 455 | print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM); |
f8daee9b | 456 | } |
7115ea05 | 457 | else |
15c999e3 | 458 | fprintf (f, "\n"); |
e33892d7 | 459 | if (ii->polymorphic) |
460 | ii->context.dump (f); | |
7115ea05 | 461 | ipa_print_node_jump_functions_for_edge (f, cs); |
f8daee9b | 462 | } |
463 | } | |
464 | ||
465 | /* Print ipa_jump_func data structures of all nodes in the call graph to F. */ | |
1917e945 | 466 | |
f8daee9b | 467 | void |
468 | ipa_print_all_jump_functions (FILE *f) | |
469 | { | |
470 | struct cgraph_node *node; | |
471 | ||
11b73810 | 472 | fprintf (f, "\nJump functions:\n"); |
7c455d87 | 473 | FOR_EACH_FUNCTION (node) |
f8daee9b | 474 | { |
475 | ipa_print_node_jump_functions (f, node); | |
476 | } | |
477 | } | |
478 | ||
ae7b7bc8 | 479 | /* Set jfunc to be a know-really nothing jump function. */ |
480 | ||
481 | static void | |
482 | ipa_set_jf_unknown (struct ipa_jump_func *jfunc) | |
483 | { | |
484 | jfunc->type = IPA_JF_UNKNOWN; | |
97cb825b | 485 | jfunc->bits = NULL; |
486 | jfunc->m_vr = NULL; | |
ae7b7bc8 | 487 | } |
488 | ||
ad4a8b28 | 489 | /* Set JFUNC to be a copy of another jmp (to be used by jump function |
490 | combination code). The two functions will share their rdesc. */ | |
491 | ||
492 | static void | |
493 | ipa_set_jf_cst_copy (struct ipa_jump_func *dst, | |
494 | struct ipa_jump_func *src) | |
495 | ||
496 | { | |
497 | gcc_checking_assert (src->type == IPA_JF_CONST); | |
498 | dst->type = IPA_JF_CONST; | |
499 | dst->value.constant = src->value.constant; | |
500 | } | |
501 | ||
4fa83f96 | 502 | /* Set JFUNC to be a constant jmp function. */ |
503 | ||
504 | static void | |
096295f6 | 505 | ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant, |
506 | struct cgraph_edge *cs) | |
4fa83f96 | 507 | { |
508 | jfunc->type = IPA_JF_CONST; | |
096295f6 | 509 | jfunc->value.constant.value = unshare_expr_without_location (constant); |
510 | ||
511 | if (TREE_CODE (constant) == ADDR_EXPR | |
512 | && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL) | |
513 | { | |
514 | struct ipa_cst_ref_desc *rdesc; | |
096295f6 | 515 | |
b196706d | 516 | rdesc = ipa_refdesc_pool.allocate (); |
096295f6 | 517 | rdesc->cs = cs; |
518 | rdesc->next_duplicate = NULL; | |
519 | rdesc->refcount = 1; | |
520 | jfunc->value.constant.rdesc = rdesc; | |
521 | } | |
522 | else | |
523 | jfunc->value.constant.rdesc = NULL; | |
4fa83f96 | 524 | } |
525 | ||
526 | /* Set JFUNC to be a simple pass-through jump function. */ | |
527 | static void | |
0d491188 | 528 | ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id, |
693010ae | 529 | bool agg_preserved) |
4fa83f96 | 530 | { |
531 | jfunc->type = IPA_JF_PASS_THROUGH; | |
532 | jfunc->value.pass_through.operand = NULL_TREE; | |
533 | jfunc->value.pass_through.formal_id = formal_id; | |
534 | jfunc->value.pass_through.operation = NOP_EXPR; | |
0d491188 | 535 | jfunc->value.pass_through.agg_preserved = agg_preserved; |
4fa83f96 | 536 | } |
537 | ||
3657b81b | 538 | /* Set JFUNC to be an unary pass through jump function. */ |
539 | ||
540 | static void | |
541 | ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id, | |
542 | enum tree_code operation) | |
543 | { | |
544 | jfunc->type = IPA_JF_PASS_THROUGH; | |
545 | jfunc->value.pass_through.operand = NULL_TREE; | |
546 | jfunc->value.pass_through.formal_id = formal_id; | |
547 | jfunc->value.pass_through.operation = operation; | |
548 | jfunc->value.pass_through.agg_preserved = false; | |
549 | } | |
4fa83f96 | 550 | /* Set JFUNC to be an arithmetic pass through jump function. */ |
551 | ||
552 | static void | |
553 | ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id, | |
554 | tree operand, enum tree_code operation) | |
555 | { | |
556 | jfunc->type = IPA_JF_PASS_THROUGH; | |
827e392b | 557 | jfunc->value.pass_through.operand = unshare_expr_without_location (operand); |
4fa83f96 | 558 | jfunc->value.pass_through.formal_id = formal_id; |
559 | jfunc->value.pass_through.operation = operation; | |
0d491188 | 560 | jfunc->value.pass_through.agg_preserved = false; |
4fa83f96 | 561 | } |
562 | ||
563 | /* Set JFUNC to be an ancestor jump function. */ | |
564 | ||
565 | static void | |
566 | ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset, | |
693010ae | 567 | int formal_id, bool agg_preserved) |
4fa83f96 | 568 | { |
569 | jfunc->type = IPA_JF_ANCESTOR; | |
570 | jfunc->value.ancestor.formal_id = formal_id; | |
571 | jfunc->value.ancestor.offset = offset; | |
0d491188 | 572 | jfunc->value.ancestor.agg_preserved = agg_preserved; |
bee52153 | 573 | } |
574 | ||
24430d08 | 575 | /* Get IPA BB information about the given BB. FBI is the context of analyzis |
576 | of this function body. */ | |
577 | ||
578 | static struct ipa_bb_info * | |
9ea91b78 | 579 | ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb) |
24430d08 | 580 | { |
581 | gcc_checking_assert (fbi); | |
582 | return &fbi->bb_infos[bb->index]; | |
583 | } | |
584 | ||
7af23aa4 | 585 | /* Structure to be passed in between detect_type_change and |
586 | check_stmt_for_type_change. */ | |
587 | ||
9908fe4d | 588 | struct prop_type_change_info |
7af23aa4 | 589 | { |
22bf03ad | 590 | /* Offset into the object where there is the virtual method pointer we are |
591 | looking for. */ | |
592 | HOST_WIDE_INT offset; | |
593 | /* The declaration or SSA_NAME pointer of the base that we are checking for | |
594 | type change. */ | |
595 | tree object; | |
7af23aa4 | 596 | /* Set to true if dynamic type change has been detected. */ |
597 | bool type_maybe_changed; | |
598 | }; | |
599 | ||
600 | /* Return true if STMT can modify a virtual method table pointer. | |
601 | ||
602 | This function makes special assumptions about both constructors and | |
603 | destructors which are all the functions that are allowed to alter the VMT | |
604 | pointers. It assumes that destructors begin with assignment into all VMT | |
605 | pointers and that constructors essentially look in the following way: | |
606 | ||
607 | 1) The very first thing they do is that they call constructors of ancestor | |
608 | sub-objects that have them. | |
609 | ||
610 | 2) Then VMT pointers of this and all its ancestors is set to new values | |
611 | corresponding to the type corresponding to the constructor. | |
612 | ||
613 | 3) Only afterwards, other stuff such as constructor of member sub-objects | |
614 | and the code written by the user is run. Only this may include calling | |
615 | virtual functions, directly or indirectly. | |
616 | ||
617 | There is no way to call a constructor of an ancestor sub-object in any | |
618 | other way. | |
619 | ||
620 | This means that we do not have to care whether constructors get the correct | |
621 | type information because they will always change it (in fact, if we define | |
622 | the type to be given by the VMT pointer, it is undefined). | |
623 | ||
624 | The most important fact to derive from the above is that if, for some | |
625 | statement in the section 3, we try to detect whether the dynamic type has | |
626 | changed, we can safely ignore all calls as we examine the function body | |
627 | backwards until we reach statements in section 2 because these calls cannot | |
628 | be ancestor constructors or destructors (if the input is not bogus) and so | |
629 | do not change the dynamic type (this holds true only for automatically | |
630 | allocated objects but at the moment we devirtualize only these). We then | |
631 | must detect that statements in section 2 change the dynamic type and can try | |
632 | to derive the new type. That is enough and we can stop, we will never see | |
633 | the calls into constructors of sub-objects in this code. Therefore we can | |
634 | safely ignore all call statements that we traverse. | |
635 | */ | |
636 | ||
637 | static bool | |
42acab1c | 638 | stmt_may_be_vtbl_ptr_store (gimple *stmt) |
7af23aa4 | 639 | { |
640 | if (is_gimple_call (stmt)) | |
641 | return false; | |
f91737f9 | 642 | if (gimple_clobber_p (stmt)) |
643 | return false; | |
7af23aa4 | 644 | else if (is_gimple_assign (stmt)) |
645 | { | |
646 | tree lhs = gimple_assign_lhs (stmt); | |
647 | ||
cf3b9c67 | 648 | if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs))) |
649 | { | |
650 | if (flag_strict_aliasing | |
651 | && !POINTER_TYPE_P (TREE_TYPE (lhs))) | |
652 | return false; | |
653 | ||
654 | if (TREE_CODE (lhs) == COMPONENT_REF | |
655 | && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))) | |
7af23aa4 | 656 | return false; |
73fd9521 | 657 | /* In the future we might want to use get_ref_base_and_extent to find |
cf3b9c67 | 658 | if there is a field corresponding to the offset and if so, proceed |
659 | almost like if it was a component ref. */ | |
660 | } | |
7af23aa4 | 661 | } |
662 | return true; | |
663 | } | |
664 | ||
693010ae | 665 | /* Callback of walk_aliased_vdefs and a helper function for detect_type_change |
666 | to check whether a particular statement may modify the virtual table | |
667 | pointerIt stores its result into DATA, which points to a | |
9908fe4d | 668 | prop_type_change_info structure. */ |
7af23aa4 | 669 | |
670 | static bool | |
671 | check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data) | |
672 | { | |
42acab1c | 673 | gimple *stmt = SSA_NAME_DEF_STMT (vdef); |
9908fe4d | 674 | struct prop_type_change_info *tci = (struct prop_type_change_info *) data; |
7af23aa4 | 675 | |
676 | if (stmt_may_be_vtbl_ptr_store (stmt)) | |
677 | { | |
678 | tci->type_maybe_changed = true; | |
679 | return true; | |
680 | } | |
681 | else | |
682 | return false; | |
683 | } | |
684 | ||
1b613a0a | 685 | /* See if ARG is PARAM_DECl describing instance passed by pointer |
686 | or reference in FUNCTION. Return false if the dynamic type may change | |
687 | in between beggining of the function until CALL is invoked. | |
22bf03ad | 688 | |
1b613a0a | 689 | Generally functions are not allowed to change type of such instances, |
f4d3c071 | 690 | but they call destructors. We assume that methods cannot destroy the THIS |
1b613a0a | 691 | pointer. Also as a special cases, constructor and destructors may change |
692 | type of the THIS pointer. */ | |
693 | ||
694 | static bool | |
42acab1c | 695 | param_type_may_change_p (tree function, tree arg, gimple *call) |
1b613a0a | 696 | { |
f4d3c071 | 697 | /* Pure functions cannot do any changes on the dynamic type; |
1b613a0a | 698 | that require writting to memory. */ |
699 | if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST)) | |
700 | return false; | |
701 | /* We need to check if we are within inlined consturctor | |
702 | or destructor (ideally we would have way to check that the | |
703 | inline cdtor is actually working on ARG, but we don't have | |
704 | easy tie on this, so punt on all non-pure cdtors. | |
705 | We may also record the types of cdtors and once we know type | |
706 | of the instance match them. | |
707 | ||
708 | Also code unification optimizations may merge calls from | |
709 | different blocks making return values unreliable. So | |
710 | do nothing during late optimization. */ | |
711 | if (DECL_STRUCT_FUNCTION (function)->after_inlining) | |
712 | return true; | |
713 | if (TREE_CODE (arg) == SSA_NAME | |
714 | && SSA_NAME_IS_DEFAULT_DEF (arg) | |
715 | && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL) | |
716 | { | |
717 | /* Normal (non-THIS) argument. */ | |
718 | if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function) | |
719 | || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE) | |
47ae02b7 | 720 | /* THIS pointer of an method - here we want to watch constructors |
1b613a0a | 721 | and destructors as those definitely may change the dynamic |
722 | type. */ | |
723 | || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE | |
724 | && !DECL_CXX_CONSTRUCTOR_P (function) | |
725 | && !DECL_CXX_DESTRUCTOR_P (function) | |
726 | && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function)))) | |
727 | { | |
728 | /* Walk the inline stack and watch out for ctors/dtors. */ | |
729 | for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK; | |
730 | block = BLOCK_SUPERCONTEXT (block)) | |
a467a47b | 731 | if (inlined_polymorphic_ctor_dtor_block_p (block, false)) |
732 | return true; | |
1b613a0a | 733 | return false; |
734 | } | |
735 | } | |
736 | return true; | |
737 | } | |
22bf03ad | 738 | |
185c1f3a | 739 | /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before |
740 | callsite CALL) by looking for assignments to its virtual table pointer. If | |
741 | it is, return true and fill in the jump function JFUNC with relevant type | |
742 | information or set it to unknown. ARG is the object itself (not a pointer | |
743 | to it, unless dereferenced). BASE is the base of the memory access as | |
1b613a0a | 744 | returned by get_ref_base_and_extent, as is the offset. |
745 | ||
746 | This is helper function for detect_type_change and detect_type_change_ssa | |
747 | that does the heavy work which is usually unnecesary. */ | |
7af23aa4 | 748 | |
749 | static bool | |
915df3d8 | 750 | detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg, |
751 | tree base, tree comp_type, gcall *call, | |
752 | struct ipa_jump_func *jfunc, | |
1b613a0a | 753 | HOST_WIDE_INT offset) |
7af23aa4 | 754 | { |
9908fe4d | 755 | struct prop_type_change_info tci; |
7af23aa4 | 756 | ao_ref ao; |
757 | ||
758 | gcc_checking_assert (DECL_P (arg) | |
759 | || TREE_CODE (arg) == MEM_REF | |
760 | || handled_component_p (arg)); | |
7af23aa4 | 761 | |
3d96ed3b | 762 | comp_type = TYPE_MAIN_VARIANT (comp_type); |
763 | ||
40d6aa75 | 764 | /* Const calls cannot call virtual methods through VMT and so type changes do |
765 | not matter. */ | |
766 | if (!flag_devirtualize || !gimple_vuse (call) | |
767 | /* Be sure expected_type is polymorphic. */ | |
768 | || !comp_type | |
769 | || TREE_CODE (comp_type) != RECORD_TYPE | |
770 | || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type)) | |
771 | || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type)))) | |
772 | return true; | |
5b864aa1 | 773 | |
b74ba78c | 774 | ao_ref_init (&ao, arg); |
7af23aa4 | 775 | ao.base = base; |
776 | ao.offset = offset; | |
777 | ao.size = POINTER_SIZE; | |
778 | ao.max_size = ao.size; | |
7af23aa4 | 779 | |
22bf03ad | 780 | tci.offset = offset; |
781 | tci.object = get_base_address (arg); | |
22bf03ad | 782 | tci.type_maybe_changed = false; |
22bf03ad | 783 | |
915df3d8 | 784 | int walked |
785 | = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change, | |
786 | &tci, NULL, NULL, fbi->aa_walk_budget + 1); | |
787 | ||
788 | if (walked >= 0 && !tci.type_maybe_changed) | |
7af23aa4 | 789 | return false; |
790 | ||
ae7b7bc8 | 791 | ipa_set_jf_unknown (jfunc); |
7af23aa4 | 792 | return true; |
793 | } | |
794 | ||
1b613a0a | 795 | /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed. |
796 | If it is, return true and fill in the jump function JFUNC with relevant type | |
797 | information or set it to unknown. ARG is the object itself (not a pointer | |
798 | to it, unless dereferenced). BASE is the base of the memory access as | |
799 | returned by get_ref_base_and_extent, as is the offset. */ | |
800 | ||
801 | static bool | |
915df3d8 | 802 | detect_type_change (ipa_func_body_info *fbi, tree arg, tree base, |
803 | tree comp_type, gcall *call, struct ipa_jump_func *jfunc, | |
804 | HOST_WIDE_INT offset) | |
1b613a0a | 805 | { |
806 | if (!flag_devirtualize) | |
807 | return false; | |
808 | ||
809 | if (TREE_CODE (base) == MEM_REF | |
810 | && !param_type_may_change_p (current_function_decl, | |
811 | TREE_OPERAND (base, 0), | |
812 | call)) | |
813 | return false; | |
915df3d8 | 814 | return detect_type_change_from_memory_writes (fbi, arg, base, comp_type, |
1b613a0a | 815 | call, jfunc, offset); |
816 | } | |
817 | ||
7af23aa4 | 818 | /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer |
819 | SSA name (its dereference will become the base and the offset is assumed to | |
820 | be zero). */ | |
821 | ||
822 | static bool | |
915df3d8 | 823 | detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type, |
1a91d914 | 824 | gcall *call, struct ipa_jump_func *jfunc) |
7af23aa4 | 825 | { |
826 | gcc_checking_assert (TREE_CODE (arg) == SSA_NAME); | |
16358a63 | 827 | if (!flag_devirtualize |
185c1f3a | 828 | || !POINTER_TYPE_P (TREE_TYPE (arg))) |
7af23aa4 | 829 | return false; |
830 | ||
1b613a0a | 831 | if (!param_type_may_change_p (current_function_decl, arg, call)) |
832 | return false; | |
833 | ||
7af23aa4 | 834 | arg = build2 (MEM_REF, ptr_type_node, arg, |
22bf03ad | 835 | build_int_cst (ptr_type_node, 0)); |
7af23aa4 | 836 | |
915df3d8 | 837 | return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type, |
1b613a0a | 838 | call, jfunc, 0); |
7af23aa4 | 839 | } |
840 | ||
ad2ffc0d | 841 | /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the |
842 | boolean variable pointed to by DATA. */ | |
843 | ||
844 | static bool | |
845 | mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED, | |
846 | void *data) | |
847 | { | |
848 | bool *b = (bool *) data; | |
849 | *b = true; | |
850 | return true; | |
851 | } | |
852 | ||
24430d08 | 853 | /* Find the nearest valid aa status for parameter specified by INDEX that |
854 | dominates BB. */ | |
855 | ||
9ea91b78 | 856 | static struct ipa_param_aa_status * |
857 | find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb, | |
24430d08 | 858 | int index) |
859 | { | |
860 | while (true) | |
861 | { | |
862 | bb = get_immediate_dominator (CDI_DOMINATORS, bb); | |
863 | if (!bb) | |
864 | return NULL; | |
865 | struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb); | |
866 | if (!bi->param_aa_statuses.is_empty () | |
867 | && bi->param_aa_statuses[index].valid) | |
868 | return &bi->param_aa_statuses[index]; | |
869 | } | |
870 | } | |
871 | ||
872 | /* Get AA status structure for the given BB and parameter with INDEX. Allocate | |
873 | structures and/or intialize the result with a dominating description as | |
874 | necessary. */ | |
875 | ||
9ea91b78 | 876 | static struct ipa_param_aa_status * |
877 | parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb, | |
24430d08 | 878 | int index) |
879 | { | |
880 | gcc_checking_assert (fbi); | |
881 | struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb); | |
882 | if (bi->param_aa_statuses.is_empty ()) | |
883 | bi->param_aa_statuses.safe_grow_cleared (fbi->param_count); | |
9ea91b78 | 884 | struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index]; |
24430d08 | 885 | if (!paa->valid) |
886 | { | |
887 | gcc_checking_assert (!paa->parm_modified | |
888 | && !paa->ref_modified | |
889 | && !paa->pt_modified); | |
9ea91b78 | 890 | struct ipa_param_aa_status *dom_paa; |
24430d08 | 891 | dom_paa = find_dominating_aa_status (fbi, bb, index); |
892 | if (dom_paa) | |
893 | *paa = *dom_paa; | |
894 | else | |
895 | paa->valid = true; | |
896 | } | |
897 | ||
898 | return paa; | |
899 | } | |
900 | ||
a04e8d62 | 901 | /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve |
0d491188 | 902 | a value known not to be modified in this function before reaching the |
24430d08 | 903 | statement STMT. FBI holds information about the function we have so far |
904 | gathered but do not survive the summary building stage. */ | |
ad2ffc0d | 905 | |
906 | static bool | |
9ea91b78 | 907 | parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index, |
42acab1c | 908 | gimple *stmt, tree parm_load) |
ad2ffc0d | 909 | { |
9ea91b78 | 910 | struct ipa_param_aa_status *paa; |
ad2ffc0d | 911 | bool modified = false; |
912 | ao_ref refd; | |
913 | ||
ab4891c2 | 914 | tree base = get_base_address (parm_load); |
915 | gcc_assert (TREE_CODE (base) == PARM_DECL); | |
916 | if (TREE_READONLY (base)) | |
917 | return true; | |
918 | ||
915df3d8 | 919 | gcc_checking_assert (fbi); |
920 | paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index); | |
921 | if (paa->parm_modified) | |
922 | return false; | |
ad2ffc0d | 923 | |
924 | gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE); | |
0d491188 | 925 | ao_ref_init (&refd, parm_load); |
24430d08 | 926 | int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, |
915df3d8 | 927 | &modified, NULL, NULL, |
928 | fbi->aa_walk_budget + 1); | |
929 | if (walked < 0) | |
930 | { | |
931 | modified = true; | |
932 | if (fbi) | |
933 | fbi->aa_walk_budget = 0; | |
934 | } | |
935 | else if (fbi) | |
936 | fbi->aa_walk_budget -= walked; | |
24430d08 | 937 | if (paa && modified) |
938 | paa->parm_modified = true; | |
0d491188 | 939 | return !modified; |
ad2ffc0d | 940 | } |
941 | ||
3657b81b | 942 | /* If STMT is an assignment that loads a value from an parameter declaration, |
943 | return the index of the parameter in ipa_node_params which has not been | |
944 | modified. Otherwise return -1. */ | |
945 | ||
946 | static int | |
947 | load_from_unmodified_param (struct ipa_func_body_info *fbi, | |
7af25a10 | 948 | vec<ipa_param_descriptor, va_gc> *descriptors, |
3657b81b | 949 | gimple *stmt) |
950 | { | |
f66b52f8 | 951 | int index; |
952 | tree op1; | |
953 | ||
3657b81b | 954 | if (!gimple_assign_single_p (stmt)) |
955 | return -1; | |
956 | ||
f66b52f8 | 957 | op1 = gimple_assign_rhs1 (stmt); |
958 | if (TREE_CODE (op1) != PARM_DECL) | |
3657b81b | 959 | return -1; |
960 | ||
f66b52f8 | 961 | index = ipa_get_param_decl_index_1 (descriptors, op1); |
962 | if (index < 0 | |
963 | || !parm_preserved_before_stmt_p (fbi, index, stmt, op1)) | |
3657b81b | 964 | return -1; |
965 | ||
f66b52f8 | 966 | return index; |
3657b81b | 967 | } |
968 | ||
24430d08 | 969 | /* Return true if memory reference REF (which must be a load through parameter |
970 | with INDEX) loads data that are known to be unmodified in this function | |
971 | before reaching statement STMT. */ | |
0d491188 | 972 | |
973 | static bool | |
9ea91b78 | 974 | parm_ref_data_preserved_p (struct ipa_func_body_info *fbi, |
42acab1c | 975 | int index, gimple *stmt, tree ref) |
0d491188 | 976 | { |
9ea91b78 | 977 | struct ipa_param_aa_status *paa; |
0d491188 | 978 | bool modified = false; |
979 | ao_ref refd; | |
980 | ||
915df3d8 | 981 | gcc_checking_assert (fbi); |
982 | paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index); | |
983 | if (paa->ref_modified) | |
984 | return false; | |
0d491188 | 985 | |
24430d08 | 986 | gcc_checking_assert (gimple_vuse (stmt)); |
0d491188 | 987 | ao_ref_init (&refd, ref); |
24430d08 | 988 | int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, |
915df3d8 | 989 | &modified, NULL, NULL, |
990 | fbi->aa_walk_budget + 1); | |
991 | if (walked < 0) | |
992 | { | |
993 | modified = true; | |
994 | fbi->aa_walk_budget = 0; | |
995 | } | |
996 | else | |
997 | fbi->aa_walk_budget -= walked; | |
998 | if (modified) | |
24430d08 | 999 | paa->ref_modified = true; |
0d491188 | 1000 | return !modified; |
1001 | } | |
1002 | ||
24430d08 | 1003 | /* Return true if the data pointed to by PARM (which is a parameter with INDEX) |
1004 | is known to be unmodified in this function before reaching call statement | |
1005 | CALL into which it is passed. FBI describes the function body. */ | |
0d491188 | 1006 | |
1007 | static bool | |
9ea91b78 | 1008 | parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index, |
42acab1c | 1009 | gimple *call, tree parm) |
0d491188 | 1010 | { |
1011 | bool modified = false; | |
1012 | ao_ref refd; | |
1013 | ||
1014 | /* It's unnecessary to calculate anything about memory contnets for a const | |
1015 | function because it is not goin to use it. But do not cache the result | |
1016 | either. Also, no such calculations for non-pointers. */ | |
1017 | if (!gimple_vuse (call) | |
915df3d8 | 1018 | || !POINTER_TYPE_P (TREE_TYPE (parm))) |
0d491188 | 1019 | return false; |
1020 | ||
9ea91b78 | 1021 | struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, |
1022 | gimple_bb (call), | |
1023 | index); | |
24430d08 | 1024 | if (paa->pt_modified) |
0d491188 | 1025 | return false; |
1026 | ||
1027 | ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE); | |
24430d08 | 1028 | int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, |
915df3d8 | 1029 | &modified, NULL, NULL, |
1030 | fbi->aa_walk_budget + 1); | |
1031 | if (walked < 0) | |
1032 | { | |
1033 | fbi->aa_walk_budget = 0; | |
1034 | modified = true; | |
1035 | } | |
1036 | else | |
1037 | fbi->aa_walk_budget -= walked; | |
0d491188 | 1038 | if (modified) |
24430d08 | 1039 | paa->pt_modified = true; |
0d491188 | 1040 | return !modified; |
1041 | } | |
1042 | ||
665ff45b | 1043 | /* Return true if we can prove that OP is a memory reference loading |
1044 | data from an aggregate passed as a parameter. | |
1045 | ||
1046 | The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return | |
1047 | false if it cannot prove that the value has not been modified before the | |
1048 | load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even | |
1049 | if it cannot prove the value has not been modified, in that case it will | |
1050 | store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there. | |
1051 | ||
0d491188 | 1052 | INFO and PARMS_AINFO describe parameters of the current function (but the |
1053 | latter can be NULL), STMT is the load statement. If function returns true, | |
1054 | *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset | |
1055 | within the aggregate and whether it is a load from a value passed by | |
1056 | reference respectively. */ | |
1057 | ||
1a673ff0 | 1058 | bool |
9ea91b78 | 1059 | ipa_load_from_parm_agg (struct ipa_func_body_info *fbi, |
7af25a10 | 1060 | vec<ipa_param_descriptor, va_gc> *descriptors, |
42acab1c | 1061 | gimple *stmt, tree op, int *index_p, |
1a673ff0 | 1062 | HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p, |
665ff45b | 1063 | bool *by_ref_p, bool *guaranteed_unmodified) |
0d491188 | 1064 | { |
1065 | int index; | |
f3c2a387 | 1066 | HOST_WIDE_INT size; |
292237f3 | 1067 | bool reverse; |
f3c2a387 | 1068 | tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse); |
0d491188 | 1069 | |
f3c2a387 | 1070 | if (!base) |
0d491188 | 1071 | return false; |
1072 | ||
1073 | if (DECL_P (base)) | |
1074 | { | |
7a4930e7 | 1075 | int index = ipa_get_param_decl_index_1 (descriptors, base); |
0d491188 | 1076 | if (index >= 0 |
24430d08 | 1077 | && parm_preserved_before_stmt_p (fbi, index, stmt, op)) |
0d491188 | 1078 | { |
1079 | *index_p = index; | |
1080 | *by_ref_p = false; | |
2a687ef9 | 1081 | if (size_p) |
1082 | *size_p = size; | |
665ff45b | 1083 | if (guaranteed_unmodified) |
1084 | *guaranteed_unmodified = true; | |
0d491188 | 1085 | return true; |
1086 | } | |
1087 | return false; | |
1088 | } | |
1089 | ||
1090 | if (TREE_CODE (base) != MEM_REF | |
1091 | || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME | |
1092 | || !integer_zerop (TREE_OPERAND (base, 1))) | |
1093 | return false; | |
1094 | ||
1095 | if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))) | |
1096 | { | |
1097 | tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0)); | |
7a4930e7 | 1098 | index = ipa_get_param_decl_index_1 (descriptors, parm); |
0d491188 | 1099 | } |
1100 | else | |
1101 | { | |
1102 | /* This branch catches situations where a pointer parameter is not a | |
1103 | gimple register, for example: | |
1104 | ||
1105 | void hip7(S*) (struct S * p) | |
1106 | { | |
1107 | void (*<T2e4>) (struct S *) D.1867; | |
1108 | struct S * p.1; | |
1109 | ||
1110 | <bb 2>: | |
1111 | p.1_1 = p; | |
1112 | D.1867_2 = p.1_1->f; | |
1113 | D.1867_2 (); | |
1114 | gdp = &p; | |
1115 | */ | |
1116 | ||
42acab1c | 1117 | gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0)); |
24430d08 | 1118 | index = load_from_unmodified_param (fbi, descriptors, def); |
0d491188 | 1119 | } |
1120 | ||
665ff45b | 1121 | if (index >= 0) |
0d491188 | 1122 | { |
665ff45b | 1123 | bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op); |
1124 | if (!data_preserved && !guaranteed_unmodified) | |
1125 | return false; | |
1126 | ||
0d491188 | 1127 | *index_p = index; |
1128 | *by_ref_p = true; | |
2a687ef9 | 1129 | if (size_p) |
1130 | *size_p = size; | |
665ff45b | 1131 | if (guaranteed_unmodified) |
1132 | *guaranteed_unmodified = data_preserved; | |
0d491188 | 1133 | return true; |
1134 | } | |
1135 | return false; | |
1136 | } | |
1137 | ||
6378ffb3 | 1138 | /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result |
ad2ffc0d | 1139 | of an assignment statement STMT, try to determine whether we are actually |
1140 | handling any of the following cases and construct an appropriate jump | |
1141 | function into JFUNC if so: | |
1142 | ||
1143 | 1) The passed value is loaded from a formal parameter which is not a gimple | |
1144 | register (most probably because it is addressable, the value has to be | |
1145 | scalar) and we can guarantee the value has not changed. This case can | |
1146 | therefore be described by a simple pass-through jump function. For example: | |
1147 | ||
1148 | foo (int a) | |
1149 | { | |
1150 | int a.0; | |
1151 | ||
1152 | a.0_2 = a; | |
1153 | bar (a.0_2); | |
1154 | ||
1155 | 2) The passed value can be described by a simple arithmetic pass-through | |
1156 | jump function. E.g. | |
1157 | ||
1158 | foo (int a) | |
1159 | { | |
1160 | int D.2064; | |
1161 | ||
1162 | D.2064_4 = a.1(D) + 4; | |
1163 | bar (D.2064_4); | |
1164 | ||
1165 | This case can also occur in combination of the previous one, e.g.: | |
1166 | ||
1167 | foo (int a, int z) | |
1168 | { | |
1169 | int a.0; | |
1170 | int D.2064; | |
1171 | ||
1172 | a.0_3 = a; | |
1173 | D.2064_4 = a.0_3 + 4; | |
1174 | foo (D.2064_4); | |
1175 | ||
1176 | 3) The passed value is an address of an object within another one (which | |
1177 | also passed by reference). Such situations are described by an ancestor | |
1178 | jump function and describe situations such as: | |
1179 | ||
1180 | B::foo() (struct B * const this) | |
1181 | { | |
1182 | struct A * D.1845; | |
1183 | ||
1184 | D.1845_2 = &this_1(D)->D.1748; | |
1185 | A::bar (D.1845_2); | |
1186 | ||
1187 | INFO is the structure describing individual parameters access different | |
1188 | stages of IPA optimizations. PARMS_AINFO contains the information that is | |
1189 | only needed for intraprocedural analysis. */ | |
5215027d | 1190 | |
1191 | static void | |
9ea91b78 | 1192 | compute_complex_assign_jump_func (struct ipa_func_body_info *fbi, |
24430d08 | 1193 | struct ipa_node_params *info, |
6378ffb3 | 1194 | struct ipa_jump_func *jfunc, |
42acab1c | 1195 | gcall *call, gimple *stmt, tree name, |
185c1f3a | 1196 | tree param_type) |
5215027d | 1197 | { |
f3c2a387 | 1198 | HOST_WIDE_INT offset, size; |
ad2ffc0d | 1199 | tree op1, tc_ssa, base, ssa; |
292237f3 | 1200 | bool reverse; |
5215027d | 1201 | int index; |
5215027d | 1202 | |
5215027d | 1203 | op1 = gimple_assign_rhs1 (stmt); |
5215027d | 1204 | |
ad2ffc0d | 1205 | if (TREE_CODE (op1) == SSA_NAME) |
5215027d | 1206 | { |
ad2ffc0d | 1207 | if (SSA_NAME_IS_DEFAULT_DEF (op1)) |
1208 | index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1)); | |
1209 | else | |
f66b52f8 | 1210 | index = load_from_unmodified_param (fbi, info->descriptors, |
1211 | SSA_NAME_DEF_STMT (op1)); | |
ad2ffc0d | 1212 | tc_ssa = op1; |
1213 | } | |
1214 | else | |
1215 | { | |
f66b52f8 | 1216 | index = load_from_unmodified_param (fbi, info->descriptors, stmt); |
ad2ffc0d | 1217 | tc_ssa = gimple_assign_lhs (stmt); |
1218 | } | |
1219 | ||
1220 | if (index >= 0) | |
1221 | { | |
d48f0d35 | 1222 | switch (gimple_assign_rhs_class (stmt)) |
0d491188 | 1223 | { |
d48f0d35 | 1224 | case GIMPLE_BINARY_RHS: |
1225 | { | |
1226 | tree op2 = gimple_assign_rhs2 (stmt); | |
1227 | if (!is_gimple_ip_invariant (op2) | |
1228 | || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) | |
1229 | != tcc_comparison) | |
1230 | && !useless_type_conversion_p (TREE_TYPE (name), | |
1231 | TREE_TYPE (op1)))) | |
1232 | return; | |
1233 | ||
1234 | ipa_set_jf_arith_pass_through (jfunc, index, op2, | |
1235 | gimple_assign_rhs_code (stmt)); | |
1236 | break; | |
1237 | } | |
1238 | case GIMPLE_SINGLE_RHS: | |
1239 | { | |
1240 | bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, | |
1241 | tc_ssa); | |
1242 | ipa_set_jf_simple_pass_through (jfunc, index, agg_p); | |
1243 | break; | |
1244 | } | |
1245 | case GIMPLE_UNARY_RHS: | |
f66b52f8 | 1246 | if (is_gimple_assign (stmt) |
1247 | && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS | |
1248 | && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))) | |
d48f0d35 | 1249 | ipa_set_jf_unary_pass_through (jfunc, index, |
f66b52f8 | 1250 | gimple_assign_rhs_code (stmt)); |
d48f0d35 | 1251 | default:; |
0d491188 | 1252 | } |
5215027d | 1253 | return; |
1254 | } | |
1255 | ||
1256 | if (TREE_CODE (op1) != ADDR_EXPR) | |
1257 | return; | |
1258 | op1 = TREE_OPERAND (op1, 0); | |
7af23aa4 | 1259 | if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE) |
6378ffb3 | 1260 | return; |
f3c2a387 | 1261 | base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse); |
90ca1268 | 1262 | offset_int mem_offset; |
1263 | if (!base | |
1264 | || TREE_CODE (base) != MEM_REF | |
1265 | || !mem_ref_offset (base).is_constant (&mem_offset)) | |
5215027d | 1266 | return; |
90ca1268 | 1267 | offset += mem_offset.to_short_addr () * BITS_PER_UNIT; |
7af23aa4 | 1268 | ssa = TREE_OPERAND (base, 0); |
1269 | if (TREE_CODE (ssa) != SSA_NAME | |
1270 | || !SSA_NAME_IS_DEFAULT_DEF (ssa) | |
ef649fb9 | 1271 | || offset < 0) |
5215027d | 1272 | return; |
1273 | ||
ad4a8b28 | 1274 | /* Dynamic types are changed in constructors and destructors. */ |
7af23aa4 | 1275 | index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa)); |
185c1f3a | 1276 | if (index >= 0 && param_type && POINTER_TYPE_P (param_type)) |
693010ae | 1277 | ipa_set_ancestor_jf (jfunc, offset, index, |
1278 | parm_ref_data_pass_through_p (fbi, index, call, ssa)); | |
5215027d | 1279 | } |
1280 | ||
09a2b4db | 1281 | /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if |
1282 | it looks like: | |
1283 | ||
1284 | iftmp.1_3 = &obj_2(D)->D.1762; | |
1285 | ||
1286 | The base of the MEM_REF must be a default definition SSA NAME of a | |
1287 | parameter. Return NULL_TREE if it looks otherwise. If case of success, the | |
1288 | whole MEM_REF expression is returned and the offset calculated from any | |
1289 | handled components and the MEM_REF itself is stored into *OFFSET. The whole | |
1290 | RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */ | |
1291 | ||
1292 | static tree | |
42acab1c | 1293 | get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset) |
09a2b4db | 1294 | { |
f3c2a387 | 1295 | HOST_WIDE_INT size; |
09a2b4db | 1296 | tree expr, parm, obj; |
292237f3 | 1297 | bool reverse; |
09a2b4db | 1298 | |
1299 | if (!gimple_assign_single_p (assign)) | |
1300 | return NULL_TREE; | |
1301 | expr = gimple_assign_rhs1 (assign); | |
1302 | ||
1303 | if (TREE_CODE (expr) != ADDR_EXPR) | |
1304 | return NULL_TREE; | |
1305 | expr = TREE_OPERAND (expr, 0); | |
1306 | obj = expr; | |
f3c2a387 | 1307 | expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse); |
09a2b4db | 1308 | |
90ca1268 | 1309 | offset_int mem_offset; |
1310 | if (!expr | |
1311 | || TREE_CODE (expr) != MEM_REF | |
1312 | || !mem_ref_offset (expr).is_constant (&mem_offset)) | |
09a2b4db | 1313 | return NULL_TREE; |
1314 | parm = TREE_OPERAND (expr, 0); | |
1315 | if (TREE_CODE (parm) != SSA_NAME | |
1316 | || !SSA_NAME_IS_DEFAULT_DEF (parm) | |
1317 | || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL) | |
1318 | return NULL_TREE; | |
1319 | ||
90ca1268 | 1320 | *offset += mem_offset.to_short_addr () * BITS_PER_UNIT; |
09a2b4db | 1321 | *obj_p = obj; |
1322 | return expr; | |
1323 | } | |
1324 | ||
5215027d | 1325 | |
6378ffb3 | 1326 | /* Given that an actual argument is an SSA_NAME that is a result of a phi |
1327 | statement PHI, try to find out whether NAME is in fact a | |
1328 | multiple-inheritance typecast from a descendant into an ancestor of a formal | |
1329 | parameter and thus can be described by an ancestor jump function and if so, | |
1330 | write the appropriate function into JFUNC. | |
1331 | ||
1332 | Essentially we want to match the following pattern: | |
1333 | ||
1334 | if (obj_2(D) != 0B) | |
1335 | goto <bb 3>; | |
1336 | else | |
1337 | goto <bb 4>; | |
1338 | ||
1339 | <bb 3>: | |
1340 | iftmp.1_3 = &obj_2(D)->D.1762; | |
1341 | ||
1342 | <bb 4>: | |
1343 | # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)> | |
1344 | D.1879_6 = middleman_1 (iftmp.1_1, i_5(D)); | |
1345 | return D.1879_6; */ | |
1346 | ||
1347 | static void | |
9ea91b78 | 1348 | compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi, |
24430d08 | 1349 | struct ipa_node_params *info, |
6378ffb3 | 1350 | struct ipa_jump_func *jfunc, |
1a91d914 | 1351 | gcall *call, gphi *phi) |
6378ffb3 | 1352 | { |
09a2b4db | 1353 | HOST_WIDE_INT offset; |
42acab1c | 1354 | gimple *assign, *cond; |
6378ffb3 | 1355 | basic_block phi_bb, assign_bb, cond_bb; |
7af23aa4 | 1356 | tree tmp, parm, expr, obj; |
6378ffb3 | 1357 | int index, i; |
1358 | ||
df693a5d | 1359 | if (gimple_phi_num_args (phi) != 2) |
6378ffb3 | 1360 | return; |
1361 | ||
df693a5d | 1362 | if (integer_zerop (PHI_ARG_DEF (phi, 1))) |
1363 | tmp = PHI_ARG_DEF (phi, 0); | |
1364 | else if (integer_zerop (PHI_ARG_DEF (phi, 0))) | |
1365 | tmp = PHI_ARG_DEF (phi, 1); | |
1366 | else | |
1367 | return; | |
6378ffb3 | 1368 | if (TREE_CODE (tmp) != SSA_NAME |
1369 | || SSA_NAME_IS_DEFAULT_DEF (tmp) | |
1370 | || !POINTER_TYPE_P (TREE_TYPE (tmp)) | |
1371 | || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE) | |
1372 | return; | |
1373 | ||
1374 | assign = SSA_NAME_DEF_STMT (tmp); | |
1375 | assign_bb = gimple_bb (assign); | |
09a2b4db | 1376 | if (!single_pred_p (assign_bb)) |
6378ffb3 | 1377 | return; |
09a2b4db | 1378 | expr = get_ancestor_addr_info (assign, &obj, &offset); |
1379 | if (!expr) | |
6378ffb3 | 1380 | return; |
1381 | parm = TREE_OPERAND (expr, 0); | |
6378ffb3 | 1382 | index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm)); |
0e384caf | 1383 | if (index < 0) |
1384 | return; | |
6378ffb3 | 1385 | |
1386 | cond_bb = single_pred (assign_bb); | |
1387 | cond = last_stmt (cond_bb); | |
3d9dc642 | 1388 | if (!cond |
1389 | || gimple_code (cond) != GIMPLE_COND | |
6378ffb3 | 1390 | || gimple_cond_code (cond) != NE_EXPR |
1391 | || gimple_cond_lhs (cond) != parm | |
1392 | || !integer_zerop (gimple_cond_rhs (cond))) | |
1393 | return; | |
1394 | ||
6378ffb3 | 1395 | phi_bb = gimple_bb (phi); |
1396 | for (i = 0; i < 2; i++) | |
1397 | { | |
1398 | basic_block pred = EDGE_PRED (phi_bb, i)->src; | |
1399 | if (pred != assign_bb && pred != cond_bb) | |
1400 | return; | |
1401 | } | |
1402 | ||
693010ae | 1403 | ipa_set_ancestor_jf (jfunc, offset, index, |
1404 | parm_ref_data_pass_through_p (fbi, index, call, parm)); | |
6378ffb3 | 1405 | } |
1406 | ||
1917e945 | 1407 | /* Inspect the given TYPE and return true iff it has the same structure (the |
1408 | same number of fields of the same types) as a C++ member pointer. If | |
1409 | METHOD_PTR and DELTA are non-NULL, store the trees representing the | |
1410 | corresponding fields there. */ | |
1411 | ||
f8daee9b | 1412 | static bool |
1413 | type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta) | |
1414 | { | |
1415 | tree fld; | |
1416 | ||
1417 | if (TREE_CODE (type) != RECORD_TYPE) | |
1418 | return false; | |
1419 | ||
1420 | fld = TYPE_FIELDS (type); | |
1421 | if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld)) | |
0d491188 | 1422 | || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE |
e913b5cd | 1423 | || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld))) |
f8daee9b | 1424 | return false; |
1425 | ||
1426 | if (method_ptr) | |
1427 | *method_ptr = fld; | |
1428 | ||
1767a056 | 1429 | fld = DECL_CHAIN (fld); |
0d491188 | 1430 | if (!fld || INTEGRAL_TYPE_P (fld) |
e913b5cd | 1431 | || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld))) |
f8daee9b | 1432 | return false; |
1433 | if (delta) | |
1434 | *delta = fld; | |
1435 | ||
1767a056 | 1436 | if (DECL_CHAIN (fld)) |
f8daee9b | 1437 | return false; |
1438 | ||
1439 | return true; | |
1440 | } | |
1441 | ||
0a10fd82 | 1442 | /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement, |
0d491188 | 1443 | return the rhs of its defining statement. Otherwise return RHS as it |
1444 | is. */ | |
b39bfa08 | 1445 | |
1446 | static inline tree | |
1447 | get_ssa_def_if_simple_copy (tree rhs) | |
1448 | { | |
1449 | while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs)) | |
1450 | { | |
42acab1c | 1451 | gimple *def_stmt = SSA_NAME_DEF_STMT (rhs); |
b39bfa08 | 1452 | |
1453 | if (gimple_assign_single_p (def_stmt)) | |
1454 | rhs = gimple_assign_rhs1 (def_stmt); | |
4ecddf77 | 1455 | else |
1456 | break; | |
b39bfa08 | 1457 | } |
1458 | return rhs; | |
1459 | } | |
1460 | ||
3f075e56 | 1461 | /* Simple linked list, describing known contents of an aggregate before |
0d491188 | 1462 | call. */ |
1463 | ||
1464 | struct ipa_known_agg_contents_list | |
1465 | { | |
1466 | /* Offset and size of the described part of the aggregate. */ | |
1467 | HOST_WIDE_INT offset, size; | |
1468 | /* Known constant value or NULL if the contents is known to be unknown. */ | |
1469 | tree constant; | |
1470 | /* Pointer to the next structure in the list. */ | |
1471 | struct ipa_known_agg_contents_list *next; | |
1472 | }; | |
f8daee9b | 1473 | |
3f075e56 | 1474 | /* Add a known content item into a linked list of ipa_known_agg_contents_list |
1475 | structure, in which all elements are sorted ascendingly by offset. */ | |
63380ec1 | 1476 | |
3f075e56 | 1477 | static inline void |
1478 | add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist, | |
1479 | struct ipa_known_agg_contents_list *item) | |
63380ec1 | 1480 | { |
3f075e56 | 1481 | struct ipa_known_agg_contents_list *list = *plist; |
1482 | ||
1483 | for (; list; list = list->next) | |
63380ec1 | 1484 | { |
3f075e56 | 1485 | if (list->offset >= item->offset) |
1486 | break; | |
1487 | ||
1488 | plist = &list->next; | |
63380ec1 | 1489 | } |
1490 | ||
3f075e56 | 1491 | item->next = list; |
1492 | *plist = item; | |
1493 | } | |
1494 | ||
1495 | /* Check whether a given known content is clobbered by certain element in | |
1496 | a linked list of ipa_known_agg_contents_list. */ | |
1497 | ||
1498 | static inline bool | |
1499 | clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list, | |
1500 | struct ipa_known_agg_contents_list *item) | |
1501 | { | |
1502 | for (; list; list = list->next) | |
63380ec1 | 1503 | { |
3f075e56 | 1504 | if (list->offset >= item->offset) |
1505 | return list->offset < item->offset + item->size; | |
1506 | ||
1507 | if (list->offset + list->size > item->offset) | |
1508 | return true; | |
63380ec1 | 1509 | } |
3f075e56 | 1510 | |
1511 | return false; | |
63380ec1 | 1512 | } |
1513 | ||
1514 | /* Build aggregate jump function from LIST, assuming there are exactly | |
3f075e56 | 1515 | CONST_COUNT constant entries there and that offset of the passed argument |
63380ec1 | 1516 | is ARG_OFFSET and store it into JFUNC. */ |
1517 | ||
1518 | static void | |
1519 | build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list, | |
1520 | int const_count, HOST_WIDE_INT arg_offset, | |
1521 | struct ipa_jump_func *jfunc) | |
1522 | { | |
1523 | vec_alloc (jfunc->agg.items, const_count); | |
1524 | while (list) | |
1525 | { | |
1526 | if (list->constant) | |
1527 | { | |
1528 | struct ipa_agg_jf_item item; | |
1529 | item.offset = list->offset - arg_offset; | |
1530 | gcc_assert ((item.offset % BITS_PER_UNIT) == 0); | |
1531 | item.value = unshare_expr_without_location (list->constant); | |
1532 | jfunc->agg.items->quick_push (item); | |
1533 | } | |
1534 | list = list->next; | |
1535 | } | |
1536 | } | |
1537 | ||
3f075e56 | 1538 | /* If STMT is a memory store to the object whose address is BASE, extract |
1539 | information (offset, size, and value) into CONTENT, and return true, | |
1540 | otherwise we conservatively assume the whole object is modified with | |
1541 | unknown content, and return false. CHECK_REF means that access to object | |
1542 | is expected to be in form of MEM_REF expression. */ | |
1543 | ||
1544 | static bool | |
1545 | extract_mem_content (gimple *stmt, tree base, bool check_ref, | |
1546 | struct ipa_known_agg_contents_list *content) | |
1547 | { | |
1548 | HOST_WIDE_INT lhs_offset, lhs_size; | |
1549 | tree lhs, rhs, lhs_base; | |
1550 | bool reverse; | |
1551 | ||
1552 | if (!gimple_assign_single_p (stmt)) | |
1553 | return false; | |
1554 | ||
1555 | lhs = gimple_assign_lhs (stmt); | |
1556 | rhs = gimple_assign_rhs1 (stmt); | |
1557 | ||
1558 | if (!is_gimple_reg_type (TREE_TYPE (rhs)) | |
1559 | || TREE_CODE (lhs) == BIT_FIELD_REF | |
1560 | || contains_bitfld_component_ref_p (lhs)) | |
1561 | return false; | |
1562 | ||
1563 | lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, | |
1564 | &lhs_size, &reverse); | |
1565 | if (!lhs_base) | |
1566 | return false; | |
1567 | ||
1568 | if (check_ref) | |
1569 | { | |
1570 | if (TREE_CODE (lhs_base) != MEM_REF | |
1571 | || TREE_OPERAND (lhs_base, 0) != base | |
1572 | || !integer_zerop (TREE_OPERAND (lhs_base, 1))) | |
1573 | return false; | |
1574 | } | |
1575 | else if (lhs_base != base) | |
1576 | return false; | |
1577 | ||
1578 | rhs = get_ssa_def_if_simple_copy (rhs); | |
1579 | ||
1580 | content->size = lhs_size; | |
1581 | content->offset = lhs_offset; | |
1582 | content->constant = is_gimple_ip_invariant (rhs) ? rhs : NULL_TREE; | |
1583 | content->next = NULL; | |
1584 | ||
1585 | return true; | |
1586 | } | |
1587 | ||
0d491188 | 1588 | /* Traverse statements from CALL backwards, scanning whether an aggregate given |
1589 | in ARG is filled in with constant values. ARG can either be an aggregate | |
63380ec1 | 1590 | expression or a pointer to an aggregate. ARG_TYPE is the type of the |
1591 | aggregate. JFUNC is the jump function into which the constants are | |
3f075e56 | 1592 | subsequently stored. AA_WALK_BUDGET_P points to limit on number of |
1593 | statements we allow get_continuation_for_phi to examine. */ | |
1917e945 | 1594 | |
f8daee9b | 1595 | static void |
3f075e56 | 1596 | determine_known_aggregate_parts (gcall *call, tree arg, |
1597 | tree arg_type, | |
1598 | struct ipa_jump_func *jfunc, | |
1599 | unsigned *aa_walk_budget_p) | |
f8daee9b | 1600 | { |
3f075e56 | 1601 | struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL; |
1602 | bitmap visited = NULL; | |
0d491188 | 1603 | int item_count = 0, const_count = 0; |
3f075e56 | 1604 | int ipa_max_agg_items = PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS); |
0d491188 | 1605 | HOST_WIDE_INT arg_offset, arg_size; |
0d491188 | 1606 | tree arg_base; |
1607 | bool check_ref, by_ref; | |
1608 | ao_ref r; | |
f8daee9b | 1609 | |
3f075e56 | 1610 | if (ipa_max_agg_items == 0) |
cba29c3a | 1611 | return; |
1612 | ||
0d491188 | 1613 | /* The function operates in three stages. First, we prepare check_ref, r, |
1614 | arg_base and arg_offset based on what is actually passed as an actual | |
1615 | argument. */ | |
f8daee9b | 1616 | |
02636da3 | 1617 | if (POINTER_TYPE_P (arg_type)) |
0d491188 | 1618 | { |
1619 | by_ref = true; | |
1620 | if (TREE_CODE (arg) == SSA_NAME) | |
1621 | { | |
1622 | tree type_size; | |
a2fd0821 | 1623 | if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))) |
1624 | || !POINTER_TYPE_P (TREE_TYPE (arg))) | |
0d491188 | 1625 | return; |
1626 | check_ref = true; | |
1627 | arg_base = arg; | |
1628 | arg_offset = 0; | |
02636da3 | 1629 | type_size = TYPE_SIZE (TREE_TYPE (arg_type)); |
e913b5cd | 1630 | arg_size = tree_to_uhwi (type_size); |
0d491188 | 1631 | ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE); |
1632 | } | |
1633 | else if (TREE_CODE (arg) == ADDR_EXPR) | |
1634 | { | |
292237f3 | 1635 | bool reverse; |
0d491188 | 1636 | |
1637 | arg = TREE_OPERAND (arg, 0); | |
f3c2a387 | 1638 | arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset, |
1639 | &arg_size, &reverse); | |
1640 | if (!arg_base) | |
0d491188 | 1641 | return; |
1642 | if (DECL_P (arg_base)) | |
1643 | { | |
0d491188 | 1644 | check_ref = false; |
63380ec1 | 1645 | ao_ref_init (&r, arg_base); |
0d491188 | 1646 | } |
1647 | else | |
1648 | return; | |
1649 | } | |
1650 | else | |
1651 | return; | |
1652 | } | |
1653 | else | |
1654 | { | |
292237f3 | 1655 | bool reverse; |
0d491188 | 1656 | |
1657 | gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg))); | |
1658 | ||
1659 | by_ref = false; | |
1660 | check_ref = false; | |
f3c2a387 | 1661 | arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset, |
1662 | &arg_size, &reverse); | |
1663 | if (!arg_base) | |
0d491188 | 1664 | return; |
1665 | ||
1666 | ao_ref_init (&r, arg); | |
1667 | } | |
1668 | ||
3f075e56 | 1669 | /* Second stage traverses virtual SSA web backwards starting from the call |
1670 | statement, only looks at individual dominating virtual operand (its | |
1671 | definition dominates the call), as long as it is confident that content | |
1672 | of the aggregate is affected by definition of the virtual operand, it | |
1673 | builds a sorted linked list of ipa_agg_jf_list describing that. */ | |
f8daee9b | 1674 | |
3f075e56 | 1675 | for (tree dom_vuse = gimple_vuse (call); dom_vuse;) |
1676 | { | |
1677 | gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse); | |
f8daee9b | 1678 | |
3f075e56 | 1679 | if (gimple_code (stmt) == GIMPLE_PHI) |
3b22db66 | 1680 | { |
3f075e56 | 1681 | dom_vuse = get_continuation_for_phi (stmt, &r, *aa_walk_budget_p, |
1682 | &visited, false, NULL, NULL); | |
1683 | continue; | |
f8daee9b | 1684 | } |
3f075e56 | 1685 | |
1686 | if (stmt_may_clobber_ref_p_1 (stmt, &r)) | |
85bab85f | 1687 | { |
3f075e56 | 1688 | struct ipa_known_agg_contents_list *content |
1689 | = XALLOCA (struct ipa_known_agg_contents_list); | |
1690 | ||
1691 | if (!extract_mem_content (stmt, arg_base, check_ref, content)) | |
85bab85f | 1692 | break; |
f8daee9b | 1693 | |
3f075e56 | 1694 | /* Now we get a dominating virtual operand, and need to check |
1695 | whether its value is clobbered any other dominating one. */ | |
1696 | if (content->constant | |
1697 | && !clobber_by_agg_contents_list_p (all_list, content)) | |
1698 | { | |
1699 | struct ipa_known_agg_contents_list *copy | |
1700 | = XALLOCA (struct ipa_known_agg_contents_list); | |
f8daee9b | 1701 | |
3f075e56 | 1702 | /* Add to the list consisting of only dominating virtual |
1703 | operands, whose definitions can finally reach the call. */ | |
1704 | add_to_agg_contents_list (&list, (*copy = *content, copy)); | |
1705 | ||
1706 | if (++const_count == ipa_max_agg_items) | |
1707 | break; | |
1708 | } | |
1709 | ||
1710 | /* Add to the list consisting of all dominating virtual operands. */ | |
1711 | add_to_agg_contents_list (&all_list, content); | |
1712 | ||
1713 | if (++item_count == 2 * ipa_max_agg_items) | |
1714 | break; | |
0d491188 | 1715 | } |
3f075e56 | 1716 | dom_vuse = gimple_vuse (stmt); |
1717 | } | |
f8daee9b | 1718 | |
3f075e56 | 1719 | if (visited) |
1720 | BITMAP_FREE (visited); | |
1917e945 | 1721 | |
0d491188 | 1722 | /* Third stage just goes over the list and creates an appropriate vector of |
3f075e56 | 1723 | ipa_agg_jf_item structures out of it, of course only if there are |
0d491188 | 1724 | any known constants to begin with. */ |
f8daee9b | 1725 | |
0d491188 | 1726 | if (const_count) |
f8daee9b | 1727 | { |
0d491188 | 1728 | jfunc->agg.by_ref = by_ref; |
63380ec1 | 1729 | build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc); |
f8daee9b | 1730 | } |
1731 | } | |
1732 | ||
3f075e56 | 1733 | |
166f8178 | 1734 | /* Return the Ith param type of callee associated with call graph |
1735 | edge E. */ | |
1736 | ||
1737 | tree | |
185c1f3a | 1738 | ipa_get_callee_param_type (struct cgraph_edge *e, int i) |
1739 | { | |
1740 | int n; | |
1741 | tree type = (e->callee | |
02774f2d | 1742 | ? TREE_TYPE (e->callee->decl) |
185c1f3a | 1743 | : gimple_call_fntype (e->call_stmt)); |
1744 | tree t = TYPE_ARG_TYPES (type); | |
1745 | ||
1746 | for (n = 0; n < i; n++) | |
1747 | { | |
1748 | if (!t) | |
1749 | break; | |
1750 | t = TREE_CHAIN (t); | |
1751 | } | |
1752 | if (t) | |
1753 | return TREE_VALUE (t); | |
1754 | if (!e->callee) | |
1755 | return NULL; | |
02774f2d | 1756 | t = DECL_ARGUMENTS (e->callee->decl); |
185c1f3a | 1757 | for (n = 0; n < i; n++) |
1758 | { | |
1759 | if (!t) | |
1760 | return NULL; | |
1761 | t = TREE_CHAIN (t); | |
1762 | } | |
1763 | if (t) | |
1764 | return TREE_TYPE (t); | |
1765 | return NULL; | |
1766 | } | |
1767 | ||
97cb825b | 1768 | /* Return ipa_bits with VALUE and MASK values, which can be either a newly |
1769 | allocated structure or a previously existing one shared with other jump | |
1770 | functions and/or transformation summaries. */ | |
1771 | ||
1772 | ipa_bits * | |
1773 | ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask) | |
1774 | { | |
1775 | ipa_bits tmp; | |
1776 | tmp.value = value; | |
1777 | tmp.mask = mask; | |
1778 | ||
1779 | ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT); | |
1780 | if (*slot) | |
1781 | return *slot; | |
1782 | ||
1783 | ipa_bits *res = ggc_alloc<ipa_bits> (); | |
1784 | res->value = value; | |
1785 | res->mask = mask; | |
1786 | *slot = res; | |
1787 | ||
1788 | return res; | |
1789 | } | |
1790 | ||
1791 | /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash | |
1792 | table in order to avoid creating multiple same ipa_bits structures. */ | |
1793 | ||
1794 | static void | |
1795 | ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value, | |
1796 | const widest_int &mask) | |
1797 | { | |
1798 | jf->bits = ipa_get_ipa_bits_for_value (value, mask); | |
1799 | } | |
1800 | ||
1801 | /* Return a pointer to a value_range just like *TMP, but either find it in | |
1802 | ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */ | |
1803 | ||
a1054504 | 1804 | static value_range_base * |
1805 | ipa_get_value_range (value_range_base *tmp) | |
97cb825b | 1806 | { |
a1054504 | 1807 | value_range_base **slot = ipa_vr_hash_table->find_slot (tmp, INSERT); |
97cb825b | 1808 | if (*slot) |
1809 | return *slot; | |
1810 | ||
a1054504 | 1811 | value_range_base *vr = ggc_alloc<value_range_base> (); |
97cb825b | 1812 | *vr = *tmp; |
1813 | *slot = vr; | |
1814 | ||
1815 | return vr; | |
1816 | } | |
1817 | ||
1818 | /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty | |
1819 | equiv set. Use hash table in order to avoid creating multiple same copies of | |
1820 | value_ranges. */ | |
1821 | ||
a1054504 | 1822 | static value_range_base * |
be44111e | 1823 | ipa_get_value_range (enum value_range_kind type, tree min, tree max) |
97cb825b | 1824 | { |
a1054504 | 1825 | value_range_base tmp (type, min, max); |
97cb825b | 1826 | return ipa_get_value_range (&tmp); |
1827 | } | |
1828 | ||
1829 | /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and | |
1830 | a NULL equiv bitmap. Use hash table in order to avoid creating multiple | |
1831 | same value_range structures. */ | |
1832 | ||
1833 | static void | |
be44111e | 1834 | ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type, |
97cb825b | 1835 | tree min, tree max) |
1836 | { | |
1837 | jf->m_vr = ipa_get_value_range (type, min, max); | |
1838 | } | |
1839 | ||
3f075e56 | 1840 | /* Assign to JF a pointer to a value_range just like TMP but either fetch a |
97cb825b | 1841 | copy from ipa_vr_hash_table or allocate a new on in GC memory. */ |
1842 | ||
1843 | static void | |
a1054504 | 1844 | ipa_set_jfunc_vr (ipa_jump_func *jf, value_range_base *tmp) |
97cb825b | 1845 | { |
1846 | jf->m_vr = ipa_get_value_range (tmp); | |
1847 | } | |
1848 | ||
f8daee9b | 1849 | /* Compute jump function for all arguments of callsite CS and insert the |
1850 | information in the jump_functions array in the ipa_edge_args corresponding | |
1851 | to this callsite. */ | |
1917e945 | 1852 | |
7115ea05 | 1853 | static void |
9ea91b78 | 1854 | ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi, |
8b68ef1b | 1855 | struct cgraph_edge *cs) |
f8daee9b | 1856 | { |
1857 | struct ipa_node_params *info = IPA_NODE_REF (cs->caller); | |
b22832dc | 1858 | struct ipa_edge_args *args = IPA_EDGE_REF (cs); |
1a91d914 | 1859 | gcall *call = cs->call_stmt; |
0d491188 | 1860 | int n, arg_num = gimple_call_num_args (call); |
072ec6eb | 1861 | bool useful_context = false; |
f8daee9b | 1862 | |
b22832dc | 1863 | if (arg_num == 0 || args->jump_functions) |
f8daee9b | 1864 | return; |
f1f41a6c | 1865 | vec_safe_grow_cleared (args->jump_functions, arg_num); |
072ec6eb | 1866 | if (flag_devirtualize) |
1867 | vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num); | |
f8daee9b | 1868 | |
accbbe24 | 1869 | if (gimple_call_internal_p (call)) |
1870 | return; | |
6c0a4a25 | 1871 | if (ipa_func_spec_opts_forbid_analysis_p (cs->caller)) |
1872 | return; | |
1873 | ||
0d491188 | 1874 | for (n = 0; n < arg_num; n++) |
1875 | { | |
1876 | struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n); | |
1877 | tree arg = gimple_call_arg (call, n); | |
185c1f3a | 1878 | tree param_type = ipa_get_callee_param_type (cs, n); |
072ec6eb | 1879 | if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg))) |
1880 | { | |
32de3b92 | 1881 | tree instance; |
072ec6eb | 1882 | struct ipa_polymorphic_call_context context (cs->caller->decl, |
1883 | arg, cs->call_stmt, | |
32de3b92 | 1884 | &instance); |
915df3d8 | 1885 | context.get_dynamic_type (instance, arg, NULL, cs->call_stmt, |
1886 | &fbi->aa_walk_budget); | |
072ec6eb | 1887 | *ipa_get_ith_polymorhic_call_context (args, n) = context; |
1888 | if (!context.useless_p ()) | |
1889 | useful_context = true; | |
1890 | } | |
f8daee9b | 1891 | |
fc323d70 | 1892 | if (POINTER_TYPE_P (TREE_TYPE (arg))) |
1893 | { | |
6cb5d057 | 1894 | bool addr_nonzero = false; |
1895 | bool strict_overflow = false; | |
1896 | ||
fc323d70 | 1897 | if (TREE_CODE (arg) == SSA_NAME |
1898 | && param_type | |
1899 | && get_ptr_nonnull (arg)) | |
6cb5d057 | 1900 | addr_nonzero = true; |
1901 | else if (tree_single_nonzero_warnv_p (arg, &strict_overflow)) | |
1902 | addr_nonzero = true; | |
1903 | ||
1904 | if (addr_nonzero) | |
fc323d70 | 1905 | { |
97cb825b | 1906 | tree z = build_int_cst (TREE_TYPE (arg), 0); |
1907 | ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z); | |
fc323d70 | 1908 | } |
1909 | else | |
97cb825b | 1910 | gcc_assert (!jfunc->m_vr); |
fc323d70 | 1911 | } |
1912 | else | |
25a8e007 | 1913 | { |
1914 | wide_int min, max; | |
be44111e | 1915 | value_range_kind type; |
25a8e007 | 1916 | if (TREE_CODE (arg) == SSA_NAME |
1917 | && param_type | |
1918 | && (type = get_range_info (arg, &min, &max)) | |
b09a4365 | 1919 | && (type == VR_RANGE || type == VR_ANTI_RANGE)) |
25a8e007 | 1920 | { |
48625f58 | 1921 | value_range_base resvr; |
1922 | value_range_base tmpvr (type, | |
1923 | wide_int_to_tree (TREE_TYPE (arg), min), | |
1924 | wide_int_to_tree (TREE_TYPE (arg), max)); | |
97cb825b | 1925 | extract_range_from_unary_expr (&resvr, NOP_EXPR, param_type, |
1926 | &tmpvr, TREE_TYPE (arg)); | |
be44111e | 1927 | if (!resvr.undefined_p () && !resvr.varying_p ()) |
97cb825b | 1928 | ipa_set_jfunc_vr (jfunc, &resvr); |
b09a4365 | 1929 | else |
97cb825b | 1930 | gcc_assert (!jfunc->m_vr); |
25a8e007 | 1931 | } |
1932 | else | |
97cb825b | 1933 | gcc_assert (!jfunc->m_vr); |
25a8e007 | 1934 | } |
ae7b7bc8 | 1935 | |
a54071b2 | 1936 | if (INTEGRAL_TYPE_P (TREE_TYPE (arg)) |
1937 | && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST)) | |
1938 | { | |
a54071b2 | 1939 | if (TREE_CODE (arg) == SSA_NAME) |
97cb825b | 1940 | ipa_set_jfunc_bits (jfunc, 0, |
1941 | widest_int::from (get_nonzero_bits (arg), | |
1942 | TYPE_SIGN (TREE_TYPE (arg)))); | |
a54071b2 | 1943 | else |
97cb825b | 1944 | ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0); |
a54071b2 | 1945 | } |
14964a36 | 1946 | else if (POINTER_TYPE_P (TREE_TYPE (arg))) |
1947 | { | |
1948 | unsigned HOST_WIDE_INT bitpos; | |
1949 | unsigned align; | |
1950 | ||
14964a36 | 1951 | get_pointer_alignment_1 (arg, &align, &bitpos); |
1c8ecf8d | 1952 | widest_int mask = wi::bit_and_not |
1953 | (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false), | |
1954 | align / BITS_PER_UNIT - 1); | |
97cb825b | 1955 | widest_int value = bitpos / BITS_PER_UNIT; |
1956 | ipa_set_jfunc_bits (jfunc, value, mask); | |
14964a36 | 1957 | } |
a54071b2 | 1958 | else |
97cb825b | 1959 | gcc_assert (!jfunc->bits); |
a54071b2 | 1960 | |
be951c59 | 1961 | if (is_gimple_ip_invariant (arg) |
53e9c5c4 | 1962 | || (VAR_P (arg) |
be951c59 | 1963 | && is_global_var (arg) |
1964 | && TREE_READONLY (arg))) | |
096295f6 | 1965 | ipa_set_jf_constant (jfunc, arg, cs); |
0d491188 | 1966 | else if (!is_gimple_reg_type (TREE_TYPE (arg)) |
1967 | && TREE_CODE (arg) == PARM_DECL) | |
1968 | { | |
1969 | int index = ipa_get_param_decl_index (info, arg); | |
1970 | ||
1971 | gcc_assert (index >=0); | |
1972 | /* Aggregate passed by value, check for pass-through, otherwise we | |
1973 | will attempt to fill in aggregate contents later in this | |
1974 | for cycle. */ | |
24430d08 | 1975 | if (parm_preserved_before_stmt_p (fbi, index, call, arg)) |
0d491188 | 1976 | { |
693010ae | 1977 | ipa_set_jf_simple_pass_through (jfunc, index, false); |
0d491188 | 1978 | continue; |
1979 | } | |
1980 | } | |
1981 | else if (TREE_CODE (arg) == SSA_NAME) | |
1982 | { | |
1983 | if (SSA_NAME_IS_DEFAULT_DEF (arg)) | |
1984 | { | |
1985 | int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg)); | |
ad4a8b28 | 1986 | if (index >= 0) |
0d491188 | 1987 | { |
693010ae | 1988 | bool agg_p; |
24430d08 | 1989 | agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg); |
693010ae | 1990 | ipa_set_jf_simple_pass_through (jfunc, index, agg_p); |
0d491188 | 1991 | } |
1992 | } | |
1993 | else | |
1994 | { | |
42acab1c | 1995 | gimple *stmt = SSA_NAME_DEF_STMT (arg); |
0d491188 | 1996 | if (is_gimple_assign (stmt)) |
24430d08 | 1997 | compute_complex_assign_jump_func (fbi, info, jfunc, |
185c1f3a | 1998 | call, stmt, arg, param_type); |
0d491188 | 1999 | else if (gimple_code (stmt) == GIMPLE_PHI) |
24430d08 | 2000 | compute_complex_ancestor_jump_func (fbi, info, jfunc, |
1a91d914 | 2001 | call, |
2002 | as_a <gphi *> (stmt)); | |
0d491188 | 2003 | } |
2004 | } | |
f8daee9b | 2005 | |
f4d3c071 | 2006 | /* If ARG is pointer, we cannot use its type to determine the type of aggregate |
02636da3 | 2007 | passed (because type conversions are ignored in gimple). Usually we can |
2008 | safely get type from function declaration, but in case of K&R prototypes or | |
2009 | variadic functions we can try our luck with type of the pointer passed. | |
2010 | TODO: Since we look for actual initialization of the memory object, we may better | |
2011 | work out the type based on the memory stores we find. */ | |
2012 | if (!param_type) | |
2013 | param_type = TREE_TYPE (arg); | |
2014 | ||
0d491188 | 2015 | if ((jfunc->type != IPA_JF_PASS_THROUGH |
2016 | || !ipa_get_jf_pass_through_agg_preserved (jfunc)) | |
2017 | && (jfunc->type != IPA_JF_ANCESTOR | |
2018 | || !ipa_get_jf_ancestor_agg_preserved (jfunc)) | |
2019 | && (AGGREGATE_TYPE_P (TREE_TYPE (arg)) | |
02636da3 | 2020 | || POINTER_TYPE_P (param_type))) |
3f075e56 | 2021 | determine_known_aggregate_parts (call, arg, param_type, jfunc, |
2022 | &fbi->aa_walk_budget); | |
0d491188 | 2023 | } |
072ec6eb | 2024 | if (!useful_context) |
2025 | vec_free (args->polymorphic_call_contexts); | |
f8daee9b | 2026 | } |
2027 | ||
7115ea05 | 2028 | /* Compute jump functions for all edges - both direct and indirect - outgoing |
24430d08 | 2029 | from BB. */ |
7115ea05 | 2030 | |
8b68ef1b | 2031 | static void |
9ea91b78 | 2032 | ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb) |
7115ea05 | 2033 | { |
24430d08 | 2034 | struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb); |
2035 | int i; | |
7115ea05 | 2036 | struct cgraph_edge *cs; |
2037 | ||
24430d08 | 2038 | FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs) |
7115ea05 | 2039 | { |
24430d08 | 2040 | struct cgraph_node *callee = cs->callee; |
7115ea05 | 2041 | |
24430d08 | 2042 | if (callee) |
2043 | { | |
415d1b9a | 2044 | callee->ultimate_alias_target (); |
24430d08 | 2045 | /* We do not need to bother analyzing calls to unknown functions |
2046 | unless they may become known during lto/whopr. */ | |
2047 | if (!callee->definition && !flag_lto) | |
2048 | continue; | |
2049 | } | |
2050 | ipa_compute_jump_functions_for_edge (fbi, cs); | |
2051 | } | |
7115ea05 | 2052 | } |
2053 | ||
0d491188 | 2054 | /* If STMT looks like a statement loading a value from a member pointer formal |
2055 | parameter, return that parameter and store the offset of the field to | |
2056 | *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still | |
2057 | might be clobbered). If USE_DELTA, then we look for a use of the delta | |
2058 | field rather than the pfn. */ | |
1917e945 | 2059 | |
f8daee9b | 2060 | static tree |
42acab1c | 2061 | ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta, |
0d491188 | 2062 | HOST_WIDE_INT *offset_p) |
f8daee9b | 2063 | { |
0d491188 | 2064 | tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field; |
2065 | ||
2066 | if (!gimple_assign_single_p (stmt)) | |
2067 | return NULL_TREE; | |
f8daee9b | 2068 | |
0d491188 | 2069 | rhs = gimple_assign_rhs1 (stmt); |
74f602fc | 2070 | if (TREE_CODE (rhs) == COMPONENT_REF) |
2071 | { | |
2072 | ref_field = TREE_OPERAND (rhs, 1); | |
2073 | rhs = TREE_OPERAND (rhs, 0); | |
2074 | } | |
2075 | else | |
2076 | ref_field = NULL_TREE; | |
c52cb439 | 2077 | if (TREE_CODE (rhs) != MEM_REF) |
f8daee9b | 2078 | return NULL_TREE; |
f8daee9b | 2079 | rec = TREE_OPERAND (rhs, 0); |
c52cb439 | 2080 | if (TREE_CODE (rec) != ADDR_EXPR) |
2081 | return NULL_TREE; | |
2082 | rec = TREE_OPERAND (rec, 0); | |
f8daee9b | 2083 | if (TREE_CODE (rec) != PARM_DECL |
66cca8a0 | 2084 | || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field)) |
f8daee9b | 2085 | return NULL_TREE; |
c52cb439 | 2086 | ref_offset = TREE_OPERAND (rhs, 1); |
74f602fc | 2087 | |
0d491188 | 2088 | if (use_delta) |
2089 | fld = delta_field; | |
2090 | else | |
2091 | fld = ptr_field; | |
2092 | if (offset_p) | |
2093 | *offset_p = int_bit_position (fld); | |
2094 | ||
74f602fc | 2095 | if (ref_field) |
2096 | { | |
2097 | if (integer_nonzerop (ref_offset)) | |
2098 | return NULL_TREE; | |
74f602fc | 2099 | return ref_field == fld ? rec : NULL_TREE; |
2100 | } | |
f8daee9b | 2101 | else |
0d491188 | 2102 | return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec |
2103 | : NULL_TREE; | |
f8daee9b | 2104 | } |
2105 | ||
2106 | /* Returns true iff T is an SSA_NAME defined by a statement. */ | |
1917e945 | 2107 | |
f8daee9b | 2108 | static bool |
2109 | ipa_is_ssa_with_stmt_def (tree t) | |
2110 | { | |
2111 | if (TREE_CODE (t) == SSA_NAME | |
2112 | && !SSA_NAME_IS_DEFAULT_DEF (t)) | |
2113 | return true; | |
2114 | else | |
2115 | return false; | |
2116 | } | |
2117 | ||
09a2b4db | 2118 | /* Find the indirect call graph edge corresponding to STMT and mark it as a |
2119 | call to a parameter number PARAM_INDEX. NODE is the caller. Return the | |
2120 | indirect call graph edge. */ | |
1917e945 | 2121 | |
09a2b4db | 2122 | static struct cgraph_edge * |
1a91d914 | 2123 | ipa_note_param_call (struct cgraph_node *node, int param_index, |
2124 | gcall *stmt) | |
f8daee9b | 2125 | { |
799c8711 | 2126 | struct cgraph_edge *cs; |
f8daee9b | 2127 | |
415d1b9a | 2128 | cs = node->get_edge (stmt); |
6378ffb3 | 2129 | cs->indirect_info->param_index = param_index; |
0d491188 | 2130 | cs->indirect_info->agg_contents = 0; |
2f6c1cf4 | 2131 | cs->indirect_info->member_ptr = 0; |
665ff45b | 2132 | cs->indirect_info->guaranteed_unmodified = 0; |
09a2b4db | 2133 | return cs; |
f8daee9b | 2134 | } |
2135 | ||
799c8711 | 2136 | /* Analyze the CALL and examine uses of formal parameters of the caller NODE |
05d4e04f | 2137 | (described by INFO). PARMS_AINFO is a pointer to a vector containing |
8b68ef1b | 2138 | intermediate information about each formal parameter. Currently it checks |
2139 | whether the call calls a pointer that is a formal parameter and if so, the | |
2140 | parameter is marked with the called flag and an indirect call graph edge | |
2141 | describing the call is created. This is very simple for ordinary pointers | |
2142 | represented in SSA but not-so-nice when it comes to member pointers. The | |
2143 | ugly part of this function does nothing more than trying to match the | |
2144 | pattern of such a call. An example of such a pattern is the gimple dump | |
2145 | below, the call is on the last line: | |
f8daee9b | 2146 | |
74f602fc | 2147 | <bb 2>: |
2148 | f$__delta_5 = f.__delta; | |
2149 | f$__pfn_24 = f.__pfn; | |
2150 | ||
2151 | or | |
f8daee9b | 2152 | <bb 2>: |
c52cb439 | 2153 | f$__delta_5 = MEM[(struct *)&f]; |
2154 | f$__pfn_24 = MEM[(struct *)&f + 4B]; | |
af3e7bf6 | 2155 | |
74f602fc | 2156 | and a few lines below: |
af3e7bf6 | 2157 | |
2158 | <bb 5> | |
f8daee9b | 2159 | D.2496_3 = (int) f$__pfn_24; |
2160 | D.2497_4 = D.2496_3 & 1; | |
2161 | if (D.2497_4 != 0) | |
2162 | goto <bb 3>; | |
2163 | else | |
2164 | goto <bb 4>; | |
2165 | ||
af3e7bf6 | 2166 | <bb 6>: |
f8daee9b | 2167 | D.2500_7 = (unsigned int) f$__delta_5; |
2168 | D.2501_8 = &S + D.2500_7; | |
2169 | D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8; | |
2170 | D.2503_10 = *D.2502_9; | |
2171 | D.2504_12 = f$__pfn_24 + -1; | |
2172 | D.2505_13 = (unsigned int) D.2504_12; | |
2173 | D.2506_14 = D.2503_10 + D.2505_13; | |
2174 | D.2507_15 = *D.2506_14; | |
2175 | iftmp.11_16 = (String:: *) D.2507_15; | |
2176 | ||
af3e7bf6 | 2177 | <bb 7>: |
f8daee9b | 2178 | # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)> |
2179 | D.2500_19 = (unsigned int) f$__delta_5; | |
2180 | D.2508_20 = &S + D.2500_19; | |
2181 | D.2493_21 = iftmp.11_1 (D.2508_20, 4); | |
2182 | ||
2183 | Such patterns are results of simple calls to a member pointer: | |
2184 | ||
2185 | int doprinting (int (MyString::* f)(int) const) | |
2186 | { | |
2187 | MyString S ("somestring"); | |
2188 | ||
2189 | return (S.*f)(4); | |
2190 | } | |
0d491188 | 2191 | |
2192 | Moreover, the function also looks for called pointers loaded from aggregates | |
2193 | passed by value or reference. */ | |
f8daee9b | 2194 | |
2195 | static void | |
9ea91b78 | 2196 | ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call, |
24430d08 | 2197 | tree target) |
f8daee9b | 2198 | { |
24430d08 | 2199 | struct ipa_node_params *info = fbi->info; |
0d491188 | 2200 | HOST_WIDE_INT offset; |
2201 | bool by_ref; | |
f8daee9b | 2202 | |
f8daee9b | 2203 | if (SSA_NAME_IS_DEFAULT_DEF (target)) |
2204 | { | |
6378ffb3 | 2205 | tree var = SSA_NAME_VAR (target); |
24430d08 | 2206 | int index = ipa_get_param_decl_index (info, var); |
f8daee9b | 2207 | if (index >= 0) |
24430d08 | 2208 | ipa_note_param_call (fbi->node, index, call); |
f8daee9b | 2209 | return; |
2210 | } | |
2211 | ||
24430d08 | 2212 | int index; |
42acab1c | 2213 | gimple *def = SSA_NAME_DEF_STMT (target); |
665ff45b | 2214 | bool guaranteed_unmodified; |
0d491188 | 2215 | if (gimple_assign_single_p (def) |
1a673ff0 | 2216 | && ipa_load_from_parm_agg (fbi, info->descriptors, def, |
2217 | gimple_assign_rhs1 (def), &index, &offset, | |
665ff45b | 2218 | NULL, &by_ref, &guaranteed_unmodified)) |
0d491188 | 2219 | { |
24430d08 | 2220 | struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call); |
0d491188 | 2221 | cs->indirect_info->offset = offset; |
2222 | cs->indirect_info->agg_contents = 1; | |
2223 | cs->indirect_info->by_ref = by_ref; | |
665ff45b | 2224 | cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified; |
0d491188 | 2225 | return; |
2226 | } | |
2227 | ||
f8daee9b | 2228 | /* Now we need to try to match the complex pattern of calling a member |
2229 | pointer. */ | |
0d491188 | 2230 | if (gimple_code (def) != GIMPLE_PHI |
2231 | || gimple_phi_num_args (def) != 2 | |
2232 | || !POINTER_TYPE_P (TREE_TYPE (target)) | |
f8daee9b | 2233 | || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE) |
2234 | return; | |
2235 | ||
f8daee9b | 2236 | /* First, we need to check whether one of these is a load from a member |
2237 | pointer that is a parameter to this function. */ | |
24430d08 | 2238 | tree n1 = PHI_ARG_DEF (def, 0); |
2239 | tree n2 = PHI_ARG_DEF (def, 1); | |
886eebf8 | 2240 | if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2)) |
f8daee9b | 2241 | return; |
42acab1c | 2242 | gimple *d1 = SSA_NAME_DEF_STMT (n1); |
2243 | gimple *d2 = SSA_NAME_DEF_STMT (n2); | |
f8daee9b | 2244 | |
24430d08 | 2245 | tree rec; |
2246 | basic_block bb, virt_bb; | |
2247 | basic_block join = gimple_bb (def); | |
0d491188 | 2248 | if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset))) |
f8daee9b | 2249 | { |
0d491188 | 2250 | if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL)) |
f8daee9b | 2251 | return; |
2252 | ||
af3e7bf6 | 2253 | bb = EDGE_PRED (join, 0)->src; |
75a70cf9 | 2254 | virt_bb = gimple_bb (d2); |
f8daee9b | 2255 | } |
0d491188 | 2256 | else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset))) |
f8daee9b | 2257 | { |
af3e7bf6 | 2258 | bb = EDGE_PRED (join, 1)->src; |
75a70cf9 | 2259 | virt_bb = gimple_bb (d1); |
f8daee9b | 2260 | } |
2261 | else | |
2262 | return; | |
2263 | ||
2264 | /* Second, we need to check that the basic blocks are laid out in the way | |
2265 | corresponding to the pattern. */ | |
2266 | ||
f8daee9b | 2267 | if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb) |
2268 | || single_pred (virt_bb) != bb | |
2269 | || single_succ (virt_bb) != join) | |
2270 | return; | |
2271 | ||
2272 | /* Third, let's see that the branching is done depending on the least | |
2273 | significant bit of the pfn. */ | |
2274 | ||
42acab1c | 2275 | gimple *branch = last_stmt (bb); |
af3e7bf6 | 2276 | if (!branch || gimple_code (branch) != GIMPLE_COND) |
f8daee9b | 2277 | return; |
2278 | ||
71b5c25e | 2279 | if ((gimple_cond_code (branch) != NE_EXPR |
2280 | && gimple_cond_code (branch) != EQ_EXPR) | |
75a70cf9 | 2281 | || !integer_zerop (gimple_cond_rhs (branch))) |
f8daee9b | 2282 | return; |
f8daee9b | 2283 | |
24430d08 | 2284 | tree cond = gimple_cond_lhs (branch); |
f8daee9b | 2285 | if (!ipa_is_ssa_with_stmt_def (cond)) |
2286 | return; | |
2287 | ||
75a70cf9 | 2288 | def = SSA_NAME_DEF_STMT (cond); |
a3808114 | 2289 | if (!is_gimple_assign (def) |
75a70cf9 | 2290 | || gimple_assign_rhs_code (def) != BIT_AND_EXPR |
2291 | || !integer_onep (gimple_assign_rhs2 (def))) | |
f8daee9b | 2292 | return; |
75a70cf9 | 2293 | |
2294 | cond = gimple_assign_rhs1 (def); | |
f8daee9b | 2295 | if (!ipa_is_ssa_with_stmt_def (cond)) |
2296 | return; | |
2297 | ||
75a70cf9 | 2298 | def = SSA_NAME_DEF_STMT (cond); |
f8daee9b | 2299 | |
a3808114 | 2300 | if (is_gimple_assign (def) |
2301 | && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def))) | |
f8daee9b | 2302 | { |
75a70cf9 | 2303 | cond = gimple_assign_rhs1 (def); |
f8daee9b | 2304 | if (!ipa_is_ssa_with_stmt_def (cond)) |
2305 | return; | |
75a70cf9 | 2306 | def = SSA_NAME_DEF_STMT (cond); |
f8daee9b | 2307 | } |
2308 | ||
24430d08 | 2309 | tree rec2; |
66cca8a0 | 2310 | rec2 = ipa_get_stmt_member_ptr_load_param (def, |
2311 | (TARGET_PTRMEMFUNC_VBIT_LOCATION | |
0d491188 | 2312 | == ptrmemfunc_vbit_in_delta), |
2313 | NULL); | |
f8daee9b | 2314 | if (rec != rec2) |
2315 | return; | |
2316 | ||
2317 | index = ipa_get_param_decl_index (info, rec); | |
0d491188 | 2318 | if (index >= 0 |
24430d08 | 2319 | && parm_preserved_before_stmt_p (fbi, index, call, rec)) |
0d491188 | 2320 | { |
24430d08 | 2321 | struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call); |
0d491188 | 2322 | cs->indirect_info->offset = offset; |
2323 | cs->indirect_info->agg_contents = 1; | |
2f6c1cf4 | 2324 | cs->indirect_info->member_ptr = 1; |
665ff45b | 2325 | cs->indirect_info->guaranteed_unmodified = 1; |
0d491188 | 2326 | } |
f8daee9b | 2327 | |
2328 | return; | |
2329 | } | |
2330 | ||
6378ffb3 | 2331 | /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the |
2332 | object referenced in the expression is a formal parameter of the caller | |
24430d08 | 2333 | FBI->node (described by FBI->info), create a call note for the |
2334 | statement. */ | |
6378ffb3 | 2335 | |
2336 | static void | |
9ea91b78 | 2337 | ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi, |
1a91d914 | 2338 | gcall *call, tree target) |
6378ffb3 | 2339 | { |
2340 | tree obj = OBJ_TYPE_REF_OBJECT (target); | |
6378ffb3 | 2341 | int index; |
09a2b4db | 2342 | HOST_WIDE_INT anc_offset; |
6378ffb3 | 2343 | |
16358a63 | 2344 | if (!flag_devirtualize) |
2345 | return; | |
2346 | ||
09a2b4db | 2347 | if (TREE_CODE (obj) != SSA_NAME) |
6378ffb3 | 2348 | return; |
2349 | ||
24430d08 | 2350 | struct ipa_node_params *info = fbi->info; |
09a2b4db | 2351 | if (SSA_NAME_IS_DEFAULT_DEF (obj)) |
2352 | { | |
24430d08 | 2353 | struct ipa_jump_func jfunc; |
09a2b4db | 2354 | if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL) |
2355 | return; | |
6378ffb3 | 2356 | |
09a2b4db | 2357 | anc_offset = 0; |
2358 | index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj)); | |
2359 | gcc_assert (index >= 0); | |
915df3d8 | 2360 | if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target), |
185c1f3a | 2361 | call, &jfunc)) |
09a2b4db | 2362 | return; |
2363 | } | |
2364 | else | |
2365 | { | |
24430d08 | 2366 | struct ipa_jump_func jfunc; |
42acab1c | 2367 | gimple *stmt = SSA_NAME_DEF_STMT (obj); |
09a2b4db | 2368 | tree expr; |
2369 | ||
2370 | expr = get_ancestor_addr_info (stmt, &obj, &anc_offset); | |
2371 | if (!expr) | |
2372 | return; | |
2373 | index = ipa_get_param_decl_index (info, | |
2374 | SSA_NAME_VAR (TREE_OPERAND (expr, 0))); | |
2375 | gcc_assert (index >= 0); | |
915df3d8 | 2376 | if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target), |
185c1f3a | 2377 | call, &jfunc, anc_offset)) |
09a2b4db | 2378 | return; |
2379 | } | |
2380 | ||
24430d08 | 2381 | struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call); |
2382 | struct cgraph_indirect_call_info *ii = cs->indirect_info; | |
0d491188 | 2383 | ii->offset = anc_offset; |
e913b5cd | 2384 | ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target)); |
fba0273a | 2385 | ii->otr_type = obj_type_ref_class (target); |
09a2b4db | 2386 | ii->polymorphic = 1; |
6378ffb3 | 2387 | } |
2388 | ||
2389 | /* Analyze a call statement CALL whether and how it utilizes formal parameters | |
05d4e04f | 2390 | of the caller (described by INFO). PARMS_AINFO is a pointer to a vector |
8b68ef1b | 2391 | containing intermediate information about each formal parameter. */ |
6378ffb3 | 2392 | |
2393 | static void | |
9ea91b78 | 2394 | ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call) |
6378ffb3 | 2395 | { |
2396 | tree target = gimple_call_fn (call); | |
c41ae25b | 2397 | |
2398 | if (!target | |
2399 | || (TREE_CODE (target) != SSA_NAME | |
2400 | && !virtual_method_call_p (target))) | |
2401 | return; | |
6378ffb3 | 2402 | |
d8b5abdb | 2403 | struct cgraph_edge *cs = fbi->node->get_edge (call); |
c41ae25b | 2404 | /* If we previously turned the call into a direct call, there is |
2405 | no need to analyze. */ | |
c41ae25b | 2406 | if (cs && !cs->indirect_unknown_callee) |
fb049fba | 2407 | return; |
d8b5abdb | 2408 | |
4c4946db | 2409 | if (cs->indirect_info->polymorphic && flag_devirtualize) |
d8b5abdb | 2410 | { |
d8b5abdb | 2411 | tree instance; |
2412 | tree target = gimple_call_fn (call); | |
379f6698 | 2413 | ipa_polymorphic_call_context context (current_function_decl, |
2414 | target, call, &instance); | |
d8b5abdb | 2415 | |
e33892d7 | 2416 | gcc_checking_assert (cs->indirect_info->otr_type |
2417 | == obj_type_ref_class (target)); | |
2418 | gcc_checking_assert (cs->indirect_info->otr_token | |
2419 | == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target))); | |
d8b5abdb | 2420 | |
1986ca43 | 2421 | cs->indirect_info->vptr_changed |
2422 | = !context.get_dynamic_type (instance, | |
2423 | OBJ_TYPE_REF_OBJECT (target), | |
915df3d8 | 2424 | obj_type_ref_class (target), call, |
2425 | &fbi->aa_walk_budget); | |
43aac8cb | 2426 | cs->indirect_info->context = context; |
d8b5abdb | 2427 | } |
2428 | ||
6378ffb3 | 2429 | if (TREE_CODE (target) == SSA_NAME) |
24430d08 | 2430 | ipa_analyze_indirect_call_uses (fbi, call, target); |
f5e35fed | 2431 | else if (virtual_method_call_p (target)) |
24430d08 | 2432 | ipa_analyze_virtual_call_uses (fbi, call, target); |
6378ffb3 | 2433 | } |
2434 | ||
2435 | ||
799c8711 | 2436 | /* Analyze the call statement STMT with respect to formal parameters (described |
24430d08 | 2437 | in INFO) of caller given by FBI->NODE. Currently it only checks whether |
2438 | formal parameters are called. */ | |
1917e945 | 2439 | |
f8daee9b | 2440 | static void |
42acab1c | 2441 | ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt) |
f8daee9b | 2442 | { |
75a70cf9 | 2443 | if (is_gimple_call (stmt)) |
1a91d914 | 2444 | ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt)); |
8b68ef1b | 2445 | } |
2446 | ||
2447 | /* Callback of walk_stmt_load_store_addr_ops for the visit_load. | |
2448 | If OP is a parameter declaration, mark it as used in the info structure | |
2449 | passed in DATA. */ | |
2450 | ||
2451 | static bool | |
42acab1c | 2452 | visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data) |
8b68ef1b | 2453 | { |
2454 | struct ipa_node_params *info = (struct ipa_node_params *) data; | |
2455 | ||
2456 | op = get_base_address (op); | |
2457 | if (op | |
2458 | && TREE_CODE (op) == PARM_DECL) | |
2459 | { | |
2460 | int index = ipa_get_param_decl_index (info, op); | |
2461 | gcc_assert (index >= 0); | |
821d0e0f | 2462 | ipa_set_param_used (info, index, true); |
8b68ef1b | 2463 | } |
2464 | ||
2465 | return false; | |
f8daee9b | 2466 | } |
2467 | ||
24430d08 | 2468 | /* Scan the statements in BB and inspect the uses of formal parameters. Store |
2469 | the findings in various structures of the associated ipa_node_params | |
2470 | structure, such as parameter flags, notes etc. FBI holds various data about | |
2471 | the function being analyzed. */ | |
1917e945 | 2472 | |
8b68ef1b | 2473 | static void |
9ea91b78 | 2474 | ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb) |
f8daee9b | 2475 | { |
75a70cf9 | 2476 | gimple_stmt_iterator gsi; |
24430d08 | 2477 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
2478 | { | |
42acab1c | 2479 | gimple *stmt = gsi_stmt (gsi); |
f8daee9b | 2480 | |
24430d08 | 2481 | if (is_gimple_debug (stmt)) |
2482 | continue; | |
f8daee9b | 2483 | |
24430d08 | 2484 | ipa_analyze_stmt_uses (fbi, stmt); |
2485 | walk_stmt_load_store_addr_ops (stmt, fbi->info, | |
2486 | visit_ref_for_mod_analysis, | |
2487 | visit_ref_for_mod_analysis, | |
2488 | visit_ref_for_mod_analysis); | |
6c0a4a25 | 2489 | } |
24430d08 | 2490 | for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
2491 | walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info, | |
2492 | visit_ref_for_mod_analysis, | |
2493 | visit_ref_for_mod_analysis, | |
2494 | visit_ref_for_mod_analysis); | |
2495 | } | |
2496 | ||
2497 | /* Calculate controlled uses of parameters of NODE. */ | |
2498 | ||
2499 | static void | |
2500 | ipa_analyze_controlled_uses (struct cgraph_node *node) | |
2501 | { | |
2502 | struct ipa_node_params *info = IPA_NODE_REF (node); | |
6c0a4a25 | 2503 | |
24430d08 | 2504 | for (int i = 0; i < ipa_get_param_count (info); i++) |
8b68ef1b | 2505 | { |
2506 | tree parm = ipa_get_param (info, i); | |
096295f6 | 2507 | int controlled_uses = 0; |
2508 | ||
8b68ef1b | 2509 | /* For SSA regs see if parameter is used. For non-SSA we compute |
2510 | the flag during modification analysis. */ | |
096295f6 | 2511 | if (is_gimple_reg (parm)) |
2512 | { | |
02774f2d | 2513 | tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), |
096295f6 | 2514 | parm); |
2515 | if (ddef && !has_zero_uses (ddef)) | |
2516 | { | |
2517 | imm_use_iterator imm_iter; | |
2518 | use_operand_p use_p; | |
2519 | ||
2520 | ipa_set_param_used (info, i, true); | |
2521 | FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef) | |
2522 | if (!is_gimple_call (USE_STMT (use_p))) | |
2523 | { | |
0891f4f9 | 2524 | if (!is_gimple_debug (USE_STMT (use_p))) |
2525 | { | |
2526 | controlled_uses = IPA_UNDESCRIBED_USE; | |
2527 | break; | |
2528 | } | |
096295f6 | 2529 | } |
2530 | else | |
2531 | controlled_uses++; | |
2532 | } | |
2533 | else | |
2534 | controlled_uses = 0; | |
2535 | } | |
2536 | else | |
2537 | controlled_uses = IPA_UNDESCRIBED_USE; | |
2538 | ipa_set_controlled_uses (info, i, controlled_uses); | |
8b68ef1b | 2539 | } |
24430d08 | 2540 | } |
8b68ef1b | 2541 | |
24430d08 | 2542 | /* Free stuff in BI. */ |
8b68ef1b | 2543 | |
24430d08 | 2544 | static void |
2545 | free_ipa_bb_info (struct ipa_bb_info *bi) | |
2546 | { | |
2547 | bi->cg_edges.release (); | |
2548 | bi->param_aa_statuses.release (); | |
f8daee9b | 2549 | } |
2550 | ||
24430d08 | 2551 | /* Dominator walker driving the analysis. */ |
803a7988 | 2552 | |
24430d08 | 2553 | class analysis_dom_walker : public dom_walker |
803a7988 | 2554 | { |
24430d08 | 2555 | public: |
9ea91b78 | 2556 | analysis_dom_walker (struct ipa_func_body_info *fbi) |
24430d08 | 2557 | : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {} |
803a7988 | 2558 | |
96752458 | 2559 | virtual edge before_dom_children (basic_block); |
24430d08 | 2560 | |
2561 | private: | |
9ea91b78 | 2562 | struct ipa_func_body_info *m_fbi; |
24430d08 | 2563 | }; |
2564 | ||
96752458 | 2565 | edge |
24430d08 | 2566 | analysis_dom_walker::before_dom_children (basic_block bb) |
2567 | { | |
2568 | ipa_analyze_params_uses_in_bb (m_fbi, bb); | |
2569 | ipa_compute_jump_functions_for_bb (m_fbi, bb); | |
96752458 | 2570 | return NULL; |
803a7988 | 2571 | } |
2572 | ||
73bd7d5f | 2573 | /* Release body info FBI. */ |
2574 | ||
2575 | void | |
2576 | ipa_release_body_info (struct ipa_func_body_info *fbi) | |
2577 | { | |
2578 | int i; | |
2579 | struct ipa_bb_info *bi; | |
2580 | ||
2581 | FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi) | |
2582 | free_ipa_bb_info (bi); | |
2583 | fbi->bb_infos.release (); | |
2584 | } | |
2585 | ||
47ae02b7 | 2586 | /* Initialize the array describing properties of formal parameters |
851d9296 | 2587 | of NODE, analyze their uses and compute jump functions associated |
2588 | with actual arguments of calls from within NODE. */ | |
8b68ef1b | 2589 | |
2590 | void | |
2591 | ipa_analyze_node (struct cgraph_node *node) | |
2592 | { | |
9ea91b78 | 2593 | struct ipa_func_body_info fbi; |
6416d4a5 | 2594 | struct ipa_node_params *info; |
8b68ef1b | 2595 | |
6416d4a5 | 2596 | ipa_check_create_node_params (); |
2597 | ipa_check_create_edge_args (); | |
2598 | info = IPA_NODE_REF (node); | |
24430d08 | 2599 | |
2600 | if (info->analysis_done) | |
2601 | return; | |
2602 | info->analysis_done = 1; | |
2603 | ||
2604 | if (ipa_func_spec_opts_forbid_analysis_p (node)) | |
2605 | { | |
2606 | for (int i = 0; i < ipa_get_param_count (info); i++) | |
2607 | { | |
2608 | ipa_set_param_used (info, i, true); | |
2609 | ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE); | |
2610 | } | |
2611 | return; | |
2612 | } | |
2613 | ||
2614 | struct function *func = DECL_STRUCT_FUNCTION (node->decl); | |
2615 | push_cfun (func); | |
2616 | calculate_dominance_info (CDI_DOMINATORS); | |
8b68ef1b | 2617 | ipa_initialize_node_params (node); |
24430d08 | 2618 | ipa_analyze_controlled_uses (node); |
8b68ef1b | 2619 | |
24430d08 | 2620 | fbi.node = node; |
2621 | fbi.info = IPA_NODE_REF (node); | |
2622 | fbi.bb_infos = vNULL; | |
2623 | fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun)); | |
2624 | fbi.param_count = ipa_get_param_count (info); | |
915df3d8 | 2625 | fbi.aa_walk_budget = PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS); |
8b68ef1b | 2626 | |
24430d08 | 2627 | for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee) |
2628 | { | |
2629 | ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt)); | |
2630 | bi->cg_edges.safe_push (cs); | |
2631 | } | |
8b68ef1b | 2632 | |
24430d08 | 2633 | for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee) |
2634 | { | |
2635 | ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt)); | |
2636 | bi->cg_edges.safe_push (cs); | |
2637 | } | |
2638 | ||
2639 | analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun)); | |
2640 | ||
73bd7d5f | 2641 | ipa_release_body_info (&fbi); |
24430d08 | 2642 | free_dominance_info (CDI_DOMINATORS); |
7af23aa4 | 2643 | pop_cfun (); |
8b68ef1b | 2644 | } |
8b68ef1b | 2645 | |
1917e945 | 2646 | /* Update the jump functions associated with call graph edge E when the call |
f8daee9b | 2647 | graph edge CS is being inlined, assuming that E->caller is already (possibly |
6378ffb3 | 2648 | indirectly) inlined into CS->callee and that E has not been inlined. */ |
1917e945 | 2649 | |
f8daee9b | 2650 | static void |
2651 | update_jump_functions_after_inlining (struct cgraph_edge *cs, | |
2652 | struct cgraph_edge *e) | |
2653 | { | |
2654 | struct ipa_edge_args *top = IPA_EDGE_REF (cs); | |
2655 | struct ipa_edge_args *args = IPA_EDGE_REF (e); | |
2656 | int count = ipa_get_cs_argument_count (args); | |
2657 | int i; | |
2658 | ||
2659 | for (i = 0; i < count; i++) | |
2660 | { | |
6378ffb3 | 2661 | struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i); |
072ec6eb | 2662 | struct ipa_polymorphic_call_context *dst_ctx |
2663 | = ipa_get_ith_polymorhic_call_context (args, i); | |
f8daee9b | 2664 | |
5215027d | 2665 | if (dst->type == IPA_JF_ANCESTOR) |
2666 | { | |
6378ffb3 | 2667 | struct ipa_jump_func *src; |
0d491188 | 2668 | int dst_fid = dst->value.ancestor.formal_id; |
072ec6eb | 2669 | struct ipa_polymorphic_call_context *src_ctx |
2670 | = ipa_get_ith_polymorhic_call_context (top, dst_fid); | |
5215027d | 2671 | |
6378ffb3 | 2672 | /* Variable number of arguments can cause havoc if we try to access |
2673 | one that does not exist in the inlined edge. So make sure we | |
2674 | don't. */ | |
0d491188 | 2675 | if (dst_fid >= ipa_get_cs_argument_count (top)) |
6378ffb3 | 2676 | { |
ae7b7bc8 | 2677 | ipa_set_jf_unknown (dst); |
6378ffb3 | 2678 | continue; |
2679 | } | |
2680 | ||
0d491188 | 2681 | src = ipa_get_ith_jump_func (top, dst_fid); |
2682 | ||
072ec6eb | 2683 | if (src_ctx && !src_ctx->useless_p ()) |
2684 | { | |
2685 | struct ipa_polymorphic_call_context ctx = *src_ctx; | |
2686 | ||
2687 | /* TODO: Make type preserved safe WRT contexts. */ | |
245ab191 | 2688 | if (!ipa_get_jf_ancestor_type_preserved (dst)) |
007a6c27 | 2689 | ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor); |
072ec6eb | 2690 | ctx.offset_by (dst->value.ancestor.offset); |
2691 | if (!ctx.useless_p ()) | |
2692 | { | |
1739ec91 | 2693 | if (!dst_ctx) |
2694 | { | |
2695 | vec_safe_grow_cleared (args->polymorphic_call_contexts, | |
2696 | count); | |
2697 | dst_ctx = ipa_get_ith_polymorhic_call_context (args, i); | |
2698 | } | |
2699 | ||
2700 | dst_ctx->combine_with (ctx); | |
072ec6eb | 2701 | } |
2702 | } | |
2703 | ||
0d491188 | 2704 | if (src->agg.items |
2705 | && (dst->value.ancestor.agg_preserved || !src->agg.by_ref)) | |
2706 | { | |
2707 | struct ipa_agg_jf_item *item; | |
2708 | int j; | |
2709 | ||
2710 | /* Currently we do not produce clobber aggregate jump functions, | |
2711 | replace with merging when we do. */ | |
2712 | gcc_assert (!dst->agg.items); | |
2713 | ||
f1f41a6c | 2714 | dst->agg.items = vec_safe_copy (src->agg.items); |
0d491188 | 2715 | dst->agg.by_ref = src->agg.by_ref; |
f1f41a6c | 2716 | FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item) |
0d491188 | 2717 | item->offset -= dst->value.ancestor.offset; |
2718 | } | |
2719 | ||
693010ae | 2720 | if (src->type == IPA_JF_PASS_THROUGH |
2721 | && src->value.pass_through.operation == NOP_EXPR) | |
0d491188 | 2722 | { |
2723 | dst->value.ancestor.formal_id = src->value.pass_through.formal_id; | |
2724 | dst->value.ancestor.agg_preserved &= | |
2725 | src->value.pass_through.agg_preserved; | |
2726 | } | |
3657b81b | 2727 | else if (src->type == IPA_JF_PASS_THROUGH |
2728 | && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary) | |
2729 | { | |
2730 | dst->value.ancestor.formal_id = src->value.pass_through.formal_id; | |
2731 | dst->value.ancestor.agg_preserved = false; | |
2732 | } | |
6378ffb3 | 2733 | else if (src->type == IPA_JF_ANCESTOR) |
2734 | { | |
2735 | dst->value.ancestor.formal_id = src->value.ancestor.formal_id; | |
2736 | dst->value.ancestor.offset += src->value.ancestor.offset; | |
0d491188 | 2737 | dst->value.ancestor.agg_preserved &= |
2738 | src->value.ancestor.agg_preserved; | |
6378ffb3 | 2739 | } |
2740 | else | |
ae7b7bc8 | 2741 | ipa_set_jf_unknown (dst); |
6378ffb3 | 2742 | } |
2743 | else if (dst->type == IPA_JF_PASS_THROUGH) | |
f8daee9b | 2744 | { |
6378ffb3 | 2745 | struct ipa_jump_func *src; |
2746 | /* We must check range due to calls with variable number of arguments | |
2747 | and we cannot combine jump functions with operations. */ | |
2748 | if (dst->value.pass_through.operation == NOP_EXPR | |
2749 | && (dst->value.pass_through.formal_id | |
2750 | < ipa_get_cs_argument_count (top))) | |
2751 | { | |
0d491188 | 2752 | int dst_fid = dst->value.pass_through.formal_id; |
2753 | src = ipa_get_ith_jump_func (top, dst_fid); | |
ad4a8b28 | 2754 | bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst); |
072ec6eb | 2755 | struct ipa_polymorphic_call_context *src_ctx |
2756 | = ipa_get_ith_polymorhic_call_context (top, dst_fid); | |
0d491188 | 2757 | |
072ec6eb | 2758 | if (src_ctx && !src_ctx->useless_p ()) |
2759 | { | |
2760 | struct ipa_polymorphic_call_context ctx = *src_ctx; | |
2761 | ||
2762 | /* TODO: Make type preserved safe WRT contexts. */ | |
245ab191 | 2763 | if (!ipa_get_jf_pass_through_type_preserved (dst)) |
007a6c27 | 2764 | ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor); |
072ec6eb | 2765 | if (!ctx.useless_p ()) |
2766 | { | |
2767 | if (!dst_ctx) | |
2768 | { | |
2769 | vec_safe_grow_cleared (args->polymorphic_call_contexts, | |
2770 | count); | |
2771 | dst_ctx = ipa_get_ith_polymorhic_call_context (args, i); | |
2772 | } | |
2773 | dst_ctx->combine_with (ctx); | |
2774 | } | |
2775 | } | |
ad4a8b28 | 2776 | switch (src->type) |
2777 | { | |
2778 | case IPA_JF_UNKNOWN: | |
ae7b7bc8 | 2779 | ipa_set_jf_unknown (dst); |
ad4a8b28 | 2780 | break; |
ad4a8b28 | 2781 | case IPA_JF_CONST: |
2782 | ipa_set_jf_cst_copy (dst, src); | |
2783 | break; | |
2784 | ||
2785 | case IPA_JF_PASS_THROUGH: | |
2786 | { | |
2787 | int formal_id = ipa_get_jf_pass_through_formal_id (src); | |
2788 | enum tree_code operation; | |
2789 | operation = ipa_get_jf_pass_through_operation (src); | |
2790 | ||
2791 | if (operation == NOP_EXPR) | |
2792 | { | |
693010ae | 2793 | bool agg_p; |
ad4a8b28 | 2794 | agg_p = dst_agg_p |
2795 | && ipa_get_jf_pass_through_agg_preserved (src); | |
693010ae | 2796 | ipa_set_jf_simple_pass_through (dst, formal_id, agg_p); |
ad4a8b28 | 2797 | } |
3657b81b | 2798 | else if (TREE_CODE_CLASS (operation) == tcc_unary) |
2799 | ipa_set_jf_unary_pass_through (dst, formal_id, operation); | |
ad4a8b28 | 2800 | else |
2801 | { | |
2802 | tree operand = ipa_get_jf_pass_through_operand (src); | |
2803 | ipa_set_jf_arith_pass_through (dst, formal_id, operand, | |
2804 | operation); | |
2805 | } | |
2806 | break; | |
2807 | } | |
2808 | case IPA_JF_ANCESTOR: | |
2809 | { | |
693010ae | 2810 | bool agg_p; |
ad4a8b28 | 2811 | agg_p = dst_agg_p |
2812 | && ipa_get_jf_ancestor_agg_preserved (src); | |
ad4a8b28 | 2813 | ipa_set_ancestor_jf (dst, |
2814 | ipa_get_jf_ancestor_offset (src), | |
ad4a8b28 | 2815 | ipa_get_jf_ancestor_formal_id (src), |
693010ae | 2816 | agg_p); |
ad4a8b28 | 2817 | break; |
2818 | } | |
2819 | default: | |
2820 | gcc_unreachable (); | |
2821 | } | |
0d491188 | 2822 | |
2823 | if (src->agg.items | |
ad4a8b28 | 2824 | && (dst_agg_p || !src->agg.by_ref)) |
0d491188 | 2825 | { |
2826 | /* Currently we do not produce clobber aggregate jump | |
2827 | functions, replace with merging when we do. */ | |
2828 | gcc_assert (!dst->agg.items); | |
2829 | ||
2830 | dst->agg.by_ref = src->agg.by_ref; | |
f1f41a6c | 2831 | dst->agg.items = vec_safe_copy (src->agg.items); |
0d491188 | 2832 | } |
6378ffb3 | 2833 | } |
2834 | else | |
ae7b7bc8 | 2835 | ipa_set_jf_unknown (dst); |
f8daee9b | 2836 | } |
6378ffb3 | 2837 | } |
2838 | } | |
2839 | ||
072ec6eb | 2840 | /* If TARGET is an addr_expr of a function declaration, make it the |
2841 | (SPECULATIVE)destination of an indirect edge IE and return the edge. | |
2842 | Otherwise, return NULL. */ | |
6378ffb3 | 2843 | |
1caef38b | 2844 | struct cgraph_edge * |
072ec6eb | 2845 | ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target, |
2846 | bool speculative) | |
6378ffb3 | 2847 | { |
2848 | struct cgraph_node *callee; | |
95fb3203 | 2849 | bool unreachable = false; |
6378ffb3 | 2850 | |
3fd0ca33 | 2851 | if (TREE_CODE (target) == ADDR_EXPR) |
2852 | target = TREE_OPERAND (target, 0); | |
6378ffb3 | 2853 | if (TREE_CODE (target) != FUNCTION_DECL) |
16473e06 | 2854 | { |
2855 | target = canonicalize_constructor_val (target, NULL); | |
2856 | if (!target || TREE_CODE (target) != FUNCTION_DECL) | |
2857 | { | |
7ab096e0 | 2858 | /* Member pointer call that goes through a VMT lookup. */ |
2859 | if (ie->indirect_info->member_ptr | |
2860 | /* Or if target is not an invariant expression and we do not | |
2861 | know if it will evaulate to function at runtime. | |
2862 | This can happen when folding through &VAR, where &VAR | |
2863 | is IP invariant, but VAR itself is not. | |
2864 | ||
2865 | TODO: Revisit this when GCC 5 is branched. It seems that | |
2866 | member_ptr check is not needed and that we may try to fold | |
2867 | the expression and see if VAR is readonly. */ | |
2868 | || !is_gimple_ip_invariant (target)) | |
2869 | { | |
2870 | if (dump_enabled_p ()) | |
2871 | { | |
c309657f | 2872 | dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt, |
0e388735 | 2873 | "discovered direct call non-invariant %s\n", |
2874 | ie->caller->dump_name ()); | |
7ab096e0 | 2875 | } |
2876 | return NULL; | |
2877 | } | |
2878 | ||
2f6c1cf4 | 2879 | |
ceb49bba | 2880 | if (dump_enabled_p ()) |
2881 | { | |
c309657f | 2882 | dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt, |
0e388735 | 2883 | "discovered direct call to non-function in %s, " |
4c8041d7 | 2884 | "making it __builtin_unreachable\n", |
0e388735 | 2885 | ie->caller->dump_name ()); |
ceb49bba | 2886 | } |
a89006bf | 2887 | |
95fb3203 | 2888 | target = builtin_decl_implicit (BUILT_IN_UNREACHABLE); |
415d1b9a | 2889 | callee = cgraph_node::get_create (target); |
95fb3203 | 2890 | unreachable = true; |
16473e06 | 2891 | } |
95fb3203 | 2892 | else |
415d1b9a | 2893 | callee = cgraph_node::get (target); |
16473e06 | 2894 | } |
95fb3203 | 2895 | else |
415d1b9a | 2896 | callee = cgraph_node::get (target); |
16473e06 | 2897 | |
2898 | /* Because may-edges are not explicitely represented and vtable may be external, | |
2899 | we may create the first reference to the object in the unit. */ | |
2900 | if (!callee || callee->global.inlined_to) | |
2901 | { | |
16473e06 | 2902 | |
2903 | /* We are better to ensure we can refer to it. | |
2904 | In the case of static functions we are out of luck, since we already | |
2905 | removed its body. In the case of public functions we may or may | |
2906 | not introduce the reference. */ | |
2907 | if (!canonicalize_constructor_val (target, NULL) | |
2908 | || !TREE_PUBLIC (target)) | |
2909 | { | |
2910 | if (dump_file) | |
2911 | fprintf (dump_file, "ipa-prop: Discovered call to a known target " | |
f4d3c071 | 2912 | "(%s -> %s) but cannot refer to it. Giving up.\n", |
0e388735 | 2913 | ie->caller->dump_name (), |
2914 | ie->callee->dump_name ()); | |
16473e06 | 2915 | return NULL; |
2916 | } | |
415d1b9a | 2917 | callee = cgraph_node::get_create (target); |
16473e06 | 2918 | } |
ceb49bba | 2919 | |
43aac8cb | 2920 | /* If the edge is already speculated. */ |
2921 | if (speculative && ie->speculative) | |
2922 | { | |
2923 | struct cgraph_edge *e2; | |
2924 | struct ipa_ref *ref; | |
2925 | ie->speculative_call_info (e2, ie, ref); | |
2926 | if (e2->callee->ultimate_alias_target () | |
2927 | != callee->ultimate_alias_target ()) | |
2928 | { | |
2929 | if (dump_file) | |
0e388735 | 2930 | fprintf (dump_file, "ipa-prop: Discovered call to a speculative " |
2931 | "target (%s -> %s) but the call is already " | |
2932 | "speculated to %s. Giving up.\n", | |
2933 | ie->caller->dump_name (), callee->dump_name (), | |
2934 | e2->callee->dump_name ()); | |
43aac8cb | 2935 | } |
2936 | else | |
2937 | { | |
2938 | if (dump_file) | |
2939 | fprintf (dump_file, "ipa-prop: Discovered call to a speculative target " | |
0e388735 | 2940 | "(%s -> %s) this agree with previous speculation.\n", |
2941 | ie->caller->dump_name (), callee->dump_name ()); | |
43aac8cb | 2942 | } |
2943 | return NULL; | |
2944 | } | |
2945 | ||
ceb49bba | 2946 | if (!dbg_cnt (devirt)) |
2947 | return NULL; | |
2948 | ||
4d701526 | 2949 | ipa_check_create_node_params (); |
3fd0ca33 | 2950 | |
f4d3c071 | 2951 | /* We cannot make edges to inline clones. It is bug that someone removed |
d4e80e2b | 2952 | the cgraph node too early. */ |
01765fa2 | 2953 | gcc_assert (!callee->global.inlined_to); |
2954 | ||
95fb3203 | 2955 | if (dump_file && !unreachable) |
6378ffb3 | 2956 | { |
072ec6eb | 2957 | fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target " |
0e388735 | 2958 | "(%s -> %s), for stmt ", |
6378ffb3 | 2959 | ie->indirect_info->polymorphic ? "a virtual" : "an indirect", |
072ec6eb | 2960 | speculative ? "speculative" : "known", |
0e388735 | 2961 | ie->caller->dump_name (), |
2962 | callee->dump_name ()); | |
6378ffb3 | 2963 | if (ie->call_stmt) |
2964 | print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM); | |
2965 | else | |
2966 | fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid); | |
4d044066 | 2967 | } |
ceb49bba | 2968 | if (dump_enabled_p ()) |
2969 | { | |
c309657f | 2970 | dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt, |
4c8041d7 | 2971 | "converting indirect call in %s to direct call to %s\n", |
2972 | ie->caller->name (), callee->name ()); | |
ceb49bba | 2973 | } |
072ec6eb | 2974 | if (!speculative) |
1a92a535 | 2975 | { |
2976 | struct cgraph_edge *orig = ie; | |
2977 | ie = ie->make_direct (callee); | |
2978 | /* If we resolved speculative edge the cost is already up to date | |
2979 | for direct call (adjusted by inline_edge_duplication_hook). */ | |
2980 | if (ie == orig) | |
2981 | { | |
d2c2513e | 2982 | ipa_call_summary *es = ipa_call_summaries->get (ie); |
1a92a535 | 2983 | es->call_stmt_size -= (eni_size_weights.indirect_call_cost |
2984 | - eni_size_weights.call_cost); | |
2985 | es->call_stmt_time -= (eni_time_weights.indirect_call_cost | |
2986 | - eni_time_weights.call_cost); | |
2987 | } | |
2988 | } | |
072ec6eb | 2989 | else |
2990 | { | |
2991 | if (!callee->can_be_discarded_p ()) | |
2992 | { | |
2993 | cgraph_node *alias; | |
2994 | alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ()); | |
2995 | if (alias) | |
2996 | callee = alias; | |
2997 | } | |
1a92a535 | 2998 | /* make_speculative will update ie's cost to direct call cost. */ |
072ec6eb | 2999 | ie = ie->make_speculative |
151b9ff5 | 3000 | (callee, ie->count.apply_scale (8, 10)); |
072ec6eb | 3001 | } |
7115ea05 | 3002 | |
6378ffb3 | 3003 | return ie; |
f8daee9b | 3004 | } |
3005 | ||
665ff45b | 3006 | /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in |
3007 | CONSTRUCTOR and return it. Return NULL if the search fails for some | |
3008 | reason. */ | |
3009 | ||
3010 | static tree | |
3011 | find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset) | |
3012 | { | |
3013 | tree type = TREE_TYPE (constructor); | |
3014 | if (TREE_CODE (type) != ARRAY_TYPE | |
3015 | && TREE_CODE (type) != RECORD_TYPE) | |
3016 | return NULL; | |
3017 | ||
3018 | unsigned ix; | |
3019 | tree index, val; | |
3020 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val) | |
3021 | { | |
3022 | HOST_WIDE_INT elt_offset; | |
3023 | if (TREE_CODE (type) == ARRAY_TYPE) | |
3024 | { | |
3025 | offset_int off; | |
3026 | tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type)); | |
3027 | gcc_assert (TREE_CODE (unit_size) == INTEGER_CST); | |
3028 | ||
3029 | if (index) | |
3030 | { | |
8be31134 | 3031 | if (TREE_CODE (index) == RANGE_EXPR) |
3032 | off = wi::to_offset (TREE_OPERAND (index, 0)); | |
3033 | else | |
3034 | off = wi::to_offset (index); | |
665ff45b | 3035 | if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type))) |
3036 | { | |
3037 | tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type)); | |
3038 | gcc_assert (TREE_CODE (unit_size) == INTEGER_CST); | |
3039 | off = wi::sext (off - wi::to_offset (low_bound), | |
3040 | TYPE_PRECISION (TREE_TYPE (index))); | |
3041 | } | |
3042 | off *= wi::to_offset (unit_size); | |
8be31134 | 3043 | /* ??? Handle more than just the first index of a |
3044 | RANGE_EXPR. */ | |
665ff45b | 3045 | } |
3046 | else | |
3047 | off = wi::to_offset (unit_size) * ix; | |
3048 | ||
3049 | off = wi::lshift (off, LOG2_BITS_PER_UNIT); | |
3050 | if (!wi::fits_shwi_p (off) || wi::neg_p (off)) | |
3051 | continue; | |
3052 | elt_offset = off.to_shwi (); | |
3053 | } | |
3054 | else if (TREE_CODE (type) == RECORD_TYPE) | |
3055 | { | |
3056 | gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL); | |
3057 | if (DECL_BIT_FIELD (index)) | |
3058 | continue; | |
3059 | elt_offset = int_bit_position (index); | |
3060 | } | |
3061 | else | |
3062 | gcc_unreachable (); | |
3063 | ||
3064 | if (elt_offset > req_offset) | |
3065 | return NULL; | |
3066 | ||
3067 | if (TREE_CODE (val) == CONSTRUCTOR) | |
3068 | return find_constructor_constant_at_offset (val, | |
3069 | req_offset - elt_offset); | |
3070 | ||
3071 | if (elt_offset == req_offset | |
3072 | && is_gimple_reg_type (TREE_TYPE (val)) | |
3073 | && is_gimple_ip_invariant (val)) | |
3074 | return val; | |
3075 | } | |
3076 | return NULL; | |
3077 | } | |
3078 | ||
3079 | /* Check whether SCALAR could be used to look up an aggregate interprocedural | |
3080 | invariant from a static constructor and if so, return it. Otherwise return | |
3081 | NULL. */ | |
3082 | ||
3083 | static tree | |
3084 | ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref) | |
3085 | { | |
3086 | if (by_ref) | |
3087 | { | |
3088 | if (TREE_CODE (scalar) != ADDR_EXPR) | |
3089 | return NULL; | |
3090 | scalar = TREE_OPERAND (scalar, 0); | |
3091 | } | |
3092 | ||
53e9c5c4 | 3093 | if (!VAR_P (scalar) |
665ff45b | 3094 | || !is_global_var (scalar) |
3095 | || !TREE_READONLY (scalar) | |
3096 | || !DECL_INITIAL (scalar) | |
3097 | || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR) | |
3098 | return NULL; | |
3099 | ||
3100 | return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset); | |
3101 | } | |
3102 | ||
3103 | /* Retrieve value from aggregate jump function AGG or static initializer of | |
3104 | SCALAR (which can be NULL) for the given OFFSET or return NULL if there is | |
3105 | none. BY_REF specifies whether the value has to be passed by reference or | |
3106 | by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points | |
3107 | to is set to true if the value comes from an initializer of a constant. */ | |
0d491188 | 3108 | |
3109 | tree | |
665ff45b | 3110 | ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar, |
3111 | HOST_WIDE_INT offset, bool by_ref, | |
3112 | bool *from_global_constant) | |
0d491188 | 3113 | { |
3114 | struct ipa_agg_jf_item *item; | |
3115 | int i; | |
3116 | ||
665ff45b | 3117 | if (scalar) |
3118 | { | |
3119 | tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref); | |
3120 | if (res) | |
3121 | { | |
3122 | if (from_global_constant) | |
3123 | *from_global_constant = true; | |
3124 | return res; | |
3125 | } | |
3126 | } | |
3127 | ||
3128 | if (!agg | |
3129 | || by_ref != agg->by_ref) | |
0d491188 | 3130 | return NULL; |
3131 | ||
f1f41a6c | 3132 | FOR_EACH_VEC_SAFE_ELT (agg->items, i, item) |
803a7988 | 3133 | if (item->offset == offset) |
3134 | { | |
3135 | /* Currently we do not have clobber values, return NULL for them once | |
3136 | we do. */ | |
3137 | gcc_checking_assert (is_gimple_ip_invariant (item->value)); | |
665ff45b | 3138 | if (from_global_constant) |
3139 | *from_global_constant = false; | |
803a7988 | 3140 | return item->value; |
3141 | } | |
0d491188 | 3142 | return NULL; |
3143 | } | |
3144 | ||
096295f6 | 3145 | /* Remove a reference to SYMBOL from the list of references of a node given by |
061168c9 | 3146 | reference description RDESC. Return true if the reference has been |
3147 | successfully found and removed. */ | |
096295f6 | 3148 | |
061168c9 | 3149 | static bool |
452659af | 3150 | remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc) |
096295f6 | 3151 | { |
3152 | struct ipa_ref *to_del; | |
3153 | struct cgraph_edge *origin; | |
3154 | ||
3155 | origin = rdesc->cs; | |
8398c9b3 | 3156 | if (!origin) |
3157 | return false; | |
51ce5652 | 3158 | to_del = origin->caller->find_reference (symbol, origin->call_stmt, |
3159 | origin->lto_stmt_uid); | |
061168c9 | 3160 | if (!to_del) |
3161 | return false; | |
3162 | ||
51ce5652 | 3163 | to_del->remove_reference (); |
096295f6 | 3164 | if (dump_file) |
0e388735 | 3165 | fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n", |
3166 | origin->caller->dump_name (), xstrdup_for_dump (symbol->name ())); | |
061168c9 | 3167 | return true; |
096295f6 | 3168 | } |
3169 | ||
3170 | /* If JFUNC has a reference description with refcount different from | |
3171 | IPA_UNDESCRIBED_USE, return the reference description, otherwise return | |
3172 | NULL. JFUNC must be a constant jump function. */ | |
3173 | ||
3174 | static struct ipa_cst_ref_desc * | |
3175 | jfunc_rdesc_usable (struct ipa_jump_func *jfunc) | |
3176 | { | |
3177 | struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc); | |
3178 | if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE) | |
3179 | return rdesc; | |
3180 | else | |
3181 | return NULL; | |
3182 | } | |
3183 | ||
061168c9 | 3184 | /* If the value of constant jump function JFUNC is an address of a function |
3185 | declaration, return the associated call graph node. Otherwise return | |
3186 | NULL. */ | |
3187 | ||
3188 | static cgraph_node * | |
3189 | cgraph_node_for_jfunc (struct ipa_jump_func *jfunc) | |
3190 | { | |
3191 | gcc_checking_assert (jfunc->type == IPA_JF_CONST); | |
3192 | tree cst = ipa_get_jf_constant (jfunc); | |
3193 | if (TREE_CODE (cst) != ADDR_EXPR | |
3194 | || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL) | |
3195 | return NULL; | |
3196 | ||
415d1b9a | 3197 | return cgraph_node::get (TREE_OPERAND (cst, 0)); |
061168c9 | 3198 | } |
3199 | ||
3200 | ||
3201 | /* If JFUNC is a constant jump function with a usable rdesc, decrement its | |
3202 | refcount and if it hits zero, remove reference to SYMBOL from the caller of | |
3203 | the edge specified in the rdesc. Return false if either the symbol or the | |
3204 | reference could not be found, otherwise return true. */ | |
3205 | ||
3206 | static bool | |
3207 | try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc) | |
3208 | { | |
3209 | struct ipa_cst_ref_desc *rdesc; | |
3210 | if (jfunc->type == IPA_JF_CONST | |
3211 | && (rdesc = jfunc_rdesc_usable (jfunc)) | |
3212 | && --rdesc->refcount == 0) | |
3213 | { | |
452659af | 3214 | symtab_node *symbol = cgraph_node_for_jfunc (jfunc); |
061168c9 | 3215 | if (!symbol) |
3216 | return false; | |
3217 | ||
3218 | return remove_described_reference (symbol, rdesc); | |
3219 | } | |
3220 | return true; | |
3221 | } | |
3222 | ||
6378ffb3 | 3223 | /* Try to find a destination for indirect edge IE that corresponds to a simple |
3224 | call or a call of a member function pointer and where the destination is a | |
944ee40d | 3225 | pointer formal parameter described by jump function JFUNC. TARGET_TYPE is |
3226 | the type of the parameter to which the result of JFUNC is passed. If it can | |
3227 | be determined, return the newly direct edge, otherwise return NULL. | |
12ecd4f9 | 3228 | NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */ |
1917e945 | 3229 | |
6378ffb3 | 3230 | static struct cgraph_edge * |
3231 | try_make_edge_direct_simple_call (struct cgraph_edge *ie, | |
944ee40d | 3232 | struct ipa_jump_func *jfunc, tree target_type, |
12ecd4f9 | 3233 | struct ipa_node_params *new_root_info) |
6378ffb3 | 3234 | { |
096295f6 | 3235 | struct cgraph_edge *cs; |
6378ffb3 | 3236 | tree target; |
4d044066 | 3237 | bool agg_contents = ie->indirect_info->agg_contents; |
944ee40d | 3238 | tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type); |
665ff45b | 3239 | if (agg_contents) |
3240 | { | |
3241 | bool from_global_constant; | |
3242 | target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar, | |
3243 | ie->indirect_info->offset, | |
3244 | ie->indirect_info->by_ref, | |
3245 | &from_global_constant); | |
3246 | if (target | |
3247 | && !from_global_constant | |
3248 | && !ie->indirect_info->guaranteed_unmodified) | |
3249 | return NULL; | |
3250 | } | |
6378ffb3 | 3251 | else |
665ff45b | 3252 | target = scalar; |
12ecd4f9 | 3253 | if (!target) |
3254 | return NULL; | |
096295f6 | 3255 | cs = ipa_make_edge_direct_to_target (ie, target); |
3256 | ||
d9ea4515 | 3257 | if (cs && !agg_contents) |
061168c9 | 3258 | { |
3259 | bool ok; | |
3260 | gcc_checking_assert (cs->callee | |
37113583 | 3261 | && (cs != ie |
3262 | || jfunc->type != IPA_JF_CONST | |
061168c9 | 3263 | || !cgraph_node_for_jfunc (jfunc) |
3264 | || cs->callee == cgraph_node_for_jfunc (jfunc))); | |
3265 | ok = try_decrement_rdesc_refcount (jfunc); | |
3266 | gcc_checking_assert (ok); | |
3267 | } | |
096295f6 | 3268 | |
3269 | return cs; | |
6378ffb3 | 3270 | } |
3271 | ||
49c3fb73 | 3272 | /* Return the target to be used in cases of impossible devirtualization. IE |
3273 | and target (the latter can be NULL) are dumped when dumping is enabled. */ | |
3274 | ||
3bc62a51 | 3275 | tree |
3276 | ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target) | |
49c3fb73 | 3277 | { |
3278 | if (dump_file) | |
3279 | { | |
3280 | if (target) | |
3281 | fprintf (dump_file, | |
0e388735 | 3282 | "Type inconsistent devirtualization: %s->%s\n", |
3283 | ie->caller->dump_name (), | |
49c3fb73 | 3284 | IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target))); |
3285 | else | |
3286 | fprintf (dump_file, | |
0e388735 | 3287 | "No devirtualization target in %s\n", |
3288 | ie->caller->dump_name ()); | |
49c3fb73 | 3289 | } |
3290 | tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE); | |
415d1b9a | 3291 | cgraph_node::get_create (new_target); |
49c3fb73 | 3292 | return new_target; |
3293 | } | |
3294 | ||
12ecd4f9 | 3295 | /* Try to find a destination for indirect edge IE that corresponds to a virtual |
3296 | call based on a formal parameter which is described by jump function JFUNC | |
3297 | and if it can be determined, make it direct and return the direct edge. | |
245ab191 | 3298 | Otherwise, return NULL. CTX describes the polymorphic context that the |
3299 | parameter the call is based on brings along with it. */ | |
6378ffb3 | 3300 | |
3301 | static struct cgraph_edge * | |
3302 | try_make_edge_direct_virtual_call (struct cgraph_edge *ie, | |
12ecd4f9 | 3303 | struct ipa_jump_func *jfunc, |
245ab191 | 3304 | struct ipa_polymorphic_call_context ctx) |
f8daee9b | 3305 | { |
245ab191 | 3306 | tree target = NULL; |
072ec6eb | 3307 | bool speculative = false; |
6378ffb3 | 3308 | |
d1f68cd8 | 3309 | if (!opt_for_fn (ie->caller->decl, flag_devirtualize)) |
02636da3 | 3310 | return NULL; |
6378ffb3 | 3311 | |
245ab191 | 3312 | gcc_assert (!ie->indirect_info->by_ref); |
072ec6eb | 3313 | |
3314 | /* Try to do lookup via known virtual table pointer value. */ | |
d1f68cd8 | 3315 | if (!ie->indirect_info->vptr_changed |
3316 | || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)) | |
02636da3 | 3317 | { |
6750de5f | 3318 | tree vtable; |
3319 | unsigned HOST_WIDE_INT offset; | |
665ff45b | 3320 | tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc) |
3321 | : NULL; | |
3322 | tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar, | |
02636da3 | 3323 | ie->indirect_info->offset, |
3324 | true); | |
6750de5f | 3325 | if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset)) |
3326 | { | |
c02e93ce | 3327 | bool can_refer; |
43aac8cb | 3328 | t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token, |
c02e93ce | 3329 | vtable, offset, &can_refer); |
3330 | if (can_refer) | |
6750de5f | 3331 | { |
c02e93ce | 3332 | if (!t |
3333 | || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE | |
3334 | && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE) | |
6750de5f | 3335 | || !possible_polymorphic_call_target_p |
43aac8cb | 3336 | (ie, cgraph_node::get (t))) |
3337 | { | |
2c9d5cb8 | 3338 | /* Do not speculate builtin_unreachable, it is stupid! */ |
43aac8cb | 3339 | if (!ie->indirect_info->vptr_changed) |
3340 | target = ipa_impossible_devirt_target (ie, target); | |
c02e93ce | 3341 | else |
3342 | target = NULL; | |
43aac8cb | 3343 | } |
3344 | else | |
3345 | { | |
3346 | target = t; | |
3347 | speculative = ie->indirect_info->vptr_changed; | |
3348 | } | |
6750de5f | 3349 | } |
3350 | } | |
02636da3 | 3351 | } |
3352 | ||
245ab191 | 3353 | ipa_polymorphic_call_context ie_context (ie); |
3354 | vec <cgraph_node *>targets; | |
3355 | bool final; | |
12ecd4f9 | 3356 | |
245ab191 | 3357 | ctx.offset_by (ie->indirect_info->offset); |
3358 | if (ie->indirect_info->vptr_changed) | |
3359 | ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor, | |
3360 | ie->indirect_info->otr_type); | |
3361 | ctx.combine_with (ie_context, ie->indirect_info->otr_type); | |
3362 | targets = possible_polymorphic_call_targets | |
3363 | (ie->indirect_info->otr_type, | |
3364 | ie->indirect_info->otr_token, | |
3365 | ctx, &final); | |
3366 | if (final && targets.length () <= 1) | |
072ec6eb | 3367 | { |
2c9d5cb8 | 3368 | speculative = false; |
245ab191 | 3369 | if (targets.length () == 1) |
3370 | target = targets[0]->decl; | |
3371 | else | |
3372 | target = ipa_impossible_devirt_target (ie, NULL_TREE); | |
072ec6eb | 3373 | } |
d1f68cd8 | 3374 | else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively) |
245ab191 | 3375 | && !ie->speculative && ie->maybe_hot_p ()) |
54176a57 | 3376 | { |
245ab191 | 3377 | cgraph_node *n; |
3378 | n = try_speculative_devirtualization (ie->indirect_info->otr_type, | |
3379 | ie->indirect_info->otr_token, | |
3380 | ie->indirect_info->context); | |
3381 | if (n) | |
072ec6eb | 3382 | { |
245ab191 | 3383 | target = n->decl; |
3384 | speculative = true; | |
072ec6eb | 3385 | } |
54176a57 | 3386 | } |
6378ffb3 | 3387 | |
3388 | if (target) | |
10fba9c0 | 3389 | { |
245ab191 | 3390 | if (!possible_polymorphic_call_target_p |
3391 | (ie, cgraph_node::get_create (target))) | |
43aac8cb | 3392 | { |
1986ca43 | 3393 | if (speculative) |
43aac8cb | 3394 | return NULL; |
3395 | target = ipa_impossible_devirt_target (ie, target); | |
3396 | } | |
072ec6eb | 3397 | return ipa_make_edge_direct_to_target (ie, target, speculative); |
10fba9c0 | 3398 | } |
6378ffb3 | 3399 | else |
3400 | return NULL; | |
f8daee9b | 3401 | } |
3402 | ||
3403 | /* Update the param called notes associated with NODE when CS is being inlined, | |
3404 | assuming NODE is (potentially indirectly) inlined into CS->callee. | |
3405 | Moreover, if the callee is discovered to be constant, create a new cgraph | |
6db08adc | 3406 | edge for it. Newly discovered indirect edges will be added to *NEW_EDGES, |
3f2ff969 | 3407 | unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */ |
1917e945 | 3408 | |
3f2ff969 | 3409 | static bool |
799c8711 | 3410 | update_indirect_edges_after_inlining (struct cgraph_edge *cs, |
3411 | struct cgraph_node *node, | |
415d1b9a | 3412 | vec<cgraph_edge *> *new_edges) |
f8daee9b | 3413 | { |
a53e7471 | 3414 | struct ipa_edge_args *top; |
6378ffb3 | 3415 | struct cgraph_edge *ie, *next_ie, *new_direct_edge; |
944ee40d | 3416 | struct ipa_node_params *new_root_info, *inlined_node_info; |
3f2ff969 | 3417 | bool res = false; |
f8daee9b | 3418 | |
799c8711 | 3419 | ipa_check_create_edge_args (); |
a53e7471 | 3420 | top = IPA_EDGE_REF (cs); |
12ecd4f9 | 3421 | new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to |
3422 | ? cs->caller->global.inlined_to | |
3423 | : cs->caller); | |
944ee40d | 3424 | inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ()); |
799c8711 | 3425 | |
3426 | for (ie = node->indirect_calls; ie; ie = next_ie) | |
f8daee9b | 3427 | { |
799c8711 | 3428 | struct cgraph_indirect_call_info *ici = ie->indirect_info; |
f8daee9b | 3429 | struct ipa_jump_func *jfunc; |
0d491188 | 3430 | int param_index; |
d122d93c | 3431 | cgraph_node *spec_target = NULL; |
f8daee9b | 3432 | |
799c8711 | 3433 | next_ie = ie->next_callee; |
f8daee9b | 3434 | |
f8b7e3ec | 3435 | if (ici->param_index == -1) |
3436 | continue; | |
799c8711 | 3437 | |
f8daee9b | 3438 | /* We must check range due to calls with variable number of arguments: */ |
799c8711 | 3439 | if (ici->param_index >= ipa_get_cs_argument_count (top)) |
f8daee9b | 3440 | { |
a226c368 | 3441 | ici->param_index = -1; |
f8daee9b | 3442 | continue; |
3443 | } | |
3444 | ||
0d491188 | 3445 | param_index = ici->param_index; |
3446 | jfunc = ipa_get_ith_jump_func (top, param_index); | |
a226c368 | 3447 | |
d122d93c | 3448 | if (ie->speculative) |
3449 | { | |
3450 | struct cgraph_edge *de; | |
3451 | struct ipa_ref *ref; | |
3452 | ie->speculative_call_info (de, ie, ref); | |
3453 | spec_target = de->callee; | |
3454 | } | |
3455 | ||
d1f68cd8 | 3456 | if (!opt_for_fn (node->decl, flag_indirect_inlining)) |
a240d038 | 3457 | new_direct_edge = NULL; |
3458 | else if (ici->polymorphic) | |
072ec6eb | 3459 | { |
245ab191 | 3460 | ipa_polymorphic_call_context ctx; |
3461 | ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc); | |
3462 | new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx); | |
072ec6eb | 3463 | } |
6378ffb3 | 3464 | else |
944ee40d | 3465 | { |
3466 | tree target_type = ipa_get_type (inlined_node_info, param_index); | |
3467 | new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc, | |
3468 | target_type, | |
3469 | new_root_info); | |
3470 | } | |
3471 | ||
4d044066 | 3472 | /* If speculation was removed, then we need to do nothing. */ |
d122d93c | 3473 | if (new_direct_edge && new_direct_edge != ie |
3474 | && new_direct_edge->callee == spec_target) | |
4d044066 | 3475 | { |
3476 | new_direct_edge->indirect_inlining_edge = 1; | |
3477 | top = IPA_EDGE_REF (cs); | |
3478 | res = true; | |
dd1f9fb5 | 3479 | if (!new_direct_edge->speculative) |
3480 | continue; | |
4d044066 | 3481 | } |
3482 | else if (new_direct_edge) | |
5215027d | 3483 | { |
6378ffb3 | 3484 | new_direct_edge->indirect_inlining_edge = 1; |
f883da84 | 3485 | if (new_direct_edge->call_stmt) |
3486 | new_direct_edge->call_stmt_cannot_inline_p | |
341de017 | 3487 | = !gimple_check_call_matching_types ( |
3488 | new_direct_edge->call_stmt, | |
02774f2d | 3489 | new_direct_edge->callee->decl, false); |
6378ffb3 | 3490 | if (new_edges) |
3491 | { | |
f1f41a6c | 3492 | new_edges->safe_push (new_direct_edge); |
6378ffb3 | 3493 | res = true; |
3494 | } | |
4d044066 | 3495 | top = IPA_EDGE_REF (cs); |
d122d93c | 3496 | /* If speculative edge was introduced we still need to update |
3497 | call info of the indirect edge. */ | |
3498 | if (!new_direct_edge->speculative) | |
3499 | continue; | |
5215027d | 3500 | } |
d122d93c | 3501 | if (jfunc->type == IPA_JF_PASS_THROUGH |
3502 | && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR) | |
a240d038 | 3503 | { |
82a343d2 | 3504 | if (ici->agg_contents |
3505 | && !ipa_get_jf_pass_through_agg_preserved (jfunc) | |
3506 | && !ici->polymorphic) | |
a240d038 | 3507 | ici->param_index = -1; |
3508 | else | |
82a343d2 | 3509 | { |
3510 | ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc); | |
3511 | if (ici->polymorphic | |
3512 | && !ipa_get_jf_pass_through_type_preserved (jfunc)) | |
3513 | ici->vptr_changed = true; | |
3514 | } | |
a240d038 | 3515 | } |
3516 | else if (jfunc->type == IPA_JF_ANCESTOR) | |
3517 | { | |
82a343d2 | 3518 | if (ici->agg_contents |
3519 | && !ipa_get_jf_ancestor_agg_preserved (jfunc) | |
3520 | && !ici->polymorphic) | |
a240d038 | 3521 | ici->param_index = -1; |
3522 | else | |
3523 | { | |
3524 | ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc); | |
3525 | ici->offset += ipa_get_jf_ancestor_offset (jfunc); | |
82a343d2 | 3526 | if (ici->polymorphic |
3527 | && !ipa_get_jf_ancestor_type_preserved (jfunc)) | |
3528 | ici->vptr_changed = true; | |
a240d038 | 3529 | } |
3530 | } | |
3531 | else | |
3532 | /* Either we can find a destination for this edge now or never. */ | |
3533 | ici->param_index = -1; | |
f8daee9b | 3534 | } |
799c8711 | 3535 | |
3f2ff969 | 3536 | return res; |
f8daee9b | 3537 | } |
3538 | ||
3539 | /* Recursively traverse subtree of NODE (including node) made of inlined | |
3540 | cgraph_edges when CS has been inlined and invoke | |
799c8711 | 3541 | update_indirect_edges_after_inlining on all nodes and |
f8daee9b | 3542 | update_jump_functions_after_inlining on all non-inlined edges that lead out |
3543 | of this subtree. Newly discovered indirect edges will be added to | |
3f2ff969 | 3544 | *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were |
3545 | created. */ | |
1917e945 | 3546 | |
3f2ff969 | 3547 | static bool |
f8daee9b | 3548 | propagate_info_to_inlined_callees (struct cgraph_edge *cs, |
3549 | struct cgraph_node *node, | |
415d1b9a | 3550 | vec<cgraph_edge *> *new_edges) |
f8daee9b | 3551 | { |
3552 | struct cgraph_edge *e; | |
3f2ff969 | 3553 | bool res; |
f8daee9b | 3554 | |
799c8711 | 3555 | res = update_indirect_edges_after_inlining (cs, node, new_edges); |
f8daee9b | 3556 | |
3557 | for (e = node->callees; e; e = e->next_callee) | |
3558 | if (!e->inline_failed) | |
3f2ff969 | 3559 | res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges); |
f8daee9b | 3560 | else |
3561 | update_jump_functions_after_inlining (cs, e); | |
a226c368 | 3562 | for (e = node->indirect_calls; e; e = e->next_callee) |
3563 | update_jump_functions_after_inlining (cs, e); | |
3f2ff969 | 3564 | |
3565 | return res; | |
f8daee9b | 3566 | } |
3567 | ||
096295f6 | 3568 | /* Combine two controlled uses counts as done during inlining. */ |
3569 | ||
3570 | static int | |
3571 | combine_controlled_uses_counters (int c, int d) | |
3572 | { | |
3573 | if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE) | |
3574 | return IPA_UNDESCRIBED_USE; | |
3575 | else | |
3576 | return c + d - 1; | |
3577 | } | |
3578 | ||
3579 | /* Propagate number of controlled users from CS->caleee to the new root of the | |
3580 | tree of inlined nodes. */ | |
3581 | ||
3582 | static void | |
3583 | propagate_controlled_uses (struct cgraph_edge *cs) | |
3584 | { | |
3585 | struct ipa_edge_args *args = IPA_EDGE_REF (cs); | |
3586 | struct cgraph_node *new_root = cs->caller->global.inlined_to | |
3587 | ? cs->caller->global.inlined_to : cs->caller; | |
3588 | struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root); | |
3589 | struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee); | |
3590 | int count, i; | |
3591 | ||
3592 | count = MIN (ipa_get_cs_argument_count (args), | |
3593 | ipa_get_param_count (old_root_info)); | |
3594 | for (i = 0; i < count; i++) | |
3595 | { | |
3596 | struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i); | |
3597 | struct ipa_cst_ref_desc *rdesc; | |
3598 | ||
3599 | if (jf->type == IPA_JF_PASS_THROUGH) | |
3600 | { | |
3601 | int src_idx, c, d; | |
3602 | src_idx = ipa_get_jf_pass_through_formal_id (jf); | |
3603 | c = ipa_get_controlled_uses (new_root_info, src_idx); | |
3604 | d = ipa_get_controlled_uses (old_root_info, i); | |
3605 | ||
3606 | gcc_checking_assert (ipa_get_jf_pass_through_operation (jf) | |
3607 | == NOP_EXPR || c == IPA_UNDESCRIBED_USE); | |
3608 | c = combine_controlled_uses_counters (c, d); | |
3609 | ipa_set_controlled_uses (new_root_info, src_idx, c); | |
3610 | if (c == 0 && new_root_info->ipcp_orig_node) | |
3611 | { | |
3612 | struct cgraph_node *n; | |
3613 | struct ipa_ref *ref; | |
245ab191 | 3614 | tree t = new_root_info->known_csts[src_idx]; |
096295f6 | 3615 | |
3616 | if (t && TREE_CODE (t) == ADDR_EXPR | |
3617 | && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL | |
415d1b9a | 3618 | && (n = cgraph_node::get (TREE_OPERAND (t, 0))) |
51ce5652 | 3619 | && (ref = new_root->find_reference (n, NULL, 0))) |
096295f6 | 3620 | { |
3621 | if (dump_file) | |
3622 | fprintf (dump_file, "ipa-prop: Removing cloning-created " | |
0e388735 | 3623 | "reference from %s to %s.\n", |
3624 | new_root->dump_name (), | |
3625 | n->dump_name ()); | |
51ce5652 | 3626 | ref->remove_reference (); |
096295f6 | 3627 | } |
3628 | } | |
3629 | } | |
3630 | else if (jf->type == IPA_JF_CONST | |
3631 | && (rdesc = jfunc_rdesc_usable (jf))) | |
3632 | { | |
3633 | int d = ipa_get_controlled_uses (old_root_info, i); | |
3634 | int c = rdesc->refcount; | |
3635 | rdesc->refcount = combine_controlled_uses_counters (c, d); | |
3636 | if (rdesc->refcount == 0) | |
3637 | { | |
3638 | tree cst = ipa_get_jf_constant (jf); | |
3639 | struct cgraph_node *n; | |
3640 | gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR | |
3641 | && TREE_CODE (TREE_OPERAND (cst, 0)) | |
3642 | == FUNCTION_DECL); | |
415d1b9a | 3643 | n = cgraph_node::get (TREE_OPERAND (cst, 0)); |
096295f6 | 3644 | if (n) |
3645 | { | |
3646 | struct cgraph_node *clone; | |
061168c9 | 3647 | bool ok; |
02774f2d | 3648 | ok = remove_described_reference (n, rdesc); |
061168c9 | 3649 | gcc_checking_assert (ok); |
096295f6 | 3650 | |
3651 | clone = cs->caller; | |
3652 | while (clone->global.inlined_to | |
3653 | && clone != rdesc->cs->caller | |
3654 | && IPA_NODE_REF (clone)->ipcp_orig_node) | |
3655 | { | |
3656 | struct ipa_ref *ref; | |
51ce5652 | 3657 | ref = clone->find_reference (n, NULL, 0); |
096295f6 | 3658 | if (ref) |
3659 | { | |
3660 | if (dump_file) | |
3661 | fprintf (dump_file, "ipa-prop: Removing " | |
3662 | "cloning-created reference " | |
0e388735 | 3663 | "from %s to %s.\n", |
3664 | clone->dump_name (), | |
3665 | n->dump_name ()); | |
51ce5652 | 3666 | ref->remove_reference (); |
096295f6 | 3667 | } |
3668 | clone = clone->callers->caller; | |
3669 | } | |
3670 | } | |
3671 | } | |
3672 | } | |
3673 | } | |
3674 | ||
3675 | for (i = ipa_get_param_count (old_root_info); | |
3676 | i < ipa_get_cs_argument_count (args); | |
3677 | i++) | |
3678 | { | |
3679 | struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i); | |
3680 | ||
3681 | if (jf->type == IPA_JF_CONST) | |
3682 | { | |
3683 | struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf); | |
3684 | if (rdesc) | |
3685 | rdesc->refcount = IPA_UNDESCRIBED_USE; | |
3686 | } | |
3687 | else if (jf->type == IPA_JF_PASS_THROUGH) | |
3688 | ipa_set_controlled_uses (new_root_info, | |
3689 | jf->value.pass_through.formal_id, | |
3690 | IPA_UNDESCRIBED_USE); | |
3691 | } | |
3692 | } | |
3693 | ||
f8daee9b | 3694 | /* Update jump functions and call note functions on inlining the call site CS. |
3695 | CS is expected to lead to a node already cloned by | |
3696 | cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to | |
3f2ff969 | 3697 | *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were + |
3698 | created. */ | |
1917e945 | 3699 | |
3f2ff969 | 3700 | bool |
f8daee9b | 3701 | ipa_propagate_indirect_call_infos (struct cgraph_edge *cs, |
415d1b9a | 3702 | vec<cgraph_edge *> *new_edges) |
f8daee9b | 3703 | { |
a226c368 | 3704 | bool changed; |
3f2ff969 | 3705 | /* Do nothing if the preparation phase has not been carried out yet |
3706 | (i.e. during early inlining). */ | |
2cc80ac3 | 3707 | if (!ipa_node_params_sum) |
3f2ff969 | 3708 | return false; |
322dd010 | 3709 | gcc_assert (ipa_edge_args_sum); |
3f2ff969 | 3710 | |
096295f6 | 3711 | propagate_controlled_uses (cs); |
a226c368 | 3712 | changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges); |
3713 | ||
a226c368 | 3714 | return changed; |
3b22db66 | 3715 | } |
3716 | ||
97cb825b | 3717 | /* Ensure that array of edge arguments infos is big enough to accommodate a |
3718 | structure for all edges and reallocates it if not. Also, allocate | |
3719 | associated hash tables is they do not already exist. */ | |
3720 | ||
3721 | void | |
3722 | ipa_check_create_edge_args (void) | |
3723 | { | |
322dd010 | 3724 | if (!ipa_edge_args_sum) |
3725 | ipa_edge_args_sum | |
3726 | = (new (ggc_cleared_alloc <ipa_edge_args_sum_t> ()) | |
3727 | ipa_edge_args_sum_t (symtab, true)); | |
97cb825b | 3728 | if (!ipa_bits_hash_table) |
3729 | ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37); | |
3730 | if (!ipa_vr_hash_table) | |
3731 | ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37); | |
3732 | } | |
3733 | ||
545eff8f | 3734 | /* Free all ipa_edge structures. */ |
1917e945 | 3735 | |
3b22db66 | 3736 | void |
545eff8f | 3737 | ipa_free_all_edge_args (void) |
3b22db66 | 3738 | { |
322dd010 | 3739 | if (!ipa_edge_args_sum) |
f1f41a6c | 3740 | return; |
3741 | ||
322dd010 | 3742 | ipa_edge_args_sum->release (); |
3743 | ipa_edge_args_sum = NULL; | |
3b22db66 | 3744 | } |
3745 | ||
545eff8f | 3746 | /* Free all ipa_node_params structures. */ |
1917e945 | 3747 | |
3b22db66 | 3748 | void |
545eff8f | 3749 | ipa_free_all_node_params (void) |
3b22db66 | 3750 | { |
df5a72e6 | 3751 | ipa_node_params_sum->release (); |
2cc80ac3 | 3752 | ipa_node_params_sum = NULL; |
545eff8f | 3753 | } |
3754 | ||
00637f9c | 3755 | /* Initialize IPA CP transformation summary and also allocate any necessary hash |
97cb825b | 3756 | tables if they do not already exist. */ |
ae7b7bc8 | 3757 | |
3758 | void | |
00637f9c | 3759 | ipcp_transformation_initialize (void) |
ae7b7bc8 | 3760 | { |
97cb825b | 3761 | if (!ipa_bits_hash_table) |
3762 | ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37); | |
3763 | if (!ipa_vr_hash_table) | |
3764 | ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37); | |
00637f9c | 3765 | if (ipcp_transformation_sum == NULL) |
3766 | ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab); | |
ae7b7bc8 | 3767 | } |
3768 | ||
803a7988 | 3769 | /* Set the aggregate replacements of NODE to be AGGVALS. */ |
3770 | ||
3771 | void | |
3772 | ipa_set_node_agg_value_chain (struct cgraph_node *node, | |
3773 | struct ipa_agg_replacement_value *aggvals) | |
3774 | { | |
00637f9c | 3775 | ipcp_transformation_initialize (); |
3776 | ipcp_transformation *s = ipcp_transformation_sum->get_create (node); | |
3777 | s->agg_values = aggvals; | |
803a7988 | 3778 | } |
3779 | ||
322dd010 | 3780 | /* Hook that is called by cgraph.c when an edge is removed. Adjust reference |
3781 | count data structures accordingly. */ | |
1917e945 | 3782 | |
322dd010 | 3783 | void |
3784 | ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args) | |
545eff8f | 3785 | { |
061168c9 | 3786 | if (args->jump_functions) |
3787 | { | |
3788 | struct ipa_jump_func *jf; | |
3789 | int i; | |
3790 | FOR_EACH_VEC_ELT (*args->jump_functions, i, jf) | |
8398c9b3 | 3791 | { |
3792 | struct ipa_cst_ref_desc *rdesc; | |
3793 | try_decrement_rdesc_refcount (jf); | |
3794 | if (jf->type == IPA_JF_CONST | |
3795 | && (rdesc = ipa_get_jf_constant_rdesc (jf)) | |
3796 | && rdesc->cs == cs) | |
3797 | rdesc->cs = NULL; | |
3798 | } | |
061168c9 | 3799 | } |
3b22db66 | 3800 | } |
3801 | ||
322dd010 | 3802 | /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust |
3803 | reference count data strucutres accordingly. */ | |
1917e945 | 3804 | |
322dd010 | 3805 | void |
3806 | ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst, | |
3807 | ipa_edge_args *old_args, ipa_edge_args *new_args) | |
545eff8f | 3808 | { |
0d491188 | 3809 | unsigned int i; |
545eff8f | 3810 | |
f1f41a6c | 3811 | new_args->jump_functions = vec_safe_copy (old_args->jump_functions); |
072ec6eb | 3812 | if (old_args->polymorphic_call_contexts) |
3813 | new_args->polymorphic_call_contexts | |
3814 | = vec_safe_copy (old_args->polymorphic_call_contexts); | |
0d491188 | 3815 | |
f1f41a6c | 3816 | for (i = 0; i < vec_safe_length (old_args->jump_functions); i++) |
096295f6 | 3817 | { |
3818 | struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i); | |
3819 | struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i); | |
3820 | ||
3821 | dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items); | |
3822 | ||
3823 | if (src_jf->type == IPA_JF_CONST) | |
3824 | { | |
3825 | struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf); | |
3826 | ||
3827 | if (!src_rdesc) | |
3828 | dst_jf->value.constant.rdesc = NULL; | |
061168c9 | 3829 | else if (src->caller == dst->caller) |
3830 | { | |
3831 | struct ipa_ref *ref; | |
452659af | 3832 | symtab_node *n = cgraph_node_for_jfunc (src_jf); |
061168c9 | 3833 | gcc_checking_assert (n); |
51ce5652 | 3834 | ref = src->caller->find_reference (n, src->call_stmt, |
3835 | src->lto_stmt_uid); | |
061168c9 | 3836 | gcc_checking_assert (ref); |
51ce5652 | 3837 | dst->caller->clone_reference (ref, ref->stmt); |
061168c9 | 3838 | |
b196706d | 3839 | struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate (); |
061168c9 | 3840 | dst_rdesc->cs = dst; |
3841 | dst_rdesc->refcount = src_rdesc->refcount; | |
3842 | dst_rdesc->next_duplicate = NULL; | |
3843 | dst_jf->value.constant.rdesc = dst_rdesc; | |
3844 | } | |
096295f6 | 3845 | else if (src_rdesc->cs == src) |
3846 | { | |
b196706d | 3847 | struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate (); |
096295f6 | 3848 | dst_rdesc->cs = dst; |
096295f6 | 3849 | dst_rdesc->refcount = src_rdesc->refcount; |
c55448e7 | 3850 | dst_rdesc->next_duplicate = src_rdesc->next_duplicate; |
3851 | src_rdesc->next_duplicate = dst_rdesc; | |
096295f6 | 3852 | dst_jf->value.constant.rdesc = dst_rdesc; |
3853 | } | |
3854 | else | |
3855 | { | |
3856 | struct ipa_cst_ref_desc *dst_rdesc; | |
3857 | /* This can happen during inlining, when a JFUNC can refer to a | |
3858 | reference taken in a function up in the tree of inline clones. | |
3859 | We need to find the duplicate that refers to our tree of | |
3860 | inline clones. */ | |
3861 | ||
3862 | gcc_assert (dst->caller->global.inlined_to); | |
3863 | for (dst_rdesc = src_rdesc->next_duplicate; | |
3864 | dst_rdesc; | |
3865 | dst_rdesc = dst_rdesc->next_duplicate) | |
c55448e7 | 3866 | { |
3867 | struct cgraph_node *top; | |
3868 | top = dst_rdesc->cs->caller->global.inlined_to | |
3869 | ? dst_rdesc->cs->caller->global.inlined_to | |
3870 | : dst_rdesc->cs->caller; | |
3871 | if (dst->caller->global.inlined_to == top) | |
3872 | break; | |
3873 | } | |
48f42a9a | 3874 | gcc_assert (dst_rdesc); |
096295f6 | 3875 | dst_jf->value.constant.rdesc = dst_rdesc; |
3876 | } | |
3877 | } | |
270bb323 | 3878 | else if (dst_jf->type == IPA_JF_PASS_THROUGH |
3879 | && src->caller == dst->caller) | |
3880 | { | |
3881 | struct cgraph_node *inline_root = dst->caller->global.inlined_to | |
3882 | ? dst->caller->global.inlined_to : dst->caller; | |
3883 | struct ipa_node_params *root_info = IPA_NODE_REF (inline_root); | |
3884 | int idx = ipa_get_jf_pass_through_formal_id (dst_jf); | |
3885 | ||
3886 | int c = ipa_get_controlled_uses (root_info, idx); | |
3887 | if (c != IPA_UNDESCRIBED_USE) | |
3888 | { | |
3889 | c++; | |
3890 | ipa_set_controlled_uses (root_info, idx, c); | |
3891 | } | |
3892 | } | |
096295f6 | 3893 | } |
545eff8f | 3894 | } |
3895 | ||
2cc80ac3 | 3896 | /* Analyze newly added function into callgraph. */ |
1917e945 | 3897 | |
545eff8f | 3898 | static void |
2cc80ac3 | 3899 | ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED) |
545eff8f | 3900 | { |
2cc80ac3 | 3901 | if (node->has_gimple_body_p ()) |
3902 | ipa_analyze_node (node); | |
3903 | } | |
545eff8f | 3904 | |
2cc80ac3 | 3905 | /* Hook that is called by summary when a node is duplicated. */ |
3906 | ||
3907 | void | |
3908 | ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst, | |
3909 | ipa_node_params *old_info, | |
3910 | ipa_node_params *new_info) | |
3911 | { | |
3912 | ipa_agg_replacement_value *old_av, *new_av; | |
545eff8f | 3913 | |
7af25a10 | 3914 | new_info->descriptors = vec_safe_copy (old_info->descriptors); |
821d0e0f | 3915 | new_info->lattices = NULL; |
545eff8f | 3916 | new_info->ipcp_orig_node = old_info->ipcp_orig_node; |
7af25a10 | 3917 | new_info->known_csts = old_info->known_csts.copy (); |
3918 | new_info->known_contexts = old_info->known_contexts.copy (); | |
1caef38b | 3919 | |
24430d08 | 3920 | new_info->analysis_done = old_info->analysis_done; |
1caef38b | 3921 | new_info->node_enqueued = old_info->node_enqueued; |
b8681788 | 3922 | new_info->versionable = old_info->versionable; |
803a7988 | 3923 | |
3924 | old_av = ipa_get_agg_replacements_for_node (src); | |
ae7b7bc8 | 3925 | if (old_av) |
803a7988 | 3926 | { |
ae7b7bc8 | 3927 | new_av = NULL; |
3928 | while (old_av) | |
3929 | { | |
3930 | struct ipa_agg_replacement_value *v; | |
803a7988 | 3931 | |
ae7b7bc8 | 3932 | v = ggc_alloc<ipa_agg_replacement_value> (); |
3933 | memcpy (v, old_av, sizeof (*v)); | |
3934 | v->next = new_av; | |
3935 | new_av = v; | |
3936 | old_av = old_av->next; | |
3937 | } | |
3938 | ipa_set_node_agg_value_chain (dst, new_av); | |
3939 | } | |
3940 | ||
00637f9c | 3941 | ipcp_transformation *src_trans = ipcp_get_transformation_summary (src); |
ae7b7bc8 | 3942 | |
25a8e007 | 3943 | if (src_trans) |
ae7b7bc8 | 3944 | { |
00637f9c | 3945 | ipcp_transformation_initialize (); |
3946 | src_trans = ipcp_transformation_sum->get_create (src); | |
3947 | ipcp_transformation *dst_trans | |
3948 | = ipcp_transformation_sum->get_create (dst); | |
97cb825b | 3949 | |
3950 | dst_trans->bits = vec_safe_copy (src_trans->bits); | |
3951 | ||
25a8e007 | 3952 | const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr; |
25a8e007 | 3953 | vec<ipa_vr, va_gc> *&dst_vr |
3954 | = ipcp_get_transformation_summary (dst)->m_vr; | |
25a8e007 | 3955 | if (vec_safe_length (src_trans->m_vr) > 0) |
3956 | { | |
3957 | vec_safe_reserve_exact (dst_vr, src_vr->length ()); | |
3958 | for (unsigned i = 0; i < src_vr->length (); ++i) | |
3959 | dst_vr->quick_push ((*src_vr)[i]); | |
3960 | } | |
803a7988 | 3961 | } |
545eff8f | 3962 | } |
3963 | ||
3964 | /* Register our cgraph hooks if they are not already there. */ | |
1917e945 | 3965 | |
3b22db66 | 3966 | void |
545eff8f | 3967 | ipa_register_cgraph_hooks (void) |
3b22db66 | 3968 | { |
2cc80ac3 | 3969 | ipa_check_create_node_params (); |
322dd010 | 3970 | ipa_check_create_edge_args (); |
2cc80ac3 | 3971 | |
2cc80ac3 | 3972 | function_insertion_hook_holder = |
35ee1c66 | 3973 | symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL); |
545eff8f | 3974 | } |
3b22db66 | 3975 | |
545eff8f | 3976 | /* Unregister our cgraph hooks if they are not already there. */ |
1917e945 | 3977 | |
545eff8f | 3978 | static void |
3979 | ipa_unregister_cgraph_hooks (void) | |
3980 | { | |
35ee1c66 | 3981 | symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder); |
b1471ee0 | 3982 | function_insertion_hook_holder = NULL; |
545eff8f | 3983 | } |
3984 | ||
3985 | /* Free all ipa_node_params and all ipa_edge_args structures if they are no | |
3986 | longer needed after ipa-cp. */ | |
1917e945 | 3987 | |
545eff8f | 3988 | void |
799c8711 | 3989 | ipa_free_all_structures_after_ipa_cp (void) |
f8daee9b | 3990 | { |
d1f68cd8 | 3991 | if (!optimize && !in_lto_p) |
f8daee9b | 3992 | { |
3993 | ipa_free_all_edge_args (); | |
3994 | ipa_free_all_node_params (); | |
8361d32f | 3995 | ipcp_sources_pool.release (); |
3996 | ipcp_cst_values_pool.release (); | |
3997 | ipcp_poly_ctx_values_pool.release (); | |
3998 | ipcp_agg_lattice_pool.release (); | |
f8daee9b | 3999 | ipa_unregister_cgraph_hooks (); |
b196706d | 4000 | ipa_refdesc_pool.release (); |
f8daee9b | 4001 | } |
4002 | } | |
4003 | ||
4004 | /* Free all ipa_node_params and all ipa_edge_args structures if they are no | |
4005 | longer needed after indirect inlining. */ | |
1917e945 | 4006 | |
f8daee9b | 4007 | void |
799c8711 | 4008 | ipa_free_all_structures_after_iinln (void) |
545eff8f | 4009 | { |
4010 | ipa_free_all_edge_args (); | |
4011 | ipa_free_all_node_params (); | |
4012 | ipa_unregister_cgraph_hooks (); | |
8361d32f | 4013 | ipcp_sources_pool.release (); |
4014 | ipcp_cst_values_pool.release (); | |
4015 | ipcp_poly_ctx_values_pool.release (); | |
4016 | ipcp_agg_lattice_pool.release (); | |
b196706d | 4017 | ipa_refdesc_pool.release (); |
3b22db66 | 4018 | } |
4019 | ||
3889f2e2 | 4020 | /* Print ipa_tree_map data structures of all functions in the |
3b22db66 | 4021 | callgraph to F. */ |
1917e945 | 4022 | |
3b22db66 | 4023 | void |
803a7988 | 4024 | ipa_print_node_params (FILE *f, struct cgraph_node *node) |
3b22db66 | 4025 | { |
4026 | int i, count; | |
f8daee9b | 4027 | struct ipa_node_params *info; |
3b22db66 | 4028 | |
02774f2d | 4029 | if (!node->definition) |
f8daee9b | 4030 | return; |
4031 | info = IPA_NODE_REF (node); | |
0e388735 | 4032 | fprintf (f, " function %s parameter descriptors:\n", node->dump_name ()); |
f8daee9b | 4033 | count = ipa_get_param_count (info); |
4034 | for (i = 0; i < count; i++) | |
3b22db66 | 4035 | { |
096295f6 | 4036 | int c; |
4037 | ||
436b29f7 | 4038 | fprintf (f, " "); |
cb4d77e1 | 4039 | ipa_dump_param (f, info, i); |
bc1b408a | 4040 | if (ipa_is_param_used (info, i)) |
4041 | fprintf (f, " used"); | |
096295f6 | 4042 | c = ipa_get_controlled_uses (info, i); |
4043 | if (c == IPA_UNDESCRIBED_USE) | |
4044 | fprintf (f, " undescribed_use"); | |
4045 | else | |
4046 | fprintf (f, " controlled_uses=%i", c); | |
f8daee9b | 4047 | fprintf (f, "\n"); |
3b22db66 | 4048 | } |
4049 | } | |
3889f2e2 | 4050 | |
11b73810 | 4051 | /* Print ipa_tree_map data structures of all functions in the |
f8daee9b | 4052 | callgraph to F. */ |
1917e945 | 4053 | |
f8daee9b | 4054 | void |
11b73810 | 4055 | ipa_print_all_params (FILE * f) |
f8daee9b | 4056 | { |
4057 | struct cgraph_node *node; | |
4058 | ||
11b73810 | 4059 | fprintf (f, "\nFunction parameters:\n"); |
7c455d87 | 4060 | FOR_EACH_FUNCTION (node) |
11b73810 | 4061 | ipa_print_node_params (f, node); |
f8daee9b | 4062 | } |
547f1802 | 4063 | |
803a7988 | 4064 | /* Dump the AV linked list. */ |
4065 | ||
4066 | void | |
4067 | ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av) | |
4068 | { | |
4069 | bool comma = false; | |
4070 | fprintf (f, " Aggregate replacements:"); | |
4071 | for (; av; av = av->next) | |
4072 | { | |
4073 | fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "", | |
4074 | av->index, av->offset); | |
1ffa4346 | 4075 | print_generic_expr (f, av->value); |
803a7988 | 4076 | comma = true; |
4077 | } | |
4078 | fprintf (f, "\n"); | |
4079 | } | |
4080 | ||
8867b500 | 4081 | /* Stream out jump function JUMP_FUNC to OB. */ |
4082 | ||
4083 | static void | |
4084 | ipa_write_jump_function (struct output_block *ob, | |
4085 | struct ipa_jump_func *jump_func) | |
4086 | { | |
0d491188 | 4087 | struct ipa_agg_jf_item *item; |
4088 | struct bitpack_d bp; | |
4089 | int i, count; | |
42157758 | 4090 | int flag = 0; |
8867b500 | 4091 | |
42157758 | 4092 | /* ADDR_EXPRs are very comon IP invariants; save some streamer data |
4093 | as well as WPA memory by handling them specially. */ | |
4094 | if (jump_func->type == IPA_JF_CONST | |
4095 | && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR) | |
4096 | flag = 1; | |
4097 | ||
4098 | streamer_write_uhwi (ob, jump_func->type * 2 + flag); | |
8867b500 | 4099 | switch (jump_func->type) |
4100 | { | |
4101 | case IPA_JF_UNKNOWN: | |
4102 | break; | |
4103 | case IPA_JF_CONST: | |
5169661d | 4104 | gcc_assert ( |
096295f6 | 4105 | EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION); |
42157758 | 4106 | stream_write_tree (ob, |
4107 | flag | |
4108 | ? TREE_OPERAND (jump_func->value.constant.value, 0) | |
4109 | : jump_func->value.constant.value, true); | |
8867b500 | 4110 | break; |
4111 | case IPA_JF_PASS_THROUGH: | |
7f385784 | 4112 | streamer_write_uhwi (ob, jump_func->value.pass_through.operation); |
30a240df | 4113 | if (jump_func->value.pass_through.operation == NOP_EXPR) |
4114 | { | |
4115 | streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id); | |
4116 | bp = bitpack_create (ob->main_stream); | |
4117 | bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1); | |
4118 | streamer_write_bitpack (&bp); | |
4119 | } | |
3657b81b | 4120 | else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation) |
4121 | == tcc_unary) | |
4122 | streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id); | |
30a240df | 4123 | else |
4124 | { | |
4125 | stream_write_tree (ob, jump_func->value.pass_through.operand, true); | |
4126 | streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id); | |
4127 | } | |
8867b500 | 4128 | break; |
4129 | case IPA_JF_ANCESTOR: | |
7f385784 | 4130 | streamer_write_uhwi (ob, jump_func->value.ancestor.offset); |
7f385784 | 4131 | streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id); |
0d491188 | 4132 | bp = bitpack_create (ob->main_stream); |
4133 | bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1); | |
4134 | streamer_write_bitpack (&bp); | |
8867b500 | 4135 | break; |
0d491188 | 4136 | } |
4137 | ||
f1f41a6c | 4138 | count = vec_safe_length (jump_func->agg.items); |
0d491188 | 4139 | streamer_write_uhwi (ob, count); |
4140 | if (count) | |
4141 | { | |
4142 | bp = bitpack_create (ob->main_stream); | |
4143 | bp_pack_value (&bp, jump_func->agg.by_ref, 1); | |
4144 | streamer_write_bitpack (&bp); | |
4145 | } | |
4146 | ||
f1f41a6c | 4147 | FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item) |
0d491188 | 4148 | { |
4149 | streamer_write_uhwi (ob, item->offset); | |
4150 | stream_write_tree (ob, item->value, true); | |
8867b500 | 4151 | } |
ae7b7bc8 | 4152 | |
a54071b2 | 4153 | bp = bitpack_create (ob->main_stream); |
97cb825b | 4154 | bp_pack_value (&bp, !!jump_func->bits, 1); |
a54071b2 | 4155 | streamer_write_bitpack (&bp); |
97cb825b | 4156 | if (jump_func->bits) |
a54071b2 | 4157 | { |
97cb825b | 4158 | streamer_write_widest_int (ob, jump_func->bits->value); |
4159 | streamer_write_widest_int (ob, jump_func->bits->mask); | |
72b16d90 | 4160 | } |
97cb825b | 4161 | bp_pack_value (&bp, !!jump_func->m_vr, 1); |
25a8e007 | 4162 | streamer_write_bitpack (&bp); |
97cb825b | 4163 | if (jump_func->m_vr) |
25a8e007 | 4164 | { |
4165 | streamer_write_enum (ob->main_stream, value_rang_type, | |
be44111e | 4166 | VR_LAST, jump_func->m_vr->kind ()); |
4167 | stream_write_tree (ob, jump_func->m_vr->min (), true); | |
4168 | stream_write_tree (ob, jump_func->m_vr->max (), true); | |
25a8e007 | 4169 | } |
8867b500 | 4170 | } |
4171 | ||
4172 | /* Read in jump function JUMP_FUNC from IB. */ | |
4173 | ||
4174 | static void | |
4175 | ipa_read_jump_function (struct lto_input_block *ib, | |
4176 | struct ipa_jump_func *jump_func, | |
096295f6 | 4177 | struct cgraph_edge *cs, |
42157758 | 4178 | struct data_in *data_in, |
4179 | bool prevails) | |
8867b500 | 4180 | { |
30a240df | 4181 | enum jump_func_type jftype; |
4182 | enum tree_code operation; | |
0d491188 | 4183 | int i, count; |
42157758 | 4184 | int val = streamer_read_uhwi (ib); |
4185 | bool flag = val & 1; | |
8867b500 | 4186 | |
42157758 | 4187 | jftype = (enum jump_func_type) (val / 2); |
30a240df | 4188 | switch (jftype) |
8867b500 | 4189 | { |
4190 | case IPA_JF_UNKNOWN: | |
ae7b7bc8 | 4191 | ipa_set_jf_unknown (jump_func); |
8867b500 | 4192 | break; |
4193 | case IPA_JF_CONST: | |
42157758 | 4194 | { |
4195 | tree t = stream_read_tree (ib, data_in); | |
4196 | if (flag && prevails) | |
4197 | t = build_fold_addr_expr (t); | |
4198 | ipa_set_jf_constant (jump_func, t, cs); | |
4199 | } | |
8867b500 | 4200 | break; |
4201 | case IPA_JF_PASS_THROUGH: | |
30a240df | 4202 | operation = (enum tree_code) streamer_read_uhwi (ib); |
4203 | if (operation == NOP_EXPR) | |
4204 | { | |
4205 | int formal_id = streamer_read_uhwi (ib); | |
4206 | struct bitpack_d bp = streamer_read_bitpack (ib); | |
4207 | bool agg_preserved = bp_unpack_value (&bp, 1); | |
693010ae | 4208 | ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved); |
30a240df | 4209 | } |
3657b81b | 4210 | else if (TREE_CODE_CLASS (operation) == tcc_unary) |
4211 | { | |
4212 | int formal_id = streamer_read_uhwi (ib); | |
4213 | ipa_set_jf_unary_pass_through (jump_func, formal_id, operation); | |
4214 | } | |
30a240df | 4215 | else |
4216 | { | |
4217 | tree operand = stream_read_tree (ib, data_in); | |
4218 | int formal_id = streamer_read_uhwi (ib); | |
4219 | ipa_set_jf_arith_pass_through (jump_func, formal_id, operand, | |
4220 | operation); | |
4221 | } | |
8867b500 | 4222 | break; |
4223 | case IPA_JF_ANCESTOR: | |
30a240df | 4224 | { |
4225 | HOST_WIDE_INT offset = streamer_read_uhwi (ib); | |
30a240df | 4226 | int formal_id = streamer_read_uhwi (ib); |
4227 | struct bitpack_d bp = streamer_read_bitpack (ib); | |
4228 | bool agg_preserved = bp_unpack_value (&bp, 1); | |
693010ae | 4229 | ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved); |
30a240df | 4230 | break; |
4231 | } | |
42157758 | 4232 | default: |
4233 | fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream"); | |
0d491188 | 4234 | } |
4235 | ||
4236 | count = streamer_read_uhwi (ib); | |
42157758 | 4237 | if (prevails) |
4238 | vec_alloc (jump_func->agg.items, count); | |
0d491188 | 4239 | if (count) |
4240 | { | |
30a240df | 4241 | struct bitpack_d bp = streamer_read_bitpack (ib); |
0d491188 | 4242 | jump_func->agg.by_ref = bp_unpack_value (&bp, 1); |
4243 | } | |
4244 | for (i = 0; i < count; i++) | |
4245 | { | |
e82e4eb5 | 4246 | struct ipa_agg_jf_item item; |
4247 | item.offset = streamer_read_uhwi (ib); | |
4248 | item.value = stream_read_tree (ib, data_in); | |
42157758 | 4249 | if (prevails) |
4250 | jump_func->agg.items->quick_push (item); | |
8867b500 | 4251 | } |
ae7b7bc8 | 4252 | |
4253 | struct bitpack_d bp = streamer_read_bitpack (ib); | |
a54071b2 | 4254 | bool bits_known = bp_unpack_value (&bp, 1); |
4255 | if (bits_known) | |
4256 | { | |
97cb825b | 4257 | widest_int value = streamer_read_widest_int (ib); |
4258 | widest_int mask = streamer_read_widest_int (ib); | |
42157758 | 4259 | if (prevails) |
4260 | ipa_set_jfunc_bits (jump_func, value, mask); | |
a54071b2 | 4261 | } |
4262 | else | |
97cb825b | 4263 | jump_func->bits = NULL; |
25a8e007 | 4264 | |
4265 | struct bitpack_d vr_bp = streamer_read_bitpack (ib); | |
4266 | bool vr_known = bp_unpack_value (&vr_bp, 1); | |
4267 | if (vr_known) | |
4268 | { | |
be44111e | 4269 | enum value_range_kind type = streamer_read_enum (ib, value_range_kind, |
97cb825b | 4270 | VR_LAST); |
4271 | tree min = stream_read_tree (ib, data_in); | |
4272 | tree max = stream_read_tree (ib, data_in); | |
42157758 | 4273 | if (prevails) |
4274 | ipa_set_jfunc_vr (jump_func, type, min, max); | |
25a8e007 | 4275 | } |
4276 | else | |
97cb825b | 4277 | jump_func->m_vr = NULL; |
8867b500 | 4278 | } |
4279 | ||
799c8711 | 4280 | /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are |
4281 | relevant to indirect inlining to OB. */ | |
00e1f01e | 4282 | |
4283 | static void | |
799c8711 | 4284 | ipa_write_indirect_edge_info (struct output_block *ob, |
4285 | struct cgraph_edge *cs) | |
00e1f01e | 4286 | { |
799c8711 | 4287 | struct cgraph_indirect_call_info *ii = cs->indirect_info; |
30baba90 | 4288 | struct bitpack_d bp; |
799c8711 | 4289 | |
7f385784 | 4290 | streamer_write_hwi (ob, ii->param_index); |
30baba90 | 4291 | bp = bitpack_create (ob->main_stream); |
4292 | bp_pack_value (&bp, ii->polymorphic, 1); | |
0d491188 | 4293 | bp_pack_value (&bp, ii->agg_contents, 1); |
2f6c1cf4 | 4294 | bp_pack_value (&bp, ii->member_ptr, 1); |
0d491188 | 4295 | bp_pack_value (&bp, ii->by_ref, 1); |
665ff45b | 4296 | bp_pack_value (&bp, ii->guaranteed_unmodified, 1); |
43aac8cb | 4297 | bp_pack_value (&bp, ii->vptr_changed, 1); |
7f385784 | 4298 | streamer_write_bitpack (&bp); |
e33892d7 | 4299 | if (ii->agg_contents || ii->polymorphic) |
4300 | streamer_write_hwi (ob, ii->offset); | |
4301 | else | |
4302 | gcc_assert (ii->offset == 0); | |
6378ffb3 | 4303 | |
4304 | if (ii->polymorphic) | |
4305 | { | |
7f385784 | 4306 | streamer_write_hwi (ob, ii->otr_token); |
515cf651 | 4307 | stream_write_tree (ob, ii->otr_type, true); |
e33892d7 | 4308 | ii->context.stream_out (ob); |
6378ffb3 | 4309 | } |
00e1f01e | 4310 | } |
4311 | ||
799c8711 | 4312 | /* Read in parts of cgraph_indirect_call_info corresponding to CS that are |
4313 | relevant to indirect inlining from IB. */ | |
00e1f01e | 4314 | |
4315 | static void | |
799c8711 | 4316 | ipa_read_indirect_edge_info (struct lto_input_block *ib, |
e33892d7 | 4317 | struct data_in *data_in, |
799c8711 | 4318 | struct cgraph_edge *cs) |
00e1f01e | 4319 | { |
799c8711 | 4320 | struct cgraph_indirect_call_info *ii = cs->indirect_info; |
30baba90 | 4321 | struct bitpack_d bp; |
00e1f01e | 4322 | |
7f385784 | 4323 | ii->param_index = (int) streamer_read_hwi (ib); |
7f385784 | 4324 | bp = streamer_read_bitpack (ib); |
30baba90 | 4325 | ii->polymorphic = bp_unpack_value (&bp, 1); |
0d491188 | 4326 | ii->agg_contents = bp_unpack_value (&bp, 1); |
2f6c1cf4 | 4327 | ii->member_ptr = bp_unpack_value (&bp, 1); |
0d491188 | 4328 | ii->by_ref = bp_unpack_value (&bp, 1); |
665ff45b | 4329 | ii->guaranteed_unmodified = bp_unpack_value (&bp, 1); |
43aac8cb | 4330 | ii->vptr_changed = bp_unpack_value (&bp, 1); |
e33892d7 | 4331 | if (ii->agg_contents || ii->polymorphic) |
4332 | ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib); | |
4333 | else | |
4334 | ii->offset = 0; | |
6378ffb3 | 4335 | if (ii->polymorphic) |
4336 | { | |
7f385784 | 4337 | ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib); |
515cf651 | 4338 | ii->otr_type = stream_read_tree (ib, data_in); |
e33892d7 | 4339 | ii->context.stream_in (ib, data_in); |
6378ffb3 | 4340 | } |
00e1f01e | 4341 | } |
4342 | ||
8867b500 | 4343 | /* Stream out NODE info to OB. */ |
4344 | ||
4345 | static void | |
4346 | ipa_write_node_info (struct output_block *ob, struct cgraph_node *node) | |
4347 | { | |
4348 | int node_ref; | |
70225339 | 4349 | lto_symtab_encoder_t encoder; |
8867b500 | 4350 | struct ipa_node_params *info = IPA_NODE_REF (node); |
4351 | int j; | |
4352 | struct cgraph_edge *e; | |
30baba90 | 4353 | struct bitpack_d bp; |
8867b500 | 4354 | |
70225339 | 4355 | encoder = ob->decl_state->symtab_node_encoder; |
02774f2d | 4356 | node_ref = lto_symtab_encoder_encode (encoder, node); |
7f385784 | 4357 | streamer_write_uhwi (ob, node_ref); |
8867b500 | 4358 | |
09ab6335 | 4359 | streamer_write_uhwi (ob, ipa_get_param_count (info)); |
4360 | for (j = 0; j < ipa_get_param_count (info); j++) | |
4361 | streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j)); | |
30baba90 | 4362 | bp = bitpack_create (ob->main_stream); |
24430d08 | 4363 | gcc_assert (info->analysis_done |
00e1f01e | 4364 | || ipa_get_param_count (info) == 0); |
8867b500 | 4365 | gcc_assert (!info->node_enqueued); |
4366 | gcc_assert (!info->ipcp_orig_node); | |
4367 | for (j = 0; j < ipa_get_param_count (info); j++) | |
821d0e0f | 4368 | bp_pack_value (&bp, ipa_is_param_used (info, j), 1); |
7f385784 | 4369 | streamer_write_bitpack (&bp); |
096295f6 | 4370 | for (j = 0; j < ipa_get_param_count (info); j++) |
72b16d90 | 4371 | { |
4372 | streamer_write_hwi (ob, ipa_get_controlled_uses (info, j)); | |
4373 | stream_write_tree (ob, ipa_get_type (info, j), true); | |
4374 | } | |
8867b500 | 4375 | for (e = node->callees; e; e = e->next_callee) |
4376 | { | |
4377 | struct ipa_edge_args *args = IPA_EDGE_REF (e); | |
4378 | ||
072ec6eb | 4379 | streamer_write_uhwi (ob, |
4380 | ipa_get_cs_argument_count (args) * 2 | |
4381 | + (args->polymorphic_call_contexts != NULL)); | |
8867b500 | 4382 | for (j = 0; j < ipa_get_cs_argument_count (args); j++) |
072ec6eb | 4383 | { |
4384 | ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j)); | |
4385 | if (args->polymorphic_call_contexts != NULL) | |
4386 | ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob); | |
4387 | } | |
8867b500 | 4388 | } |
799c8711 | 4389 | for (e = node->indirect_calls; e; e = e->next_callee) |
d6731c65 | 4390 | { |
4391 | struct ipa_edge_args *args = IPA_EDGE_REF (e); | |
4392 | ||
072ec6eb | 4393 | streamer_write_uhwi (ob, |
4394 | ipa_get_cs_argument_count (args) * 2 | |
4395 | + (args->polymorphic_call_contexts != NULL)); | |
d6731c65 | 4396 | for (j = 0; j < ipa_get_cs_argument_count (args); j++) |
072ec6eb | 4397 | { |
4398 | ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j)); | |
4399 | if (args->polymorphic_call_contexts != NULL) | |
4400 | ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob); | |
4401 | } | |
d6731c65 | 4402 | ipa_write_indirect_edge_info (ob, e); |
4403 | } | |
8867b500 | 4404 | } |
4405 | ||
42157758 | 4406 | /* Stream in edge E from IB. */ |
7a88162d | 4407 | |
42157758 | 4408 | static void |
4409 | ipa_read_edge_info (struct lto_input_block *ib, | |
4410 | struct data_in *data_in, | |
4411 | struct cgraph_edge *e, bool prevails) | |
7a88162d | 4412 | { |
42157758 | 4413 | int count = streamer_read_uhwi (ib); |
4414 | bool contexts_computed = count & 1; | |
4415 | ||
4416 | count /= 2; | |
4417 | if (!count) | |
4418 | return; | |
4419 | if (prevails && e->possibly_call_in_translation_unit_p ()) | |
4420 | { | |
4421 | struct ipa_edge_args *args = IPA_EDGE_REF (e); | |
4422 | vec_safe_grow_cleared (args->jump_functions, count); | |
4423 | if (contexts_computed) | |
4424 | vec_safe_grow_cleared (args->polymorphic_call_contexts, count); | |
4425 | for (int k = 0; k < count; k++) | |
4426 | { | |
4427 | ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e, | |
4428 | data_in, prevails); | |
4429 | if (contexts_computed) | |
4430 | ipa_get_ith_polymorhic_call_context (args, k)->stream_in | |
4431 | (ib, data_in); | |
4432 | } | |
4433 | } | |
4434 | else | |
4435 | { | |
4436 | for (int k = 0; k < count; k++) | |
4437 | { | |
4438 | struct ipa_jump_func dummy; | |
4439 | ipa_read_jump_function (ib, &dummy, e, | |
4440 | data_in, prevails); | |
4441 | if (contexts_computed) | |
4442 | { | |
4443 | struct ipa_polymorphic_call_context ctx; | |
4444 | ctx.stream_in (ib, data_in); | |
4445 | } | |
4446 | } | |
4447 | } | |
7a88162d | 4448 | } |
4449 | ||
0a10fd82 | 4450 | /* Stream in NODE info from IB. */ |
8867b500 | 4451 | |
4452 | static void | |
4453 | ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node, | |
4454 | struct data_in *data_in) | |
4455 | { | |
8867b500 | 4456 | int k; |
4457 | struct cgraph_edge *e; | |
30baba90 | 4458 | struct bitpack_d bp; |
42157758 | 4459 | bool prevails = node->prevailing_p (); |
4460 | struct ipa_node_params *info = prevails ? IPA_NODE_REF (node) : NULL; | |
8867b500 | 4461 | |
42157758 | 4462 | int param_count = streamer_read_uhwi (ib); |
4463 | if (prevails) | |
4464 | { | |
4465 | ipa_alloc_node_params (node, param_count); | |
4466 | for (k = 0; k < param_count; k++) | |
4467 | (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib); | |
4468 | if (ipa_get_param_count (info) != 0) | |
4469 | info->analysis_done = true; | |
4470 | info->node_enqueued = false; | |
4471 | } | |
4472 | else | |
4473 | for (k = 0; k < param_count; k++) | |
4474 | streamer_read_uhwi (ib); | |
72b16d90 | 4475 | |
7f385784 | 4476 | bp = streamer_read_bitpack (ib); |
42157758 | 4477 | for (k = 0; k < param_count; k++) |
72b16d90 | 4478 | { |
42157758 | 4479 | bool used = bp_unpack_value (&bp, 1); |
4480 | ||
4481 | if (prevails) | |
4482 | ipa_set_param_used (info, k, used); | |
72b16d90 | 4483 | } |
42157758 | 4484 | for (k = 0; k < param_count; k++) |
8867b500 | 4485 | { |
42157758 | 4486 | int nuses = streamer_read_hwi (ib); |
4487 | tree type = stream_read_tree (ib, data_in); | |
8867b500 | 4488 | |
42157758 | 4489 | if (prevails) |
072ec6eb | 4490 | { |
42157758 | 4491 | ipa_set_controlled_uses (info, k, nuses); |
4492 | (*info->descriptors)[k].decl_or_type = type; | |
072ec6eb | 4493 | } |
8867b500 | 4494 | } |
42157758 | 4495 | for (e = node->callees; e; e = e->next_callee) |
4496 | ipa_read_edge_info (ib, data_in, e, prevails); | |
799c8711 | 4497 | for (e = node->indirect_calls; e; e = e->next_callee) |
d6731c65 | 4498 | { |
42157758 | 4499 | ipa_read_edge_info (ib, data_in, e, prevails); |
d6731c65 | 4500 | ipa_read_indirect_edge_info (ib, data_in, e); |
4501 | } | |
8867b500 | 4502 | } |
4503 | ||
4504 | /* Write jump functions for nodes in SET. */ | |
4505 | ||
4506 | void | |
eab36a5a | 4507 | ipa_prop_write_jump_functions (void) |
8867b500 | 4508 | { |
4509 | struct cgraph_node *node; | |
0a0b4924 | 4510 | struct output_block *ob; |
8867b500 | 4511 | unsigned int count = 0; |
eab36a5a | 4512 | lto_symtab_encoder_iterator lsei; |
4513 | lto_symtab_encoder_t encoder; | |
4514 | ||
322dd010 | 4515 | if (!ipa_node_params_sum || !ipa_edge_args_sum) |
0a0b4924 | 4516 | return; |
8867b500 | 4517 | |
0a0b4924 | 4518 | ob = create_output_block (LTO_section_jump_functions); |
eab36a5a | 4519 | encoder = ob->decl_state->symtab_node_encoder; |
afb0d513 | 4520 | ob->symbol = NULL; |
eab36a5a | 4521 | for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei); |
4522 | lsei_next_function_in_partition (&lsei)) | |
8867b500 | 4523 | { |
eab36a5a | 4524 | node = lsei_cgraph_node (lsei); |
415d1b9a | 4525 | if (node->has_gimple_body_p () |
91bf9d9a | 4526 | && IPA_NODE_REF (node) != NULL) |
8867b500 | 4527 | count++; |
4528 | } | |
4529 | ||
7f385784 | 4530 | streamer_write_uhwi (ob, count); |
8867b500 | 4531 | |
4532 | /* Process all of the functions. */ | |
eab36a5a | 4533 | for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei); |
4534 | lsei_next_function_in_partition (&lsei)) | |
8867b500 | 4535 | { |
eab36a5a | 4536 | node = lsei_cgraph_node (lsei); |
415d1b9a | 4537 | if (node->has_gimple_body_p () |
91bf9d9a | 4538 | && IPA_NODE_REF (node) != NULL) |
8867b500 | 4539 | ipa_write_node_info (ob, node); |
4540 | } | |
7f385784 | 4541 | streamer_write_char_stream (ob->main_stream, 0); |
8867b500 | 4542 | produce_asm (ob, NULL); |
4543 | destroy_output_block (ob); | |
4544 | } | |
4545 | ||
4546 | /* Read section in file FILE_DATA of length LEN with data DATA. */ | |
4547 | ||
4548 | static void | |
4549 | ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data, | |
4550 | size_t len) | |
4551 | { | |
4552 | const struct lto_function_header *header = | |
4553 | (const struct lto_function_header *) data; | |
949e5786 | 4554 | const int cfg_offset = sizeof (struct lto_function_header); |
4555 | const int main_offset = cfg_offset + header->cfg_size; | |
4556 | const int string_offset = main_offset + header->main_size; | |
8867b500 | 4557 | struct data_in *data_in; |
8867b500 | 4558 | unsigned int i; |
4559 | unsigned int count; | |
4560 | ||
472ca566 | 4561 | lto_input_block ib_main ((const char *) data + main_offset, |
2e971afd | 4562 | header->main_size, file_data->mode_table); |
8867b500 | 4563 | |
4564 | data_in = | |
4565 | lto_data_in_create (file_data, (const char *) data + string_offset, | |
1e094109 | 4566 | header->string_size, vNULL); |
7f385784 | 4567 | count = streamer_read_uhwi (&ib_main); |
8867b500 | 4568 | |
4569 | for (i = 0; i < count; i++) | |
4570 | { | |
4571 | unsigned int index; | |
4572 | struct cgraph_node *node; | |
70225339 | 4573 | lto_symtab_encoder_t encoder; |
8867b500 | 4574 | |
7f385784 | 4575 | index = streamer_read_uhwi (&ib_main); |
70225339 | 4576 | encoder = file_data->symtab_node_encoder; |
415d1b9a | 4577 | node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder, |
4578 | index)); | |
02774f2d | 4579 | gcc_assert (node->definition); |
8867b500 | 4580 | ipa_read_node_info (&ib_main, node, data_in); |
4581 | } | |
4582 | lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data, | |
4583 | len); | |
4584 | lto_data_in_delete (data_in); | |
4585 | } | |
4586 | ||
4587 | /* Read ipcp jump functions. */ | |
4588 | ||
4589 | void | |
4590 | ipa_prop_read_jump_functions (void) | |
4591 | { | |
4592 | struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data (); | |
4593 | struct lto_file_decl_data *file_data; | |
4594 | unsigned int j = 0; | |
4595 | ||
4596 | ipa_check_create_node_params (); | |
4597 | ipa_check_create_edge_args (); | |
4598 | ipa_register_cgraph_hooks (); | |
4599 | ||
4600 | while ((file_data = file_data_vec[j++])) | |
4601 | { | |
4602 | size_t len; | |
4603 | const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len); | |
4604 | ||
4605 | if (data) | |
4606 | ipa_prop_read_section (file_data, data, len); | |
4607 | } | |
4608 | } | |
4609 | ||
803a7988 | 4610 | void |
ae7b7bc8 | 4611 | write_ipcp_transformation_info (output_block *ob, cgraph_node *node) |
803a7988 | 4612 | { |
4613 | int node_ref; | |
4614 | unsigned int count = 0; | |
4615 | lto_symtab_encoder_t encoder; | |
4616 | struct ipa_agg_replacement_value *aggvals, *av; | |
4617 | ||
4618 | aggvals = ipa_get_agg_replacements_for_node (node); | |
4619 | encoder = ob->decl_state->symtab_node_encoder; | |
02774f2d | 4620 | node_ref = lto_symtab_encoder_encode (encoder, node); |
803a7988 | 4621 | streamer_write_uhwi (ob, node_ref); |
4622 | ||
4623 | for (av = aggvals; av; av = av->next) | |
4624 | count++; | |
4625 | streamer_write_uhwi (ob, count); | |
4626 | ||
4627 | for (av = aggvals; av; av = av->next) | |
4628 | { | |
c42e4f2e | 4629 | struct bitpack_d bp; |
4630 | ||
803a7988 | 4631 | streamer_write_uhwi (ob, av->offset); |
4632 | streamer_write_uhwi (ob, av->index); | |
4633 | stream_write_tree (ob, av->value, true); | |
c42e4f2e | 4634 | |
4635 | bp = bitpack_create (ob->main_stream); | |
4636 | bp_pack_value (&bp, av->by_ref, 1); | |
4637 | streamer_write_bitpack (&bp); | |
803a7988 | 4638 | } |
ae7b7bc8 | 4639 | |
00637f9c | 4640 | ipcp_transformation *ts = ipcp_get_transformation_summary (node); |
25a8e007 | 4641 | if (ts && vec_safe_length (ts->m_vr) > 0) |
4642 | { | |
4643 | count = ts->m_vr->length (); | |
4644 | streamer_write_uhwi (ob, count); | |
4645 | for (unsigned i = 0; i < count; ++i) | |
4646 | { | |
4647 | struct bitpack_d bp; | |
4648 | ipa_vr *parm_vr = &(*ts->m_vr)[i]; | |
4649 | bp = bitpack_create (ob->main_stream); | |
4650 | bp_pack_value (&bp, parm_vr->known, 1); | |
4651 | streamer_write_bitpack (&bp); | |
4652 | if (parm_vr->known) | |
4653 | { | |
4654 | streamer_write_enum (ob->main_stream, value_rang_type, | |
4655 | VR_LAST, parm_vr->type); | |
4656 | streamer_write_wide_int (ob, parm_vr->min); | |
4657 | streamer_write_wide_int (ob, parm_vr->max); | |
4658 | } | |
4659 | } | |
4660 | } | |
4661 | else | |
4662 | streamer_write_uhwi (ob, 0); | |
4663 | ||
a54071b2 | 4664 | if (ts && vec_safe_length (ts->bits) > 0) |
4665 | { | |
4666 | count = ts->bits->length (); | |
4667 | streamer_write_uhwi (ob, count); | |
4668 | ||
4669 | for (unsigned i = 0; i < count; ++i) | |
4670 | { | |
97cb825b | 4671 | const ipa_bits *bits_jfunc = (*ts->bits)[i]; |
a54071b2 | 4672 | struct bitpack_d bp = bitpack_create (ob->main_stream); |
97cb825b | 4673 | bp_pack_value (&bp, !!bits_jfunc, 1); |
a54071b2 | 4674 | streamer_write_bitpack (&bp); |
97cb825b | 4675 | if (bits_jfunc) |
a54071b2 | 4676 | { |
97cb825b | 4677 | streamer_write_widest_int (ob, bits_jfunc->value); |
4678 | streamer_write_widest_int (ob, bits_jfunc->mask); | |
a54071b2 | 4679 | } |
4680 | } | |
4681 | } | |
4682 | else | |
4683 | streamer_write_uhwi (ob, 0); | |
803a7988 | 4684 | } |
4685 | ||
4686 | /* Stream in the aggregate value replacement chain for NODE from IB. */ | |
4687 | ||
4688 | static void | |
ae7b7bc8 | 4689 | read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node, |
4690 | data_in *data_in) | |
803a7988 | 4691 | { |
4692 | struct ipa_agg_replacement_value *aggvals = NULL; | |
4693 | unsigned int count, i; | |
4694 | ||
4695 | count = streamer_read_uhwi (ib); | |
4696 | for (i = 0; i <count; i++) | |
4697 | { | |
4698 | struct ipa_agg_replacement_value *av; | |
c42e4f2e | 4699 | struct bitpack_d bp; |
803a7988 | 4700 | |
25a27413 | 4701 | av = ggc_alloc<ipa_agg_replacement_value> (); |
803a7988 | 4702 | av->offset = streamer_read_uhwi (ib); |
4703 | av->index = streamer_read_uhwi (ib); | |
4704 | av->value = stream_read_tree (ib, data_in); | |
c42e4f2e | 4705 | bp = streamer_read_bitpack (ib); |
4706 | av->by_ref = bp_unpack_value (&bp, 1); | |
803a7988 | 4707 | av->next = aggvals; |
4708 | aggvals = av; | |
4709 | } | |
4710 | ipa_set_node_agg_value_chain (node, aggvals); | |
14964a36 | 4711 | |
a54071b2 | 4712 | count = streamer_read_uhwi (ib); |
4713 | if (count > 0) | |
4714 | { | |
00637f9c | 4715 | ipcp_transformation_initialize (); |
4716 | ipcp_transformation *ts = ipcp_transformation_sum->get_create (node); | |
25a8e007 | 4717 | vec_safe_grow_cleared (ts->m_vr, count); |
4718 | for (i = 0; i < count; i++) | |
4719 | { | |
4720 | ipa_vr *parm_vr; | |
4721 | parm_vr = &(*ts->m_vr)[i]; | |
4722 | struct bitpack_d bp; | |
4723 | bp = streamer_read_bitpack (ib); | |
4724 | parm_vr->known = bp_unpack_value (&bp, 1); | |
4725 | if (parm_vr->known) | |
4726 | { | |
be44111e | 4727 | parm_vr->type = streamer_read_enum (ib, value_range_kind, |
25a8e007 | 4728 | VR_LAST); |
4729 | parm_vr->min = streamer_read_wide_int (ib); | |
4730 | parm_vr->max = streamer_read_wide_int (ib); | |
4731 | } | |
4732 | } | |
4733 | } | |
4734 | count = streamer_read_uhwi (ib); | |
4735 | if (count > 0) | |
4736 | { | |
00637f9c | 4737 | ipcp_transformation_initialize (); |
4738 | ipcp_transformation *ts = ipcp_transformation_sum->get_create (node); | |
a54071b2 | 4739 | vec_safe_grow_cleared (ts->bits, count); |
4740 | ||
4741 | for (i = 0; i < count; i++) | |
4742 | { | |
a54071b2 | 4743 | struct bitpack_d bp = streamer_read_bitpack (ib); |
97cb825b | 4744 | bool known = bp_unpack_value (&bp, 1); |
4745 | if (known) | |
a54071b2 | 4746 | { |
97cb825b | 4747 | ipa_bits *bits |
4748 | = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib), | |
4749 | streamer_read_widest_int (ib)); | |
4750 | (*ts->bits)[i] = bits; | |
a54071b2 | 4751 | } |
4752 | } | |
4753 | } | |
803a7988 | 4754 | } |
4755 | ||
4756 | /* Write all aggregate replacement for nodes in set. */ | |
4757 | ||
4758 | void | |
ae7b7bc8 | 4759 | ipcp_write_transformation_summaries (void) |
803a7988 | 4760 | { |
4761 | struct cgraph_node *node; | |
4762 | struct output_block *ob; | |
4763 | unsigned int count = 0; | |
4764 | lto_symtab_encoder_iterator lsei; | |
4765 | lto_symtab_encoder_t encoder; | |
4766 | ||
803a7988 | 4767 | ob = create_output_block (LTO_section_ipcp_transform); |
4768 | encoder = ob->decl_state->symtab_node_encoder; | |
afb0d513 | 4769 | ob->symbol = NULL; |
803a7988 | 4770 | for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei); |
4771 | lsei_next_function_in_partition (&lsei)) | |
4772 | { | |
4773 | node = lsei_cgraph_node (lsei); | |
ae7b7bc8 | 4774 | if (node->has_gimple_body_p ()) |
803a7988 | 4775 | count++; |
4776 | } | |
4777 | ||
4778 | streamer_write_uhwi (ob, count); | |
4779 | ||
4780 | for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei); | |
4781 | lsei_next_function_in_partition (&lsei)) | |
4782 | { | |
4783 | node = lsei_cgraph_node (lsei); | |
ae7b7bc8 | 4784 | if (node->has_gimple_body_p ()) |
4785 | write_ipcp_transformation_info (ob, node); | |
803a7988 | 4786 | } |
4787 | streamer_write_char_stream (ob->main_stream, 0); | |
4788 | produce_asm (ob, NULL); | |
4789 | destroy_output_block (ob); | |
4790 | } | |
4791 | ||
4792 | /* Read replacements section in file FILE_DATA of length LEN with data | |
4793 | DATA. */ | |
4794 | ||
4795 | static void | |
4796 | read_replacements_section (struct lto_file_decl_data *file_data, | |
4797 | const char *data, | |
4798 | size_t len) | |
4799 | { | |
4800 | const struct lto_function_header *header = | |
4801 | (const struct lto_function_header *) data; | |
4802 | const int cfg_offset = sizeof (struct lto_function_header); | |
4803 | const int main_offset = cfg_offset + header->cfg_size; | |
4804 | const int string_offset = main_offset + header->main_size; | |
4805 | struct data_in *data_in; | |
803a7988 | 4806 | unsigned int i; |
4807 | unsigned int count; | |
4808 | ||
472ca566 | 4809 | lto_input_block ib_main ((const char *) data + main_offset, |
2e971afd | 4810 | header->main_size, file_data->mode_table); |
803a7988 | 4811 | |
4812 | data_in = lto_data_in_create (file_data, (const char *) data + string_offset, | |
1e094109 | 4813 | header->string_size, vNULL); |
803a7988 | 4814 | count = streamer_read_uhwi (&ib_main); |
4815 | ||
4816 | for (i = 0; i < count; i++) | |
4817 | { | |
4818 | unsigned int index; | |
4819 | struct cgraph_node *node; | |
4820 | lto_symtab_encoder_t encoder; | |
4821 | ||
4822 | index = streamer_read_uhwi (&ib_main); | |
4823 | encoder = file_data->symtab_node_encoder; | |
415d1b9a | 4824 | node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder, |
4825 | index)); | |
02774f2d | 4826 | gcc_assert (node->definition); |
ae7b7bc8 | 4827 | read_ipcp_transformation_info (&ib_main, node, data_in); |
803a7988 | 4828 | } |
4829 | lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data, | |
4830 | len); | |
4831 | lto_data_in_delete (data_in); | |
4832 | } | |
4833 | ||
4834 | /* Read IPA-CP aggregate replacements. */ | |
4835 | ||
4836 | void | |
ae7b7bc8 | 4837 | ipcp_read_transformation_summaries (void) |
803a7988 | 4838 | { |
4839 | struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data (); | |
4840 | struct lto_file_decl_data *file_data; | |
4841 | unsigned int j = 0; | |
4842 | ||
4843 | while ((file_data = file_data_vec[j++])) | |
4844 | { | |
4845 | size_t len; | |
4846 | const char *data = lto_get_section_data (file_data, | |
4847 | LTO_section_ipcp_transform, | |
4848 | NULL, &len); | |
4849 | if (data) | |
4850 | read_replacements_section (file_data, data, len); | |
4851 | } | |
4852 | } | |
4853 | ||
4854 | /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in | |
4855 | NODE. */ | |
4856 | ||
4857 | static void | |
4858 | adjust_agg_replacement_values (struct cgraph_node *node, | |
4859 | struct ipa_agg_replacement_value *aggval) | |
4860 | { | |
4861 | struct ipa_agg_replacement_value *v; | |
4862 | int i, c = 0, d = 0, *adj; | |
4863 | ||
4864 | if (!node->clone.combined_args_to_skip) | |
4865 | return; | |
4866 | ||
4867 | for (v = aggval; v; v = v->next) | |
4868 | { | |
4869 | gcc_assert (v->index >= 0); | |
4870 | if (c < v->index) | |
4871 | c = v->index; | |
4872 | } | |
4873 | c++; | |
4874 | ||
4875 | adj = XALLOCAVEC (int, c); | |
4876 | for (i = 0; i < c; i++) | |
4877 | if (bitmap_bit_p (node->clone.combined_args_to_skip, i)) | |
4878 | { | |
4879 | adj[i] = -1; | |
4880 | d++; | |
4881 | } | |
4882 | else | |
4883 | adj[i] = i - d; | |
4884 | ||
4885 | for (v = aggval; v; v = v->next) | |
4886 | v->index = adj[v->index]; | |
4887 | } | |
4888 | ||
24430d08 | 4889 | /* Dominator walker driving the ipcp modification phase. */ |
4890 | ||
4891 | class ipcp_modif_dom_walker : public dom_walker | |
4892 | { | |
4893 | public: | |
9ea91b78 | 4894 | ipcp_modif_dom_walker (struct ipa_func_body_info *fbi, |
7af25a10 | 4895 | vec<ipa_param_descriptor, va_gc> *descs, |
24430d08 | 4896 | struct ipa_agg_replacement_value *av, |
4897 | bool *sc, bool *cc) | |
4898 | : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs), | |
4899 | m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {} | |
4900 | ||
96752458 | 4901 | virtual edge before_dom_children (basic_block); |
24430d08 | 4902 | |
4903 | private: | |
9ea91b78 | 4904 | struct ipa_func_body_info *m_fbi; |
7af25a10 | 4905 | vec<ipa_param_descriptor, va_gc> *m_descriptors; |
24430d08 | 4906 | struct ipa_agg_replacement_value *m_aggval; |
4907 | bool *m_something_changed, *m_cfg_changed; | |
4908 | }; | |
4909 | ||
96752458 | 4910 | edge |
24430d08 | 4911 | ipcp_modif_dom_walker::before_dom_children (basic_block bb) |
4912 | { | |
4913 | gimple_stmt_iterator gsi; | |
4914 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
4915 | { | |
4916 | struct ipa_agg_replacement_value *v; | |
42acab1c | 4917 | gimple *stmt = gsi_stmt (gsi); |
24430d08 | 4918 | tree rhs, val, t; |
4919 | HOST_WIDE_INT offset, size; | |
4920 | int index; | |
4921 | bool by_ref, vce; | |
4922 | ||
4923 | if (!gimple_assign_load_p (stmt)) | |
4924 | continue; | |
4925 | rhs = gimple_assign_rhs1 (stmt); | |
4926 | if (!is_gimple_reg_type (TREE_TYPE (rhs))) | |
4927 | continue; | |
803a7988 | 4928 | |
24430d08 | 4929 | vce = false; |
4930 | t = rhs; | |
4931 | while (handled_component_p (t)) | |
4932 | { | |
4933 | /* V_C_E can do things like convert an array of integers to one | |
4934 | bigger integer and similar things we do not handle below. */ | |
4935 | if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR) | |
4936 | { | |
4937 | vce = true; | |
4938 | break; | |
4939 | } | |
4940 | t = TREE_OPERAND (t, 0); | |
4941 | } | |
4942 | if (vce) | |
4943 | continue; | |
4944 | ||
1a673ff0 | 4945 | if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index, |
4946 | &offset, &size, &by_ref)) | |
24430d08 | 4947 | continue; |
4948 | for (v = m_aggval; v; v = v->next) | |
4949 | if (v->index == index | |
4950 | && v->offset == offset) | |
4951 | break; | |
4952 | if (!v | |
4953 | || v->by_ref != by_ref | |
4954 | || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size) | |
4955 | continue; | |
4956 | ||
4957 | gcc_checking_assert (is_gimple_ip_invariant (v->value)); | |
4958 | if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value))) | |
4959 | { | |
4960 | if (fold_convertible_p (TREE_TYPE (rhs), v->value)) | |
4961 | val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value); | |
4962 | else if (TYPE_SIZE (TREE_TYPE (rhs)) | |
4963 | == TYPE_SIZE (TREE_TYPE (v->value))) | |
4964 | val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value); | |
4965 | else | |
4966 | { | |
4967 | if (dump_file) | |
4968 | { | |
4969 | fprintf (dump_file, " const "); | |
1ffa4346 | 4970 | print_generic_expr (dump_file, v->value); |
24430d08 | 4971 | fprintf (dump_file, " can't be converted to type of "); |
1ffa4346 | 4972 | print_generic_expr (dump_file, rhs); |
24430d08 | 4973 | fprintf (dump_file, "\n"); |
4974 | } | |
4975 | continue; | |
4976 | } | |
4977 | } | |
4978 | else | |
4979 | val = v->value; | |
4980 | ||
4981 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4982 | { | |
4983 | fprintf (dump_file, "Modifying stmt:\n "); | |
1ffa4346 | 4984 | print_gimple_stmt (dump_file, stmt, 0); |
24430d08 | 4985 | } |
4986 | gimple_assign_set_rhs_from_tree (&gsi, val); | |
4987 | update_stmt (stmt); | |
4988 | ||
4989 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4990 | { | |
4991 | fprintf (dump_file, "into:\n "); | |
1ffa4346 | 4992 | print_gimple_stmt (dump_file, stmt, 0); |
24430d08 | 4993 | fprintf (dump_file, "\n"); |
4994 | } | |
4995 | ||
4996 | *m_something_changed = true; | |
4997 | if (maybe_clean_eh_stmt (stmt) | |
4998 | && gimple_purge_dead_eh_edges (gimple_bb (stmt))) | |
4999 | *m_cfg_changed = true; | |
5000 | } | |
96752458 | 5001 | return NULL; |
24430d08 | 5002 | } |
5003 | ||
a54071b2 | 5004 | /* Update bits info of formal parameters as described in |
00637f9c | 5005 | ipcp_transformation. */ |
a54071b2 | 5006 | |
5007 | static void | |
5008 | ipcp_update_bits (struct cgraph_node *node) | |
5009 | { | |
5010 | tree parm = DECL_ARGUMENTS (node->decl); | |
5011 | tree next_parm = parm; | |
00637f9c | 5012 | ipcp_transformation *ts = ipcp_get_transformation_summary (node); |
a54071b2 | 5013 | |
5014 | if (!ts || vec_safe_length (ts->bits) == 0) | |
5015 | return; | |
5016 | ||
97cb825b | 5017 | vec<ipa_bits *, va_gc> &bits = *ts->bits; |
a54071b2 | 5018 | unsigned count = bits.length (); |
5019 | ||
5020 | for (unsigned i = 0; i < count; ++i, parm = next_parm) | |
5021 | { | |
5022 | if (node->clone.combined_args_to_skip | |
5023 | && bitmap_bit_p (node->clone.combined_args_to_skip, i)) | |
5024 | continue; | |
5025 | ||
5026 | gcc_checking_assert (parm); | |
5027 | next_parm = DECL_CHAIN (parm); | |
5028 | ||
97cb825b | 5029 | if (!bits[i] |
5030 | || !(INTEGRAL_TYPE_P (TREE_TYPE (parm)) | |
5031 | || POINTER_TYPE_P (TREE_TYPE (parm))) | |
a54071b2 | 5032 | || !is_gimple_reg (parm)) |
97cb825b | 5033 | continue; |
a54071b2 | 5034 | |
5035 | tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm); | |
5036 | if (!ddef) | |
5037 | continue; | |
5038 | ||
5039 | if (dump_file) | |
5040 | { | |
97cb825b | 5041 | fprintf (dump_file, "Adjusting mask for param %u to ", i); |
5042 | print_hex (bits[i]->mask, dump_file); | |
a54071b2 | 5043 | fprintf (dump_file, "\n"); |
5044 | } | |
5045 | ||
14964a36 | 5046 | if (INTEGRAL_TYPE_P (TREE_TYPE (ddef))) |
5047 | { | |
5048 | unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef)); | |
5049 | signop sgn = TYPE_SIGN (TREE_TYPE (ddef)); | |
5050 | ||
97cb825b | 5051 | wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED) |
5052 | | wide_int::from (bits[i]->value, prec, sgn); | |
14964a36 | 5053 | set_nonzero_bits (ddef, nonzero_bits); |
5054 | } | |
5055 | else | |
5056 | { | |
97cb825b | 5057 | unsigned tem = bits[i]->mask.to_uhwi (); |
5058 | unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi (); | |
14964a36 | 5059 | unsigned align = tem & -tem; |
5060 | unsigned misalign = bitpos & (align - 1); | |
a54071b2 | 5061 | |
14964a36 | 5062 | if (align > 1) |
5063 | { | |
5064 | if (dump_file) | |
5065 | fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign); | |
5066 | ||
5067 | unsigned old_align, old_misalign; | |
5068 | struct ptr_info_def *pi = get_ptr_info (ddef); | |
5069 | bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign); | |
5070 | ||
5071 | if (old_known | |
5072 | && old_align > align) | |
5073 | { | |
5074 | if (dump_file) | |
5075 | { | |
5076 | fprintf (dump_file, "But alignment was already %u.\n", old_align); | |
5077 | if ((old_misalign & (align - 1)) != misalign) | |
5078 | fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n", | |
5079 | old_misalign, misalign); | |
5080 | } | |
5081 | continue; | |
5082 | } | |
5083 | ||
5084 | if (old_known | |
5085 | && ((misalign & (old_align - 1)) != old_misalign) | |
5086 | && dump_file) | |
5087 | fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n", | |
5088 | old_misalign, misalign); | |
5089 | ||
5090 | set_ptr_info_alignment (pi, align, misalign); | |
5091 | } | |
5092 | } | |
a54071b2 | 5093 | } |
5094 | } | |
5095 | ||
25a8e007 | 5096 | /* Update value range of formal parameters as described in |
00637f9c | 5097 | ipcp_transformation. */ |
25a8e007 | 5098 | |
5099 | static void | |
5100 | ipcp_update_vr (struct cgraph_node *node) | |
5101 | { | |
5102 | tree fndecl = node->decl; | |
5103 | tree parm = DECL_ARGUMENTS (fndecl); | |
5104 | tree next_parm = parm; | |
00637f9c | 5105 | ipcp_transformation *ts = ipcp_get_transformation_summary (node); |
25a8e007 | 5106 | if (!ts || vec_safe_length (ts->m_vr) == 0) |
5107 | return; | |
5108 | const vec<ipa_vr, va_gc> &vr = *ts->m_vr; | |
5109 | unsigned count = vr.length (); | |
5110 | ||
5111 | for (unsigned i = 0; i < count; ++i, parm = next_parm) | |
5112 | { | |
5113 | if (node->clone.combined_args_to_skip | |
5114 | && bitmap_bit_p (node->clone.combined_args_to_skip, i)) | |
5115 | continue; | |
5116 | gcc_checking_assert (parm); | |
5117 | next_parm = DECL_CHAIN (parm); | |
5118 | tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm); | |
5119 | ||
5120 | if (!ddef || !is_gimple_reg (parm)) | |
5121 | continue; | |
5122 | ||
5123 | if (vr[i].known | |
25a8e007 | 5124 | && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE)) |
5125 | { | |
5126 | tree type = TREE_TYPE (ddef); | |
5127 | unsigned prec = TYPE_PRECISION (type); | |
fc323d70 | 5128 | if (INTEGRAL_TYPE_P (TREE_TYPE (ddef))) |
5129 | { | |
5130 | if (dump_file) | |
5131 | { | |
5132 | fprintf (dump_file, "Setting value range of param %u ", i); | |
5133 | fprintf (dump_file, "%s[", | |
5134 | (vr[i].type == VR_ANTI_RANGE) ? "~" : ""); | |
5135 | print_decs (vr[i].min, dump_file); | |
5136 | fprintf (dump_file, ", "); | |
5137 | print_decs (vr[i].max, dump_file); | |
5138 | fprintf (dump_file, "]\n"); | |
5139 | } | |
5140 | set_range_info (ddef, vr[i].type, | |
5141 | wide_int_storage::from (vr[i].min, prec, | |
5142 | TYPE_SIGN (type)), | |
5143 | wide_int_storage::from (vr[i].max, prec, | |
5144 | TYPE_SIGN (type))); | |
5145 | } | |
5146 | else if (POINTER_TYPE_P (TREE_TYPE (ddef)) | |
5147 | && vr[i].type == VR_ANTI_RANGE | |
5148 | && wi::eq_p (vr[i].min, 0) | |
5149 | && wi::eq_p (vr[i].max, 0)) | |
25a8e007 | 5150 | { |
fc323d70 | 5151 | if (dump_file) |
5152 | fprintf (dump_file, "Setting nonnull for %u\n", i); | |
5153 | set_ptr_nonnull (ddef); | |
25a8e007 | 5154 | } |
25a8e007 | 5155 | } |
5156 | } | |
5157 | } | |
5158 | ||
24430d08 | 5159 | /* IPCP transformation phase doing propagation of aggregate values. */ |
803a7988 | 5160 | |
5161 | unsigned int | |
5162 | ipcp_transform_function (struct cgraph_node *node) | |
5163 | { | |
7af25a10 | 5164 | vec<ipa_param_descriptor, va_gc> *descriptors = NULL; |
9ea91b78 | 5165 | struct ipa_func_body_info fbi; |
803a7988 | 5166 | struct ipa_agg_replacement_value *aggval; |
803a7988 | 5167 | int param_count; |
5168 | bool cfg_changed = false, something_changed = false; | |
5169 | ||
5170 | gcc_checking_assert (cfun); | |
5171 | gcc_checking_assert (current_function_decl); | |
5172 | ||
5173 | if (dump_file) | |
0e388735 | 5174 | fprintf (dump_file, "Modification phase of node %s\n", |
5175 | node->dump_name ()); | |
803a7988 | 5176 | |
a54071b2 | 5177 | ipcp_update_bits (node); |
25a8e007 | 5178 | ipcp_update_vr (node); |
803a7988 | 5179 | aggval = ipa_get_agg_replacements_for_node (node); |
5180 | if (!aggval) | |
5181 | return 0; | |
02774f2d | 5182 | param_count = count_formal_params (node->decl); |
803a7988 | 5183 | if (param_count == 0) |
5184 | return 0; | |
5185 | adjust_agg_replacement_values (node, aggval); | |
5186 | if (dump_file) | |
5187 | ipa_dump_agg_replacement_values (dump_file, aggval); | |
803a7988 | 5188 | |
24430d08 | 5189 | fbi.node = node; |
5190 | fbi.info = NULL; | |
5191 | fbi.bb_infos = vNULL; | |
5192 | fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun)); | |
5193 | fbi.param_count = param_count; | |
915df3d8 | 5194 | fbi.aa_walk_budget = PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS); |
803a7988 | 5195 | |
7af25a10 | 5196 | vec_safe_grow_cleared (descriptors, param_count); |
5197 | ipa_populate_param_decls (node, *descriptors); | |
24430d08 | 5198 | calculate_dominance_info (CDI_DOMINATORS); |
5199 | ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed, | |
5200 | &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun)); | |
803a7988 | 5201 | |
24430d08 | 5202 | int i; |
5203 | struct ipa_bb_info *bi; | |
5204 | FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi) | |
5205 | free_ipa_bb_info (bi); | |
5206 | fbi.bb_infos.release (); | |
5207 | free_dominance_info (CDI_DOMINATORS); | |
00637f9c | 5208 | |
5209 | ipcp_transformation *s = ipcp_transformation_sum->get (node); | |
5210 | s->agg_values = NULL; | |
5211 | s->bits = NULL; | |
5212 | s->m_vr = NULL; | |
87474e50 | 5213 | |
7af25a10 | 5214 | vec_free (descriptors); |
803a7988 | 5215 | |
5216 | if (!something_changed) | |
5217 | return 0; | |
9eafff7e | 5218 | |
5219 | if (cfg_changed) | |
5220 | delete_unreachable_blocks_update_callgraph (node, false); | |
5221 | ||
5222 | return TODO_update_ssa_only_virtuals; | |
803a7988 | 5223 | } |
97cb825b | 5224 | |
5225 | #include "gt-ipa-prop.h" |