]>
Commit | Line | Data |
---|---|---|
3b22db66 | 1 | /* Interprocedural analyses. |
d353bf18 | 2 | Copyright (C) 2005-2015 Free Software Foundation, Inc. |
3b22db66 | 3 | |
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify it under | |
7 | the terms of the GNU General Public License as published by the Free | |
8c4c00c1 | 8 | Software Foundation; either version 3, or (at your option) any later |
3b22db66 | 9 | version. |
10 | ||
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
3b22db66 | 19 | |
20 | #include "config.h" | |
21 | #include "system.h" | |
22 | #include "coretypes.h" | |
b20a8bb4 | 23 | #include "alias.h" |
24 | #include "symtab.h" | |
25 | #include "options.h" | |
b20a8bb4 | 26 | #include "tree.h" |
27 | #include "fold-const.h" | |
28 | #include "predict.h" | |
94ea8568 | 29 | #include "tm.h" |
30 | #include "hard-reg-set.h" | |
94ea8568 | 31 | #include "function.h" |
32 | #include "dominance.h" | |
33 | #include "cfg.h" | |
bc61cadb | 34 | #include "basic-block.h" |
35 | #include "tree-ssa-alias.h" | |
36 | #include "internal-fn.h" | |
37 | #include "gimple-fold.h" | |
38 | #include "tree-eh.h" | |
39 | #include "gimple-expr.h" | |
b23fb4cb | 40 | #include "gimple.h" |
d53441c8 | 41 | #include "rtl.h" |
42 | #include "flags.h" | |
d53441c8 | 43 | #include "insn-config.h" |
44 | #include "expmed.h" | |
45 | #include "dojump.h" | |
46 | #include "explow.h" | |
47 | #include "calls.h" | |
48 | #include "emit-rtl.h" | |
49 | #include "varasm.h" | |
50 | #include "stmt.h" | |
9ed99284 | 51 | #include "expr.h" |
52 | #include "stor-layout.h" | |
53 | #include "print-tree.h" | |
a8783bee | 54 | #include "gimplify.h" |
dcf1a1ec | 55 | #include "gimple-iterator.h" |
e795d6e1 | 56 | #include "gimplify-me.h" |
dcf1a1ec | 57 | #include "gimple-walk.h" |
3b22db66 | 58 | #include "langhooks.h" |
3b22db66 | 59 | #include "target.h" |
1140c305 | 60 | #include "cgraph.h" |
61 | #include "alloc-pool.h" | |
2cc80ac3 | 62 | #include "symbol-summary.h" |
3b22db66 | 63 | #include "ipa-prop.h" |
073c1fd5 | 64 | #include "bitmap.h" |
65 | #include "gimple-ssa.h" | |
66 | #include "tree-cfg.h" | |
67 | #include "tree-phinodes.h" | |
68 | #include "ssa-iterators.h" | |
69 | #include "tree-into-ssa.h" | |
70 | #include "tree-dfa.h" | |
3b22db66 | 71 | #include "tree-pass.h" |
545eff8f | 72 | #include "tree-inline.h" |
18b64b34 | 73 | #include "ipa-inline.h" |
f8daee9b | 74 | #include "diagnostic.h" |
ce084dfc | 75 | #include "gimple-pretty-print.h" |
8867b500 | 76 | #include "lto-streamer.h" |
2541503d | 77 | #include "data-streamer.h" |
78 | #include "tree-streamer.h" | |
699f00b5 | 79 | #include "params.h" |
10fba9c0 | 80 | #include "ipa-utils.h" |
4563499d | 81 | #include "stringpool.h" |
82 | #include "tree-ssanames.h" | |
ceb49bba | 83 | #include "dbgcnt.h" |
24430d08 | 84 | #include "domwalk.h" |
f7715905 | 85 | #include "builtins.h" |
545eff8f | 86 | |
24430d08 | 87 | /* Intermediate information that we get from alias analysis about a particular |
88 | parameter in a particular basic_block. When a parameter or the memory it | |
89 | references is marked modified, we use that information in all dominatd | |
90 | blocks without cosulting alias analysis oracle. */ | |
8b68ef1b | 91 | |
24430d08 | 92 | struct param_aa_status |
8b68ef1b | 93 | { |
24430d08 | 94 | /* Set when this structure contains meaningful information. If not, the |
95 | structure describing a dominating BB should be used instead. */ | |
96 | bool valid; | |
97 | ||
98 | /* Whether we have seen something which might have modified the data in | |
99 | question. PARM is for the parameter itself, REF is for data it points to | |
100 | but using the alias type of individual accesses and PT is the same thing | |
101 | but for computing aggregate pass-through functions using a very inclusive | |
102 | ao_ref. */ | |
0d491188 | 103 | bool parm_modified, ref_modified, pt_modified; |
24430d08 | 104 | }; |
105 | ||
106 | /* Information related to a given BB that used only when looking at function | |
107 | body. */ | |
108 | ||
109 | struct ipa_bb_info | |
110 | { | |
111 | /* Call graph edges going out of this BB. */ | |
415d1b9a | 112 | vec<cgraph_edge *> cg_edges; |
24430d08 | 113 | /* Alias analysis statuses of each formal parameter at this bb. */ |
114 | vec<param_aa_status> param_aa_statuses; | |
115 | }; | |
116 | ||
117 | /* Structure with global information that is only used when looking at function | |
118 | body. */ | |
119 | ||
120 | struct func_body_info | |
121 | { | |
122 | /* The node that is being analyzed. */ | |
123 | cgraph_node *node; | |
124 | ||
125 | /* Its info. */ | |
126 | struct ipa_node_params *info; | |
127 | ||
128 | /* Information about individual BBs. */ | |
129 | vec<ipa_bb_info> bb_infos; | |
130 | ||
131 | /* Number of parameters. */ | |
132 | int param_count; | |
133 | ||
134 | /* Number of statements already walked by when analyzing this function. */ | |
135 | unsigned int aa_walked; | |
8b68ef1b | 136 | }; |
137 | ||
2cc80ac3 | 138 | /* Function summary where the parameter infos are actually stored. */ |
139 | ipa_node_params_t *ipa_node_params_sum = NULL; | |
ae7b7bc8 | 140 | /* Vector of IPA-CP transformation data for each clone. */ |
141 | vec<ipcp_transformation_summary, va_gc> *ipcp_transformations; | |
545eff8f | 142 | /* Vector where the parameter infos are actually stored. */ |
b3e7c666 | 143 | vec<ipa_edge_args, va_gc> *ipa_edge_args_vector; |
545eff8f | 144 | |
145 | /* Holders of ipa cgraph hooks: */ | |
86844d6c | 146 | static struct cgraph_edge_hook_list *edge_removal_hook_holder; |
86844d6c | 147 | static struct cgraph_2edge_hook_list *edge_duplication_hook_holder; |
b1471ee0 | 148 | static struct cgraph_node_hook_list *function_insertion_hook_holder; |
3b22db66 | 149 | |
096295f6 | 150 | /* Description of a reference to an IPA constant. */ |
151 | struct ipa_cst_ref_desc | |
152 | { | |
153 | /* Edge that corresponds to the statement which took the reference. */ | |
154 | struct cgraph_edge *cs; | |
155 | /* Linked list of duplicates created when call graph edges are cloned. */ | |
156 | struct ipa_cst_ref_desc *next_duplicate; | |
157 | /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value | |
158 | if out of control. */ | |
159 | int refcount; | |
160 | }; | |
161 | ||
162 | /* Allocation pool for reference descriptions. */ | |
163 | ||
b196706d | 164 | static pool_allocator<ipa_cst_ref_desc> ipa_refdesc_pool |
165 | ("IPA-PROP ref descriptions", 32); | |
096295f6 | 166 | |
6c0a4a25 | 167 | /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated |
168 | with NODE should prevent us from analyzing it for the purposes of IPA-CP. */ | |
169 | ||
170 | static bool | |
171 | ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node) | |
172 | { | |
02774f2d | 173 | tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl); |
6c0a4a25 | 174 | |
175 | if (!fs_opts) | |
176 | return false; | |
d1f68cd8 | 177 | return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp); |
6c0a4a25 | 178 | } |
179 | ||
1917e945 | 180 | /* Return index of the formal whose tree is PTREE in function which corresponds |
181 | to INFO. */ | |
182 | ||
7a4930e7 | 183 | static int |
b3e7c666 | 184 | ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree) |
3b22db66 | 185 | { |
186 | int i, count; | |
187 | ||
f1f41a6c | 188 | count = descriptors.length (); |
3b22db66 | 189 | for (i = 0; i < count; i++) |
f1f41a6c | 190 | if (descriptors[i].decl == ptree) |
3b22db66 | 191 | return i; |
192 | ||
193 | return -1; | |
194 | } | |
195 | ||
7a4930e7 | 196 | /* Return index of the formal whose tree is PTREE in function which corresponds |
197 | to INFO. */ | |
198 | ||
199 | int | |
200 | ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree) | |
201 | { | |
202 | return ipa_get_param_decl_index_1 (info->descriptors, ptree); | |
203 | } | |
204 | ||
205 | /* Populate the param_decl field in parameter DESCRIPTORS that correspond to | |
206 | NODE. */ | |
1917e945 | 207 | |
3f2ff969 | 208 | static void |
209 | ipa_populate_param_decls (struct cgraph_node *node, | |
b3e7c666 | 210 | vec<ipa_param_descriptor> &descriptors) |
3b22db66 | 211 | { |
212 | tree fndecl; | |
213 | tree fnargs; | |
214 | tree parm; | |
215 | int param_num; | |
f8daee9b | 216 | |
02774f2d | 217 | fndecl = node->decl; |
09ab6335 | 218 | gcc_assert (gimple_has_body_p (fndecl)); |
3b22db66 | 219 | fnargs = DECL_ARGUMENTS (fndecl); |
220 | param_num = 0; | |
1767a056 | 221 | for (parm = fnargs; parm; parm = DECL_CHAIN (parm)) |
3b22db66 | 222 | { |
f1f41a6c | 223 | descriptors[param_num].decl = parm; |
96c7c99c | 224 | descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm), |
225 | true); | |
3b22db66 | 226 | param_num++; |
227 | } | |
228 | } | |
229 | ||
547f1802 | 230 | /* Return how many formal parameters FNDECL has. */ |
231 | ||
09809ecc | 232 | int |
821d0e0f | 233 | count_formal_params (tree fndecl) |
547f1802 | 234 | { |
235 | tree parm; | |
236 | int count = 0; | |
09ab6335 | 237 | gcc_assert (gimple_has_body_p (fndecl)); |
547f1802 | 238 | |
1767a056 | 239 | for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm)) |
547f1802 | 240 | count++; |
241 | ||
242 | return count; | |
243 | } | |
244 | ||
09ab6335 | 245 | /* Return the declaration of Ith formal parameter of the function corresponding |
246 | to INFO. Note there is no setter function as this array is built just once | |
247 | using ipa_initialize_node_params. */ | |
248 | ||
249 | void | |
250 | ipa_dump_param (FILE *file, struct ipa_node_params *info, int i) | |
251 | { | |
252 | fprintf (file, "param #%i", i); | |
253 | if (info->descriptors[i].decl) | |
254 | { | |
255 | fprintf (file, " "); | |
256 | print_generic_expr (file, info->descriptors[i].decl, 0); | |
257 | } | |
258 | } | |
259 | ||
260 | /* Initialize the ipa_node_params structure associated with NODE | |
261 | to hold PARAM_COUNT parameters. */ | |
262 | ||
263 | void | |
264 | ipa_alloc_node_params (struct cgraph_node *node, int param_count) | |
265 | { | |
266 | struct ipa_node_params *info = IPA_NODE_REF (node); | |
267 | ||
268 | if (!info->descriptors.exists () && param_count) | |
269 | info->descriptors.safe_grow_cleared (param_count); | |
270 | } | |
271 | ||
3f2ff969 | 272 | /* Initialize the ipa_node_params structure associated with NODE by counting |
273 | the function parameters, creating the descriptors and populating their | |
274 | param_decls. */ | |
1917e945 | 275 | |
3f2ff969 | 276 | void |
277 | ipa_initialize_node_params (struct cgraph_node *node) | |
278 | { | |
279 | struct ipa_node_params *info = IPA_NODE_REF (node); | |
280 | ||
f1f41a6c | 281 | if (!info->descriptors.exists ()) |
3f2ff969 | 282 | { |
02774f2d | 283 | ipa_alloc_node_params (node, count_formal_params (node->decl)); |
09ab6335 | 284 | ipa_populate_param_decls (node, info->descriptors); |
3f2ff969 | 285 | } |
3b22db66 | 286 | } |
287 | ||
7115ea05 | 288 | /* Print the jump functions associated with call graph edge CS to file F. */ |
289 | ||
290 | static void | |
291 | ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs) | |
292 | { | |
293 | int i, count; | |
294 | ||
295 | count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs)); | |
296 | for (i = 0; i < count; i++) | |
297 | { | |
298 | struct ipa_jump_func *jump_func; | |
299 | enum jump_func_type type; | |
300 | ||
301 | jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i); | |
302 | type = jump_func->type; | |
303 | ||
304 | fprintf (f, " param %d: ", i); | |
305 | if (type == IPA_JF_UNKNOWN) | |
306 | fprintf (f, "UNKNOWN\n"); | |
7115ea05 | 307 | else if (type == IPA_JF_CONST) |
308 | { | |
096295f6 | 309 | tree val = jump_func->value.constant.value; |
7115ea05 | 310 | fprintf (f, "CONST: "); |
311 | print_generic_expr (f, val, 0); | |
312 | if (TREE_CODE (val) == ADDR_EXPR | |
313 | && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL) | |
314 | { | |
315 | fprintf (f, " -> "); | |
316 | print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)), | |
317 | 0); | |
318 | } | |
319 | fprintf (f, "\n"); | |
320 | } | |
7115ea05 | 321 | else if (type == IPA_JF_PASS_THROUGH) |
322 | { | |
323 | fprintf (f, "PASS THROUGH: "); | |
0d491188 | 324 | fprintf (f, "%d, op %s", |
7115ea05 | 325 | jump_func->value.pass_through.formal_id, |
f3d35d4d | 326 | get_tree_code_name(jump_func->value.pass_through.operation)); |
7115ea05 | 327 | if (jump_func->value.pass_through.operation != NOP_EXPR) |
0d491188 | 328 | { |
329 | fprintf (f, " "); | |
330 | print_generic_expr (f, | |
331 | jump_func->value.pass_through.operand, 0); | |
332 | } | |
333 | if (jump_func->value.pass_through.agg_preserved) | |
334 | fprintf (f, ", agg_preserved"); | |
734eb2d7 | 335 | fprintf (f, "\n"); |
7115ea05 | 336 | } |
337 | else if (type == IPA_JF_ANCESTOR) | |
338 | { | |
339 | fprintf (f, "ANCESTOR: "); | |
f03df321 | 340 | fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC, |
7115ea05 | 341 | jump_func->value.ancestor.formal_id, |
342 | jump_func->value.ancestor.offset); | |
0d491188 | 343 | if (jump_func->value.ancestor.agg_preserved) |
344 | fprintf (f, ", agg_preserved"); | |
734eb2d7 | 345 | fprintf (f, "\n"); |
7115ea05 | 346 | } |
0d491188 | 347 | |
348 | if (jump_func->agg.items) | |
349 | { | |
350 | struct ipa_agg_jf_item *item; | |
351 | int j; | |
352 | ||
353 | fprintf (f, " Aggregate passed by %s:\n", | |
354 | jump_func->agg.by_ref ? "reference" : "value"); | |
f1f41a6c | 355 | FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item) |
0d491188 | 356 | { |
357 | fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ", | |
358 | item->offset); | |
359 | if (TYPE_P (item->value)) | |
360 | fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits", | |
e913b5cd | 361 | tree_to_uhwi (TYPE_SIZE (item->value))); |
0d491188 | 362 | else |
363 | { | |
364 | fprintf (f, "cst: "); | |
365 | print_generic_expr (f, item->value, 0); | |
366 | } | |
367 | fprintf (f, "\n"); | |
368 | } | |
369 | } | |
245ab191 | 370 | |
371 | struct ipa_polymorphic_call_context *ctx | |
372 | = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i); | |
373 | if (ctx && !ctx->useless_p ()) | |
374 | { | |
375 | fprintf (f, " Context: "); | |
376 | ctx->dump (dump_file); | |
377 | } | |
ae7b7bc8 | 378 | |
379 | if (jump_func->alignment.known) | |
380 | { | |
381 | fprintf (f, " Alignment: %u, misalignment: %u\n", | |
382 | jump_func->alignment.align, | |
383 | jump_func->alignment.misalign); | |
384 | } | |
385 | else | |
386 | fprintf (f, " Unknown alignment\n"); | |
7115ea05 | 387 | } |
388 | } | |
389 | ||
390 | ||
1917e945 | 391 | /* Print the jump functions of all arguments on all call graph edges going from |
392 | NODE to file F. */ | |
393 | ||
3b22db66 | 394 | void |
f8daee9b | 395 | ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node) |
3b22db66 | 396 | { |
f8daee9b | 397 | struct cgraph_edge *cs; |
3b22db66 | 398 | |
f1c8b4d7 | 399 | fprintf (f, " Jump functions of caller %s/%i:\n", node->name (), |
02774f2d | 400 | node->order); |
f8daee9b | 401 | for (cs = node->callees; cs; cs = cs->next_callee) |
402 | { | |
403 | if (!ipa_edge_args_info_available_for_edge_p (cs)) | |
404 | continue; | |
405 | ||
7115ea05 | 406 | fprintf (f, " callsite %s/%i -> %s/%i : \n", |
5ae49d3e | 407 | xstrdup_for_dump (node->name ()), node->order, |
408 | xstrdup_for_dump (cs->callee->name ()), | |
02774f2d | 409 | cs->callee->order); |
7115ea05 | 410 | ipa_print_node_jump_functions_for_edge (f, cs); |
411 | } | |
3b22db66 | 412 | |
15c999e3 | 413 | for (cs = node->indirect_calls; cs; cs = cs->next_callee) |
7115ea05 | 414 | { |
15c999e3 | 415 | struct cgraph_indirect_call_info *ii; |
7115ea05 | 416 | if (!ipa_edge_args_info_available_for_edge_p (cs)) |
417 | continue; | |
f8daee9b | 418 | |
15c999e3 | 419 | ii = cs->indirect_info; |
420 | if (ii->agg_contents) | |
2f6c1cf4 | 421 | fprintf (f, " indirect %s callsite, calling param %i, " |
15c999e3 | 422 | "offset " HOST_WIDE_INT_PRINT_DEC ", %s", |
2f6c1cf4 | 423 | ii->member_ptr ? "member ptr" : "aggregate", |
15c999e3 | 424 | ii->param_index, ii->offset, |
425 | ii->by_ref ? "by reference" : "by_value"); | |
426 | else | |
02636da3 | 427 | fprintf (f, " indirect %s callsite, calling param %i, " |
428 | "offset " HOST_WIDE_INT_PRINT_DEC, | |
429 | ii->polymorphic ? "polymorphic" : "simple", ii->param_index, | |
430 | ii->offset); | |
15c999e3 | 431 | |
7115ea05 | 432 | if (cs->call_stmt) |
433 | { | |
15c999e3 | 434 | fprintf (f, ", for stmt "); |
7115ea05 | 435 | print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM); |
f8daee9b | 436 | } |
7115ea05 | 437 | else |
15c999e3 | 438 | fprintf (f, "\n"); |
e33892d7 | 439 | if (ii->polymorphic) |
440 | ii->context.dump (f); | |
7115ea05 | 441 | ipa_print_node_jump_functions_for_edge (f, cs); |
f8daee9b | 442 | } |
443 | } | |
444 | ||
445 | /* Print ipa_jump_func data structures of all nodes in the call graph to F. */ | |
1917e945 | 446 | |
f8daee9b | 447 | void |
448 | ipa_print_all_jump_functions (FILE *f) | |
449 | { | |
450 | struct cgraph_node *node; | |
451 | ||
11b73810 | 452 | fprintf (f, "\nJump functions:\n"); |
7c455d87 | 453 | FOR_EACH_FUNCTION (node) |
f8daee9b | 454 | { |
455 | ipa_print_node_jump_functions (f, node); | |
456 | } | |
457 | } | |
458 | ||
ae7b7bc8 | 459 | /* Set jfunc to be a know-really nothing jump function. */ |
460 | ||
461 | static void | |
462 | ipa_set_jf_unknown (struct ipa_jump_func *jfunc) | |
463 | { | |
464 | jfunc->type = IPA_JF_UNKNOWN; | |
465 | jfunc->alignment.known = false; | |
466 | } | |
467 | ||
ad4a8b28 | 468 | /* Set JFUNC to be a copy of another jmp (to be used by jump function |
469 | combination code). The two functions will share their rdesc. */ | |
470 | ||
471 | static void | |
472 | ipa_set_jf_cst_copy (struct ipa_jump_func *dst, | |
473 | struct ipa_jump_func *src) | |
474 | ||
475 | { | |
476 | gcc_checking_assert (src->type == IPA_JF_CONST); | |
477 | dst->type = IPA_JF_CONST; | |
478 | dst->value.constant = src->value.constant; | |
479 | } | |
480 | ||
4fa83f96 | 481 | /* Set JFUNC to be a constant jmp function. */ |
482 | ||
483 | static void | |
096295f6 | 484 | ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant, |
485 | struct cgraph_edge *cs) | |
4fa83f96 | 486 | { |
5169661d | 487 | constant = unshare_expr (constant); |
488 | if (constant && EXPR_P (constant)) | |
489 | SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION); | |
4fa83f96 | 490 | jfunc->type = IPA_JF_CONST; |
096295f6 | 491 | jfunc->value.constant.value = unshare_expr_without_location (constant); |
492 | ||
493 | if (TREE_CODE (constant) == ADDR_EXPR | |
494 | && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL) | |
495 | { | |
496 | struct ipa_cst_ref_desc *rdesc; | |
096295f6 | 497 | |
b196706d | 498 | rdesc = ipa_refdesc_pool.allocate (); |
096295f6 | 499 | rdesc->cs = cs; |
500 | rdesc->next_duplicate = NULL; | |
501 | rdesc->refcount = 1; | |
502 | jfunc->value.constant.rdesc = rdesc; | |
503 | } | |
504 | else | |
505 | jfunc->value.constant.rdesc = NULL; | |
4fa83f96 | 506 | } |
507 | ||
508 | /* Set JFUNC to be a simple pass-through jump function. */ | |
509 | static void | |
0d491188 | 510 | ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id, |
693010ae | 511 | bool agg_preserved) |
4fa83f96 | 512 | { |
513 | jfunc->type = IPA_JF_PASS_THROUGH; | |
514 | jfunc->value.pass_through.operand = NULL_TREE; | |
515 | jfunc->value.pass_through.formal_id = formal_id; | |
516 | jfunc->value.pass_through.operation = NOP_EXPR; | |
0d491188 | 517 | jfunc->value.pass_through.agg_preserved = agg_preserved; |
4fa83f96 | 518 | } |
519 | ||
520 | /* Set JFUNC to be an arithmetic pass through jump function. */ | |
521 | ||
522 | static void | |
523 | ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id, | |
524 | tree operand, enum tree_code operation) | |
525 | { | |
526 | jfunc->type = IPA_JF_PASS_THROUGH; | |
827e392b | 527 | jfunc->value.pass_through.operand = unshare_expr_without_location (operand); |
4fa83f96 | 528 | jfunc->value.pass_through.formal_id = formal_id; |
529 | jfunc->value.pass_through.operation = operation; | |
0d491188 | 530 | jfunc->value.pass_through.agg_preserved = false; |
4fa83f96 | 531 | } |
532 | ||
533 | /* Set JFUNC to be an ancestor jump function. */ | |
534 | ||
535 | static void | |
536 | ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset, | |
693010ae | 537 | int formal_id, bool agg_preserved) |
4fa83f96 | 538 | { |
539 | jfunc->type = IPA_JF_ANCESTOR; | |
540 | jfunc->value.ancestor.formal_id = formal_id; | |
541 | jfunc->value.ancestor.offset = offset; | |
0d491188 | 542 | jfunc->value.ancestor.agg_preserved = agg_preserved; |
bee52153 | 543 | } |
544 | ||
24430d08 | 545 | /* Get IPA BB information about the given BB. FBI is the context of analyzis |
546 | of this function body. */ | |
547 | ||
548 | static struct ipa_bb_info * | |
549 | ipa_get_bb_info (struct func_body_info *fbi, basic_block bb) | |
550 | { | |
551 | gcc_checking_assert (fbi); | |
552 | return &fbi->bb_infos[bb->index]; | |
553 | } | |
554 | ||
7af23aa4 | 555 | /* Structure to be passed in between detect_type_change and |
556 | check_stmt_for_type_change. */ | |
557 | ||
9908fe4d | 558 | struct prop_type_change_info |
7af23aa4 | 559 | { |
22bf03ad | 560 | /* Offset into the object where there is the virtual method pointer we are |
561 | looking for. */ | |
562 | HOST_WIDE_INT offset; | |
563 | /* The declaration or SSA_NAME pointer of the base that we are checking for | |
564 | type change. */ | |
565 | tree object; | |
7af23aa4 | 566 | /* Set to true if dynamic type change has been detected. */ |
567 | bool type_maybe_changed; | |
568 | }; | |
569 | ||
570 | /* Return true if STMT can modify a virtual method table pointer. | |
571 | ||
572 | This function makes special assumptions about both constructors and | |
573 | destructors which are all the functions that are allowed to alter the VMT | |
574 | pointers. It assumes that destructors begin with assignment into all VMT | |
575 | pointers and that constructors essentially look in the following way: | |
576 | ||
577 | 1) The very first thing they do is that they call constructors of ancestor | |
578 | sub-objects that have them. | |
579 | ||
580 | 2) Then VMT pointers of this and all its ancestors is set to new values | |
581 | corresponding to the type corresponding to the constructor. | |
582 | ||
583 | 3) Only afterwards, other stuff such as constructor of member sub-objects | |
584 | and the code written by the user is run. Only this may include calling | |
585 | virtual functions, directly or indirectly. | |
586 | ||
587 | There is no way to call a constructor of an ancestor sub-object in any | |
588 | other way. | |
589 | ||
590 | This means that we do not have to care whether constructors get the correct | |
591 | type information because they will always change it (in fact, if we define | |
592 | the type to be given by the VMT pointer, it is undefined). | |
593 | ||
594 | The most important fact to derive from the above is that if, for some | |
595 | statement in the section 3, we try to detect whether the dynamic type has | |
596 | changed, we can safely ignore all calls as we examine the function body | |
597 | backwards until we reach statements in section 2 because these calls cannot | |
598 | be ancestor constructors or destructors (if the input is not bogus) and so | |
599 | do not change the dynamic type (this holds true only for automatically | |
600 | allocated objects but at the moment we devirtualize only these). We then | |
601 | must detect that statements in section 2 change the dynamic type and can try | |
602 | to derive the new type. That is enough and we can stop, we will never see | |
603 | the calls into constructors of sub-objects in this code. Therefore we can | |
604 | safely ignore all call statements that we traverse. | |
605 | */ | |
606 | ||
607 | static bool | |
608 | stmt_may_be_vtbl_ptr_store (gimple stmt) | |
609 | { | |
610 | if (is_gimple_call (stmt)) | |
611 | return false; | |
f91737f9 | 612 | if (gimple_clobber_p (stmt)) |
613 | return false; | |
7af23aa4 | 614 | else if (is_gimple_assign (stmt)) |
615 | { | |
616 | tree lhs = gimple_assign_lhs (stmt); | |
617 | ||
cf3b9c67 | 618 | if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs))) |
619 | { | |
620 | if (flag_strict_aliasing | |
621 | && !POINTER_TYPE_P (TREE_TYPE (lhs))) | |
622 | return false; | |
623 | ||
624 | if (TREE_CODE (lhs) == COMPONENT_REF | |
625 | && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))) | |
7af23aa4 | 626 | return false; |
cf3b9c67 | 627 | /* In the future we might want to use get_base_ref_and_offset to find |
628 | if there is a field corresponding to the offset and if so, proceed | |
629 | almost like if it was a component ref. */ | |
630 | } | |
7af23aa4 | 631 | } |
632 | return true; | |
633 | } | |
634 | ||
693010ae | 635 | /* Callback of walk_aliased_vdefs and a helper function for detect_type_change |
636 | to check whether a particular statement may modify the virtual table | |
637 | pointerIt stores its result into DATA, which points to a | |
9908fe4d | 638 | prop_type_change_info structure. */ |
7af23aa4 | 639 | |
640 | static bool | |
641 | check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data) | |
642 | { | |
643 | gimple stmt = SSA_NAME_DEF_STMT (vdef); | |
9908fe4d | 644 | struct prop_type_change_info *tci = (struct prop_type_change_info *) data; |
7af23aa4 | 645 | |
646 | if (stmt_may_be_vtbl_ptr_store (stmt)) | |
647 | { | |
648 | tci->type_maybe_changed = true; | |
649 | return true; | |
650 | } | |
651 | else | |
652 | return false; | |
653 | } | |
654 | ||
1b613a0a | 655 | /* See if ARG is PARAM_DECl describing instance passed by pointer |
656 | or reference in FUNCTION. Return false if the dynamic type may change | |
657 | in between beggining of the function until CALL is invoked. | |
22bf03ad | 658 | |
1b613a0a | 659 | Generally functions are not allowed to change type of such instances, |
660 | but they call destructors. We assume that methods can not destroy the THIS | |
661 | pointer. Also as a special cases, constructor and destructors may change | |
662 | type of the THIS pointer. */ | |
663 | ||
664 | static bool | |
665 | param_type_may_change_p (tree function, tree arg, gimple call) | |
666 | { | |
667 | /* Pure functions can not do any changes on the dynamic type; | |
668 | that require writting to memory. */ | |
669 | if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST)) | |
670 | return false; | |
671 | /* We need to check if we are within inlined consturctor | |
672 | or destructor (ideally we would have way to check that the | |
673 | inline cdtor is actually working on ARG, but we don't have | |
674 | easy tie on this, so punt on all non-pure cdtors. | |
675 | We may also record the types of cdtors and once we know type | |
676 | of the instance match them. | |
677 | ||
678 | Also code unification optimizations may merge calls from | |
679 | different blocks making return values unreliable. So | |
680 | do nothing during late optimization. */ | |
681 | if (DECL_STRUCT_FUNCTION (function)->after_inlining) | |
682 | return true; | |
683 | if (TREE_CODE (arg) == SSA_NAME | |
684 | && SSA_NAME_IS_DEFAULT_DEF (arg) | |
685 | && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL) | |
686 | { | |
687 | /* Normal (non-THIS) argument. */ | |
688 | if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function) | |
689 | || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE) | |
690 | /* THIS pointer of an method - here we we want to watch constructors | |
691 | and destructors as those definitely may change the dynamic | |
692 | type. */ | |
693 | || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE | |
694 | && !DECL_CXX_CONSTRUCTOR_P (function) | |
695 | && !DECL_CXX_DESTRUCTOR_P (function) | |
696 | && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function)))) | |
697 | { | |
698 | /* Walk the inline stack and watch out for ctors/dtors. */ | |
699 | for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK; | |
700 | block = BLOCK_SUPERCONTEXT (block)) | |
a467a47b | 701 | if (inlined_polymorphic_ctor_dtor_block_p (block, false)) |
702 | return true; | |
1b613a0a | 703 | return false; |
704 | } | |
705 | } | |
706 | return true; | |
707 | } | |
22bf03ad | 708 | |
185c1f3a | 709 | /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before |
710 | callsite CALL) by looking for assignments to its virtual table pointer. If | |
711 | it is, return true and fill in the jump function JFUNC with relevant type | |
712 | information or set it to unknown. ARG is the object itself (not a pointer | |
713 | to it, unless dereferenced). BASE is the base of the memory access as | |
1b613a0a | 714 | returned by get_ref_base_and_extent, as is the offset. |
715 | ||
716 | This is helper function for detect_type_change and detect_type_change_ssa | |
717 | that does the heavy work which is usually unnecesary. */ | |
7af23aa4 | 718 | |
719 | static bool | |
1b613a0a | 720 | detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type, |
1a91d914 | 721 | gcall *call, struct ipa_jump_func *jfunc, |
1b613a0a | 722 | HOST_WIDE_INT offset) |
7af23aa4 | 723 | { |
9908fe4d | 724 | struct prop_type_change_info tci; |
7af23aa4 | 725 | ao_ref ao; |
f91737f9 | 726 | bool entry_reached = false; |
7af23aa4 | 727 | |
728 | gcc_checking_assert (DECL_P (arg) | |
729 | || TREE_CODE (arg) == MEM_REF | |
730 | || handled_component_p (arg)); | |
7af23aa4 | 731 | |
3d96ed3b | 732 | comp_type = TYPE_MAIN_VARIANT (comp_type); |
733 | ||
40d6aa75 | 734 | /* Const calls cannot call virtual methods through VMT and so type changes do |
735 | not matter. */ | |
736 | if (!flag_devirtualize || !gimple_vuse (call) | |
737 | /* Be sure expected_type is polymorphic. */ | |
738 | || !comp_type | |
739 | || TREE_CODE (comp_type) != RECORD_TYPE | |
740 | || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type)) | |
741 | || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type)))) | |
742 | return true; | |
5b864aa1 | 743 | |
b74ba78c | 744 | ao_ref_init (&ao, arg); |
7af23aa4 | 745 | ao.base = base; |
746 | ao.offset = offset; | |
747 | ao.size = POINTER_SIZE; | |
748 | ao.max_size = ao.size; | |
7af23aa4 | 749 | |
22bf03ad | 750 | tci.offset = offset; |
751 | tci.object = get_base_address (arg); | |
22bf03ad | 752 | tci.type_maybe_changed = false; |
22bf03ad | 753 | |
7af23aa4 | 754 | walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change, |
f91737f9 | 755 | &tci, NULL, &entry_reached); |
7af23aa4 | 756 | if (!tci.type_maybe_changed) |
757 | return false; | |
758 | ||
ae7b7bc8 | 759 | ipa_set_jf_unknown (jfunc); |
7af23aa4 | 760 | return true; |
761 | } | |
762 | ||
1b613a0a | 763 | /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed. |
764 | If it is, return true and fill in the jump function JFUNC with relevant type | |
765 | information or set it to unknown. ARG is the object itself (not a pointer | |
766 | to it, unless dereferenced). BASE is the base of the memory access as | |
767 | returned by get_ref_base_and_extent, as is the offset. */ | |
768 | ||
769 | static bool | |
1a91d914 | 770 | detect_type_change (tree arg, tree base, tree comp_type, gcall *call, |
1b613a0a | 771 | struct ipa_jump_func *jfunc, HOST_WIDE_INT offset) |
772 | { | |
773 | if (!flag_devirtualize) | |
774 | return false; | |
775 | ||
776 | if (TREE_CODE (base) == MEM_REF | |
777 | && !param_type_may_change_p (current_function_decl, | |
778 | TREE_OPERAND (base, 0), | |
779 | call)) | |
780 | return false; | |
781 | return detect_type_change_from_memory_writes (arg, base, comp_type, | |
782 | call, jfunc, offset); | |
783 | } | |
784 | ||
7af23aa4 | 785 | /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer |
786 | SSA name (its dereference will become the base and the offset is assumed to | |
787 | be zero). */ | |
788 | ||
789 | static bool | |
185c1f3a | 790 | detect_type_change_ssa (tree arg, tree comp_type, |
1a91d914 | 791 | gcall *call, struct ipa_jump_func *jfunc) |
7af23aa4 | 792 | { |
793 | gcc_checking_assert (TREE_CODE (arg) == SSA_NAME); | |
16358a63 | 794 | if (!flag_devirtualize |
185c1f3a | 795 | || !POINTER_TYPE_P (TREE_TYPE (arg))) |
7af23aa4 | 796 | return false; |
797 | ||
1b613a0a | 798 | if (!param_type_may_change_p (current_function_decl, arg, call)) |
799 | return false; | |
800 | ||
7af23aa4 | 801 | arg = build2 (MEM_REF, ptr_type_node, arg, |
22bf03ad | 802 | build_int_cst (ptr_type_node, 0)); |
7af23aa4 | 803 | |
1b613a0a | 804 | return detect_type_change_from_memory_writes (arg, arg, comp_type, |
805 | call, jfunc, 0); | |
7af23aa4 | 806 | } |
807 | ||
ad2ffc0d | 808 | /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the |
809 | boolean variable pointed to by DATA. */ | |
810 | ||
811 | static bool | |
812 | mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED, | |
813 | void *data) | |
814 | { | |
815 | bool *b = (bool *) data; | |
816 | *b = true; | |
817 | return true; | |
818 | } | |
819 | ||
24430d08 | 820 | /* Return true if we have already walked so many statements in AA that we |
821 | should really just start giving up. */ | |
822 | ||
823 | static bool | |
824 | aa_overwalked (struct func_body_info *fbi) | |
825 | { | |
826 | gcc_checking_assert (fbi); | |
827 | return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS); | |
828 | } | |
829 | ||
830 | /* Find the nearest valid aa status for parameter specified by INDEX that | |
831 | dominates BB. */ | |
832 | ||
833 | static struct param_aa_status * | |
834 | find_dominating_aa_status (struct func_body_info *fbi, basic_block bb, | |
835 | int index) | |
836 | { | |
837 | while (true) | |
838 | { | |
839 | bb = get_immediate_dominator (CDI_DOMINATORS, bb); | |
840 | if (!bb) | |
841 | return NULL; | |
842 | struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb); | |
843 | if (!bi->param_aa_statuses.is_empty () | |
844 | && bi->param_aa_statuses[index].valid) | |
845 | return &bi->param_aa_statuses[index]; | |
846 | } | |
847 | } | |
848 | ||
849 | /* Get AA status structure for the given BB and parameter with INDEX. Allocate | |
850 | structures and/or intialize the result with a dominating description as | |
851 | necessary. */ | |
852 | ||
853 | static struct param_aa_status * | |
854 | parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb, | |
855 | int index) | |
856 | { | |
857 | gcc_checking_assert (fbi); | |
858 | struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb); | |
859 | if (bi->param_aa_statuses.is_empty ()) | |
860 | bi->param_aa_statuses.safe_grow_cleared (fbi->param_count); | |
861 | struct param_aa_status *paa = &bi->param_aa_statuses[index]; | |
862 | if (!paa->valid) | |
863 | { | |
864 | gcc_checking_assert (!paa->parm_modified | |
865 | && !paa->ref_modified | |
866 | && !paa->pt_modified); | |
867 | struct param_aa_status *dom_paa; | |
868 | dom_paa = find_dominating_aa_status (fbi, bb, index); | |
869 | if (dom_paa) | |
870 | *paa = *dom_paa; | |
871 | else | |
872 | paa->valid = true; | |
873 | } | |
874 | ||
875 | return paa; | |
876 | } | |
877 | ||
a04e8d62 | 878 | /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve |
0d491188 | 879 | a value known not to be modified in this function before reaching the |
24430d08 | 880 | statement STMT. FBI holds information about the function we have so far |
881 | gathered but do not survive the summary building stage. */ | |
ad2ffc0d | 882 | |
883 | static bool | |
24430d08 | 884 | parm_preserved_before_stmt_p (struct func_body_info *fbi, int index, |
885 | gimple stmt, tree parm_load) | |
ad2ffc0d | 886 | { |
24430d08 | 887 | struct param_aa_status *paa; |
ad2ffc0d | 888 | bool modified = false; |
889 | ao_ref refd; | |
890 | ||
24430d08 | 891 | /* FIXME: FBI can be NULL if we are being called from outside |
892 | ipa_node_analysis or ipcp_transform_function, which currently happens | |
893 | during inlining analysis. It would be great to extend fbi's lifetime and | |
894 | always have it. Currently, we are just not afraid of too much walking in | |
895 | that case. */ | |
896 | if (fbi) | |
897 | { | |
898 | if (aa_overwalked (fbi)) | |
899 | return false; | |
900 | paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index); | |
901 | if (paa->parm_modified) | |
902 | return false; | |
903 | } | |
904 | else | |
905 | paa = NULL; | |
ad2ffc0d | 906 | |
907 | gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE); | |
0d491188 | 908 | ao_ref_init (&refd, parm_load); |
24430d08 | 909 | int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, |
910 | &modified, NULL); | |
911 | if (fbi) | |
912 | fbi->aa_walked += walked; | |
913 | if (paa && modified) | |
914 | paa->parm_modified = true; | |
0d491188 | 915 | return !modified; |
ad2ffc0d | 916 | } |
917 | ||
918 | /* If STMT is an assignment that loads a value from an parameter declaration, | |
919 | return the index of the parameter in ipa_node_params which has not been | |
920 | modified. Otherwise return -1. */ | |
921 | ||
922 | static int | |
24430d08 | 923 | load_from_unmodified_param (struct func_body_info *fbi, |
924 | vec<ipa_param_descriptor> descriptors, | |
ad2ffc0d | 925 | gimple stmt) |
926 | { | |
927 | int index; | |
928 | tree op1; | |
929 | ||
930 | if (!gimple_assign_single_p (stmt)) | |
931 | return -1; | |
932 | ||
933 | op1 = gimple_assign_rhs1 (stmt); | |
934 | if (TREE_CODE (op1) != PARM_DECL) | |
935 | return -1; | |
936 | ||
7a4930e7 | 937 | index = ipa_get_param_decl_index_1 (descriptors, op1); |
ad2ffc0d | 938 | if (index < 0 |
24430d08 | 939 | || !parm_preserved_before_stmt_p (fbi, index, stmt, op1)) |
ad2ffc0d | 940 | return -1; |
941 | ||
942 | return index; | |
943 | } | |
7af23aa4 | 944 | |
24430d08 | 945 | /* Return true if memory reference REF (which must be a load through parameter |
946 | with INDEX) loads data that are known to be unmodified in this function | |
947 | before reaching statement STMT. */ | |
0d491188 | 948 | |
949 | static bool | |
24430d08 | 950 | parm_ref_data_preserved_p (struct func_body_info *fbi, |
951 | int index, gimple stmt, tree ref) | |
0d491188 | 952 | { |
24430d08 | 953 | struct param_aa_status *paa; |
0d491188 | 954 | bool modified = false; |
955 | ao_ref refd; | |
956 | ||
24430d08 | 957 | /* FIXME: FBI can be NULL if we are being called from outside |
958 | ipa_node_analysis or ipcp_transform_function, which currently happens | |
959 | during inlining analysis. It would be great to extend fbi's lifetime and | |
960 | always have it. Currently, we are just not afraid of too much walking in | |
961 | that case. */ | |
962 | if (fbi) | |
963 | { | |
964 | if (aa_overwalked (fbi)) | |
965 | return false; | |
966 | paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index); | |
967 | if (paa->ref_modified) | |
968 | return false; | |
969 | } | |
970 | else | |
971 | paa = NULL; | |
0d491188 | 972 | |
24430d08 | 973 | gcc_checking_assert (gimple_vuse (stmt)); |
0d491188 | 974 | ao_ref_init (&refd, ref); |
24430d08 | 975 | int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, |
976 | &modified, NULL); | |
977 | if (fbi) | |
978 | fbi->aa_walked += walked; | |
979 | if (paa && modified) | |
980 | paa->ref_modified = true; | |
0d491188 | 981 | return !modified; |
982 | } | |
983 | ||
24430d08 | 984 | /* Return true if the data pointed to by PARM (which is a parameter with INDEX) |
985 | is known to be unmodified in this function before reaching call statement | |
986 | CALL into which it is passed. FBI describes the function body. */ | |
0d491188 | 987 | |
988 | static bool | |
24430d08 | 989 | parm_ref_data_pass_through_p (struct func_body_info *fbi, int index, |
990 | gimple call, tree parm) | |
0d491188 | 991 | { |
992 | bool modified = false; | |
993 | ao_ref refd; | |
994 | ||
995 | /* It's unnecessary to calculate anything about memory contnets for a const | |
996 | function because it is not goin to use it. But do not cache the result | |
997 | either. Also, no such calculations for non-pointers. */ | |
998 | if (!gimple_vuse (call) | |
24430d08 | 999 | || !POINTER_TYPE_P (TREE_TYPE (parm)) |
1000 | || aa_overwalked (fbi)) | |
0d491188 | 1001 | return false; |
1002 | ||
24430d08 | 1003 | struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call), |
1004 | index); | |
1005 | if (paa->pt_modified) | |
0d491188 | 1006 | return false; |
1007 | ||
1008 | ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE); | |
24430d08 | 1009 | int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, |
1010 | &modified, NULL); | |
1011 | fbi->aa_walked += walked; | |
0d491188 | 1012 | if (modified) |
24430d08 | 1013 | paa->pt_modified = true; |
0d491188 | 1014 | return !modified; |
1015 | } | |
1016 | ||
1017 | /* Return true if we can prove that OP is a memory reference loading unmodified | |
1018 | data from an aggregate passed as a parameter and if the aggregate is passed | |
1019 | by reference, that the alias type of the load corresponds to the type of the | |
1020 | formal parameter (so that we can rely on this type for TBAA in callers). | |
1021 | INFO and PARMS_AINFO describe parameters of the current function (but the | |
1022 | latter can be NULL), STMT is the load statement. If function returns true, | |
1023 | *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset | |
1024 | within the aggregate and whether it is a load from a value passed by | |
1025 | reference respectively. */ | |
1026 | ||
1027 | static bool | |
24430d08 | 1028 | ipa_load_from_parm_agg_1 (struct func_body_info *fbi, |
1029 | vec<ipa_param_descriptor> descriptors, | |
1030 | gimple stmt, tree op, int *index_p, | |
1031 | HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p, | |
1032 | bool *by_ref_p) | |
0d491188 | 1033 | { |
1034 | int index; | |
1035 | HOST_WIDE_INT size, max_size; | |
1036 | tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size); | |
1037 | ||
1038 | if (max_size == -1 || max_size != size || *offset_p < 0) | |
1039 | return false; | |
1040 | ||
1041 | if (DECL_P (base)) | |
1042 | { | |
7a4930e7 | 1043 | int index = ipa_get_param_decl_index_1 (descriptors, base); |
0d491188 | 1044 | if (index >= 0 |
24430d08 | 1045 | && parm_preserved_before_stmt_p (fbi, index, stmt, op)) |
0d491188 | 1046 | { |
1047 | *index_p = index; | |
1048 | *by_ref_p = false; | |
2a687ef9 | 1049 | if (size_p) |
1050 | *size_p = size; | |
0d491188 | 1051 | return true; |
1052 | } | |
1053 | return false; | |
1054 | } | |
1055 | ||
1056 | if (TREE_CODE (base) != MEM_REF | |
1057 | || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME | |
1058 | || !integer_zerop (TREE_OPERAND (base, 1))) | |
1059 | return false; | |
1060 | ||
1061 | if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))) | |
1062 | { | |
1063 | tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0)); | |
7a4930e7 | 1064 | index = ipa_get_param_decl_index_1 (descriptors, parm); |
0d491188 | 1065 | } |
1066 | else | |
1067 | { | |
1068 | /* This branch catches situations where a pointer parameter is not a | |
1069 | gimple register, for example: | |
1070 | ||
1071 | void hip7(S*) (struct S * p) | |
1072 | { | |
1073 | void (*<T2e4>) (struct S *) D.1867; | |
1074 | struct S * p.1; | |
1075 | ||
1076 | <bb 2>: | |
1077 | p.1_1 = p; | |
1078 | D.1867_2 = p.1_1->f; | |
1079 | D.1867_2 (); | |
1080 | gdp = &p; | |
1081 | */ | |
1082 | ||
1083 | gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0)); | |
24430d08 | 1084 | index = load_from_unmodified_param (fbi, descriptors, def); |
0d491188 | 1085 | } |
1086 | ||
1087 | if (index >= 0 | |
24430d08 | 1088 | && parm_ref_data_preserved_p (fbi, index, stmt, op)) |
0d491188 | 1089 | { |
1090 | *index_p = index; | |
1091 | *by_ref_p = true; | |
2a687ef9 | 1092 | if (size_p) |
1093 | *size_p = size; | |
0d491188 | 1094 | return true; |
1095 | } | |
1096 | return false; | |
1097 | } | |
1098 | ||
1099 | /* Just like the previous function, just without the param_analysis_info | |
1100 | pointer, for users outside of this file. */ | |
1101 | ||
1102 | bool | |
1103 | ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt, | |
1104 | tree op, int *index_p, HOST_WIDE_INT *offset_p, | |
1105 | bool *by_ref_p) | |
1106 | { | |
24430d08 | 1107 | return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p, |
2a687ef9 | 1108 | offset_p, NULL, by_ref_p); |
0d491188 | 1109 | } |
1110 | ||
6378ffb3 | 1111 | /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result |
ad2ffc0d | 1112 | of an assignment statement STMT, try to determine whether we are actually |
1113 | handling any of the following cases and construct an appropriate jump | |
1114 | function into JFUNC if so: | |
1115 | ||
1116 | 1) The passed value is loaded from a formal parameter which is not a gimple | |
1117 | register (most probably because it is addressable, the value has to be | |
1118 | scalar) and we can guarantee the value has not changed. This case can | |
1119 | therefore be described by a simple pass-through jump function. For example: | |
1120 | ||
1121 | foo (int a) | |
1122 | { | |
1123 | int a.0; | |
1124 | ||
1125 | a.0_2 = a; | |
1126 | bar (a.0_2); | |
1127 | ||
1128 | 2) The passed value can be described by a simple arithmetic pass-through | |
1129 | jump function. E.g. | |
1130 | ||
1131 | foo (int a) | |
1132 | { | |
1133 | int D.2064; | |
1134 | ||
1135 | D.2064_4 = a.1(D) + 4; | |
1136 | bar (D.2064_4); | |
1137 | ||
1138 | This case can also occur in combination of the previous one, e.g.: | |
1139 | ||
1140 | foo (int a, int z) | |
1141 | { | |
1142 | int a.0; | |
1143 | int D.2064; | |
1144 | ||
1145 | a.0_3 = a; | |
1146 | D.2064_4 = a.0_3 + 4; | |
1147 | foo (D.2064_4); | |
1148 | ||
1149 | 3) The passed value is an address of an object within another one (which | |
1150 | also passed by reference). Such situations are described by an ancestor | |
1151 | jump function and describe situations such as: | |
1152 | ||
1153 | B::foo() (struct B * const this) | |
1154 | { | |
1155 | struct A * D.1845; | |
1156 | ||
1157 | D.1845_2 = &this_1(D)->D.1748; | |
1158 | A::bar (D.1845_2); | |
1159 | ||
1160 | INFO is the structure describing individual parameters access different | |
1161 | stages of IPA optimizations. PARMS_AINFO contains the information that is | |
1162 | only needed for intraprocedural analysis. */ | |
5215027d | 1163 | |
1164 | static void | |
24430d08 | 1165 | compute_complex_assign_jump_func (struct func_body_info *fbi, |
1166 | struct ipa_node_params *info, | |
6378ffb3 | 1167 | struct ipa_jump_func *jfunc, |
1a91d914 | 1168 | gcall *call, gimple stmt, tree name, |
185c1f3a | 1169 | tree param_type) |
5215027d | 1170 | { |
1171 | HOST_WIDE_INT offset, size, max_size; | |
ad2ffc0d | 1172 | tree op1, tc_ssa, base, ssa; |
5215027d | 1173 | int index; |
5215027d | 1174 | |
5215027d | 1175 | op1 = gimple_assign_rhs1 (stmt); |
5215027d | 1176 | |
ad2ffc0d | 1177 | if (TREE_CODE (op1) == SSA_NAME) |
5215027d | 1178 | { |
ad2ffc0d | 1179 | if (SSA_NAME_IS_DEFAULT_DEF (op1)) |
1180 | index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1)); | |
1181 | else | |
24430d08 | 1182 | index = load_from_unmodified_param (fbi, info->descriptors, |
ad2ffc0d | 1183 | SSA_NAME_DEF_STMT (op1)); |
1184 | tc_ssa = op1; | |
1185 | } | |
1186 | else | |
1187 | { | |
24430d08 | 1188 | index = load_from_unmodified_param (fbi, info->descriptors, stmt); |
ad2ffc0d | 1189 | tc_ssa = gimple_assign_lhs (stmt); |
1190 | } | |
1191 | ||
1192 | if (index >= 0) | |
1193 | { | |
1194 | tree op2 = gimple_assign_rhs2 (stmt); | |
5215027d | 1195 | |
6378ffb3 | 1196 | if (op2) |
5215027d | 1197 | { |
6378ffb3 | 1198 | if (!is_gimple_ip_invariant (op2) |
1199 | || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison | |
1200 | && !useless_type_conversion_p (TREE_TYPE (name), | |
1201 | TREE_TYPE (op1)))) | |
1202 | return; | |
1203 | ||
4fa83f96 | 1204 | ipa_set_jf_arith_pass_through (jfunc, index, op2, |
1205 | gimple_assign_rhs_code (stmt)); | |
5215027d | 1206 | } |
ad4a8b28 | 1207 | else if (gimple_assign_single_p (stmt)) |
0d491188 | 1208 | { |
24430d08 | 1209 | bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa); |
693010ae | 1210 | ipa_set_jf_simple_pass_through (jfunc, index, agg_p); |
0d491188 | 1211 | } |
5215027d | 1212 | return; |
1213 | } | |
1214 | ||
1215 | if (TREE_CODE (op1) != ADDR_EXPR) | |
1216 | return; | |
1217 | op1 = TREE_OPERAND (op1, 0); | |
7af23aa4 | 1218 | if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE) |
6378ffb3 | 1219 | return; |
131e3e56 | 1220 | base = get_ref_base_and_extent (op1, &offset, &size, &max_size); |
1221 | if (TREE_CODE (base) != MEM_REF | |
8700909c | 1222 | /* If this is a varying address, punt. */ |
1223 | || max_size == -1 | |
1224 | || max_size != size) | |
5215027d | 1225 | return; |
e913b5cd | 1226 | offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT; |
7af23aa4 | 1227 | ssa = TREE_OPERAND (base, 0); |
1228 | if (TREE_CODE (ssa) != SSA_NAME | |
1229 | || !SSA_NAME_IS_DEFAULT_DEF (ssa) | |
ef649fb9 | 1230 | || offset < 0) |
5215027d | 1231 | return; |
1232 | ||
ad4a8b28 | 1233 | /* Dynamic types are changed in constructors and destructors. */ |
7af23aa4 | 1234 | index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa)); |
185c1f3a | 1235 | if (index >= 0 && param_type && POINTER_TYPE_P (param_type)) |
693010ae | 1236 | ipa_set_ancestor_jf (jfunc, offset, index, |
1237 | parm_ref_data_pass_through_p (fbi, index, call, ssa)); | |
5215027d | 1238 | } |
1239 | ||
09a2b4db | 1240 | /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if |
1241 | it looks like: | |
1242 | ||
1243 | iftmp.1_3 = &obj_2(D)->D.1762; | |
1244 | ||
1245 | The base of the MEM_REF must be a default definition SSA NAME of a | |
1246 | parameter. Return NULL_TREE if it looks otherwise. If case of success, the | |
1247 | whole MEM_REF expression is returned and the offset calculated from any | |
1248 | handled components and the MEM_REF itself is stored into *OFFSET. The whole | |
1249 | RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */ | |
1250 | ||
1251 | static tree | |
1252 | get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset) | |
1253 | { | |
1254 | HOST_WIDE_INT size, max_size; | |
1255 | tree expr, parm, obj; | |
1256 | ||
1257 | if (!gimple_assign_single_p (assign)) | |
1258 | return NULL_TREE; | |
1259 | expr = gimple_assign_rhs1 (assign); | |
1260 | ||
1261 | if (TREE_CODE (expr) != ADDR_EXPR) | |
1262 | return NULL_TREE; | |
1263 | expr = TREE_OPERAND (expr, 0); | |
1264 | obj = expr; | |
1265 | expr = get_ref_base_and_extent (expr, offset, &size, &max_size); | |
1266 | ||
1267 | if (TREE_CODE (expr) != MEM_REF | |
1268 | /* If this is a varying address, punt. */ | |
1269 | || max_size == -1 | |
1270 | || max_size != size | |
1271 | || *offset < 0) | |
1272 | return NULL_TREE; | |
1273 | parm = TREE_OPERAND (expr, 0); | |
1274 | if (TREE_CODE (parm) != SSA_NAME | |
1275 | || !SSA_NAME_IS_DEFAULT_DEF (parm) | |
1276 | || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL) | |
1277 | return NULL_TREE; | |
1278 | ||
e913b5cd | 1279 | *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT; |
09a2b4db | 1280 | *obj_p = obj; |
1281 | return expr; | |
1282 | } | |
1283 | ||
5215027d | 1284 | |
6378ffb3 | 1285 | /* Given that an actual argument is an SSA_NAME that is a result of a phi |
1286 | statement PHI, try to find out whether NAME is in fact a | |
1287 | multiple-inheritance typecast from a descendant into an ancestor of a formal | |
1288 | parameter and thus can be described by an ancestor jump function and if so, | |
1289 | write the appropriate function into JFUNC. | |
1290 | ||
1291 | Essentially we want to match the following pattern: | |
1292 | ||
1293 | if (obj_2(D) != 0B) | |
1294 | goto <bb 3>; | |
1295 | else | |
1296 | goto <bb 4>; | |
1297 | ||
1298 | <bb 3>: | |
1299 | iftmp.1_3 = &obj_2(D)->D.1762; | |
1300 | ||
1301 | <bb 4>: | |
1302 | # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)> | |
1303 | D.1879_6 = middleman_1 (iftmp.1_1, i_5(D)); | |
1304 | return D.1879_6; */ | |
1305 | ||
1306 | static void | |
24430d08 | 1307 | compute_complex_ancestor_jump_func (struct func_body_info *fbi, |
1308 | struct ipa_node_params *info, | |
6378ffb3 | 1309 | struct ipa_jump_func *jfunc, |
1a91d914 | 1310 | gcall *call, gphi *phi) |
6378ffb3 | 1311 | { |
09a2b4db | 1312 | HOST_WIDE_INT offset; |
6378ffb3 | 1313 | gimple assign, cond; |
1314 | basic_block phi_bb, assign_bb, cond_bb; | |
7af23aa4 | 1315 | tree tmp, parm, expr, obj; |
6378ffb3 | 1316 | int index, i; |
1317 | ||
df693a5d | 1318 | if (gimple_phi_num_args (phi) != 2) |
6378ffb3 | 1319 | return; |
1320 | ||
df693a5d | 1321 | if (integer_zerop (PHI_ARG_DEF (phi, 1))) |
1322 | tmp = PHI_ARG_DEF (phi, 0); | |
1323 | else if (integer_zerop (PHI_ARG_DEF (phi, 0))) | |
1324 | tmp = PHI_ARG_DEF (phi, 1); | |
1325 | else | |
1326 | return; | |
6378ffb3 | 1327 | if (TREE_CODE (tmp) != SSA_NAME |
1328 | || SSA_NAME_IS_DEFAULT_DEF (tmp) | |
1329 | || !POINTER_TYPE_P (TREE_TYPE (tmp)) | |
1330 | || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE) | |
1331 | return; | |
1332 | ||
1333 | assign = SSA_NAME_DEF_STMT (tmp); | |
1334 | assign_bb = gimple_bb (assign); | |
09a2b4db | 1335 | if (!single_pred_p (assign_bb)) |
6378ffb3 | 1336 | return; |
09a2b4db | 1337 | expr = get_ancestor_addr_info (assign, &obj, &offset); |
1338 | if (!expr) | |
6378ffb3 | 1339 | return; |
1340 | parm = TREE_OPERAND (expr, 0); | |
6378ffb3 | 1341 | index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm)); |
0e384caf | 1342 | if (index < 0) |
1343 | return; | |
6378ffb3 | 1344 | |
1345 | cond_bb = single_pred (assign_bb); | |
1346 | cond = last_stmt (cond_bb); | |
3d9dc642 | 1347 | if (!cond |
1348 | || gimple_code (cond) != GIMPLE_COND | |
6378ffb3 | 1349 | || gimple_cond_code (cond) != NE_EXPR |
1350 | || gimple_cond_lhs (cond) != parm | |
1351 | || !integer_zerop (gimple_cond_rhs (cond))) | |
1352 | return; | |
1353 | ||
6378ffb3 | 1354 | phi_bb = gimple_bb (phi); |
1355 | for (i = 0; i < 2; i++) | |
1356 | { | |
1357 | basic_block pred = EDGE_PRED (phi_bb, i)->src; | |
1358 | if (pred != assign_bb && pred != cond_bb) | |
1359 | return; | |
1360 | } | |
1361 | ||
693010ae | 1362 | ipa_set_ancestor_jf (jfunc, offset, index, |
1363 | parm_ref_data_pass_through_p (fbi, index, call, parm)); | |
6378ffb3 | 1364 | } |
1365 | ||
1917e945 | 1366 | /* Inspect the given TYPE and return true iff it has the same structure (the |
1367 | same number of fields of the same types) as a C++ member pointer. If | |
1368 | METHOD_PTR and DELTA are non-NULL, store the trees representing the | |
1369 | corresponding fields there. */ | |
1370 | ||
f8daee9b | 1371 | static bool |
1372 | type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta) | |
1373 | { | |
1374 | tree fld; | |
1375 | ||
1376 | if (TREE_CODE (type) != RECORD_TYPE) | |
1377 | return false; | |
1378 | ||
1379 | fld = TYPE_FIELDS (type); | |
1380 | if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld)) | |
0d491188 | 1381 | || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE |
e913b5cd | 1382 | || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld))) |
f8daee9b | 1383 | return false; |
1384 | ||
1385 | if (method_ptr) | |
1386 | *method_ptr = fld; | |
1387 | ||
1767a056 | 1388 | fld = DECL_CHAIN (fld); |
0d491188 | 1389 | if (!fld || INTEGRAL_TYPE_P (fld) |
e913b5cd | 1390 | || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld))) |
f8daee9b | 1391 | return false; |
1392 | if (delta) | |
1393 | *delta = fld; | |
1394 | ||
1767a056 | 1395 | if (DECL_CHAIN (fld)) |
f8daee9b | 1396 | return false; |
1397 | ||
1398 | return true; | |
1399 | } | |
1400 | ||
0a10fd82 | 1401 | /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement, |
0d491188 | 1402 | return the rhs of its defining statement. Otherwise return RHS as it |
1403 | is. */ | |
b39bfa08 | 1404 | |
1405 | static inline tree | |
1406 | get_ssa_def_if_simple_copy (tree rhs) | |
1407 | { | |
1408 | while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs)) | |
1409 | { | |
1410 | gimple def_stmt = SSA_NAME_DEF_STMT (rhs); | |
1411 | ||
1412 | if (gimple_assign_single_p (def_stmt)) | |
1413 | rhs = gimple_assign_rhs1 (def_stmt); | |
4ecddf77 | 1414 | else |
1415 | break; | |
b39bfa08 | 1416 | } |
1417 | return rhs; | |
1418 | } | |
1419 | ||
0d491188 | 1420 | /* Simple linked list, describing known contents of an aggregate beforere |
1421 | call. */ | |
1422 | ||
1423 | struct ipa_known_agg_contents_list | |
1424 | { | |
1425 | /* Offset and size of the described part of the aggregate. */ | |
1426 | HOST_WIDE_INT offset, size; | |
1427 | /* Known constant value or NULL if the contents is known to be unknown. */ | |
1428 | tree constant; | |
1429 | /* Pointer to the next structure in the list. */ | |
1430 | struct ipa_known_agg_contents_list *next; | |
1431 | }; | |
f8daee9b | 1432 | |
63380ec1 | 1433 | /* Find the proper place in linked list of ipa_known_agg_contents_list |
1434 | structures where to put a new one with the given LHS_OFFSET and LHS_SIZE, | |
1435 | unless there is a partial overlap, in which case return NULL, or such | |
1436 | element is already there, in which case set *ALREADY_THERE to true. */ | |
1437 | ||
1438 | static struct ipa_known_agg_contents_list ** | |
1439 | get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list, | |
1440 | HOST_WIDE_INT lhs_offset, | |
1441 | HOST_WIDE_INT lhs_size, | |
1442 | bool *already_there) | |
1443 | { | |
1444 | struct ipa_known_agg_contents_list **p = list; | |
1445 | while (*p && (*p)->offset < lhs_offset) | |
1446 | { | |
1447 | if ((*p)->offset + (*p)->size > lhs_offset) | |
1448 | return NULL; | |
1449 | p = &(*p)->next; | |
1450 | } | |
1451 | ||
1452 | if (*p && (*p)->offset < lhs_offset + lhs_size) | |
1453 | { | |
1454 | if ((*p)->offset == lhs_offset && (*p)->size == lhs_size) | |
1455 | /* We already know this value is subsequently overwritten with | |
1456 | something else. */ | |
1457 | *already_there = true; | |
1458 | else | |
1459 | /* Otherwise this is a partial overlap which we cannot | |
1460 | represent. */ | |
1461 | return NULL; | |
1462 | } | |
1463 | return p; | |
1464 | } | |
1465 | ||
1466 | /* Build aggregate jump function from LIST, assuming there are exactly | |
1467 | CONST_COUNT constant entries there and that th offset of the passed argument | |
1468 | is ARG_OFFSET and store it into JFUNC. */ | |
1469 | ||
1470 | static void | |
1471 | build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list, | |
1472 | int const_count, HOST_WIDE_INT arg_offset, | |
1473 | struct ipa_jump_func *jfunc) | |
1474 | { | |
1475 | vec_alloc (jfunc->agg.items, const_count); | |
1476 | while (list) | |
1477 | { | |
1478 | if (list->constant) | |
1479 | { | |
1480 | struct ipa_agg_jf_item item; | |
1481 | item.offset = list->offset - arg_offset; | |
1482 | gcc_assert ((item.offset % BITS_PER_UNIT) == 0); | |
1483 | item.value = unshare_expr_without_location (list->constant); | |
1484 | jfunc->agg.items->quick_push (item); | |
1485 | } | |
1486 | list = list->next; | |
1487 | } | |
1488 | } | |
1489 | ||
0d491188 | 1490 | /* Traverse statements from CALL backwards, scanning whether an aggregate given |
1491 | in ARG is filled in with constant values. ARG can either be an aggregate | |
63380ec1 | 1492 | expression or a pointer to an aggregate. ARG_TYPE is the type of the |
1493 | aggregate. JFUNC is the jump function into which the constants are | |
1494 | subsequently stored. */ | |
1917e945 | 1495 | |
f8daee9b | 1496 | static void |
1a91d914 | 1497 | determine_locally_known_aggregate_parts (gcall *call, tree arg, |
1498 | tree arg_type, | |
63380ec1 | 1499 | struct ipa_jump_func *jfunc) |
f8daee9b | 1500 | { |
0d491188 | 1501 | struct ipa_known_agg_contents_list *list = NULL; |
1502 | int item_count = 0, const_count = 0; | |
1503 | HOST_WIDE_INT arg_offset, arg_size; | |
75a70cf9 | 1504 | gimple_stmt_iterator gsi; |
0d491188 | 1505 | tree arg_base; |
1506 | bool check_ref, by_ref; | |
1507 | ao_ref r; | |
f8daee9b | 1508 | |
0d491188 | 1509 | /* The function operates in three stages. First, we prepare check_ref, r, |
1510 | arg_base and arg_offset based on what is actually passed as an actual | |
1511 | argument. */ | |
f8daee9b | 1512 | |
02636da3 | 1513 | if (POINTER_TYPE_P (arg_type)) |
0d491188 | 1514 | { |
1515 | by_ref = true; | |
1516 | if (TREE_CODE (arg) == SSA_NAME) | |
1517 | { | |
1518 | tree type_size; | |
02636da3 | 1519 | if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))) |
0d491188 | 1520 | return; |
1521 | check_ref = true; | |
1522 | arg_base = arg; | |
1523 | arg_offset = 0; | |
02636da3 | 1524 | type_size = TYPE_SIZE (TREE_TYPE (arg_type)); |
e913b5cd | 1525 | arg_size = tree_to_uhwi (type_size); |
0d491188 | 1526 | ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE); |
1527 | } | |
1528 | else if (TREE_CODE (arg) == ADDR_EXPR) | |
1529 | { | |
1530 | HOST_WIDE_INT arg_max_size; | |
1531 | ||
1532 | arg = TREE_OPERAND (arg, 0); | |
1533 | arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size, | |
1534 | &arg_max_size); | |
1535 | if (arg_max_size == -1 | |
1536 | || arg_max_size != arg_size | |
1537 | || arg_offset < 0) | |
1538 | return; | |
1539 | if (DECL_P (arg_base)) | |
1540 | { | |
0d491188 | 1541 | check_ref = false; |
63380ec1 | 1542 | ao_ref_init (&r, arg_base); |
0d491188 | 1543 | } |
1544 | else | |
1545 | return; | |
1546 | } | |
1547 | else | |
1548 | return; | |
1549 | } | |
1550 | else | |
1551 | { | |
1552 | HOST_WIDE_INT arg_max_size; | |
1553 | ||
1554 | gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg))); | |
1555 | ||
1556 | by_ref = false; | |
1557 | check_ref = false; | |
1558 | arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size, | |
1559 | &arg_max_size); | |
1560 | if (arg_max_size == -1 | |
1561 | || arg_max_size != arg_size | |
1562 | || arg_offset < 0) | |
1563 | return; | |
1564 | ||
1565 | ao_ref_init (&r, arg); | |
1566 | } | |
1567 | ||
1568 | /* Second stage walks back the BB, looks at individual statements and as long | |
1569 | as it is confident of how the statements affect contents of the | |
1570 | aggregates, it builds a sorted linked list of ipa_agg_jf_list structures | |
1571 | describing it. */ | |
1572 | gsi = gsi_for_stmt (call); | |
75a70cf9 | 1573 | gsi_prev (&gsi); |
1574 | for (; !gsi_end_p (gsi); gsi_prev (&gsi)) | |
f8daee9b | 1575 | { |
0d491188 | 1576 | struct ipa_known_agg_contents_list *n, **p; |
75a70cf9 | 1577 | gimple stmt = gsi_stmt (gsi); |
0d491188 | 1578 | HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size; |
1579 | tree lhs, rhs, lhs_base; | |
f8daee9b | 1580 | |
0d491188 | 1581 | if (!stmt_may_clobber_ref_p_1 (stmt, &r)) |
af3e7bf6 | 1582 | continue; |
a3808114 | 1583 | if (!gimple_assign_single_p (stmt)) |
0d491188 | 1584 | break; |
f8daee9b | 1585 | |
75a70cf9 | 1586 | lhs = gimple_assign_lhs (stmt); |
1587 | rhs = gimple_assign_rhs1 (stmt); | |
b7b667b4 | 1588 | if (!is_gimple_reg_type (TREE_TYPE (rhs)) |
fb8b391e | 1589 | || TREE_CODE (lhs) == BIT_FIELD_REF |
1590 | || contains_bitfld_component_ref_p (lhs)) | |
0d491188 | 1591 | break; |
f8daee9b | 1592 | |
0d491188 | 1593 | lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size, |
1594 | &lhs_max_size); | |
1595 | if (lhs_max_size == -1 | |
63380ec1 | 1596 | || lhs_max_size != lhs_size) |
0d491188 | 1597 | break; |
f8daee9b | 1598 | |
0d491188 | 1599 | if (check_ref) |
3b22db66 | 1600 | { |
0d491188 | 1601 | if (TREE_CODE (lhs_base) != MEM_REF |
1602 | || TREE_OPERAND (lhs_base, 0) != arg_base | |
1603 | || !integer_zerop (TREE_OPERAND (lhs_base, 1))) | |
1604 | break; | |
f8daee9b | 1605 | } |
0d491188 | 1606 | else if (lhs_base != arg_base) |
85bab85f | 1607 | { |
1608 | if (DECL_P (lhs_base)) | |
1609 | continue; | |
1610 | else | |
1611 | break; | |
1612 | } | |
f8daee9b | 1613 | |
63380ec1 | 1614 | bool already_there = false; |
1615 | p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size, | |
1616 | &already_there); | |
1617 | if (!p) | |
0d491188 | 1618 | break; |
63380ec1 | 1619 | if (already_there) |
1620 | continue; | |
f8daee9b | 1621 | |
0d491188 | 1622 | rhs = get_ssa_def_if_simple_copy (rhs); |
1623 | n = XALLOCA (struct ipa_known_agg_contents_list); | |
1624 | n->size = lhs_size; | |
1625 | n->offset = lhs_offset; | |
1626 | if (is_gimple_ip_invariant (rhs)) | |
1627 | { | |
1628 | n->constant = rhs; | |
1629 | const_count++; | |
1630 | } | |
1631 | else | |
1632 | n->constant = NULL_TREE; | |
1633 | n->next = *p; | |
1634 | *p = n; | |
f8daee9b | 1635 | |
0d491188 | 1636 | item_count++; |
699f00b5 | 1637 | if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) |
1638 | || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)) | |
0d491188 | 1639 | break; |
1640 | } | |
1917e945 | 1641 | |
0d491188 | 1642 | /* Third stage just goes over the list and creates an appropriate vector of |
1643 | ipa_agg_jf_item structures out of it, of sourse only if there are | |
1644 | any known constants to begin with. */ | |
f8daee9b | 1645 | |
0d491188 | 1646 | if (const_count) |
f8daee9b | 1647 | { |
0d491188 | 1648 | jfunc->agg.by_ref = by_ref; |
63380ec1 | 1649 | build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc); |
f8daee9b | 1650 | } |
1651 | } | |
1652 | ||
185c1f3a | 1653 | static tree |
1654 | ipa_get_callee_param_type (struct cgraph_edge *e, int i) | |
1655 | { | |
1656 | int n; | |
1657 | tree type = (e->callee | |
02774f2d | 1658 | ? TREE_TYPE (e->callee->decl) |
185c1f3a | 1659 | : gimple_call_fntype (e->call_stmt)); |
1660 | tree t = TYPE_ARG_TYPES (type); | |
1661 | ||
1662 | for (n = 0; n < i; n++) | |
1663 | { | |
1664 | if (!t) | |
1665 | break; | |
1666 | t = TREE_CHAIN (t); | |
1667 | } | |
1668 | if (t) | |
1669 | return TREE_VALUE (t); | |
1670 | if (!e->callee) | |
1671 | return NULL; | |
02774f2d | 1672 | t = DECL_ARGUMENTS (e->callee->decl); |
185c1f3a | 1673 | for (n = 0; n < i; n++) |
1674 | { | |
1675 | if (!t) | |
1676 | return NULL; | |
1677 | t = TREE_CHAIN (t); | |
1678 | } | |
1679 | if (t) | |
1680 | return TREE_TYPE (t); | |
1681 | return NULL; | |
1682 | } | |
1683 | ||
f8daee9b | 1684 | /* Compute jump function for all arguments of callsite CS and insert the |
1685 | information in the jump_functions array in the ipa_edge_args corresponding | |
1686 | to this callsite. */ | |
1917e945 | 1687 | |
7115ea05 | 1688 | static void |
24430d08 | 1689 | ipa_compute_jump_functions_for_edge (struct func_body_info *fbi, |
8b68ef1b | 1690 | struct cgraph_edge *cs) |
f8daee9b | 1691 | { |
1692 | struct ipa_node_params *info = IPA_NODE_REF (cs->caller); | |
b22832dc | 1693 | struct ipa_edge_args *args = IPA_EDGE_REF (cs); |
1a91d914 | 1694 | gcall *call = cs->call_stmt; |
0d491188 | 1695 | int n, arg_num = gimple_call_num_args (call); |
072ec6eb | 1696 | bool useful_context = false; |
f8daee9b | 1697 | |
b22832dc | 1698 | if (arg_num == 0 || args->jump_functions) |
f8daee9b | 1699 | return; |
f1f41a6c | 1700 | vec_safe_grow_cleared (args->jump_functions, arg_num); |
072ec6eb | 1701 | if (flag_devirtualize) |
1702 | vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num); | |
f8daee9b | 1703 | |
accbbe24 | 1704 | if (gimple_call_internal_p (call)) |
1705 | return; | |
6c0a4a25 | 1706 | if (ipa_func_spec_opts_forbid_analysis_p (cs->caller)) |
1707 | return; | |
1708 | ||
0d491188 | 1709 | for (n = 0; n < arg_num; n++) |
1710 | { | |
1711 | struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n); | |
1712 | tree arg = gimple_call_arg (call, n); | |
185c1f3a | 1713 | tree param_type = ipa_get_callee_param_type (cs, n); |
072ec6eb | 1714 | if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg))) |
1715 | { | |
32de3b92 | 1716 | tree instance; |
072ec6eb | 1717 | struct ipa_polymorphic_call_context context (cs->caller->decl, |
1718 | arg, cs->call_stmt, | |
32de3b92 | 1719 | &instance); |
1720 | context.get_dynamic_type (instance, arg, NULL, cs->call_stmt); | |
072ec6eb | 1721 | *ipa_get_ith_polymorhic_call_context (args, n) = context; |
1722 | if (!context.useless_p ()) | |
1723 | useful_context = true; | |
1724 | } | |
f8daee9b | 1725 | |
ae7b7bc8 | 1726 | if (POINTER_TYPE_P (TREE_TYPE(arg))) |
1727 | { | |
1728 | unsigned HOST_WIDE_INT hwi_bitpos; | |
1729 | unsigned align; | |
1730 | ||
1731 | if (get_pointer_alignment_1 (arg, &align, &hwi_bitpos) | |
43c317b1 | 1732 | && align % BITS_PER_UNIT == 0 |
1733 | && hwi_bitpos % BITS_PER_UNIT == 0) | |
ae7b7bc8 | 1734 | { |
1735 | jfunc->alignment.known = true; | |
43c317b1 | 1736 | jfunc->alignment.align = align / BITS_PER_UNIT; |
ae7b7bc8 | 1737 | jfunc->alignment.misalign = hwi_bitpos / BITS_PER_UNIT; |
1738 | } | |
1739 | else | |
1740 | gcc_assert (!jfunc->alignment.known); | |
1741 | } | |
1742 | else | |
1743 | gcc_assert (!jfunc->alignment.known); | |
1744 | ||
0d491188 | 1745 | if (is_gimple_ip_invariant (arg)) |
096295f6 | 1746 | ipa_set_jf_constant (jfunc, arg, cs); |
0d491188 | 1747 | else if (!is_gimple_reg_type (TREE_TYPE (arg)) |
1748 | && TREE_CODE (arg) == PARM_DECL) | |
1749 | { | |
1750 | int index = ipa_get_param_decl_index (info, arg); | |
1751 | ||
1752 | gcc_assert (index >=0); | |
1753 | /* Aggregate passed by value, check for pass-through, otherwise we | |
1754 | will attempt to fill in aggregate contents later in this | |
1755 | for cycle. */ | |
24430d08 | 1756 | if (parm_preserved_before_stmt_p (fbi, index, call, arg)) |
0d491188 | 1757 | { |
693010ae | 1758 | ipa_set_jf_simple_pass_through (jfunc, index, false); |
0d491188 | 1759 | continue; |
1760 | } | |
1761 | } | |
1762 | else if (TREE_CODE (arg) == SSA_NAME) | |
1763 | { | |
1764 | if (SSA_NAME_IS_DEFAULT_DEF (arg)) | |
1765 | { | |
1766 | int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg)); | |
ad4a8b28 | 1767 | if (index >= 0) |
0d491188 | 1768 | { |
693010ae | 1769 | bool agg_p; |
24430d08 | 1770 | agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg); |
693010ae | 1771 | ipa_set_jf_simple_pass_through (jfunc, index, agg_p); |
0d491188 | 1772 | } |
1773 | } | |
1774 | else | |
1775 | { | |
1776 | gimple stmt = SSA_NAME_DEF_STMT (arg); | |
1777 | if (is_gimple_assign (stmt)) | |
24430d08 | 1778 | compute_complex_assign_jump_func (fbi, info, jfunc, |
185c1f3a | 1779 | call, stmt, arg, param_type); |
0d491188 | 1780 | else if (gimple_code (stmt) == GIMPLE_PHI) |
24430d08 | 1781 | compute_complex_ancestor_jump_func (fbi, info, jfunc, |
1a91d914 | 1782 | call, |
1783 | as_a <gphi *> (stmt)); | |
0d491188 | 1784 | } |
1785 | } | |
f8daee9b | 1786 | |
02636da3 | 1787 | /* If ARG is pointer, we can not use its type to determine the type of aggregate |
1788 | passed (because type conversions are ignored in gimple). Usually we can | |
1789 | safely get type from function declaration, but in case of K&R prototypes or | |
1790 | variadic functions we can try our luck with type of the pointer passed. | |
1791 | TODO: Since we look for actual initialization of the memory object, we may better | |
1792 | work out the type based on the memory stores we find. */ | |
1793 | if (!param_type) | |
1794 | param_type = TREE_TYPE (arg); | |
1795 | ||
0d491188 | 1796 | if ((jfunc->type != IPA_JF_PASS_THROUGH |
1797 | || !ipa_get_jf_pass_through_agg_preserved (jfunc)) | |
1798 | && (jfunc->type != IPA_JF_ANCESTOR | |
1799 | || !ipa_get_jf_ancestor_agg_preserved (jfunc)) | |
1800 | && (AGGREGATE_TYPE_P (TREE_TYPE (arg)) | |
02636da3 | 1801 | || POINTER_TYPE_P (param_type))) |
63380ec1 | 1802 | determine_locally_known_aggregate_parts (call, arg, param_type, jfunc); |
0d491188 | 1803 | } |
072ec6eb | 1804 | if (!useful_context) |
1805 | vec_free (args->polymorphic_call_contexts); | |
f8daee9b | 1806 | } |
1807 | ||
7115ea05 | 1808 | /* Compute jump functions for all edges - both direct and indirect - outgoing |
24430d08 | 1809 | from BB. */ |
7115ea05 | 1810 | |
8b68ef1b | 1811 | static void |
24430d08 | 1812 | ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb) |
7115ea05 | 1813 | { |
24430d08 | 1814 | struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb); |
1815 | int i; | |
7115ea05 | 1816 | struct cgraph_edge *cs; |
1817 | ||
24430d08 | 1818 | FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs) |
7115ea05 | 1819 | { |
24430d08 | 1820 | struct cgraph_node *callee = cs->callee; |
7115ea05 | 1821 | |
24430d08 | 1822 | if (callee) |
1823 | { | |
415d1b9a | 1824 | callee->ultimate_alias_target (); |
24430d08 | 1825 | /* We do not need to bother analyzing calls to unknown functions |
1826 | unless they may become known during lto/whopr. */ | |
1827 | if (!callee->definition && !flag_lto) | |
1828 | continue; | |
1829 | } | |
1830 | ipa_compute_jump_functions_for_edge (fbi, cs); | |
1831 | } | |
7115ea05 | 1832 | } |
1833 | ||
0d491188 | 1834 | /* If STMT looks like a statement loading a value from a member pointer formal |
1835 | parameter, return that parameter and store the offset of the field to | |
1836 | *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still | |
1837 | might be clobbered). If USE_DELTA, then we look for a use of the delta | |
1838 | field rather than the pfn. */ | |
1917e945 | 1839 | |
f8daee9b | 1840 | static tree |
0d491188 | 1841 | ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta, |
1842 | HOST_WIDE_INT *offset_p) | |
f8daee9b | 1843 | { |
0d491188 | 1844 | tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field; |
1845 | ||
1846 | if (!gimple_assign_single_p (stmt)) | |
1847 | return NULL_TREE; | |
f8daee9b | 1848 | |
0d491188 | 1849 | rhs = gimple_assign_rhs1 (stmt); |
74f602fc | 1850 | if (TREE_CODE (rhs) == COMPONENT_REF) |
1851 | { | |
1852 | ref_field = TREE_OPERAND (rhs, 1); | |
1853 | rhs = TREE_OPERAND (rhs, 0); | |
1854 | } | |
1855 | else | |
1856 | ref_field = NULL_TREE; | |
c52cb439 | 1857 | if (TREE_CODE (rhs) != MEM_REF) |
f8daee9b | 1858 | return NULL_TREE; |
f8daee9b | 1859 | rec = TREE_OPERAND (rhs, 0); |
c52cb439 | 1860 | if (TREE_CODE (rec) != ADDR_EXPR) |
1861 | return NULL_TREE; | |
1862 | rec = TREE_OPERAND (rec, 0); | |
f8daee9b | 1863 | if (TREE_CODE (rec) != PARM_DECL |
66cca8a0 | 1864 | || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field)) |
f8daee9b | 1865 | return NULL_TREE; |
c52cb439 | 1866 | ref_offset = TREE_OPERAND (rhs, 1); |
74f602fc | 1867 | |
0d491188 | 1868 | if (use_delta) |
1869 | fld = delta_field; | |
1870 | else | |
1871 | fld = ptr_field; | |
1872 | if (offset_p) | |
1873 | *offset_p = int_bit_position (fld); | |
1874 | ||
74f602fc | 1875 | if (ref_field) |
1876 | { | |
1877 | if (integer_nonzerop (ref_offset)) | |
1878 | return NULL_TREE; | |
74f602fc | 1879 | return ref_field == fld ? rec : NULL_TREE; |
1880 | } | |
f8daee9b | 1881 | else |
0d491188 | 1882 | return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec |
1883 | : NULL_TREE; | |
f8daee9b | 1884 | } |
1885 | ||
1886 | /* Returns true iff T is an SSA_NAME defined by a statement. */ | |
1917e945 | 1887 | |
f8daee9b | 1888 | static bool |
1889 | ipa_is_ssa_with_stmt_def (tree t) | |
1890 | { | |
1891 | if (TREE_CODE (t) == SSA_NAME | |
1892 | && !SSA_NAME_IS_DEFAULT_DEF (t)) | |
1893 | return true; | |
1894 | else | |
1895 | return false; | |
1896 | } | |
1897 | ||
09a2b4db | 1898 | /* Find the indirect call graph edge corresponding to STMT and mark it as a |
1899 | call to a parameter number PARAM_INDEX. NODE is the caller. Return the | |
1900 | indirect call graph edge. */ | |
1917e945 | 1901 | |
09a2b4db | 1902 | static struct cgraph_edge * |
1a91d914 | 1903 | ipa_note_param_call (struct cgraph_node *node, int param_index, |
1904 | gcall *stmt) | |
f8daee9b | 1905 | { |
799c8711 | 1906 | struct cgraph_edge *cs; |
f8daee9b | 1907 | |
415d1b9a | 1908 | cs = node->get_edge (stmt); |
6378ffb3 | 1909 | cs->indirect_info->param_index = param_index; |
0d491188 | 1910 | cs->indirect_info->agg_contents = 0; |
2f6c1cf4 | 1911 | cs->indirect_info->member_ptr = 0; |
09a2b4db | 1912 | return cs; |
f8daee9b | 1913 | } |
1914 | ||
799c8711 | 1915 | /* Analyze the CALL and examine uses of formal parameters of the caller NODE |
05d4e04f | 1916 | (described by INFO). PARMS_AINFO is a pointer to a vector containing |
8b68ef1b | 1917 | intermediate information about each formal parameter. Currently it checks |
1918 | whether the call calls a pointer that is a formal parameter and if so, the | |
1919 | parameter is marked with the called flag and an indirect call graph edge | |
1920 | describing the call is created. This is very simple for ordinary pointers | |
1921 | represented in SSA but not-so-nice when it comes to member pointers. The | |
1922 | ugly part of this function does nothing more than trying to match the | |
1923 | pattern of such a call. An example of such a pattern is the gimple dump | |
1924 | below, the call is on the last line: | |
f8daee9b | 1925 | |
74f602fc | 1926 | <bb 2>: |
1927 | f$__delta_5 = f.__delta; | |
1928 | f$__pfn_24 = f.__pfn; | |
1929 | ||
1930 | or | |
f8daee9b | 1931 | <bb 2>: |
c52cb439 | 1932 | f$__delta_5 = MEM[(struct *)&f]; |
1933 | f$__pfn_24 = MEM[(struct *)&f + 4B]; | |
af3e7bf6 | 1934 | |
74f602fc | 1935 | and a few lines below: |
af3e7bf6 | 1936 | |
1937 | <bb 5> | |
f8daee9b | 1938 | D.2496_3 = (int) f$__pfn_24; |
1939 | D.2497_4 = D.2496_3 & 1; | |
1940 | if (D.2497_4 != 0) | |
1941 | goto <bb 3>; | |
1942 | else | |
1943 | goto <bb 4>; | |
1944 | ||
af3e7bf6 | 1945 | <bb 6>: |
f8daee9b | 1946 | D.2500_7 = (unsigned int) f$__delta_5; |
1947 | D.2501_8 = &S + D.2500_7; | |
1948 | D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8; | |
1949 | D.2503_10 = *D.2502_9; | |
1950 | D.2504_12 = f$__pfn_24 + -1; | |
1951 | D.2505_13 = (unsigned int) D.2504_12; | |
1952 | D.2506_14 = D.2503_10 + D.2505_13; | |
1953 | D.2507_15 = *D.2506_14; | |
1954 | iftmp.11_16 = (String:: *) D.2507_15; | |
1955 | ||
af3e7bf6 | 1956 | <bb 7>: |
f8daee9b | 1957 | # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)> |
1958 | D.2500_19 = (unsigned int) f$__delta_5; | |
1959 | D.2508_20 = &S + D.2500_19; | |
1960 | D.2493_21 = iftmp.11_1 (D.2508_20, 4); | |
1961 | ||
1962 | Such patterns are results of simple calls to a member pointer: | |
1963 | ||
1964 | int doprinting (int (MyString::* f)(int) const) | |
1965 | { | |
1966 | MyString S ("somestring"); | |
1967 | ||
1968 | return (S.*f)(4); | |
1969 | } | |
0d491188 | 1970 | |
1971 | Moreover, the function also looks for called pointers loaded from aggregates | |
1972 | passed by value or reference. */ | |
f8daee9b | 1973 | |
1974 | static void | |
1a91d914 | 1975 | ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gcall *call, |
24430d08 | 1976 | tree target) |
f8daee9b | 1977 | { |
24430d08 | 1978 | struct ipa_node_params *info = fbi->info; |
0d491188 | 1979 | HOST_WIDE_INT offset; |
1980 | bool by_ref; | |
f8daee9b | 1981 | |
f8daee9b | 1982 | if (SSA_NAME_IS_DEFAULT_DEF (target)) |
1983 | { | |
6378ffb3 | 1984 | tree var = SSA_NAME_VAR (target); |
24430d08 | 1985 | int index = ipa_get_param_decl_index (info, var); |
f8daee9b | 1986 | if (index >= 0) |
24430d08 | 1987 | ipa_note_param_call (fbi->node, index, call); |
f8daee9b | 1988 | return; |
1989 | } | |
1990 | ||
24430d08 | 1991 | int index; |
1992 | gimple def = SSA_NAME_DEF_STMT (target); | |
0d491188 | 1993 | if (gimple_assign_single_p (def) |
24430d08 | 1994 | && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def, |
0d491188 | 1995 | gimple_assign_rhs1 (def), &index, &offset, |
2a687ef9 | 1996 | NULL, &by_ref)) |
0d491188 | 1997 | { |
24430d08 | 1998 | struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call); |
0d491188 | 1999 | cs->indirect_info->offset = offset; |
2000 | cs->indirect_info->agg_contents = 1; | |
2001 | cs->indirect_info->by_ref = by_ref; | |
2002 | return; | |
2003 | } | |
2004 | ||
f8daee9b | 2005 | /* Now we need to try to match the complex pattern of calling a member |
2006 | pointer. */ | |
0d491188 | 2007 | if (gimple_code (def) != GIMPLE_PHI |
2008 | || gimple_phi_num_args (def) != 2 | |
2009 | || !POINTER_TYPE_P (TREE_TYPE (target)) | |
f8daee9b | 2010 | || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE) |
2011 | return; | |
2012 | ||
f8daee9b | 2013 | /* First, we need to check whether one of these is a load from a member |
2014 | pointer that is a parameter to this function. */ | |
24430d08 | 2015 | tree n1 = PHI_ARG_DEF (def, 0); |
2016 | tree n2 = PHI_ARG_DEF (def, 1); | |
886eebf8 | 2017 | if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2)) |
f8daee9b | 2018 | return; |
24430d08 | 2019 | gimple d1 = SSA_NAME_DEF_STMT (n1); |
2020 | gimple d2 = SSA_NAME_DEF_STMT (n2); | |
f8daee9b | 2021 | |
24430d08 | 2022 | tree rec; |
2023 | basic_block bb, virt_bb; | |
2024 | basic_block join = gimple_bb (def); | |
0d491188 | 2025 | if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset))) |
f8daee9b | 2026 | { |
0d491188 | 2027 | if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL)) |
f8daee9b | 2028 | return; |
2029 | ||
af3e7bf6 | 2030 | bb = EDGE_PRED (join, 0)->src; |
75a70cf9 | 2031 | virt_bb = gimple_bb (d2); |
f8daee9b | 2032 | } |
0d491188 | 2033 | else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset))) |
f8daee9b | 2034 | { |
af3e7bf6 | 2035 | bb = EDGE_PRED (join, 1)->src; |
75a70cf9 | 2036 | virt_bb = gimple_bb (d1); |
f8daee9b | 2037 | } |
2038 | else | |
2039 | return; | |
2040 | ||
2041 | /* Second, we need to check that the basic blocks are laid out in the way | |
2042 | corresponding to the pattern. */ | |
2043 | ||
f8daee9b | 2044 | if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb) |
2045 | || single_pred (virt_bb) != bb | |
2046 | || single_succ (virt_bb) != join) | |
2047 | return; | |
2048 | ||
2049 | /* Third, let's see that the branching is done depending on the least | |
2050 | significant bit of the pfn. */ | |
2051 | ||
24430d08 | 2052 | gimple branch = last_stmt (bb); |
af3e7bf6 | 2053 | if (!branch || gimple_code (branch) != GIMPLE_COND) |
f8daee9b | 2054 | return; |
2055 | ||
71b5c25e | 2056 | if ((gimple_cond_code (branch) != NE_EXPR |
2057 | && gimple_cond_code (branch) != EQ_EXPR) | |
75a70cf9 | 2058 | || !integer_zerop (gimple_cond_rhs (branch))) |
f8daee9b | 2059 | return; |
f8daee9b | 2060 | |
24430d08 | 2061 | tree cond = gimple_cond_lhs (branch); |
f8daee9b | 2062 | if (!ipa_is_ssa_with_stmt_def (cond)) |
2063 | return; | |
2064 | ||
75a70cf9 | 2065 | def = SSA_NAME_DEF_STMT (cond); |
a3808114 | 2066 | if (!is_gimple_assign (def) |
75a70cf9 | 2067 | || gimple_assign_rhs_code (def) != BIT_AND_EXPR |
2068 | || !integer_onep (gimple_assign_rhs2 (def))) | |
f8daee9b | 2069 | return; |
75a70cf9 | 2070 | |
2071 | cond = gimple_assign_rhs1 (def); | |
f8daee9b | 2072 | if (!ipa_is_ssa_with_stmt_def (cond)) |
2073 | return; | |
2074 | ||
75a70cf9 | 2075 | def = SSA_NAME_DEF_STMT (cond); |
f8daee9b | 2076 | |
a3808114 | 2077 | if (is_gimple_assign (def) |
2078 | && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def))) | |
f8daee9b | 2079 | { |
75a70cf9 | 2080 | cond = gimple_assign_rhs1 (def); |
f8daee9b | 2081 | if (!ipa_is_ssa_with_stmt_def (cond)) |
2082 | return; | |
75a70cf9 | 2083 | def = SSA_NAME_DEF_STMT (cond); |
f8daee9b | 2084 | } |
2085 | ||
24430d08 | 2086 | tree rec2; |
66cca8a0 | 2087 | rec2 = ipa_get_stmt_member_ptr_load_param (def, |
2088 | (TARGET_PTRMEMFUNC_VBIT_LOCATION | |
0d491188 | 2089 | == ptrmemfunc_vbit_in_delta), |
2090 | NULL); | |
f8daee9b | 2091 | if (rec != rec2) |
2092 | return; | |
2093 | ||
2094 | index = ipa_get_param_decl_index (info, rec); | |
0d491188 | 2095 | if (index >= 0 |
24430d08 | 2096 | && parm_preserved_before_stmt_p (fbi, index, call, rec)) |
0d491188 | 2097 | { |
24430d08 | 2098 | struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call); |
0d491188 | 2099 | cs->indirect_info->offset = offset; |
2100 | cs->indirect_info->agg_contents = 1; | |
2f6c1cf4 | 2101 | cs->indirect_info->member_ptr = 1; |
0d491188 | 2102 | } |
f8daee9b | 2103 | |
2104 | return; | |
2105 | } | |
2106 | ||
6378ffb3 | 2107 | /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the |
2108 | object referenced in the expression is a formal parameter of the caller | |
24430d08 | 2109 | FBI->node (described by FBI->info), create a call note for the |
2110 | statement. */ | |
6378ffb3 | 2111 | |
2112 | static void | |
24430d08 | 2113 | ipa_analyze_virtual_call_uses (struct func_body_info *fbi, |
1a91d914 | 2114 | gcall *call, tree target) |
6378ffb3 | 2115 | { |
2116 | tree obj = OBJ_TYPE_REF_OBJECT (target); | |
6378ffb3 | 2117 | int index; |
09a2b4db | 2118 | HOST_WIDE_INT anc_offset; |
6378ffb3 | 2119 | |
16358a63 | 2120 | if (!flag_devirtualize) |
2121 | return; | |
2122 | ||
09a2b4db | 2123 | if (TREE_CODE (obj) != SSA_NAME) |
6378ffb3 | 2124 | return; |
2125 | ||
24430d08 | 2126 | struct ipa_node_params *info = fbi->info; |
09a2b4db | 2127 | if (SSA_NAME_IS_DEFAULT_DEF (obj)) |
2128 | { | |
24430d08 | 2129 | struct ipa_jump_func jfunc; |
09a2b4db | 2130 | if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL) |
2131 | return; | |
6378ffb3 | 2132 | |
09a2b4db | 2133 | anc_offset = 0; |
2134 | index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj)); | |
2135 | gcc_assert (index >= 0); | |
185c1f3a | 2136 | if (detect_type_change_ssa (obj, obj_type_ref_class (target), |
2137 | call, &jfunc)) | |
09a2b4db | 2138 | return; |
2139 | } | |
2140 | else | |
2141 | { | |
24430d08 | 2142 | struct ipa_jump_func jfunc; |
09a2b4db | 2143 | gimple stmt = SSA_NAME_DEF_STMT (obj); |
2144 | tree expr; | |
2145 | ||
2146 | expr = get_ancestor_addr_info (stmt, &obj, &anc_offset); | |
2147 | if (!expr) | |
2148 | return; | |
2149 | index = ipa_get_param_decl_index (info, | |
2150 | SSA_NAME_VAR (TREE_OPERAND (expr, 0))); | |
2151 | gcc_assert (index >= 0); | |
185c1f3a | 2152 | if (detect_type_change (obj, expr, obj_type_ref_class (target), |
2153 | call, &jfunc, anc_offset)) | |
09a2b4db | 2154 | return; |
2155 | } | |
2156 | ||
24430d08 | 2157 | struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call); |
2158 | struct cgraph_indirect_call_info *ii = cs->indirect_info; | |
0d491188 | 2159 | ii->offset = anc_offset; |
e913b5cd | 2160 | ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target)); |
fba0273a | 2161 | ii->otr_type = obj_type_ref_class (target); |
09a2b4db | 2162 | ii->polymorphic = 1; |
6378ffb3 | 2163 | } |
2164 | ||
2165 | /* Analyze a call statement CALL whether and how it utilizes formal parameters | |
05d4e04f | 2166 | of the caller (described by INFO). PARMS_AINFO is a pointer to a vector |
8b68ef1b | 2167 | containing intermediate information about each formal parameter. */ |
6378ffb3 | 2168 | |
2169 | static void | |
1a91d914 | 2170 | ipa_analyze_call_uses (struct func_body_info *fbi, gcall *call) |
6378ffb3 | 2171 | { |
2172 | tree target = gimple_call_fn (call); | |
c41ae25b | 2173 | |
2174 | if (!target | |
2175 | || (TREE_CODE (target) != SSA_NAME | |
2176 | && !virtual_method_call_p (target))) | |
2177 | return; | |
6378ffb3 | 2178 | |
d8b5abdb | 2179 | struct cgraph_edge *cs = fbi->node->get_edge (call); |
c41ae25b | 2180 | /* If we previously turned the call into a direct call, there is |
2181 | no need to analyze. */ | |
c41ae25b | 2182 | if (cs && !cs->indirect_unknown_callee) |
fb049fba | 2183 | return; |
d8b5abdb | 2184 | |
4c4946db | 2185 | if (cs->indirect_info->polymorphic && flag_devirtualize) |
d8b5abdb | 2186 | { |
d8b5abdb | 2187 | tree instance; |
2188 | tree target = gimple_call_fn (call); | |
379f6698 | 2189 | ipa_polymorphic_call_context context (current_function_decl, |
2190 | target, call, &instance); | |
d8b5abdb | 2191 | |
e33892d7 | 2192 | gcc_checking_assert (cs->indirect_info->otr_type |
2193 | == obj_type_ref_class (target)); | |
2194 | gcc_checking_assert (cs->indirect_info->otr_token | |
2195 | == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target))); | |
d8b5abdb | 2196 | |
1986ca43 | 2197 | cs->indirect_info->vptr_changed |
2198 | = !context.get_dynamic_type (instance, | |
2199 | OBJ_TYPE_REF_OBJECT (target), | |
2200 | obj_type_ref_class (target), call); | |
43aac8cb | 2201 | cs->indirect_info->context = context; |
d8b5abdb | 2202 | } |
2203 | ||
6378ffb3 | 2204 | if (TREE_CODE (target) == SSA_NAME) |
24430d08 | 2205 | ipa_analyze_indirect_call_uses (fbi, call, target); |
f5e35fed | 2206 | else if (virtual_method_call_p (target)) |
24430d08 | 2207 | ipa_analyze_virtual_call_uses (fbi, call, target); |
6378ffb3 | 2208 | } |
2209 | ||
2210 | ||
799c8711 | 2211 | /* Analyze the call statement STMT with respect to formal parameters (described |
24430d08 | 2212 | in INFO) of caller given by FBI->NODE. Currently it only checks whether |
2213 | formal parameters are called. */ | |
1917e945 | 2214 | |
f8daee9b | 2215 | static void |
24430d08 | 2216 | ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt) |
f8daee9b | 2217 | { |
75a70cf9 | 2218 | if (is_gimple_call (stmt)) |
1a91d914 | 2219 | ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt)); |
8b68ef1b | 2220 | } |
2221 | ||
2222 | /* Callback of walk_stmt_load_store_addr_ops for the visit_load. | |
2223 | If OP is a parameter declaration, mark it as used in the info structure | |
2224 | passed in DATA. */ | |
2225 | ||
2226 | static bool | |
5b26a9e3 | 2227 | visit_ref_for_mod_analysis (gimple, tree op, tree, void *data) |
8b68ef1b | 2228 | { |
2229 | struct ipa_node_params *info = (struct ipa_node_params *) data; | |
2230 | ||
2231 | op = get_base_address (op); | |
2232 | if (op | |
2233 | && TREE_CODE (op) == PARM_DECL) | |
2234 | { | |
2235 | int index = ipa_get_param_decl_index (info, op); | |
2236 | gcc_assert (index >= 0); | |
821d0e0f | 2237 | ipa_set_param_used (info, index, true); |
8b68ef1b | 2238 | } |
2239 | ||
2240 | return false; | |
f8daee9b | 2241 | } |
2242 | ||
24430d08 | 2243 | /* Scan the statements in BB and inspect the uses of formal parameters. Store |
2244 | the findings in various structures of the associated ipa_node_params | |
2245 | structure, such as parameter flags, notes etc. FBI holds various data about | |
2246 | the function being analyzed. */ | |
1917e945 | 2247 | |
8b68ef1b | 2248 | static void |
24430d08 | 2249 | ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb) |
f8daee9b | 2250 | { |
75a70cf9 | 2251 | gimple_stmt_iterator gsi; |
24430d08 | 2252 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
2253 | { | |
2254 | gimple stmt = gsi_stmt (gsi); | |
f8daee9b | 2255 | |
24430d08 | 2256 | if (is_gimple_debug (stmt)) |
2257 | continue; | |
f8daee9b | 2258 | |
24430d08 | 2259 | ipa_analyze_stmt_uses (fbi, stmt); |
2260 | walk_stmt_load_store_addr_ops (stmt, fbi->info, | |
2261 | visit_ref_for_mod_analysis, | |
2262 | visit_ref_for_mod_analysis, | |
2263 | visit_ref_for_mod_analysis); | |
6c0a4a25 | 2264 | } |
24430d08 | 2265 | for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
2266 | walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info, | |
2267 | visit_ref_for_mod_analysis, | |
2268 | visit_ref_for_mod_analysis, | |
2269 | visit_ref_for_mod_analysis); | |
2270 | } | |
2271 | ||
2272 | /* Calculate controlled uses of parameters of NODE. */ | |
2273 | ||
2274 | static void | |
2275 | ipa_analyze_controlled_uses (struct cgraph_node *node) | |
2276 | { | |
2277 | struct ipa_node_params *info = IPA_NODE_REF (node); | |
6c0a4a25 | 2278 | |
24430d08 | 2279 | for (int i = 0; i < ipa_get_param_count (info); i++) |
8b68ef1b | 2280 | { |
2281 | tree parm = ipa_get_param (info, i); | |
096295f6 | 2282 | int controlled_uses = 0; |
2283 | ||
8b68ef1b | 2284 | /* For SSA regs see if parameter is used. For non-SSA we compute |
2285 | the flag during modification analysis. */ | |
096295f6 | 2286 | if (is_gimple_reg (parm)) |
2287 | { | |
02774f2d | 2288 | tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), |
096295f6 | 2289 | parm); |
2290 | if (ddef && !has_zero_uses (ddef)) | |
2291 | { | |
2292 | imm_use_iterator imm_iter; | |
2293 | use_operand_p use_p; | |
2294 | ||
2295 | ipa_set_param_used (info, i, true); | |
2296 | FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef) | |
2297 | if (!is_gimple_call (USE_STMT (use_p))) | |
2298 | { | |
0891f4f9 | 2299 | if (!is_gimple_debug (USE_STMT (use_p))) |
2300 | { | |
2301 | controlled_uses = IPA_UNDESCRIBED_USE; | |
2302 | break; | |
2303 | } | |
096295f6 | 2304 | } |
2305 | else | |
2306 | controlled_uses++; | |
2307 | } | |
2308 | else | |
2309 | controlled_uses = 0; | |
2310 | } | |
2311 | else | |
2312 | controlled_uses = IPA_UNDESCRIBED_USE; | |
2313 | ipa_set_controlled_uses (info, i, controlled_uses); | |
8b68ef1b | 2314 | } |
24430d08 | 2315 | } |
8b68ef1b | 2316 | |
24430d08 | 2317 | /* Free stuff in BI. */ |
8b68ef1b | 2318 | |
24430d08 | 2319 | static void |
2320 | free_ipa_bb_info (struct ipa_bb_info *bi) | |
2321 | { | |
2322 | bi->cg_edges.release (); | |
2323 | bi->param_aa_statuses.release (); | |
f8daee9b | 2324 | } |
2325 | ||
24430d08 | 2326 | /* Dominator walker driving the analysis. */ |
803a7988 | 2327 | |
24430d08 | 2328 | class analysis_dom_walker : public dom_walker |
803a7988 | 2329 | { |
24430d08 | 2330 | public: |
2331 | analysis_dom_walker (struct func_body_info *fbi) | |
2332 | : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {} | |
803a7988 | 2333 | |
24430d08 | 2334 | virtual void before_dom_children (basic_block); |
2335 | ||
2336 | private: | |
2337 | struct func_body_info *m_fbi; | |
2338 | }; | |
2339 | ||
2340 | void | |
2341 | analysis_dom_walker::before_dom_children (basic_block bb) | |
2342 | { | |
2343 | ipa_analyze_params_uses_in_bb (m_fbi, bb); | |
2344 | ipa_compute_jump_functions_for_bb (m_fbi, bb); | |
803a7988 | 2345 | } |
2346 | ||
851d9296 | 2347 | /* Initialize the array describing properties of of formal parameters |
2348 | of NODE, analyze their uses and compute jump functions associated | |
2349 | with actual arguments of calls from within NODE. */ | |
8b68ef1b | 2350 | |
2351 | void | |
2352 | ipa_analyze_node (struct cgraph_node *node) | |
2353 | { | |
24430d08 | 2354 | struct func_body_info fbi; |
6416d4a5 | 2355 | struct ipa_node_params *info; |
8b68ef1b | 2356 | |
6416d4a5 | 2357 | ipa_check_create_node_params (); |
2358 | ipa_check_create_edge_args (); | |
2359 | info = IPA_NODE_REF (node); | |
24430d08 | 2360 | |
2361 | if (info->analysis_done) | |
2362 | return; | |
2363 | info->analysis_done = 1; | |
2364 | ||
2365 | if (ipa_func_spec_opts_forbid_analysis_p (node)) | |
2366 | { | |
2367 | for (int i = 0; i < ipa_get_param_count (info); i++) | |
2368 | { | |
2369 | ipa_set_param_used (info, i, true); | |
2370 | ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE); | |
2371 | } | |
2372 | return; | |
2373 | } | |
2374 | ||
2375 | struct function *func = DECL_STRUCT_FUNCTION (node->decl); | |
2376 | push_cfun (func); | |
2377 | calculate_dominance_info (CDI_DOMINATORS); | |
8b68ef1b | 2378 | ipa_initialize_node_params (node); |
24430d08 | 2379 | ipa_analyze_controlled_uses (node); |
8b68ef1b | 2380 | |
24430d08 | 2381 | fbi.node = node; |
2382 | fbi.info = IPA_NODE_REF (node); | |
2383 | fbi.bb_infos = vNULL; | |
2384 | fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun)); | |
2385 | fbi.param_count = ipa_get_param_count (info); | |
2386 | fbi.aa_walked = 0; | |
8b68ef1b | 2387 | |
24430d08 | 2388 | for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee) |
2389 | { | |
2390 | ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt)); | |
2391 | bi->cg_edges.safe_push (cs); | |
2392 | } | |
8b68ef1b | 2393 | |
24430d08 | 2394 | for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee) |
2395 | { | |
2396 | ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt)); | |
2397 | bi->cg_edges.safe_push (cs); | |
2398 | } | |
2399 | ||
2400 | analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun)); | |
2401 | ||
2402 | int i; | |
2403 | struct ipa_bb_info *bi; | |
2404 | FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi) | |
2405 | free_ipa_bb_info (bi); | |
2406 | fbi.bb_infos.release (); | |
2407 | free_dominance_info (CDI_DOMINATORS); | |
7af23aa4 | 2408 | pop_cfun (); |
8b68ef1b | 2409 | } |
8b68ef1b | 2410 | |
1917e945 | 2411 | /* Update the jump functions associated with call graph edge E when the call |
f8daee9b | 2412 | graph edge CS is being inlined, assuming that E->caller is already (possibly |
6378ffb3 | 2413 | indirectly) inlined into CS->callee and that E has not been inlined. */ |
1917e945 | 2414 | |
f8daee9b | 2415 | static void |
2416 | update_jump_functions_after_inlining (struct cgraph_edge *cs, | |
2417 | struct cgraph_edge *e) | |
2418 | { | |
2419 | struct ipa_edge_args *top = IPA_EDGE_REF (cs); | |
2420 | struct ipa_edge_args *args = IPA_EDGE_REF (e); | |
2421 | int count = ipa_get_cs_argument_count (args); | |
2422 | int i; | |
2423 | ||
2424 | for (i = 0; i < count; i++) | |
2425 | { | |
6378ffb3 | 2426 | struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i); |
072ec6eb | 2427 | struct ipa_polymorphic_call_context *dst_ctx |
2428 | = ipa_get_ith_polymorhic_call_context (args, i); | |
f8daee9b | 2429 | |
5215027d | 2430 | if (dst->type == IPA_JF_ANCESTOR) |
2431 | { | |
6378ffb3 | 2432 | struct ipa_jump_func *src; |
0d491188 | 2433 | int dst_fid = dst->value.ancestor.formal_id; |
072ec6eb | 2434 | struct ipa_polymorphic_call_context *src_ctx |
2435 | = ipa_get_ith_polymorhic_call_context (top, dst_fid); | |
5215027d | 2436 | |
6378ffb3 | 2437 | /* Variable number of arguments can cause havoc if we try to access |
2438 | one that does not exist in the inlined edge. So make sure we | |
2439 | don't. */ | |
0d491188 | 2440 | if (dst_fid >= ipa_get_cs_argument_count (top)) |
6378ffb3 | 2441 | { |
ae7b7bc8 | 2442 | ipa_set_jf_unknown (dst); |
6378ffb3 | 2443 | continue; |
2444 | } | |
2445 | ||
0d491188 | 2446 | src = ipa_get_ith_jump_func (top, dst_fid); |
2447 | ||
072ec6eb | 2448 | if (src_ctx && !src_ctx->useless_p ()) |
2449 | { | |
2450 | struct ipa_polymorphic_call_context ctx = *src_ctx; | |
2451 | ||
2452 | /* TODO: Make type preserved safe WRT contexts. */ | |
245ab191 | 2453 | if (!ipa_get_jf_ancestor_type_preserved (dst)) |
007a6c27 | 2454 | ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor); |
072ec6eb | 2455 | ctx.offset_by (dst->value.ancestor.offset); |
2456 | if (!ctx.useless_p ()) | |
2457 | { | |
2458 | vec_safe_grow_cleared (args->polymorphic_call_contexts, | |
2459 | count); | |
2460 | dst_ctx = ipa_get_ith_polymorhic_call_context (args, i); | |
2461 | } | |
245ab191 | 2462 | dst_ctx->combine_with (ctx); |
072ec6eb | 2463 | } |
2464 | ||
0d491188 | 2465 | if (src->agg.items |
2466 | && (dst->value.ancestor.agg_preserved || !src->agg.by_ref)) | |
2467 | { | |
2468 | struct ipa_agg_jf_item *item; | |
2469 | int j; | |
2470 | ||
2471 | /* Currently we do not produce clobber aggregate jump functions, | |
2472 | replace with merging when we do. */ | |
2473 | gcc_assert (!dst->agg.items); | |
2474 | ||
f1f41a6c | 2475 | dst->agg.items = vec_safe_copy (src->agg.items); |
0d491188 | 2476 | dst->agg.by_ref = src->agg.by_ref; |
f1f41a6c | 2477 | FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item) |
0d491188 | 2478 | item->offset -= dst->value.ancestor.offset; |
2479 | } | |
2480 | ||
693010ae | 2481 | if (src->type == IPA_JF_PASS_THROUGH |
2482 | && src->value.pass_through.operation == NOP_EXPR) | |
0d491188 | 2483 | { |
2484 | dst->value.ancestor.formal_id = src->value.pass_through.formal_id; | |
2485 | dst->value.ancestor.agg_preserved &= | |
2486 | src->value.pass_through.agg_preserved; | |
2487 | } | |
6378ffb3 | 2488 | else if (src->type == IPA_JF_ANCESTOR) |
2489 | { | |
2490 | dst->value.ancestor.formal_id = src->value.ancestor.formal_id; | |
2491 | dst->value.ancestor.offset += src->value.ancestor.offset; | |
0d491188 | 2492 | dst->value.ancestor.agg_preserved &= |
2493 | src->value.ancestor.agg_preserved; | |
6378ffb3 | 2494 | } |
2495 | else | |
ae7b7bc8 | 2496 | ipa_set_jf_unknown (dst); |
6378ffb3 | 2497 | } |
2498 | else if (dst->type == IPA_JF_PASS_THROUGH) | |
f8daee9b | 2499 | { |
6378ffb3 | 2500 | struct ipa_jump_func *src; |
2501 | /* We must check range due to calls with variable number of arguments | |
2502 | and we cannot combine jump functions with operations. */ | |
2503 | if (dst->value.pass_through.operation == NOP_EXPR | |
2504 | && (dst->value.pass_through.formal_id | |
2505 | < ipa_get_cs_argument_count (top))) | |
2506 | { | |
0d491188 | 2507 | int dst_fid = dst->value.pass_through.formal_id; |
2508 | src = ipa_get_ith_jump_func (top, dst_fid); | |
ad4a8b28 | 2509 | bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst); |
072ec6eb | 2510 | struct ipa_polymorphic_call_context *src_ctx |
2511 | = ipa_get_ith_polymorhic_call_context (top, dst_fid); | |
0d491188 | 2512 | |
072ec6eb | 2513 | if (src_ctx && !src_ctx->useless_p ()) |
2514 | { | |
2515 | struct ipa_polymorphic_call_context ctx = *src_ctx; | |
2516 | ||
2517 | /* TODO: Make type preserved safe WRT contexts. */ | |
245ab191 | 2518 | if (!ipa_get_jf_pass_through_type_preserved (dst)) |
007a6c27 | 2519 | ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor); |
072ec6eb | 2520 | if (!ctx.useless_p ()) |
2521 | { | |
2522 | if (!dst_ctx) | |
2523 | { | |
2524 | vec_safe_grow_cleared (args->polymorphic_call_contexts, | |
2525 | count); | |
2526 | dst_ctx = ipa_get_ith_polymorhic_call_context (args, i); | |
2527 | } | |
2528 | dst_ctx->combine_with (ctx); | |
2529 | } | |
2530 | } | |
ad4a8b28 | 2531 | switch (src->type) |
2532 | { | |
2533 | case IPA_JF_UNKNOWN: | |
ae7b7bc8 | 2534 | ipa_set_jf_unknown (dst); |
ad4a8b28 | 2535 | break; |
ad4a8b28 | 2536 | case IPA_JF_CONST: |
2537 | ipa_set_jf_cst_copy (dst, src); | |
2538 | break; | |
2539 | ||
2540 | case IPA_JF_PASS_THROUGH: | |
2541 | { | |
2542 | int formal_id = ipa_get_jf_pass_through_formal_id (src); | |
2543 | enum tree_code operation; | |
2544 | operation = ipa_get_jf_pass_through_operation (src); | |
2545 | ||
2546 | if (operation == NOP_EXPR) | |
2547 | { | |
693010ae | 2548 | bool agg_p; |
ad4a8b28 | 2549 | agg_p = dst_agg_p |
2550 | && ipa_get_jf_pass_through_agg_preserved (src); | |
693010ae | 2551 | ipa_set_jf_simple_pass_through (dst, formal_id, agg_p); |
ad4a8b28 | 2552 | } |
2553 | else | |
2554 | { | |
2555 | tree operand = ipa_get_jf_pass_through_operand (src); | |
2556 | ipa_set_jf_arith_pass_through (dst, formal_id, operand, | |
2557 | operation); | |
2558 | } | |
2559 | break; | |
2560 | } | |
2561 | case IPA_JF_ANCESTOR: | |
2562 | { | |
693010ae | 2563 | bool agg_p; |
ad4a8b28 | 2564 | agg_p = dst_agg_p |
2565 | && ipa_get_jf_ancestor_agg_preserved (src); | |
ad4a8b28 | 2566 | ipa_set_ancestor_jf (dst, |
2567 | ipa_get_jf_ancestor_offset (src), | |
ad4a8b28 | 2568 | ipa_get_jf_ancestor_formal_id (src), |
693010ae | 2569 | agg_p); |
ad4a8b28 | 2570 | break; |
2571 | } | |
2572 | default: | |
2573 | gcc_unreachable (); | |
2574 | } | |
0d491188 | 2575 | |
2576 | if (src->agg.items | |
ad4a8b28 | 2577 | && (dst_agg_p || !src->agg.by_ref)) |
0d491188 | 2578 | { |
2579 | /* Currently we do not produce clobber aggregate jump | |
2580 | functions, replace with merging when we do. */ | |
2581 | gcc_assert (!dst->agg.items); | |
2582 | ||
2583 | dst->agg.by_ref = src->agg.by_ref; | |
f1f41a6c | 2584 | dst->agg.items = vec_safe_copy (src->agg.items); |
0d491188 | 2585 | } |
6378ffb3 | 2586 | } |
2587 | else | |
ae7b7bc8 | 2588 | ipa_set_jf_unknown (dst); |
f8daee9b | 2589 | } |
6378ffb3 | 2590 | } |
2591 | } | |
2592 | ||
072ec6eb | 2593 | /* If TARGET is an addr_expr of a function declaration, make it the |
2594 | (SPECULATIVE)destination of an indirect edge IE and return the edge. | |
2595 | Otherwise, return NULL. */ | |
6378ffb3 | 2596 | |
1caef38b | 2597 | struct cgraph_edge * |
072ec6eb | 2598 | ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target, |
2599 | bool speculative) | |
6378ffb3 | 2600 | { |
2601 | struct cgraph_node *callee; | |
18b64b34 | 2602 | struct inline_edge_summary *es = inline_edge_summary (ie); |
95fb3203 | 2603 | bool unreachable = false; |
6378ffb3 | 2604 | |
3fd0ca33 | 2605 | if (TREE_CODE (target) == ADDR_EXPR) |
2606 | target = TREE_OPERAND (target, 0); | |
6378ffb3 | 2607 | if (TREE_CODE (target) != FUNCTION_DECL) |
16473e06 | 2608 | { |
2609 | target = canonicalize_constructor_val (target, NULL); | |
2610 | if (!target || TREE_CODE (target) != FUNCTION_DECL) | |
2611 | { | |
7ab096e0 | 2612 | /* Member pointer call that goes through a VMT lookup. */ |
2613 | if (ie->indirect_info->member_ptr | |
2614 | /* Or if target is not an invariant expression and we do not | |
2615 | know if it will evaulate to function at runtime. | |
2616 | This can happen when folding through &VAR, where &VAR | |
2617 | is IP invariant, but VAR itself is not. | |
2618 | ||
2619 | TODO: Revisit this when GCC 5 is branched. It seems that | |
2620 | member_ptr check is not needed and that we may try to fold | |
2621 | the expression and see if VAR is readonly. */ | |
2622 | || !is_gimple_ip_invariant (target)) | |
2623 | { | |
2624 | if (dump_enabled_p ()) | |
2625 | { | |
2626 | location_t loc = gimple_location_safe (ie->call_stmt); | |
2627 | dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc, | |
2628 | "discovered direct call non-invariant " | |
2629 | "%s/%i\n", | |
2630 | ie->caller->name (), ie->caller->order); | |
2631 | } | |
2632 | return NULL; | |
2633 | } | |
2634 | ||
2f6c1cf4 | 2635 | |
ceb49bba | 2636 | if (dump_enabled_p ()) |
2637 | { | |
4c8041d7 | 2638 | location_t loc = gimple_location_safe (ie->call_stmt); |
2639 | dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc, | |
2640 | "discovered direct call to non-function in %s/%i, " | |
2641 | "making it __builtin_unreachable\n", | |
2642 | ie->caller->name (), ie->caller->order); | |
ceb49bba | 2643 | } |
a89006bf | 2644 | |
95fb3203 | 2645 | target = builtin_decl_implicit (BUILT_IN_UNREACHABLE); |
415d1b9a | 2646 | callee = cgraph_node::get_create (target); |
95fb3203 | 2647 | unreachable = true; |
16473e06 | 2648 | } |
95fb3203 | 2649 | else |
415d1b9a | 2650 | callee = cgraph_node::get (target); |
16473e06 | 2651 | } |
95fb3203 | 2652 | else |
415d1b9a | 2653 | callee = cgraph_node::get (target); |
16473e06 | 2654 | |
2655 | /* Because may-edges are not explicitely represented and vtable may be external, | |
2656 | we may create the first reference to the object in the unit. */ | |
2657 | if (!callee || callee->global.inlined_to) | |
2658 | { | |
16473e06 | 2659 | |
2660 | /* We are better to ensure we can refer to it. | |
2661 | In the case of static functions we are out of luck, since we already | |
2662 | removed its body. In the case of public functions we may or may | |
2663 | not introduce the reference. */ | |
2664 | if (!canonicalize_constructor_val (target, NULL) | |
2665 | || !TREE_PUBLIC (target)) | |
2666 | { | |
2667 | if (dump_file) | |
2668 | fprintf (dump_file, "ipa-prop: Discovered call to a known target " | |
2669 | "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n", | |
5ae49d3e | 2670 | xstrdup_for_dump (ie->caller->name ()), |
02774f2d | 2671 | ie->caller->order, |
5ae49d3e | 2672 | xstrdup_for_dump (ie->callee->name ()), |
02774f2d | 2673 | ie->callee->order); |
16473e06 | 2674 | return NULL; |
2675 | } | |
415d1b9a | 2676 | callee = cgraph_node::get_create (target); |
16473e06 | 2677 | } |
ceb49bba | 2678 | |
43aac8cb | 2679 | /* If the edge is already speculated. */ |
2680 | if (speculative && ie->speculative) | |
2681 | { | |
2682 | struct cgraph_edge *e2; | |
2683 | struct ipa_ref *ref; | |
2684 | ie->speculative_call_info (e2, ie, ref); | |
2685 | if (e2->callee->ultimate_alias_target () | |
2686 | != callee->ultimate_alias_target ()) | |
2687 | { | |
2688 | if (dump_file) | |
2689 | fprintf (dump_file, "ipa-prop: Discovered call to a speculative target " | |
2690 | "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n", | |
5ae49d3e | 2691 | xstrdup_for_dump (ie->caller->name ()), |
43aac8cb | 2692 | ie->caller->order, |
5ae49d3e | 2693 | xstrdup_for_dump (callee->name ()), |
43aac8cb | 2694 | callee->order, |
5ae49d3e | 2695 | xstrdup_for_dump (e2->callee->name ()), |
43aac8cb | 2696 | e2->callee->order); |
2697 | } | |
2698 | else | |
2699 | { | |
2700 | if (dump_file) | |
2701 | fprintf (dump_file, "ipa-prop: Discovered call to a speculative target " | |
2702 | "(%s/%i -> %s/%i) this agree with previous speculation.\n", | |
5ae49d3e | 2703 | xstrdup_for_dump (ie->caller->name ()), |
43aac8cb | 2704 | ie->caller->order, |
5ae49d3e | 2705 | xstrdup_for_dump (callee->name ()), |
43aac8cb | 2706 | callee->order); |
2707 | } | |
2708 | return NULL; | |
2709 | } | |
2710 | ||
ceb49bba | 2711 | if (!dbg_cnt (devirt)) |
2712 | return NULL; | |
2713 | ||
4d701526 | 2714 | ipa_check_create_node_params (); |
3fd0ca33 | 2715 | |
d4e80e2b | 2716 | /* We can not make edges to inline clones. It is bug that someone removed |
2717 | the cgraph node too early. */ | |
01765fa2 | 2718 | gcc_assert (!callee->global.inlined_to); |
2719 | ||
95fb3203 | 2720 | if (dump_file && !unreachable) |
6378ffb3 | 2721 | { |
072ec6eb | 2722 | fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target " |
3fd0ca33 | 2723 | "(%s/%i -> %s/%i), for stmt ", |
6378ffb3 | 2724 | ie->indirect_info->polymorphic ? "a virtual" : "an indirect", |
072ec6eb | 2725 | speculative ? "speculative" : "known", |
5ae49d3e | 2726 | xstrdup_for_dump (ie->caller->name ()), |
02774f2d | 2727 | ie->caller->order, |
5ae49d3e | 2728 | xstrdup_for_dump (callee->name ()), |
02774f2d | 2729 | callee->order); |
6378ffb3 | 2730 | if (ie->call_stmt) |
2731 | print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM); | |
2732 | else | |
2733 | fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid); | |
4d044066 | 2734 | } |
ceb49bba | 2735 | if (dump_enabled_p ()) |
2736 | { | |
4c8041d7 | 2737 | location_t loc = gimple_location_safe (ie->call_stmt); |
a89006bf | 2738 | |
4c8041d7 | 2739 | dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc, |
2740 | "converting indirect call in %s to direct call to %s\n", | |
2741 | ie->caller->name (), callee->name ()); | |
ceb49bba | 2742 | } |
072ec6eb | 2743 | if (!speculative) |
1a92a535 | 2744 | { |
2745 | struct cgraph_edge *orig = ie; | |
2746 | ie = ie->make_direct (callee); | |
2747 | /* If we resolved speculative edge the cost is already up to date | |
2748 | for direct call (adjusted by inline_edge_duplication_hook). */ | |
2749 | if (ie == orig) | |
2750 | { | |
2751 | es = inline_edge_summary (ie); | |
2752 | es->call_stmt_size -= (eni_size_weights.indirect_call_cost | |
2753 | - eni_size_weights.call_cost); | |
2754 | es->call_stmt_time -= (eni_time_weights.indirect_call_cost | |
2755 | - eni_time_weights.call_cost); | |
2756 | } | |
2757 | } | |
072ec6eb | 2758 | else |
2759 | { | |
2760 | if (!callee->can_be_discarded_p ()) | |
2761 | { | |
2762 | cgraph_node *alias; | |
2763 | alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ()); | |
2764 | if (alias) | |
2765 | callee = alias; | |
2766 | } | |
1a92a535 | 2767 | /* make_speculative will update ie's cost to direct call cost. */ |
072ec6eb | 2768 | ie = ie->make_speculative |
2769 | (callee, ie->count * 8 / 10, ie->frequency * 8 / 10); | |
2770 | } | |
7115ea05 | 2771 | |
6378ffb3 | 2772 | return ie; |
f8daee9b | 2773 | } |
2774 | ||
0d491188 | 2775 | /* Retrieve value from aggregate jump function AGG for the given OFFSET or |
2776 | return NULL if there is not any. BY_REF specifies whether the value has to | |
2777 | be passed by reference or by value. */ | |
2778 | ||
2779 | tree | |
2780 | ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, | |
2781 | HOST_WIDE_INT offset, bool by_ref) | |
2782 | { | |
2783 | struct ipa_agg_jf_item *item; | |
2784 | int i; | |
2785 | ||
2786 | if (by_ref != agg->by_ref) | |
2787 | return NULL; | |
2788 | ||
f1f41a6c | 2789 | FOR_EACH_VEC_SAFE_ELT (agg->items, i, item) |
803a7988 | 2790 | if (item->offset == offset) |
2791 | { | |
2792 | /* Currently we do not have clobber values, return NULL for them once | |
2793 | we do. */ | |
2794 | gcc_checking_assert (is_gimple_ip_invariant (item->value)); | |
2795 | return item->value; | |
2796 | } | |
0d491188 | 2797 | return NULL; |
2798 | } | |
2799 | ||
096295f6 | 2800 | /* Remove a reference to SYMBOL from the list of references of a node given by |
061168c9 | 2801 | reference description RDESC. Return true if the reference has been |
2802 | successfully found and removed. */ | |
096295f6 | 2803 | |
061168c9 | 2804 | static bool |
452659af | 2805 | remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc) |
096295f6 | 2806 | { |
2807 | struct ipa_ref *to_del; | |
2808 | struct cgraph_edge *origin; | |
2809 | ||
2810 | origin = rdesc->cs; | |
8398c9b3 | 2811 | if (!origin) |
2812 | return false; | |
51ce5652 | 2813 | to_del = origin->caller->find_reference (symbol, origin->call_stmt, |
2814 | origin->lto_stmt_uid); | |
061168c9 | 2815 | if (!to_del) |
2816 | return false; | |
2817 | ||
51ce5652 | 2818 | to_del->remove_reference (); |
096295f6 | 2819 | if (dump_file) |
2820 | fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n", | |
5ae49d3e | 2821 | xstrdup_for_dump (origin->caller->name ()), |
2822 | origin->caller->order, xstrdup_for_dump (symbol->name ())); | |
061168c9 | 2823 | return true; |
096295f6 | 2824 | } |
2825 | ||
2826 | /* If JFUNC has a reference description with refcount different from | |
2827 | IPA_UNDESCRIBED_USE, return the reference description, otherwise return | |
2828 | NULL. JFUNC must be a constant jump function. */ | |
2829 | ||
2830 | static struct ipa_cst_ref_desc * | |
2831 | jfunc_rdesc_usable (struct ipa_jump_func *jfunc) | |
2832 | { | |
2833 | struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc); | |
2834 | if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE) | |
2835 | return rdesc; | |
2836 | else | |
2837 | return NULL; | |
2838 | } | |
2839 | ||
061168c9 | 2840 | /* If the value of constant jump function JFUNC is an address of a function |
2841 | declaration, return the associated call graph node. Otherwise return | |
2842 | NULL. */ | |
2843 | ||
2844 | static cgraph_node * | |
2845 | cgraph_node_for_jfunc (struct ipa_jump_func *jfunc) | |
2846 | { | |
2847 | gcc_checking_assert (jfunc->type == IPA_JF_CONST); | |
2848 | tree cst = ipa_get_jf_constant (jfunc); | |
2849 | if (TREE_CODE (cst) != ADDR_EXPR | |
2850 | || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL) | |
2851 | return NULL; | |
2852 | ||
415d1b9a | 2853 | return cgraph_node::get (TREE_OPERAND (cst, 0)); |
061168c9 | 2854 | } |
2855 | ||
2856 | ||
2857 | /* If JFUNC is a constant jump function with a usable rdesc, decrement its | |
2858 | refcount and if it hits zero, remove reference to SYMBOL from the caller of | |
2859 | the edge specified in the rdesc. Return false if either the symbol or the | |
2860 | reference could not be found, otherwise return true. */ | |
2861 | ||
2862 | static bool | |
2863 | try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc) | |
2864 | { | |
2865 | struct ipa_cst_ref_desc *rdesc; | |
2866 | if (jfunc->type == IPA_JF_CONST | |
2867 | && (rdesc = jfunc_rdesc_usable (jfunc)) | |
2868 | && --rdesc->refcount == 0) | |
2869 | { | |
452659af | 2870 | symtab_node *symbol = cgraph_node_for_jfunc (jfunc); |
061168c9 | 2871 | if (!symbol) |
2872 | return false; | |
2873 | ||
2874 | return remove_described_reference (symbol, rdesc); | |
2875 | } | |
2876 | return true; | |
2877 | } | |
2878 | ||
6378ffb3 | 2879 | /* Try to find a destination for indirect edge IE that corresponds to a simple |
2880 | call or a call of a member function pointer and where the destination is a | |
2881 | pointer formal parameter described by jump function JFUNC. If it can be | |
12ecd4f9 | 2882 | determined, return the newly direct edge, otherwise return NULL. |
2883 | NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */ | |
1917e945 | 2884 | |
6378ffb3 | 2885 | static struct cgraph_edge * |
2886 | try_make_edge_direct_simple_call (struct cgraph_edge *ie, | |
12ecd4f9 | 2887 | struct ipa_jump_func *jfunc, |
2888 | struct ipa_node_params *new_root_info) | |
6378ffb3 | 2889 | { |
096295f6 | 2890 | struct cgraph_edge *cs; |
6378ffb3 | 2891 | tree target; |
4d044066 | 2892 | bool agg_contents = ie->indirect_info->agg_contents; |
6378ffb3 | 2893 | |
0d491188 | 2894 | if (ie->indirect_info->agg_contents) |
12ecd4f9 | 2895 | target = ipa_find_agg_cst_for_param (&jfunc->agg, |
2896 | ie->indirect_info->offset, | |
2897 | ie->indirect_info->by_ref); | |
6378ffb3 | 2898 | else |
12ecd4f9 | 2899 | target = ipa_value_from_jfunc (new_root_info, jfunc); |
2900 | if (!target) | |
2901 | return NULL; | |
096295f6 | 2902 | cs = ipa_make_edge_direct_to_target (ie, target); |
2903 | ||
d9ea4515 | 2904 | if (cs && !agg_contents) |
061168c9 | 2905 | { |
2906 | bool ok; | |
2907 | gcc_checking_assert (cs->callee | |
37113583 | 2908 | && (cs != ie |
2909 | || jfunc->type != IPA_JF_CONST | |
061168c9 | 2910 | || !cgraph_node_for_jfunc (jfunc) |
2911 | || cs->callee == cgraph_node_for_jfunc (jfunc))); | |
2912 | ok = try_decrement_rdesc_refcount (jfunc); | |
2913 | gcc_checking_assert (ok); | |
2914 | } | |
096295f6 | 2915 | |
2916 | return cs; | |
6378ffb3 | 2917 | } |
2918 | ||
49c3fb73 | 2919 | /* Return the target to be used in cases of impossible devirtualization. IE |
2920 | and target (the latter can be NULL) are dumped when dumping is enabled. */ | |
2921 | ||
3bc62a51 | 2922 | tree |
2923 | ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target) | |
49c3fb73 | 2924 | { |
2925 | if (dump_file) | |
2926 | { | |
2927 | if (target) | |
2928 | fprintf (dump_file, | |
3bc62a51 | 2929 | "Type inconsistent devirtualization: %s/%i->%s\n", |
49c3fb73 | 2930 | ie->caller->name (), ie->caller->order, |
2931 | IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target))); | |
2932 | else | |
2933 | fprintf (dump_file, | |
2934 | "No devirtualization target in %s/%i\n", | |
2935 | ie->caller->name (), ie->caller->order); | |
2936 | } | |
2937 | tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE); | |
415d1b9a | 2938 | cgraph_node::get_create (new_target); |
49c3fb73 | 2939 | return new_target; |
2940 | } | |
2941 | ||
12ecd4f9 | 2942 | /* Try to find a destination for indirect edge IE that corresponds to a virtual |
2943 | call based on a formal parameter which is described by jump function JFUNC | |
2944 | and if it can be determined, make it direct and return the direct edge. | |
245ab191 | 2945 | Otherwise, return NULL. CTX describes the polymorphic context that the |
2946 | parameter the call is based on brings along with it. */ | |
6378ffb3 | 2947 | |
2948 | static struct cgraph_edge * | |
2949 | try_make_edge_direct_virtual_call (struct cgraph_edge *ie, | |
12ecd4f9 | 2950 | struct ipa_jump_func *jfunc, |
245ab191 | 2951 | struct ipa_polymorphic_call_context ctx) |
f8daee9b | 2952 | { |
245ab191 | 2953 | tree target = NULL; |
072ec6eb | 2954 | bool speculative = false; |
6378ffb3 | 2955 | |
d1f68cd8 | 2956 | if (!opt_for_fn (ie->caller->decl, flag_devirtualize)) |
02636da3 | 2957 | return NULL; |
6378ffb3 | 2958 | |
245ab191 | 2959 | gcc_assert (!ie->indirect_info->by_ref); |
072ec6eb | 2960 | |
2961 | /* Try to do lookup via known virtual table pointer value. */ | |
d1f68cd8 | 2962 | if (!ie->indirect_info->vptr_changed |
2963 | || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)) | |
02636da3 | 2964 | { |
6750de5f | 2965 | tree vtable; |
2966 | unsigned HOST_WIDE_INT offset; | |
02636da3 | 2967 | tree t = ipa_find_agg_cst_for_param (&jfunc->agg, |
2968 | ie->indirect_info->offset, | |
2969 | true); | |
6750de5f | 2970 | if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset)) |
2971 | { | |
43aac8cb | 2972 | t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token, |
6750de5f | 2973 | vtable, offset); |
43aac8cb | 2974 | if (t) |
6750de5f | 2975 | { |
43aac8cb | 2976 | if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE |
2977 | && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE) | |
6750de5f | 2978 | || !possible_polymorphic_call_target_p |
43aac8cb | 2979 | (ie, cgraph_node::get (t))) |
2980 | { | |
2c9d5cb8 | 2981 | /* Do not speculate builtin_unreachable, it is stupid! */ |
43aac8cb | 2982 | if (!ie->indirect_info->vptr_changed) |
2983 | target = ipa_impossible_devirt_target (ie, target); | |
2984 | } | |
2985 | else | |
2986 | { | |
2987 | target = t; | |
2988 | speculative = ie->indirect_info->vptr_changed; | |
2989 | } | |
6750de5f | 2990 | } |
2991 | } | |
02636da3 | 2992 | } |
2993 | ||
245ab191 | 2994 | ipa_polymorphic_call_context ie_context (ie); |
2995 | vec <cgraph_node *>targets; | |
2996 | bool final; | |
12ecd4f9 | 2997 | |
245ab191 | 2998 | ctx.offset_by (ie->indirect_info->offset); |
2999 | if (ie->indirect_info->vptr_changed) | |
3000 | ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor, | |
3001 | ie->indirect_info->otr_type); | |
3002 | ctx.combine_with (ie_context, ie->indirect_info->otr_type); | |
3003 | targets = possible_polymorphic_call_targets | |
3004 | (ie->indirect_info->otr_type, | |
3005 | ie->indirect_info->otr_token, | |
3006 | ctx, &final); | |
3007 | if (final && targets.length () <= 1) | |
072ec6eb | 3008 | { |
2c9d5cb8 | 3009 | speculative = false; |
245ab191 | 3010 | if (targets.length () == 1) |
3011 | target = targets[0]->decl; | |
3012 | else | |
3013 | target = ipa_impossible_devirt_target (ie, NULL_TREE); | |
072ec6eb | 3014 | } |
d1f68cd8 | 3015 | else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively) |
245ab191 | 3016 | && !ie->speculative && ie->maybe_hot_p ()) |
54176a57 | 3017 | { |
245ab191 | 3018 | cgraph_node *n; |
3019 | n = try_speculative_devirtualization (ie->indirect_info->otr_type, | |
3020 | ie->indirect_info->otr_token, | |
3021 | ie->indirect_info->context); | |
3022 | if (n) | |
072ec6eb | 3023 | { |
245ab191 | 3024 | target = n->decl; |
3025 | speculative = true; | |
072ec6eb | 3026 | } |
54176a57 | 3027 | } |
6378ffb3 | 3028 | |
3029 | if (target) | |
10fba9c0 | 3030 | { |
245ab191 | 3031 | if (!possible_polymorphic_call_target_p |
3032 | (ie, cgraph_node::get_create (target))) | |
43aac8cb | 3033 | { |
1986ca43 | 3034 | if (speculative) |
43aac8cb | 3035 | return NULL; |
3036 | target = ipa_impossible_devirt_target (ie, target); | |
3037 | } | |
072ec6eb | 3038 | return ipa_make_edge_direct_to_target (ie, target, speculative); |
10fba9c0 | 3039 | } |
6378ffb3 | 3040 | else |
3041 | return NULL; | |
f8daee9b | 3042 | } |
3043 | ||
3044 | /* Update the param called notes associated with NODE when CS is being inlined, | |
3045 | assuming NODE is (potentially indirectly) inlined into CS->callee. | |
3046 | Moreover, if the callee is discovered to be constant, create a new cgraph | |
6db08adc | 3047 | edge for it. Newly discovered indirect edges will be added to *NEW_EDGES, |
3f2ff969 | 3048 | unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */ |
1917e945 | 3049 | |
3f2ff969 | 3050 | static bool |
799c8711 | 3051 | update_indirect_edges_after_inlining (struct cgraph_edge *cs, |
3052 | struct cgraph_node *node, | |
415d1b9a | 3053 | vec<cgraph_edge *> *new_edges) |
f8daee9b | 3054 | { |
a53e7471 | 3055 | struct ipa_edge_args *top; |
6378ffb3 | 3056 | struct cgraph_edge *ie, *next_ie, *new_direct_edge; |
12ecd4f9 | 3057 | struct ipa_node_params *new_root_info; |
3f2ff969 | 3058 | bool res = false; |
f8daee9b | 3059 | |
799c8711 | 3060 | ipa_check_create_edge_args (); |
a53e7471 | 3061 | top = IPA_EDGE_REF (cs); |
12ecd4f9 | 3062 | new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to |
3063 | ? cs->caller->global.inlined_to | |
3064 | : cs->caller); | |
799c8711 | 3065 | |
3066 | for (ie = node->indirect_calls; ie; ie = next_ie) | |
f8daee9b | 3067 | { |
799c8711 | 3068 | struct cgraph_indirect_call_info *ici = ie->indirect_info; |
f8daee9b | 3069 | struct ipa_jump_func *jfunc; |
0d491188 | 3070 | int param_index; |
d122d93c | 3071 | cgraph_node *spec_target = NULL; |
f8daee9b | 3072 | |
799c8711 | 3073 | next_ie = ie->next_callee; |
f8daee9b | 3074 | |
f8b7e3ec | 3075 | if (ici->param_index == -1) |
3076 | continue; | |
799c8711 | 3077 | |
f8daee9b | 3078 | /* We must check range due to calls with variable number of arguments: */ |
799c8711 | 3079 | if (ici->param_index >= ipa_get_cs_argument_count (top)) |
f8daee9b | 3080 | { |
a226c368 | 3081 | ici->param_index = -1; |
f8daee9b | 3082 | continue; |
3083 | } | |
3084 | ||
0d491188 | 3085 | param_index = ici->param_index; |
3086 | jfunc = ipa_get_ith_jump_func (top, param_index); | |
a226c368 | 3087 | |
d122d93c | 3088 | if (ie->speculative) |
3089 | { | |
3090 | struct cgraph_edge *de; | |
3091 | struct ipa_ref *ref; | |
3092 | ie->speculative_call_info (de, ie, ref); | |
3093 | spec_target = de->callee; | |
3094 | } | |
3095 | ||
d1f68cd8 | 3096 | if (!opt_for_fn (node->decl, flag_indirect_inlining)) |
a240d038 | 3097 | new_direct_edge = NULL; |
3098 | else if (ici->polymorphic) | |
072ec6eb | 3099 | { |
245ab191 | 3100 | ipa_polymorphic_call_context ctx; |
3101 | ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc); | |
3102 | new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx); | |
072ec6eb | 3103 | } |
6378ffb3 | 3104 | else |
12ecd4f9 | 3105 | new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc, |
3106 | new_root_info); | |
4d044066 | 3107 | /* If speculation was removed, then we need to do nothing. */ |
d122d93c | 3108 | if (new_direct_edge && new_direct_edge != ie |
3109 | && new_direct_edge->callee == spec_target) | |
4d044066 | 3110 | { |
3111 | new_direct_edge->indirect_inlining_edge = 1; | |
3112 | top = IPA_EDGE_REF (cs); | |
3113 | res = true; | |
dd1f9fb5 | 3114 | if (!new_direct_edge->speculative) |
3115 | continue; | |
4d044066 | 3116 | } |
3117 | else if (new_direct_edge) | |
5215027d | 3118 | { |
6378ffb3 | 3119 | new_direct_edge->indirect_inlining_edge = 1; |
f883da84 | 3120 | if (new_direct_edge->call_stmt) |
3121 | new_direct_edge->call_stmt_cannot_inline_p | |
341de017 | 3122 | = !gimple_check_call_matching_types ( |
3123 | new_direct_edge->call_stmt, | |
02774f2d | 3124 | new_direct_edge->callee->decl, false); |
6378ffb3 | 3125 | if (new_edges) |
3126 | { | |
f1f41a6c | 3127 | new_edges->safe_push (new_direct_edge); |
6378ffb3 | 3128 | res = true; |
3129 | } | |
4d044066 | 3130 | top = IPA_EDGE_REF (cs); |
d122d93c | 3131 | /* If speculative edge was introduced we still need to update |
3132 | call info of the indirect edge. */ | |
3133 | if (!new_direct_edge->speculative) | |
3134 | continue; | |
5215027d | 3135 | } |
d122d93c | 3136 | if (jfunc->type == IPA_JF_PASS_THROUGH |
3137 | && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR) | |
a240d038 | 3138 | { |
82a343d2 | 3139 | if (ici->agg_contents |
3140 | && !ipa_get_jf_pass_through_agg_preserved (jfunc) | |
3141 | && !ici->polymorphic) | |
a240d038 | 3142 | ici->param_index = -1; |
3143 | else | |
82a343d2 | 3144 | { |
3145 | ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc); | |
3146 | if (ici->polymorphic | |
3147 | && !ipa_get_jf_pass_through_type_preserved (jfunc)) | |
3148 | ici->vptr_changed = true; | |
3149 | } | |
a240d038 | 3150 | } |
3151 | else if (jfunc->type == IPA_JF_ANCESTOR) | |
3152 | { | |
82a343d2 | 3153 | if (ici->agg_contents |
3154 | && !ipa_get_jf_ancestor_agg_preserved (jfunc) | |
3155 | && !ici->polymorphic) | |
a240d038 | 3156 | ici->param_index = -1; |
3157 | else | |
3158 | { | |
3159 | ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc); | |
3160 | ici->offset += ipa_get_jf_ancestor_offset (jfunc); | |
82a343d2 | 3161 | if (ici->polymorphic |
3162 | && !ipa_get_jf_ancestor_type_preserved (jfunc)) | |
3163 | ici->vptr_changed = true; | |
a240d038 | 3164 | } |
3165 | } | |
3166 | else | |
3167 | /* Either we can find a destination for this edge now or never. */ | |
3168 | ici->param_index = -1; | |
f8daee9b | 3169 | } |
799c8711 | 3170 | |
3f2ff969 | 3171 | return res; |
f8daee9b | 3172 | } |
3173 | ||
3174 | /* Recursively traverse subtree of NODE (including node) made of inlined | |
3175 | cgraph_edges when CS has been inlined and invoke | |
799c8711 | 3176 | update_indirect_edges_after_inlining on all nodes and |
f8daee9b | 3177 | update_jump_functions_after_inlining on all non-inlined edges that lead out |
3178 | of this subtree. Newly discovered indirect edges will be added to | |
3f2ff969 | 3179 | *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were |
3180 | created. */ | |
1917e945 | 3181 | |
3f2ff969 | 3182 | static bool |
f8daee9b | 3183 | propagate_info_to_inlined_callees (struct cgraph_edge *cs, |
3184 | struct cgraph_node *node, | |
415d1b9a | 3185 | vec<cgraph_edge *> *new_edges) |
f8daee9b | 3186 | { |
3187 | struct cgraph_edge *e; | |
3f2ff969 | 3188 | bool res; |
f8daee9b | 3189 | |
799c8711 | 3190 | res = update_indirect_edges_after_inlining (cs, node, new_edges); |
f8daee9b | 3191 | |
3192 | for (e = node->callees; e; e = e->next_callee) | |
3193 | if (!e->inline_failed) | |
3f2ff969 | 3194 | res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges); |
f8daee9b | 3195 | else |
3196 | update_jump_functions_after_inlining (cs, e); | |
a226c368 | 3197 | for (e = node->indirect_calls; e; e = e->next_callee) |
3198 | update_jump_functions_after_inlining (cs, e); | |
3f2ff969 | 3199 | |
3200 | return res; | |
f8daee9b | 3201 | } |
3202 | ||
096295f6 | 3203 | /* Combine two controlled uses counts as done during inlining. */ |
3204 | ||
3205 | static int | |
3206 | combine_controlled_uses_counters (int c, int d) | |
3207 | { | |
3208 | if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE) | |
3209 | return IPA_UNDESCRIBED_USE; | |
3210 | else | |
3211 | return c + d - 1; | |
3212 | } | |
3213 | ||
3214 | /* Propagate number of controlled users from CS->caleee to the new root of the | |
3215 | tree of inlined nodes. */ | |
3216 | ||
3217 | static void | |
3218 | propagate_controlled_uses (struct cgraph_edge *cs) | |
3219 | { | |
3220 | struct ipa_edge_args *args = IPA_EDGE_REF (cs); | |
3221 | struct cgraph_node *new_root = cs->caller->global.inlined_to | |
3222 | ? cs->caller->global.inlined_to : cs->caller; | |
3223 | struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root); | |
3224 | struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee); | |
3225 | int count, i; | |
3226 | ||
3227 | count = MIN (ipa_get_cs_argument_count (args), | |
3228 | ipa_get_param_count (old_root_info)); | |
3229 | for (i = 0; i < count; i++) | |
3230 | { | |
3231 | struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i); | |
3232 | struct ipa_cst_ref_desc *rdesc; | |
3233 | ||
3234 | if (jf->type == IPA_JF_PASS_THROUGH) | |
3235 | { | |
3236 | int src_idx, c, d; | |
3237 | src_idx = ipa_get_jf_pass_through_formal_id (jf); | |
3238 | c = ipa_get_controlled_uses (new_root_info, src_idx); | |
3239 | d = ipa_get_controlled_uses (old_root_info, i); | |
3240 | ||
3241 | gcc_checking_assert (ipa_get_jf_pass_through_operation (jf) | |
3242 | == NOP_EXPR || c == IPA_UNDESCRIBED_USE); | |
3243 | c = combine_controlled_uses_counters (c, d); | |
3244 | ipa_set_controlled_uses (new_root_info, src_idx, c); | |
3245 | if (c == 0 && new_root_info->ipcp_orig_node) | |
3246 | { | |
3247 | struct cgraph_node *n; | |
3248 | struct ipa_ref *ref; | |
245ab191 | 3249 | tree t = new_root_info->known_csts[src_idx]; |
096295f6 | 3250 | |
3251 | if (t && TREE_CODE (t) == ADDR_EXPR | |
3252 | && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL | |
415d1b9a | 3253 | && (n = cgraph_node::get (TREE_OPERAND (t, 0))) |
51ce5652 | 3254 | && (ref = new_root->find_reference (n, NULL, 0))) |
096295f6 | 3255 | { |
3256 | if (dump_file) | |
3257 | fprintf (dump_file, "ipa-prop: Removing cloning-created " | |
3258 | "reference from %s/%i to %s/%i.\n", | |
5ae49d3e | 3259 | xstrdup_for_dump (new_root->name ()), |
02774f2d | 3260 | new_root->order, |
5ae49d3e | 3261 | xstrdup_for_dump (n->name ()), n->order); |
51ce5652 | 3262 | ref->remove_reference (); |
096295f6 | 3263 | } |
3264 | } | |
3265 | } | |
3266 | else if (jf->type == IPA_JF_CONST | |
3267 | && (rdesc = jfunc_rdesc_usable (jf))) | |
3268 | { | |
3269 | int d = ipa_get_controlled_uses (old_root_info, i); | |
3270 | int c = rdesc->refcount; | |
3271 | rdesc->refcount = combine_controlled_uses_counters (c, d); | |
3272 | if (rdesc->refcount == 0) | |
3273 | { | |
3274 | tree cst = ipa_get_jf_constant (jf); | |
3275 | struct cgraph_node *n; | |
3276 | gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR | |
3277 | && TREE_CODE (TREE_OPERAND (cst, 0)) | |
3278 | == FUNCTION_DECL); | |
415d1b9a | 3279 | n = cgraph_node::get (TREE_OPERAND (cst, 0)); |
096295f6 | 3280 | if (n) |
3281 | { | |
3282 | struct cgraph_node *clone; | |
061168c9 | 3283 | bool ok; |
02774f2d | 3284 | ok = remove_described_reference (n, rdesc); |
061168c9 | 3285 | gcc_checking_assert (ok); |
096295f6 | 3286 | |
3287 | clone = cs->caller; | |
3288 | while (clone->global.inlined_to | |
3289 | && clone != rdesc->cs->caller | |
3290 | && IPA_NODE_REF (clone)->ipcp_orig_node) | |
3291 | { | |
3292 | struct ipa_ref *ref; | |
51ce5652 | 3293 | ref = clone->find_reference (n, NULL, 0); |
096295f6 | 3294 | if (ref) |
3295 | { | |
3296 | if (dump_file) | |
3297 | fprintf (dump_file, "ipa-prop: Removing " | |
3298 | "cloning-created reference " | |
3299 | "from %s/%i to %s/%i.\n", | |
5ae49d3e | 3300 | xstrdup_for_dump (clone->name ()), |
02774f2d | 3301 | clone->order, |
5ae49d3e | 3302 | xstrdup_for_dump (n->name ()), |
02774f2d | 3303 | n->order); |
51ce5652 | 3304 | ref->remove_reference (); |
096295f6 | 3305 | } |
3306 | clone = clone->callers->caller; | |
3307 | } | |
3308 | } | |
3309 | } | |
3310 | } | |
3311 | } | |
3312 | ||
3313 | for (i = ipa_get_param_count (old_root_info); | |
3314 | i < ipa_get_cs_argument_count (args); | |
3315 | i++) | |
3316 | { | |
3317 | struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i); | |
3318 | ||
3319 | if (jf->type == IPA_JF_CONST) | |
3320 | { | |
3321 | struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf); | |
3322 | if (rdesc) | |
3323 | rdesc->refcount = IPA_UNDESCRIBED_USE; | |
3324 | } | |
3325 | else if (jf->type == IPA_JF_PASS_THROUGH) | |
3326 | ipa_set_controlled_uses (new_root_info, | |
3327 | jf->value.pass_through.formal_id, | |
3328 | IPA_UNDESCRIBED_USE); | |
3329 | } | |
3330 | } | |
3331 | ||
f8daee9b | 3332 | /* Update jump functions and call note functions on inlining the call site CS. |
3333 | CS is expected to lead to a node already cloned by | |
3334 | cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to | |
3f2ff969 | 3335 | *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were + |
3336 | created. */ | |
1917e945 | 3337 | |
3f2ff969 | 3338 | bool |
f8daee9b | 3339 | ipa_propagate_indirect_call_infos (struct cgraph_edge *cs, |
415d1b9a | 3340 | vec<cgraph_edge *> *new_edges) |
f8daee9b | 3341 | { |
a226c368 | 3342 | bool changed; |
3f2ff969 | 3343 | /* Do nothing if the preparation phase has not been carried out yet |
3344 | (i.e. during early inlining). */ | |
2cc80ac3 | 3345 | if (!ipa_node_params_sum) |
3f2ff969 | 3346 | return false; |
3347 | gcc_assert (ipa_edge_args_vector); | |
3348 | ||
096295f6 | 3349 | propagate_controlled_uses (cs); |
a226c368 | 3350 | changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges); |
3351 | ||
a226c368 | 3352 | return changed; |
3b22db66 | 3353 | } |
3354 | ||
545eff8f | 3355 | /* Frees all dynamically allocated structures that the argument info points |
3356 | to. */ | |
1917e945 | 3357 | |
3b22db66 | 3358 | void |
545eff8f | 3359 | ipa_free_edge_args_substructures (struct ipa_edge_args *args) |
3b22db66 | 3360 | { |
f1f41a6c | 3361 | vec_free (args->jump_functions); |
545eff8f | 3362 | memset (args, 0, sizeof (*args)); |
3b22db66 | 3363 | } |
3364 | ||
545eff8f | 3365 | /* Free all ipa_edge structures. */ |
1917e945 | 3366 | |
3b22db66 | 3367 | void |
545eff8f | 3368 | ipa_free_all_edge_args (void) |
3b22db66 | 3369 | { |
545eff8f | 3370 | int i; |
3371 | struct ipa_edge_args *args; | |
3b22db66 | 3372 | |
f1f41a6c | 3373 | if (!ipa_edge_args_vector) |
3374 | return; | |
3375 | ||
3376 | FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args) | |
545eff8f | 3377 | ipa_free_edge_args_substructures (args); |
3378 | ||
f1f41a6c | 3379 | vec_free (ipa_edge_args_vector); |
3b22db66 | 3380 | } |
3381 | ||
545eff8f | 3382 | /* Frees all dynamically allocated structures that the param info points |
3383 | to. */ | |
1917e945 | 3384 | |
2cc80ac3 | 3385 | ipa_node_params::~ipa_node_params () |
3b22db66 | 3386 | { |
2cc80ac3 | 3387 | descriptors.release (); |
3388 | free (lattices); | |
821d0e0f | 3389 | /* Lattice values and their sources are deallocated with their alocation |
3390 | pool. */ | |
2cc80ac3 | 3391 | known_contexts.release (); |
3392 | ||
3393 | lattices = NULL; | |
3394 | ipcp_orig_node = NULL; | |
3395 | analysis_done = 0; | |
3396 | node_enqueued = 0; | |
3397 | do_clone_for_all_contexts = 0; | |
3398 | is_all_contexts_clone = 0; | |
3399 | node_dead = 0; | |
3b22db66 | 3400 | } |
3401 | ||
545eff8f | 3402 | /* Free all ipa_node_params structures. */ |
1917e945 | 3403 | |
3b22db66 | 3404 | void |
545eff8f | 3405 | ipa_free_all_node_params (void) |
3b22db66 | 3406 | { |
2cc80ac3 | 3407 | delete ipa_node_params_sum; |
3408 | ipa_node_params_sum = NULL; | |
545eff8f | 3409 | } |
3410 | ||
ae7b7bc8 | 3411 | /* Grow ipcp_transformations if necessary. */ |
3412 | ||
3413 | void | |
3414 | ipcp_grow_transformations_if_necessary (void) | |
3415 | { | |
3416 | if (vec_safe_length (ipcp_transformations) | |
3417 | <= (unsigned) symtab->cgraph_max_uid) | |
3418 | vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1); | |
3419 | } | |
3420 | ||
803a7988 | 3421 | /* Set the aggregate replacements of NODE to be AGGVALS. */ |
3422 | ||
3423 | void | |
3424 | ipa_set_node_agg_value_chain (struct cgraph_node *node, | |
3425 | struct ipa_agg_replacement_value *aggvals) | |
3426 | { | |
ae7b7bc8 | 3427 | ipcp_grow_transformations_if_necessary (); |
3428 | (*ipcp_transformations)[node->uid].agg_values = aggvals; | |
803a7988 | 3429 | } |
3430 | ||
545eff8f | 3431 | /* Hook that is called by cgraph.c when an edge is removed. */ |
1917e945 | 3432 | |
545eff8f | 3433 | static void |
74140efd | 3434 | ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED) |
545eff8f | 3435 | { |
061168c9 | 3436 | struct ipa_edge_args *args; |
3437 | ||
3438 | /* During IPA-CP updating we can be called on not-yet analyzed clones. */ | |
f1f41a6c | 3439 | if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid) |
5afe38fe | 3440 | return; |
061168c9 | 3441 | |
3442 | args = IPA_EDGE_REF (cs); | |
3443 | if (args->jump_functions) | |
3444 | { | |
3445 | struct ipa_jump_func *jf; | |
3446 | int i; | |
3447 | FOR_EACH_VEC_ELT (*args->jump_functions, i, jf) | |
8398c9b3 | 3448 | { |
3449 | struct ipa_cst_ref_desc *rdesc; | |
3450 | try_decrement_rdesc_refcount (jf); | |
3451 | if (jf->type == IPA_JF_CONST | |
3452 | && (rdesc = ipa_get_jf_constant_rdesc (jf)) | |
3453 | && rdesc->cs == cs) | |
3454 | rdesc->cs = NULL; | |
3455 | } | |
061168c9 | 3456 | } |
3457 | ||
545eff8f | 3458 | ipa_free_edge_args_substructures (IPA_EDGE_REF (cs)); |
3b22db66 | 3459 | } |
3460 | ||
0d491188 | 3461 | /* Hook that is called by cgraph.c when an edge is duplicated. */ |
1917e945 | 3462 | |
545eff8f | 3463 | static void |
3464 | ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst, | |
2cc80ac3 | 3465 | void *) |
545eff8f | 3466 | { |
3467 | struct ipa_edge_args *old_args, *new_args; | |
0d491188 | 3468 | unsigned int i; |
545eff8f | 3469 | |
3470 | ipa_check_create_edge_args (); | |
3471 | ||
3472 | old_args = IPA_EDGE_REF (src); | |
3473 | new_args = IPA_EDGE_REF (dst); | |
3474 | ||
f1f41a6c | 3475 | new_args->jump_functions = vec_safe_copy (old_args->jump_functions); |
072ec6eb | 3476 | if (old_args->polymorphic_call_contexts) |
3477 | new_args->polymorphic_call_contexts | |
3478 | = vec_safe_copy (old_args->polymorphic_call_contexts); | |
0d491188 | 3479 | |
f1f41a6c | 3480 | for (i = 0; i < vec_safe_length (old_args->jump_functions); i++) |
096295f6 | 3481 | { |
3482 | struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i); | |
3483 | struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i); | |
3484 | ||
3485 | dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items); | |
3486 | ||
3487 | if (src_jf->type == IPA_JF_CONST) | |
3488 | { | |
3489 | struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf); | |
3490 | ||
3491 | if (!src_rdesc) | |
3492 | dst_jf->value.constant.rdesc = NULL; | |
061168c9 | 3493 | else if (src->caller == dst->caller) |
3494 | { | |
3495 | struct ipa_ref *ref; | |
452659af | 3496 | symtab_node *n = cgraph_node_for_jfunc (src_jf); |
061168c9 | 3497 | gcc_checking_assert (n); |
51ce5652 | 3498 | ref = src->caller->find_reference (n, src->call_stmt, |
3499 | src->lto_stmt_uid); | |
061168c9 | 3500 | gcc_checking_assert (ref); |
51ce5652 | 3501 | dst->caller->clone_reference (ref, ref->stmt); |
061168c9 | 3502 | |
b196706d | 3503 | struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate (); |
061168c9 | 3504 | dst_rdesc->cs = dst; |
3505 | dst_rdesc->refcount = src_rdesc->refcount; | |
3506 | dst_rdesc->next_duplicate = NULL; | |
3507 | dst_jf->value.constant.rdesc = dst_rdesc; | |
3508 | } | |
096295f6 | 3509 | else if (src_rdesc->cs == src) |
3510 | { | |
b196706d | 3511 | struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate (); |
096295f6 | 3512 | dst_rdesc->cs = dst; |
096295f6 | 3513 | dst_rdesc->refcount = src_rdesc->refcount; |
c55448e7 | 3514 | dst_rdesc->next_duplicate = src_rdesc->next_duplicate; |
3515 | src_rdesc->next_duplicate = dst_rdesc; | |
096295f6 | 3516 | dst_jf->value.constant.rdesc = dst_rdesc; |
3517 | } | |
3518 | else | |
3519 | { | |
3520 | struct ipa_cst_ref_desc *dst_rdesc; | |
3521 | /* This can happen during inlining, when a JFUNC can refer to a | |
3522 | reference taken in a function up in the tree of inline clones. | |
3523 | We need to find the duplicate that refers to our tree of | |
3524 | inline clones. */ | |
3525 | ||
3526 | gcc_assert (dst->caller->global.inlined_to); | |
3527 | for (dst_rdesc = src_rdesc->next_duplicate; | |
3528 | dst_rdesc; | |
3529 | dst_rdesc = dst_rdesc->next_duplicate) | |
c55448e7 | 3530 | { |
3531 | struct cgraph_node *top; | |
3532 | top = dst_rdesc->cs->caller->global.inlined_to | |
3533 | ? dst_rdesc->cs->caller->global.inlined_to | |
3534 | : dst_rdesc->cs->caller; | |
3535 | if (dst->caller->global.inlined_to == top) | |
3536 | break; | |
3537 | } | |
48f42a9a | 3538 | gcc_assert (dst_rdesc); |
096295f6 | 3539 | dst_jf->value.constant.rdesc = dst_rdesc; |
3540 | } | |
3541 | } | |
270bb323 | 3542 | else if (dst_jf->type == IPA_JF_PASS_THROUGH |
3543 | && src->caller == dst->caller) | |
3544 | { | |
3545 | struct cgraph_node *inline_root = dst->caller->global.inlined_to | |
3546 | ? dst->caller->global.inlined_to : dst->caller; | |
3547 | struct ipa_node_params *root_info = IPA_NODE_REF (inline_root); | |
3548 | int idx = ipa_get_jf_pass_through_formal_id (dst_jf); | |
3549 | ||
3550 | int c = ipa_get_controlled_uses (root_info, idx); | |
3551 | if (c != IPA_UNDESCRIBED_USE) | |
3552 | { | |
3553 | c++; | |
3554 | ipa_set_controlled_uses (root_info, idx, c); | |
3555 | } | |
3556 | } | |
096295f6 | 3557 | } |
545eff8f | 3558 | } |
3559 | ||
2cc80ac3 | 3560 | /* Analyze newly added function into callgraph. */ |
1917e945 | 3561 | |
545eff8f | 3562 | static void |
2cc80ac3 | 3563 | ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED) |
545eff8f | 3564 | { |
2cc80ac3 | 3565 | if (node->has_gimple_body_p ()) |
3566 | ipa_analyze_node (node); | |
3567 | } | |
545eff8f | 3568 | |
2cc80ac3 | 3569 | /* Hook that is called by summary when a node is duplicated. */ |
3570 | ||
3571 | void | |
3572 | ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst, | |
3573 | ipa_node_params *old_info, | |
3574 | ipa_node_params *new_info) | |
3575 | { | |
3576 | ipa_agg_replacement_value *old_av, *new_av; | |
545eff8f | 3577 | |
f1f41a6c | 3578 | new_info->descriptors = old_info->descriptors.copy (); |
821d0e0f | 3579 | new_info->lattices = NULL; |
545eff8f | 3580 | new_info->ipcp_orig_node = old_info->ipcp_orig_node; |
1caef38b | 3581 | |
24430d08 | 3582 | new_info->analysis_done = old_info->analysis_done; |
1caef38b | 3583 | new_info->node_enqueued = old_info->node_enqueued; |
803a7988 | 3584 | |
3585 | old_av = ipa_get_agg_replacements_for_node (src); | |
ae7b7bc8 | 3586 | if (old_av) |
803a7988 | 3587 | { |
ae7b7bc8 | 3588 | new_av = NULL; |
3589 | while (old_av) | |
3590 | { | |
3591 | struct ipa_agg_replacement_value *v; | |
803a7988 | 3592 | |
ae7b7bc8 | 3593 | v = ggc_alloc<ipa_agg_replacement_value> (); |
3594 | memcpy (v, old_av, sizeof (*v)); | |
3595 | v->next = new_av; | |
3596 | new_av = v; | |
3597 | old_av = old_av->next; | |
3598 | } | |
3599 | ipa_set_node_agg_value_chain (dst, new_av); | |
3600 | } | |
3601 | ||
3602 | ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src); | |
3603 | ||
3604 | if (src_trans && vec_safe_length (src_trans->alignments) > 0) | |
3605 | { | |
3606 | ipcp_grow_transformations_if_necessary (); | |
3607 | src_trans = ipcp_get_transformation_summary (src); | |
3608 | const vec<ipa_alignment, va_gc> *src_alignments = src_trans->alignments; | |
3609 | vec<ipa_alignment, va_gc> *&dst_alignments | |
3610 | = ipcp_get_transformation_summary (dst)->alignments; | |
3611 | vec_safe_reserve_exact (dst_alignments, src_alignments->length ()); | |
3612 | for (unsigned i = 0; i < src_alignments->length (); ++i) | |
3613 | dst_alignments->quick_push ((*src_alignments)[i]); | |
803a7988 | 3614 | } |
545eff8f | 3615 | } |
3616 | ||
3617 | /* Register our cgraph hooks if they are not already there. */ | |
1917e945 | 3618 | |
3b22db66 | 3619 | void |
545eff8f | 3620 | ipa_register_cgraph_hooks (void) |
3b22db66 | 3621 | { |
2cc80ac3 | 3622 | ipa_check_create_node_params (); |
3623 | ||
545eff8f | 3624 | if (!edge_removal_hook_holder) |
3625 | edge_removal_hook_holder = | |
35ee1c66 | 3626 | symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL); |
545eff8f | 3627 | if (!edge_duplication_hook_holder) |
3628 | edge_duplication_hook_holder = | |
35ee1c66 | 3629 | symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL); |
2cc80ac3 | 3630 | function_insertion_hook_holder = |
35ee1c66 | 3631 | symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL); |
545eff8f | 3632 | } |
3b22db66 | 3633 | |
545eff8f | 3634 | /* Unregister our cgraph hooks if they are not already there. */ |
1917e945 | 3635 | |
545eff8f | 3636 | static void |
3637 | ipa_unregister_cgraph_hooks (void) | |
3638 | { | |
35ee1c66 | 3639 | symtab->remove_edge_removal_hook (edge_removal_hook_holder); |
545eff8f | 3640 | edge_removal_hook_holder = NULL; |
35ee1c66 | 3641 | symtab->remove_edge_duplication_hook (edge_duplication_hook_holder); |
545eff8f | 3642 | edge_duplication_hook_holder = NULL; |
35ee1c66 | 3643 | symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder); |
b1471ee0 | 3644 | function_insertion_hook_holder = NULL; |
545eff8f | 3645 | } |
3646 | ||
3647 | /* Free all ipa_node_params and all ipa_edge_args structures if they are no | |
3648 | longer needed after ipa-cp. */ | |
1917e945 | 3649 | |
545eff8f | 3650 | void |
799c8711 | 3651 | ipa_free_all_structures_after_ipa_cp (void) |
f8daee9b | 3652 | { |
d1f68cd8 | 3653 | if (!optimize && !in_lto_p) |
f8daee9b | 3654 | { |
3655 | ipa_free_all_edge_args (); | |
3656 | ipa_free_all_node_params (); | |
8361d32f | 3657 | ipcp_sources_pool.release (); |
3658 | ipcp_cst_values_pool.release (); | |
3659 | ipcp_poly_ctx_values_pool.release (); | |
3660 | ipcp_agg_lattice_pool.release (); | |
f8daee9b | 3661 | ipa_unregister_cgraph_hooks (); |
b196706d | 3662 | ipa_refdesc_pool.release (); |
f8daee9b | 3663 | } |
3664 | } | |
3665 | ||
3666 | /* Free all ipa_node_params and all ipa_edge_args structures if they are no | |
3667 | longer needed after indirect inlining. */ | |
1917e945 | 3668 | |
f8daee9b | 3669 | void |
799c8711 | 3670 | ipa_free_all_structures_after_iinln (void) |
545eff8f | 3671 | { |
3672 | ipa_free_all_edge_args (); | |
3673 | ipa_free_all_node_params (); | |
3674 | ipa_unregister_cgraph_hooks (); | |
8361d32f | 3675 | ipcp_sources_pool.release (); |
3676 | ipcp_cst_values_pool.release (); | |
3677 | ipcp_poly_ctx_values_pool.release (); | |
3678 | ipcp_agg_lattice_pool.release (); | |
b196706d | 3679 | ipa_refdesc_pool.release (); |
3b22db66 | 3680 | } |
3681 | ||
3889f2e2 | 3682 | /* Print ipa_tree_map data structures of all functions in the |
3b22db66 | 3683 | callgraph to F. */ |
1917e945 | 3684 | |
3b22db66 | 3685 | void |
803a7988 | 3686 | ipa_print_node_params (FILE *f, struct cgraph_node *node) |
3b22db66 | 3687 | { |
3688 | int i, count; | |
f8daee9b | 3689 | struct ipa_node_params *info; |
3b22db66 | 3690 | |
02774f2d | 3691 | if (!node->definition) |
f8daee9b | 3692 | return; |
3693 | info = IPA_NODE_REF (node); | |
15c999e3 | 3694 | fprintf (f, " function %s/%i parameter descriptors:\n", |
f1c8b4d7 | 3695 | node->name (), node->order); |
f8daee9b | 3696 | count = ipa_get_param_count (info); |
3697 | for (i = 0; i < count; i++) | |
3b22db66 | 3698 | { |
096295f6 | 3699 | int c; |
3700 | ||
436b29f7 | 3701 | fprintf (f, " "); |
cb4d77e1 | 3702 | ipa_dump_param (f, info, i); |
bc1b408a | 3703 | if (ipa_is_param_used (info, i)) |
3704 | fprintf (f, " used"); | |
096295f6 | 3705 | c = ipa_get_controlled_uses (info, i); |
3706 | if (c == IPA_UNDESCRIBED_USE) | |
3707 | fprintf (f, " undescribed_use"); | |
3708 | else | |
3709 | fprintf (f, " controlled_uses=%i", c); | |
f8daee9b | 3710 | fprintf (f, "\n"); |
3b22db66 | 3711 | } |
3712 | } | |
3889f2e2 | 3713 | |
11b73810 | 3714 | /* Print ipa_tree_map data structures of all functions in the |
f8daee9b | 3715 | callgraph to F. */ |
1917e945 | 3716 | |
f8daee9b | 3717 | void |
11b73810 | 3718 | ipa_print_all_params (FILE * f) |
f8daee9b | 3719 | { |
3720 | struct cgraph_node *node; | |
3721 | ||
11b73810 | 3722 | fprintf (f, "\nFunction parameters:\n"); |
7c455d87 | 3723 | FOR_EACH_FUNCTION (node) |
11b73810 | 3724 | ipa_print_node_params (f, node); |
f8daee9b | 3725 | } |
547f1802 | 3726 | |
3727 | /* Return a heap allocated vector containing formal parameters of FNDECL. */ | |
3728 | ||
f1f41a6c | 3729 | vec<tree> |
547f1802 | 3730 | ipa_get_vector_of_formal_parms (tree fndecl) |
3731 | { | |
f1f41a6c | 3732 | vec<tree> args; |
547f1802 | 3733 | int count; |
3734 | tree parm; | |
3735 | ||
09ab6335 | 3736 | gcc_assert (!flag_wpa); |
821d0e0f | 3737 | count = count_formal_params (fndecl); |
f1f41a6c | 3738 | args.create (count); |
1767a056 | 3739 | for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm)) |
f1f41a6c | 3740 | args.quick_push (parm); |
547f1802 | 3741 | |
3742 | return args; | |
3743 | } | |
3744 | ||
3745 | /* Return a heap allocated vector containing types of formal parameters of | |
3746 | function type FNTYPE. */ | |
3747 | ||
6bcfabf2 | 3748 | vec<tree> |
3749 | ipa_get_vector_of_formal_parm_types (tree fntype) | |
547f1802 | 3750 | { |
f1f41a6c | 3751 | vec<tree> types; |
547f1802 | 3752 | int count = 0; |
3753 | tree t; | |
3754 | ||
3755 | for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t)) | |
3756 | count++; | |
3757 | ||
f1f41a6c | 3758 | types.create (count); |
547f1802 | 3759 | for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t)) |
f1f41a6c | 3760 | types.quick_push (TREE_VALUE (t)); |
547f1802 | 3761 | |
3762 | return types; | |
3763 | } | |
3764 | ||
3765 | /* Modify the function declaration FNDECL and its type according to the plan in | |
3766 | ADJUSTMENTS. It also sets base fields of individual adjustments structures | |
3767 | to reflect the actual parameters being modified which are determined by the | |
3768 | base_index field. */ | |
3769 | ||
3770 | void | |
6bcfabf2 | 3771 | ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments) |
547f1802 | 3772 | { |
6bcfabf2 | 3773 | vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl); |
3774 | tree orig_type = TREE_TYPE (fndecl); | |
3775 | tree old_arg_types = TYPE_ARG_TYPES (orig_type); | |
547f1802 | 3776 | |
3777 | /* The following test is an ugly hack, some functions simply don't have any | |
3778 | arguments in their type. This is probably a bug but well... */ | |
6bcfabf2 | 3779 | bool care_for_types = (old_arg_types != NULL_TREE); |
3780 | bool last_parm_void; | |
3781 | vec<tree> otypes; | |
547f1802 | 3782 | if (care_for_types) |
3783 | { | |
3784 | last_parm_void = (TREE_VALUE (tree_last (old_arg_types)) | |
3785 | == void_type_node); | |
6bcfabf2 | 3786 | otypes = ipa_get_vector_of_formal_parm_types (orig_type); |
547f1802 | 3787 | if (last_parm_void) |
f1f41a6c | 3788 | gcc_assert (oparms.length () + 1 == otypes.length ()); |
547f1802 | 3789 | else |
f1f41a6c | 3790 | gcc_assert (oparms.length () == otypes.length ()); |
547f1802 | 3791 | } |
3792 | else | |
3793 | { | |
3794 | last_parm_void = false; | |
f1f41a6c | 3795 | otypes.create (0); |
547f1802 | 3796 | } |
3797 | ||
6bcfabf2 | 3798 | int len = adjustments.length (); |
3799 | tree *link = &DECL_ARGUMENTS (fndecl); | |
3800 | tree new_arg_types = NULL; | |
3801 | for (int i = 0; i < len; i++) | |
547f1802 | 3802 | { |
3803 | struct ipa_parm_adjustment *adj; | |
3804 | gcc_assert (link); | |
3805 | ||
f1f41a6c | 3806 | adj = &adjustments[i]; |
6bcfabf2 | 3807 | tree parm; |
3808 | if (adj->op == IPA_PARM_OP_NEW) | |
3809 | parm = NULL; | |
3810 | else | |
3811 | parm = oparms[adj->base_index]; | |
547f1802 | 3812 | adj->base = parm; |
3813 | ||
6bcfabf2 | 3814 | if (adj->op == IPA_PARM_OP_COPY) |
547f1802 | 3815 | { |
3816 | if (care_for_types) | |
f1f41a6c | 3817 | new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index], |
547f1802 | 3818 | new_arg_types); |
3819 | *link = parm; | |
1767a056 | 3820 | link = &DECL_CHAIN (parm); |
547f1802 | 3821 | } |
6bcfabf2 | 3822 | else if (adj->op != IPA_PARM_OP_REMOVE) |
547f1802 | 3823 | { |
3824 | tree new_parm; | |
3825 | tree ptype; | |
3826 | ||
3827 | if (adj->by_ref) | |
3828 | ptype = build_pointer_type (adj->type); | |
3829 | else | |
178e1d99 | 3830 | { |
3831 | ptype = adj->type; | |
3832 | if (is_gimple_reg_type (ptype)) | |
3833 | { | |
3834 | unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype)); | |
3835 | if (TYPE_ALIGN (ptype) < malign) | |
3836 | ptype = build_aligned_type (ptype, malign); | |
3837 | } | |
3838 | } | |
547f1802 | 3839 | |
3840 | if (care_for_types) | |
3841 | new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types); | |
3842 | ||
3843 | new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE, | |
3844 | ptype); | |
6bcfabf2 | 3845 | const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH"; |
3846 | DECL_NAME (new_parm) = create_tmp_var_name (prefix); | |
547f1802 | 3847 | DECL_ARTIFICIAL (new_parm) = 1; |
3848 | DECL_ARG_TYPE (new_parm) = ptype; | |
3849 | DECL_CONTEXT (new_parm) = fndecl; | |
3850 | TREE_USED (new_parm) = 1; | |
3851 | DECL_IGNORED_P (new_parm) = 1; | |
3852 | layout_decl (new_parm, 0); | |
3853 | ||
6bcfabf2 | 3854 | if (adj->op == IPA_PARM_OP_NEW) |
3855 | adj->base = NULL; | |
3856 | else | |
3857 | adj->base = parm; | |
3858 | adj->new_decl = new_parm; | |
547f1802 | 3859 | |
3860 | *link = new_parm; | |
1767a056 | 3861 | link = &DECL_CHAIN (new_parm); |
547f1802 | 3862 | } |
3863 | } | |
3864 | ||
3865 | *link = NULL_TREE; | |
3866 | ||
6bcfabf2 | 3867 | tree new_reversed = NULL; |
547f1802 | 3868 | if (care_for_types) |
3869 | { | |
3870 | new_reversed = nreverse (new_arg_types); | |
3871 | if (last_parm_void) | |
3872 | { | |
3873 | if (new_reversed) | |
3874 | TREE_CHAIN (new_arg_types) = void_list_node; | |
3875 | else | |
3876 | new_reversed = void_list_node; | |
3877 | } | |
3878 | } | |
3879 | ||
3880 | /* Use copy_node to preserve as much as possible from original type | |
3881 | (debug info, attribute lists etc.) | |
3882 | Exception is METHOD_TYPEs must have THIS argument. | |
3883 | When we are asked to remove it, we need to build new FUNCTION_TYPE | |
3884 | instead. */ | |
6bcfabf2 | 3885 | tree new_type = NULL; |
547f1802 | 3886 | if (TREE_CODE (orig_type) != METHOD_TYPE |
6bcfabf2 | 3887 | || (adjustments[0].op == IPA_PARM_OP_COPY |
f1f41a6c | 3888 | && adjustments[0].base_index == 0)) |
547f1802 | 3889 | { |
ae5fe283 | 3890 | new_type = build_distinct_type_copy (orig_type); |
547f1802 | 3891 | TYPE_ARG_TYPES (new_type) = new_reversed; |
3892 | } | |
3893 | else | |
3894 | { | |
3895 | new_type | |
3896 | = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type), | |
3897 | new_reversed)); | |
3898 | TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type); | |
3899 | DECL_VINDEX (fndecl) = NULL_TREE; | |
3900 | } | |
3901 | ||
fd8d648f | 3902 | /* When signature changes, we need to clear builtin info. */ |
3903 | if (DECL_BUILT_IN (fndecl)) | |
3904 | { | |
3905 | DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN; | |
3906 | DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0; | |
3907 | } | |
3908 | ||
547f1802 | 3909 | TREE_TYPE (fndecl) = new_type; |
d050bafd | 3910 | DECL_VIRTUAL_P (fndecl) = 0; |
fa19d637 | 3911 | DECL_LANG_SPECIFIC (fndecl) = NULL; |
f1f41a6c | 3912 | otypes.release (); |
3913 | oparms.release (); | |
547f1802 | 3914 | } |
3915 | ||
3916 | /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS. | |
3917 | If this is a directly recursive call, CS must be NULL. Otherwise it must | |
3918 | contain the corresponding call graph edge. */ | |
3919 | ||
3920 | void | |
1a91d914 | 3921 | ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt, |
547f1802 | 3922 | ipa_parm_adjustment_vec adjustments) |
3923 | { | |
415d1b9a | 3924 | struct cgraph_node *current_node = cgraph_node::get (current_function_decl); |
f1f41a6c | 3925 | vec<tree> vargs; |
3926 | vec<tree, va_gc> **debug_args = NULL; | |
1a91d914 | 3927 | gcall *new_stmt; |
7d9f258f | 3928 | gimple_stmt_iterator gsi, prev_gsi; |
547f1802 | 3929 | tree callee_decl; |
3930 | int i, len; | |
3931 | ||
f1f41a6c | 3932 | len = adjustments.length (); |
3933 | vargs.create (len); | |
02774f2d | 3934 | callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl; |
51ce5652 | 3935 | current_node->remove_stmt_references (stmt); |
547f1802 | 3936 | |
3937 | gsi = gsi_for_stmt (stmt); | |
7d9f258f | 3938 | prev_gsi = gsi; |
3939 | gsi_prev (&prev_gsi); | |
547f1802 | 3940 | for (i = 0; i < len; i++) |
3941 | { | |
3942 | struct ipa_parm_adjustment *adj; | |
3943 | ||
f1f41a6c | 3944 | adj = &adjustments[i]; |
547f1802 | 3945 | |
6bcfabf2 | 3946 | if (adj->op == IPA_PARM_OP_COPY) |
547f1802 | 3947 | { |
3948 | tree arg = gimple_call_arg (stmt, adj->base_index); | |
3949 | ||
f1f41a6c | 3950 | vargs.quick_push (arg); |
547f1802 | 3951 | } |
6bcfabf2 | 3952 | else if (adj->op != IPA_PARM_OP_REMOVE) |
547f1802 | 3953 | { |
bc89031c | 3954 | tree expr, base, off; |
3955 | location_t loc; | |
e33c5592 | 3956 | unsigned int deref_align = 0; |
1880b114 | 3957 | bool deref_base = false; |
bc89031c | 3958 | |
3959 | /* We create a new parameter out of the value of the old one, we can | |
3960 | do the following kind of transformations: | |
3961 | ||
3962 | - A scalar passed by reference is converted to a scalar passed by | |
3963 | value. (adj->by_ref is false and the type of the original | |
3964 | actual argument is a pointer to a scalar). | |
3965 | ||
3966 | - A part of an aggregate is passed instead of the whole aggregate. | |
3967 | The part can be passed either by value or by reference, this is | |
3968 | determined by value of adj->by_ref. Moreover, the code below | |
3969 | handles both situations when the original aggregate is passed by | |
3970 | value (its type is not a pointer) and when it is passed by | |
3971 | reference (it is a pointer to an aggregate). | |
3972 | ||
3973 | When the new argument is passed by reference (adj->by_ref is true) | |
3974 | it must be a part of an aggregate and therefore we form it by | |
3975 | simply taking the address of a reference inside the original | |
3976 | aggregate. */ | |
3977 | ||
3978 | gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0); | |
3979 | base = gimple_call_arg (stmt, adj->base_index); | |
c7abeac5 | 3980 | loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base) |
3981 | : EXPR_LOCATION (base); | |
bc89031c | 3982 | |
14c9496e | 3983 | if (TREE_CODE (base) != ADDR_EXPR |
3984 | && POINTER_TYPE_P (TREE_TYPE (base))) | |
3985 | off = build_int_cst (adj->alias_ptr_type, | |
bc89031c | 3986 | adj->offset / BITS_PER_UNIT); |
547f1802 | 3987 | else |
547f1802 | 3988 | { |
bc89031c | 3989 | HOST_WIDE_INT base_offset; |
3990 | tree prev_base; | |
1880b114 | 3991 | bool addrof; |
bc89031c | 3992 | |
3993 | if (TREE_CODE (base) == ADDR_EXPR) | |
1880b114 | 3994 | { |
3995 | base = TREE_OPERAND (base, 0); | |
3996 | addrof = true; | |
3997 | } | |
3998 | else | |
3999 | addrof = false; | |
bc89031c | 4000 | prev_base = base; |
4001 | base = get_addr_base_and_unit_offset (base, &base_offset); | |
4002 | /* Aggregate arguments can have non-invariant addresses. */ | |
4003 | if (!base) | |
4004 | { | |
4005 | base = build_fold_addr_expr (prev_base); | |
14c9496e | 4006 | off = build_int_cst (adj->alias_ptr_type, |
bc89031c | 4007 | adj->offset / BITS_PER_UNIT); |
4008 | } | |
4009 | else if (TREE_CODE (base) == MEM_REF) | |
4010 | { | |
1880b114 | 4011 | if (!addrof) |
4012 | { | |
4013 | deref_base = true; | |
4014 | deref_align = TYPE_ALIGN (TREE_TYPE (base)); | |
4015 | } | |
14c9496e | 4016 | off = build_int_cst (adj->alias_ptr_type, |
bc89031c | 4017 | base_offset |
4018 | + adj->offset / BITS_PER_UNIT); | |
4019 | off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), | |
317e2a67 | 4020 | off); |
bc89031c | 4021 | base = TREE_OPERAND (base, 0); |
4022 | } | |
4023 | else | |
4024 | { | |
14c9496e | 4025 | off = build_int_cst (adj->alias_ptr_type, |
bc89031c | 4026 | base_offset |
4027 | + adj->offset / BITS_PER_UNIT); | |
4028 | base = build_fold_addr_expr (base); | |
4029 | } | |
547f1802 | 4030 | } |
bc89031c | 4031 | |
6b1b2cb7 | 4032 | if (!adj->by_ref) |
4033 | { | |
4034 | tree type = adj->type; | |
4035 | unsigned int align; | |
4036 | unsigned HOST_WIDE_INT misalign; | |
ceea063b | 4037 | |
1880b114 | 4038 | if (deref_base) |
4039 | { | |
4040 | align = deref_align; | |
4041 | misalign = 0; | |
4042 | } | |
4043 | else | |
4044 | { | |
4045 | get_pointer_alignment_1 (base, &align, &misalign); | |
4046 | if (TYPE_ALIGN (type) > align) | |
4047 | align = TYPE_ALIGN (type); | |
4048 | } | |
936a705d | 4049 | misalign += (offset_int::from (off, SIGNED).to_short_addr () |
4050 | * BITS_PER_UNIT); | |
6b1b2cb7 | 4051 | misalign = misalign & (align - 1); |
4052 | if (misalign != 0) | |
4053 | align = (misalign & -misalign); | |
4054 | if (align < TYPE_ALIGN (type)) | |
4055 | type = build_aligned_type (type, align); | |
4563499d | 4056 | base = force_gimple_operand_gsi (&gsi, base, |
4057 | true, NULL, true, GSI_SAME_STMT); | |
6b1b2cb7 | 4058 | expr = fold_build2_loc (loc, MEM_REF, type, base, off); |
4563499d | 4059 | /* If expr is not a valid gimple call argument emit |
4060 | a load into a temporary. */ | |
4061 | if (is_gimple_reg_type (TREE_TYPE (expr))) | |
4062 | { | |
4063 | gimple tem = gimple_build_assign (NULL_TREE, expr); | |
4064 | if (gimple_in_ssa_p (cfun)) | |
4065 | { | |
4066 | gimple_set_vuse (tem, gimple_vuse (stmt)); | |
4067 | expr = make_ssa_name (TREE_TYPE (expr), tem); | |
4068 | } | |
4069 | else | |
f9e245b2 | 4070 | expr = create_tmp_reg (TREE_TYPE (expr)); |
4563499d | 4071 | gimple_assign_set_lhs (tem, expr); |
4072 | gsi_insert_before (&gsi, tem, GSI_SAME_STMT); | |
4073 | } | |
6b1b2cb7 | 4074 | } |
4075 | else | |
4076 | { | |
4077 | expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off); | |
4078 | expr = build_fold_addr_expr (expr); | |
4563499d | 4079 | expr = force_gimple_operand_gsi (&gsi, expr, |
4080 | true, NULL, true, GSI_SAME_STMT); | |
6b1b2cb7 | 4081 | } |
f1f41a6c | 4082 | vargs.quick_push (expr); |
547f1802 | 4083 | } |
6bcfabf2 | 4084 | if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS) |
841424cc | 4085 | { |
4086 | unsigned int ix; | |
4087 | tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg; | |
4088 | gimple def_temp; | |
4089 | ||
4090 | arg = gimple_call_arg (stmt, adj->base_index); | |
4091 | if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg))) | |
4092 | { | |
4093 | if (!fold_convertible_p (TREE_TYPE (origin), arg)) | |
4094 | continue; | |
4095 | arg = fold_convert_loc (gimple_location (stmt), | |
4096 | TREE_TYPE (origin), arg); | |
4097 | } | |
4098 | if (debug_args == NULL) | |
4099 | debug_args = decl_debug_args_insert (callee_decl); | |
f1f41a6c | 4100 | for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2) |
841424cc | 4101 | if (ddecl == origin) |
4102 | { | |
f1f41a6c | 4103 | ddecl = (**debug_args)[ix + 1]; |
841424cc | 4104 | break; |
4105 | } | |
4106 | if (ddecl == NULL) | |
4107 | { | |
4108 | ddecl = make_node (DEBUG_EXPR_DECL); | |
4109 | DECL_ARTIFICIAL (ddecl) = 1; | |
4110 | TREE_TYPE (ddecl) = TREE_TYPE (origin); | |
4111 | DECL_MODE (ddecl) = DECL_MODE (origin); | |
4112 | ||
f1f41a6c | 4113 | vec_safe_push (*debug_args, origin); |
4114 | vec_safe_push (*debug_args, ddecl); | |
841424cc | 4115 | } |
f1f41a6c | 4116 | def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt); |
841424cc | 4117 | gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT); |
4118 | } | |
547f1802 | 4119 | } |
4120 | ||
4121 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4122 | { | |
4123 | fprintf (dump_file, "replacing stmt:"); | |
4124 | print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0); | |
4125 | } | |
4126 | ||
547f1802 | 4127 | new_stmt = gimple_build_call_vec (callee_decl, vargs); |
f1f41a6c | 4128 | vargs.release (); |
547f1802 | 4129 | if (gimple_call_lhs (stmt)) |
4130 | gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt)); | |
4131 | ||
4132 | gimple_set_block (new_stmt, gimple_block (stmt)); | |
4133 | if (gimple_has_location (stmt)) | |
4134 | gimple_set_location (new_stmt, gimple_location (stmt)); | |
547f1802 | 4135 | gimple_call_set_chain (new_stmt, gimple_call_chain (stmt)); |
d92a57ff | 4136 | gimple_call_copy_flags (new_stmt, stmt); |
4563499d | 4137 | if (gimple_in_ssa_p (cfun)) |
4138 | { | |
4139 | gimple_set_vuse (new_stmt, gimple_vuse (stmt)); | |
4140 | if (gimple_vdef (stmt)) | |
4141 | { | |
4142 | gimple_set_vdef (new_stmt, gimple_vdef (stmt)); | |
4143 | SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt; | |
4144 | } | |
4145 | } | |
547f1802 | 4146 | |
4147 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4148 | { | |
4149 | fprintf (dump_file, "with stmt:"); | |
4150 | print_gimple_stmt (dump_file, new_stmt, 0, 0); | |
4151 | fprintf (dump_file, "\n"); | |
4152 | } | |
4153 | gsi_replace (&gsi, new_stmt, true); | |
4154 | if (cs) | |
35ee1c66 | 4155 | cs->set_call_stmt (new_stmt); |
7d9f258f | 4156 | do |
4157 | { | |
415d1b9a | 4158 | current_node->record_stmt_references (gsi_stmt (gsi)); |
7d9f258f | 4159 | gsi_prev (&gsi); |
4160 | } | |
0f3c9fa8 | 4161 | while (gsi_stmt (gsi) != gsi_stmt (prev_gsi)); |
547f1802 | 4162 | } |
4163 | ||
6bcfabf2 | 4164 | /* If the expression *EXPR should be replaced by a reduction of a parameter, do |
4165 | so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT | |
4166 | specifies whether the function should care about type incompatibility the | |
4167 | current and new expressions. If it is false, the function will leave | |
4168 | incompatibility issues to the caller. Return true iff the expression | |
4169 | was modified. */ | |
4170 | ||
4171 | bool | |
4172 | ipa_modify_expr (tree *expr, bool convert, | |
4173 | ipa_parm_adjustment_vec adjustments) | |
4174 | { | |
4175 | struct ipa_parm_adjustment *cand | |
4176 | = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false); | |
4177 | if (!cand) | |
4178 | return false; | |
4179 | ||
4180 | tree src; | |
4181 | if (cand->by_ref) | |
4182 | src = build_simple_mem_ref (cand->new_decl); | |
4183 | else | |
4184 | src = cand->new_decl; | |
4185 | ||
4186 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4187 | { | |
4188 | fprintf (dump_file, "About to replace expr "); | |
4189 | print_generic_expr (dump_file, *expr, 0); | |
4190 | fprintf (dump_file, " with "); | |
4191 | print_generic_expr (dump_file, src, 0); | |
4192 | fprintf (dump_file, "\n"); | |
4193 | } | |
4194 | ||
4195 | if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type)) | |
4196 | { | |
4197 | tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src); | |
4198 | *expr = vce; | |
4199 | } | |
4200 | else | |
4201 | *expr = src; | |
4202 | return true; | |
4203 | } | |
4204 | ||
4205 | /* If T is an SSA_NAME, return NULL if it is not a default def or | |
4206 | return its base variable if it is. If IGNORE_DEFAULT_DEF is true, | |
4207 | the base variable is always returned, regardless if it is a default | |
4208 | def. Return T if it is not an SSA_NAME. */ | |
4209 | ||
4210 | static tree | |
4211 | get_ssa_base_param (tree t, bool ignore_default_def) | |
4212 | { | |
4213 | if (TREE_CODE (t) == SSA_NAME) | |
4214 | { | |
4215 | if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t)) | |
4216 | return SSA_NAME_VAR (t); | |
4217 | else | |
4218 | return NULL_TREE; | |
4219 | } | |
4220 | return t; | |
4221 | } | |
4222 | ||
4223 | /* Given an expression, return an adjustment entry specifying the | |
4224 | transformation to be done on EXPR. If no suitable adjustment entry | |
4225 | was found, returns NULL. | |
4226 | ||
4227 | If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a | |
4228 | default def, otherwise bail on them. | |
4229 | ||
4230 | If CONVERT is non-NULL, this function will set *CONVERT if the | |
4231 | expression provided is a component reference. ADJUSTMENTS is the | |
4232 | adjustments vector. */ | |
4233 | ||
4234 | ipa_parm_adjustment * | |
4235 | ipa_get_adjustment_candidate (tree **expr, bool *convert, | |
4236 | ipa_parm_adjustment_vec adjustments, | |
4237 | bool ignore_default_def) | |
4238 | { | |
4239 | if (TREE_CODE (**expr) == BIT_FIELD_REF | |
4240 | || TREE_CODE (**expr) == IMAGPART_EXPR | |
4241 | || TREE_CODE (**expr) == REALPART_EXPR) | |
4242 | { | |
4243 | *expr = &TREE_OPERAND (**expr, 0); | |
4244 | if (convert) | |
4245 | *convert = true; | |
4246 | } | |
4247 | ||
4248 | HOST_WIDE_INT offset, size, max_size; | |
4249 | tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size); | |
4250 | if (!base || size == -1 || max_size == -1) | |
4251 | return NULL; | |
4252 | ||
4253 | if (TREE_CODE (base) == MEM_REF) | |
4254 | { | |
bdff91a1 | 4255 | offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT; |
6bcfabf2 | 4256 | base = TREE_OPERAND (base, 0); |
4257 | } | |
4258 | ||
4259 | base = get_ssa_base_param (base, ignore_default_def); | |
4260 | if (!base || TREE_CODE (base) != PARM_DECL) | |
4261 | return NULL; | |
4262 | ||
4263 | struct ipa_parm_adjustment *cand = NULL; | |
4264 | unsigned int len = adjustments.length (); | |
4265 | for (unsigned i = 0; i < len; i++) | |
4266 | { | |
4267 | struct ipa_parm_adjustment *adj = &adjustments[i]; | |
4268 | ||
4269 | if (adj->base == base | |
4270 | && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE)) | |
4271 | { | |
4272 | cand = adj; | |
4273 | break; | |
4274 | } | |
4275 | } | |
4276 | ||
4277 | if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE) | |
4278 | return NULL; | |
4279 | return cand; | |
4280 | } | |
4281 | ||
547f1802 | 4282 | /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */ |
4283 | ||
4284 | static bool | |
4285 | index_in_adjustments_multiple_times_p (int base_index, | |
4286 | ipa_parm_adjustment_vec adjustments) | |
4287 | { | |
f1f41a6c | 4288 | int i, len = adjustments.length (); |
547f1802 | 4289 | bool one = false; |
4290 | ||
4291 | for (i = 0; i < len; i++) | |
4292 | { | |
4293 | struct ipa_parm_adjustment *adj; | |
f1f41a6c | 4294 | adj = &adjustments[i]; |
547f1802 | 4295 | |
4296 | if (adj->base_index == base_index) | |
4297 | { | |
4298 | if (one) | |
4299 | return true; | |
4300 | else | |
4301 | one = true; | |
4302 | } | |
4303 | } | |
4304 | return false; | |
4305 | } | |
4306 | ||
4307 | ||
4308 | /* Return adjustments that should have the same effect on function parameters | |
4309 | and call arguments as if they were first changed according to adjustments in | |
4310 | INNER and then by adjustments in OUTER. */ | |
4311 | ||
4312 | ipa_parm_adjustment_vec | |
4313 | ipa_combine_adjustments (ipa_parm_adjustment_vec inner, | |
4314 | ipa_parm_adjustment_vec outer) | |
4315 | { | |
f1f41a6c | 4316 | int i, outlen = outer.length (); |
4317 | int inlen = inner.length (); | |
547f1802 | 4318 | int removals = 0; |
4319 | ipa_parm_adjustment_vec adjustments, tmp; | |
4320 | ||
f1f41a6c | 4321 | tmp.create (inlen); |
547f1802 | 4322 | for (i = 0; i < inlen; i++) |
4323 | { | |
4324 | struct ipa_parm_adjustment *n; | |
f1f41a6c | 4325 | n = &inner[i]; |
547f1802 | 4326 | |
6bcfabf2 | 4327 | if (n->op == IPA_PARM_OP_REMOVE) |
547f1802 | 4328 | removals++; |
4329 | else | |
6bcfabf2 | 4330 | { |
4331 | /* FIXME: Handling of new arguments are not implemented yet. */ | |
4332 | gcc_assert (n->op != IPA_PARM_OP_NEW); | |
4333 | tmp.quick_push (*n); | |
4334 | } | |
547f1802 | 4335 | } |
4336 | ||
f1f41a6c | 4337 | adjustments.create (outlen + removals); |
547f1802 | 4338 | for (i = 0; i < outlen; i++) |
4339 | { | |
e82e4eb5 | 4340 | struct ipa_parm_adjustment r; |
f1f41a6c | 4341 | struct ipa_parm_adjustment *out = &outer[i]; |
4342 | struct ipa_parm_adjustment *in = &tmp[out->base_index]; | |
547f1802 | 4343 | |
e82e4eb5 | 4344 | memset (&r, 0, sizeof (r)); |
6bcfabf2 | 4345 | gcc_assert (in->op != IPA_PARM_OP_REMOVE); |
4346 | if (out->op == IPA_PARM_OP_REMOVE) | |
547f1802 | 4347 | { |
4348 | if (!index_in_adjustments_multiple_times_p (in->base_index, tmp)) | |
4349 | { | |
6bcfabf2 | 4350 | r.op = IPA_PARM_OP_REMOVE; |
f1f41a6c | 4351 | adjustments.quick_push (r); |
547f1802 | 4352 | } |
4353 | continue; | |
4354 | } | |
6bcfabf2 | 4355 | else |
4356 | { | |
4357 | /* FIXME: Handling of new arguments are not implemented yet. */ | |
4358 | gcc_assert (out->op != IPA_PARM_OP_NEW); | |
4359 | } | |
547f1802 | 4360 | |
e82e4eb5 | 4361 | r.base_index = in->base_index; |
4362 | r.type = out->type; | |
547f1802 | 4363 | |
4364 | /* FIXME: Create nonlocal value too. */ | |
4365 | ||
6bcfabf2 | 4366 | if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY) |
4367 | r.op = IPA_PARM_OP_COPY; | |
4368 | else if (in->op == IPA_PARM_OP_COPY) | |
e82e4eb5 | 4369 | r.offset = out->offset; |
6bcfabf2 | 4370 | else if (out->op == IPA_PARM_OP_COPY) |
e82e4eb5 | 4371 | r.offset = in->offset; |
547f1802 | 4372 | else |
e82e4eb5 | 4373 | r.offset = in->offset + out->offset; |
f1f41a6c | 4374 | adjustments.quick_push (r); |
547f1802 | 4375 | } |
4376 | ||
4377 | for (i = 0; i < inlen; i++) | |
4378 | { | |
f1f41a6c | 4379 | struct ipa_parm_adjustment *n = &inner[i]; |
547f1802 | 4380 | |
6bcfabf2 | 4381 | if (n->op == IPA_PARM_OP_REMOVE) |
f1f41a6c | 4382 | adjustments.quick_push (*n); |
547f1802 | 4383 | } |
4384 | ||
f1f41a6c | 4385 | tmp.release (); |
547f1802 | 4386 | return adjustments; |
4387 | } | |
4388 | ||
4389 | /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human | |
4390 | friendly way, assuming they are meant to be applied to FNDECL. */ | |
4391 | ||
4392 | void | |
4393 | ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments, | |
4394 | tree fndecl) | |
4395 | { | |
f1f41a6c | 4396 | int i, len = adjustments.length (); |
547f1802 | 4397 | bool first = true; |
f1f41a6c | 4398 | vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl); |
547f1802 | 4399 | |
4400 | fprintf (file, "IPA param adjustments: "); | |
4401 | for (i = 0; i < len; i++) | |
4402 | { | |
4403 | struct ipa_parm_adjustment *adj; | |
f1f41a6c | 4404 | adj = &adjustments[i]; |
547f1802 | 4405 | |
4406 | if (!first) | |
4407 | fprintf (file, " "); | |
4408 | else | |
4409 | first = false; | |
4410 | ||
4411 | fprintf (file, "%i. base_index: %i - ", i, adj->base_index); | |
f1f41a6c | 4412 | print_generic_expr (file, parms[adj->base_index], 0); |
547f1802 | 4413 | if (adj->base) |
4414 | { | |
4415 | fprintf (file, ", base: "); | |
4416 | print_generic_expr (file, adj->base, 0); | |
4417 | } | |
6bcfabf2 | 4418 | if (adj->new_decl) |
547f1802 | 4419 | { |
6bcfabf2 | 4420 | fprintf (file, ", new_decl: "); |
4421 | print_generic_expr (file, adj->new_decl, 0); | |
547f1802 | 4422 | } |
4423 | if (adj->new_ssa_base) | |
4424 | { | |
4425 | fprintf (file, ", new_ssa_base: "); | |
4426 | print_generic_expr (file, adj->new_ssa_base, 0); | |
4427 | } | |
4428 | ||
6bcfabf2 | 4429 | if (adj->op == IPA_PARM_OP_COPY) |
547f1802 | 4430 | fprintf (file, ", copy_param"); |
6bcfabf2 | 4431 | else if (adj->op == IPA_PARM_OP_REMOVE) |
547f1802 | 4432 | fprintf (file, ", remove_param"); |
4433 | else | |
4434 | fprintf (file, ", offset %li", (long) adj->offset); | |
4435 | if (adj->by_ref) | |
4436 | fprintf (file, ", by_ref"); | |
4437 | print_node_brief (file, ", type: ", adj->type, 0); | |
4438 | fprintf (file, "\n"); | |
4439 | } | |
f1f41a6c | 4440 | parms.release (); |
547f1802 | 4441 | } |
4442 | ||
803a7988 | 4443 | /* Dump the AV linked list. */ |
4444 | ||
4445 | void | |
4446 | ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av) | |
4447 | { | |
4448 | bool comma = false; | |
4449 | fprintf (f, " Aggregate replacements:"); | |
4450 | for (; av; av = av->next) | |
4451 | { | |
4452 | fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "", | |
4453 | av->index, av->offset); | |
4454 | print_generic_expr (f, av->value, 0); | |
4455 | comma = true; | |
4456 | } | |
4457 | fprintf (f, "\n"); | |
4458 | } | |
4459 | ||
8867b500 | 4460 | /* Stream out jump function JUMP_FUNC to OB. */ |
4461 | ||
4462 | static void | |
4463 | ipa_write_jump_function (struct output_block *ob, | |
4464 | struct ipa_jump_func *jump_func) | |
4465 | { | |
0d491188 | 4466 | struct ipa_agg_jf_item *item; |
4467 | struct bitpack_d bp; | |
4468 | int i, count; | |
8867b500 | 4469 | |
0d491188 | 4470 | streamer_write_uhwi (ob, jump_func->type); |
8867b500 | 4471 | switch (jump_func->type) |
4472 | { | |
4473 | case IPA_JF_UNKNOWN: | |
4474 | break; | |
4475 | case IPA_JF_CONST: | |
5169661d | 4476 | gcc_assert ( |
096295f6 | 4477 | EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION); |
4478 | stream_write_tree (ob, jump_func->value.constant.value, true); | |
8867b500 | 4479 | break; |
4480 | case IPA_JF_PASS_THROUGH: | |
7f385784 | 4481 | streamer_write_uhwi (ob, jump_func->value.pass_through.operation); |
30a240df | 4482 | if (jump_func->value.pass_through.operation == NOP_EXPR) |
4483 | { | |
4484 | streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id); | |
4485 | bp = bitpack_create (ob->main_stream); | |
4486 | bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1); | |
4487 | streamer_write_bitpack (&bp); | |
4488 | } | |
4489 | else | |
4490 | { | |
4491 | stream_write_tree (ob, jump_func->value.pass_through.operand, true); | |
4492 | streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id); | |
4493 | } | |
8867b500 | 4494 | break; |
4495 | case IPA_JF_ANCESTOR: | |
7f385784 | 4496 | streamer_write_uhwi (ob, jump_func->value.ancestor.offset); |
7f385784 | 4497 | streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id); |
0d491188 | 4498 | bp = bitpack_create (ob->main_stream); |
4499 | bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1); | |
4500 | streamer_write_bitpack (&bp); | |
8867b500 | 4501 | break; |
0d491188 | 4502 | } |
4503 | ||
f1f41a6c | 4504 | count = vec_safe_length (jump_func->agg.items); |
0d491188 | 4505 | streamer_write_uhwi (ob, count); |
4506 | if (count) | |
4507 | { | |
4508 | bp = bitpack_create (ob->main_stream); | |
4509 | bp_pack_value (&bp, jump_func->agg.by_ref, 1); | |
4510 | streamer_write_bitpack (&bp); | |
4511 | } | |
4512 | ||
f1f41a6c | 4513 | FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item) |
0d491188 | 4514 | { |
4515 | streamer_write_uhwi (ob, item->offset); | |
4516 | stream_write_tree (ob, item->value, true); | |
8867b500 | 4517 | } |
ae7b7bc8 | 4518 | |
4519 | bp = bitpack_create (ob->main_stream); | |
4520 | bp_pack_value (&bp, jump_func->alignment.known, 1); | |
4521 | streamer_write_bitpack (&bp); | |
4522 | if (jump_func->alignment.known) | |
4523 | { | |
4524 | streamer_write_uhwi (ob, jump_func->alignment.align); | |
4525 | streamer_write_uhwi (ob, jump_func->alignment.misalign); | |
4526 | } | |
8867b500 | 4527 | } |
4528 | ||
4529 | /* Read in jump function JUMP_FUNC from IB. */ | |
4530 | ||
4531 | static void | |
4532 | ipa_read_jump_function (struct lto_input_block *ib, | |
4533 | struct ipa_jump_func *jump_func, | |
096295f6 | 4534 | struct cgraph_edge *cs, |
8867b500 | 4535 | struct data_in *data_in) |
4536 | { | |
30a240df | 4537 | enum jump_func_type jftype; |
4538 | enum tree_code operation; | |
0d491188 | 4539 | int i, count; |
8867b500 | 4540 | |
30a240df | 4541 | jftype = (enum jump_func_type) streamer_read_uhwi (ib); |
4542 | switch (jftype) | |
8867b500 | 4543 | { |
4544 | case IPA_JF_UNKNOWN: | |
ae7b7bc8 | 4545 | ipa_set_jf_unknown (jump_func); |
8867b500 | 4546 | break; |
4547 | case IPA_JF_CONST: | |
096295f6 | 4548 | ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs); |
8867b500 | 4549 | break; |
4550 | case IPA_JF_PASS_THROUGH: | |
30a240df | 4551 | operation = (enum tree_code) streamer_read_uhwi (ib); |
4552 | if (operation == NOP_EXPR) | |
4553 | { | |
4554 | int formal_id = streamer_read_uhwi (ib); | |
4555 | struct bitpack_d bp = streamer_read_bitpack (ib); | |
4556 | bool agg_preserved = bp_unpack_value (&bp, 1); | |
693010ae | 4557 | ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved); |
30a240df | 4558 | } |
4559 | else | |
4560 | { | |
4561 | tree operand = stream_read_tree (ib, data_in); | |
4562 | int formal_id = streamer_read_uhwi (ib); | |
4563 | ipa_set_jf_arith_pass_through (jump_func, formal_id, operand, | |
4564 | operation); | |
4565 | } | |
8867b500 | 4566 | break; |
4567 | case IPA_JF_ANCESTOR: | |
30a240df | 4568 | { |
4569 | HOST_WIDE_INT offset = streamer_read_uhwi (ib); | |
30a240df | 4570 | int formal_id = streamer_read_uhwi (ib); |
4571 | struct bitpack_d bp = streamer_read_bitpack (ib); | |
4572 | bool agg_preserved = bp_unpack_value (&bp, 1); | |
693010ae | 4573 | ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved); |
30a240df | 4574 | break; |
4575 | } | |
0d491188 | 4576 | } |
4577 | ||
4578 | count = streamer_read_uhwi (ib); | |
f1f41a6c | 4579 | vec_alloc (jump_func->agg.items, count); |
0d491188 | 4580 | if (count) |
4581 | { | |
30a240df | 4582 | struct bitpack_d bp = streamer_read_bitpack (ib); |
0d491188 | 4583 | jump_func->agg.by_ref = bp_unpack_value (&bp, 1); |
4584 | } | |
4585 | for (i = 0; i < count; i++) | |
4586 | { | |
e82e4eb5 | 4587 | struct ipa_agg_jf_item item; |
4588 | item.offset = streamer_read_uhwi (ib); | |
4589 | item.value = stream_read_tree (ib, data_in); | |
f1f41a6c | 4590 | jump_func->agg.items->quick_push (item); |
8867b500 | 4591 | } |
ae7b7bc8 | 4592 | |
4593 | struct bitpack_d bp = streamer_read_bitpack (ib); | |
4594 | bool alignment_known = bp_unpack_value (&bp, 1); | |
4595 | if (alignment_known) | |
4596 | { | |
4597 | jump_func->alignment.known = true; | |
4598 | jump_func->alignment.align = streamer_read_uhwi (ib); | |
4599 | jump_func->alignment.misalign = streamer_read_uhwi (ib); | |
4600 | } | |
4601 | else | |
4602 | jump_func->alignment.known = false; | |
8867b500 | 4603 | } |
4604 | ||
799c8711 | 4605 | /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are |
4606 | relevant to indirect inlining to OB. */ | |
00e1f01e | 4607 | |
4608 | static void | |
799c8711 | 4609 | ipa_write_indirect_edge_info (struct output_block *ob, |
4610 | struct cgraph_edge *cs) | |
00e1f01e | 4611 | { |
799c8711 | 4612 | struct cgraph_indirect_call_info *ii = cs->indirect_info; |
30baba90 | 4613 | struct bitpack_d bp; |
799c8711 | 4614 | |
7f385784 | 4615 | streamer_write_hwi (ob, ii->param_index); |
30baba90 | 4616 | bp = bitpack_create (ob->main_stream); |
4617 | bp_pack_value (&bp, ii->polymorphic, 1); | |
0d491188 | 4618 | bp_pack_value (&bp, ii->agg_contents, 1); |
2f6c1cf4 | 4619 | bp_pack_value (&bp, ii->member_ptr, 1); |
0d491188 | 4620 | bp_pack_value (&bp, ii->by_ref, 1); |
43aac8cb | 4621 | bp_pack_value (&bp, ii->vptr_changed, 1); |
7f385784 | 4622 | streamer_write_bitpack (&bp); |
e33892d7 | 4623 | if (ii->agg_contents || ii->polymorphic) |
4624 | streamer_write_hwi (ob, ii->offset); | |
4625 | else | |
4626 | gcc_assert (ii->offset == 0); | |
6378ffb3 | 4627 | |
4628 | if (ii->polymorphic) | |
4629 | { | |
7f385784 | 4630 | streamer_write_hwi (ob, ii->otr_token); |
515cf651 | 4631 | stream_write_tree (ob, ii->otr_type, true); |
e33892d7 | 4632 | ii->context.stream_out (ob); |
6378ffb3 | 4633 | } |
00e1f01e | 4634 | } |
4635 | ||
799c8711 | 4636 | /* Read in parts of cgraph_indirect_call_info corresponding to CS that are |
4637 | relevant to indirect inlining from IB. */ | |
00e1f01e | 4638 | |
4639 | static void | |
799c8711 | 4640 | ipa_read_indirect_edge_info (struct lto_input_block *ib, |
e33892d7 | 4641 | struct data_in *data_in, |
799c8711 | 4642 | struct cgraph_edge *cs) |
00e1f01e | 4643 | { |
799c8711 | 4644 | struct cgraph_indirect_call_info *ii = cs->indirect_info; |
30baba90 | 4645 | struct bitpack_d bp; |
00e1f01e | 4646 | |
7f385784 | 4647 | ii->param_index = (int) streamer_read_hwi (ib); |
7f385784 | 4648 | bp = streamer_read_bitpack (ib); |
30baba90 | 4649 | ii->polymorphic = bp_unpack_value (&bp, 1); |
0d491188 | 4650 | ii->agg_contents = bp_unpack_value (&bp, 1); |
2f6c1cf4 | 4651 | ii->member_ptr = bp_unpack_value (&bp, 1); |
0d491188 | 4652 | ii->by_ref = bp_unpack_value (&bp, 1); |
43aac8cb | 4653 | ii->vptr_changed = bp_unpack_value (&bp, 1); |
e33892d7 | 4654 | if (ii->agg_contents || ii->polymorphic) |
4655 | ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib); | |
4656 | else | |
4657 | ii->offset = 0; | |
6378ffb3 | 4658 | if (ii->polymorphic) |
4659 | { | |
7f385784 | 4660 | ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib); |
515cf651 | 4661 | ii->otr_type = stream_read_tree (ib, data_in); |
e33892d7 | 4662 | ii->context.stream_in (ib, data_in); |
6378ffb3 | 4663 | } |
00e1f01e | 4664 | } |
4665 | ||
8867b500 | 4666 | /* Stream out NODE info to OB. */ |
4667 | ||
4668 | static void | |
4669 | ipa_write_node_info (struct output_block *ob, struct cgraph_node *node) | |
4670 | { | |
4671 | int node_ref; | |
70225339 | 4672 | lto_symtab_encoder_t encoder; |
8867b500 | 4673 | struct ipa_node_params *info = IPA_NODE_REF (node); |
4674 | int j; | |
4675 | struct cgraph_edge *e; | |
30baba90 | 4676 | struct bitpack_d bp; |
8867b500 | 4677 | |
70225339 | 4678 | encoder = ob->decl_state->symtab_node_encoder; |
02774f2d | 4679 | node_ref = lto_symtab_encoder_encode (encoder, node); |
7f385784 | 4680 | streamer_write_uhwi (ob, node_ref); |
8867b500 | 4681 | |
09ab6335 | 4682 | streamer_write_uhwi (ob, ipa_get_param_count (info)); |
4683 | for (j = 0; j < ipa_get_param_count (info); j++) | |
4684 | streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j)); | |
30baba90 | 4685 | bp = bitpack_create (ob->main_stream); |
24430d08 | 4686 | gcc_assert (info->analysis_done |
00e1f01e | 4687 | || ipa_get_param_count (info) == 0); |
8867b500 | 4688 | gcc_assert (!info->node_enqueued); |
4689 | gcc_assert (!info->ipcp_orig_node); | |
4690 | for (j = 0; j < ipa_get_param_count (info); j++) | |
821d0e0f | 4691 | bp_pack_value (&bp, ipa_is_param_used (info, j), 1); |
7f385784 | 4692 | streamer_write_bitpack (&bp); |
096295f6 | 4693 | for (j = 0; j < ipa_get_param_count (info); j++) |
4694 | streamer_write_hwi (ob, ipa_get_controlled_uses (info, j)); | |
8867b500 | 4695 | for (e = node->callees; e; e = e->next_callee) |
4696 | { | |
4697 | struct ipa_edge_args *args = IPA_EDGE_REF (e); | |
4698 | ||
072ec6eb | 4699 | streamer_write_uhwi (ob, |
4700 | ipa_get_cs_argument_count (args) * 2 | |
4701 | + (args->polymorphic_call_contexts != NULL)); | |
8867b500 | 4702 | for (j = 0; j < ipa_get_cs_argument_count (args); j++) |
072ec6eb | 4703 | { |
4704 | ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j)); | |
4705 | if (args->polymorphic_call_contexts != NULL) | |
4706 | ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob); | |
4707 | } | |
8867b500 | 4708 | } |
799c8711 | 4709 | for (e = node->indirect_calls; e; e = e->next_callee) |
d6731c65 | 4710 | { |
4711 | struct ipa_edge_args *args = IPA_EDGE_REF (e); | |
4712 | ||
072ec6eb | 4713 | streamer_write_uhwi (ob, |
4714 | ipa_get_cs_argument_count (args) * 2 | |
4715 | + (args->polymorphic_call_contexts != NULL)); | |
d6731c65 | 4716 | for (j = 0; j < ipa_get_cs_argument_count (args); j++) |
072ec6eb | 4717 | { |
4718 | ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j)); | |
4719 | if (args->polymorphic_call_contexts != NULL) | |
4720 | ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob); | |
4721 | } | |
d6731c65 | 4722 | ipa_write_indirect_edge_info (ob, e); |
4723 | } | |
8867b500 | 4724 | } |
4725 | ||
0a10fd82 | 4726 | /* Stream in NODE info from IB. */ |
8867b500 | 4727 | |
4728 | static void | |
4729 | ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node, | |
4730 | struct data_in *data_in) | |
4731 | { | |
4732 | struct ipa_node_params *info = IPA_NODE_REF (node); | |
4733 | int k; | |
4734 | struct cgraph_edge *e; | |
30baba90 | 4735 | struct bitpack_d bp; |
8867b500 | 4736 | |
09ab6335 | 4737 | ipa_alloc_node_params (node, streamer_read_uhwi (ib)); |
8867b500 | 4738 | |
09ab6335 | 4739 | for (k = 0; k < ipa_get_param_count (info); k++) |
4740 | info->descriptors[k].move_cost = streamer_read_uhwi (ib); | |
4741 | ||
7f385784 | 4742 | bp = streamer_read_bitpack (ib); |
8867b500 | 4743 | if (ipa_get_param_count (info) != 0) |
24430d08 | 4744 | info->analysis_done = true; |
8867b500 | 4745 | info->node_enqueued = false; |
4746 | for (k = 0; k < ipa_get_param_count (info); k++) | |
821d0e0f | 4747 | ipa_set_param_used (info, k, bp_unpack_value (&bp, 1)); |
f965232f | 4748 | for (k = 0; k < ipa_get_param_count (info); k++) |
4749 | ipa_set_controlled_uses (info, k, streamer_read_hwi (ib)); | |
8867b500 | 4750 | for (e = node->callees; e; e = e->next_callee) |
4751 | { | |
4752 | struct ipa_edge_args *args = IPA_EDGE_REF (e); | |
7f385784 | 4753 | int count = streamer_read_uhwi (ib); |
072ec6eb | 4754 | bool contexts_computed = count & 1; |
4755 | count /= 2; | |
8867b500 | 4756 | |
8867b500 | 4757 | if (!count) |
4758 | continue; | |
f1f41a6c | 4759 | vec_safe_grow_cleared (args->jump_functions, count); |
072ec6eb | 4760 | if (contexts_computed) |
4761 | vec_safe_grow_cleared (args->polymorphic_call_contexts, count); | |
8867b500 | 4762 | |
8867b500 | 4763 | for (k = 0; k < ipa_get_cs_argument_count (args); k++) |
072ec6eb | 4764 | { |
4765 | ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e, | |
4766 | data_in); | |
4767 | if (contexts_computed) | |
4768 | ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in); | |
4769 | } | |
8867b500 | 4770 | } |
799c8711 | 4771 | for (e = node->indirect_calls; e; e = e->next_callee) |
d6731c65 | 4772 | { |
4773 | struct ipa_edge_args *args = IPA_EDGE_REF (e); | |
7f385784 | 4774 | int count = streamer_read_uhwi (ib); |
072ec6eb | 4775 | bool contexts_computed = count & 1; |
4776 | count /= 2; | |
d6731c65 | 4777 | |
d6731c65 | 4778 | if (count) |
4779 | { | |
f1f41a6c | 4780 | vec_safe_grow_cleared (args->jump_functions, count); |
072ec6eb | 4781 | if (contexts_computed) |
4782 | vec_safe_grow_cleared (args->polymorphic_call_contexts, count); | |
d6731c65 | 4783 | for (k = 0; k < ipa_get_cs_argument_count (args); k++) |
072ec6eb | 4784 | { |
4785 | ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e, | |
4786 | data_in); | |
4787 | if (contexts_computed) | |
4788 | ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in); | |
4789 | } | |
d6731c65 | 4790 | } |
4791 | ipa_read_indirect_edge_info (ib, data_in, e); | |
4792 | } | |
8867b500 | 4793 | } |
4794 | ||
4795 | /* Write jump functions for nodes in SET. */ | |
4796 | ||
4797 | void | |
eab36a5a | 4798 | ipa_prop_write_jump_functions (void) |
8867b500 | 4799 | { |
4800 | struct cgraph_node *node; | |
0a0b4924 | 4801 | struct output_block *ob; |
8867b500 | 4802 | unsigned int count = 0; |
eab36a5a | 4803 | lto_symtab_encoder_iterator lsei; |
4804 | lto_symtab_encoder_t encoder; | |
4805 | ||
2cc80ac3 | 4806 | if (!ipa_node_params_sum) |
0a0b4924 | 4807 | return; |
8867b500 | 4808 | |
0a0b4924 | 4809 | ob = create_output_block (LTO_section_jump_functions); |
eab36a5a | 4810 | encoder = ob->decl_state->symtab_node_encoder; |
afb0d513 | 4811 | ob->symbol = NULL; |
eab36a5a | 4812 | for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei); |
4813 | lsei_next_function_in_partition (&lsei)) | |
8867b500 | 4814 | { |
eab36a5a | 4815 | node = lsei_cgraph_node (lsei); |
415d1b9a | 4816 | if (node->has_gimple_body_p () |
91bf9d9a | 4817 | && IPA_NODE_REF (node) != NULL) |
8867b500 | 4818 | count++; |
4819 | } | |
4820 | ||
7f385784 | 4821 | streamer_write_uhwi (ob, count); |
8867b500 | 4822 | |
4823 | /* Process all of the functions. */ | |
eab36a5a | 4824 | for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei); |
4825 | lsei_next_function_in_partition (&lsei)) | |
8867b500 | 4826 | { |
eab36a5a | 4827 | node = lsei_cgraph_node (lsei); |
415d1b9a | 4828 | if (node->has_gimple_body_p () |
91bf9d9a | 4829 | && IPA_NODE_REF (node) != NULL) |
8867b500 | 4830 | ipa_write_node_info (ob, node); |
4831 | } | |
7f385784 | 4832 | streamer_write_char_stream (ob->main_stream, 0); |
8867b500 | 4833 | produce_asm (ob, NULL); |
4834 | destroy_output_block (ob); | |
4835 | } | |
4836 | ||
4837 | /* Read section in file FILE_DATA of length LEN with data DATA. */ | |
4838 | ||
4839 | static void | |
4840 | ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data, | |
4841 | size_t len) | |
4842 | { | |
4843 | const struct lto_function_header *header = | |
4844 | (const struct lto_function_header *) data; | |
949e5786 | 4845 | const int cfg_offset = sizeof (struct lto_function_header); |
4846 | const int main_offset = cfg_offset + header->cfg_size; | |
4847 | const int string_offset = main_offset + header->main_size; | |
8867b500 | 4848 | struct data_in *data_in; |
8867b500 | 4849 | unsigned int i; |
4850 | unsigned int count; | |
4851 | ||
472ca566 | 4852 | lto_input_block ib_main ((const char *) data + main_offset, |
2e971afd | 4853 | header->main_size, file_data->mode_table); |
8867b500 | 4854 | |
4855 | data_in = | |
4856 | lto_data_in_create (file_data, (const char *) data + string_offset, | |
1e094109 | 4857 | header->string_size, vNULL); |
7f385784 | 4858 | count = streamer_read_uhwi (&ib_main); |
8867b500 | 4859 | |
4860 | for (i = 0; i < count; i++) | |
4861 | { | |
4862 | unsigned int index; | |
4863 | struct cgraph_node *node; | |
70225339 | 4864 | lto_symtab_encoder_t encoder; |
8867b500 | 4865 | |
7f385784 | 4866 | index = streamer_read_uhwi (&ib_main); |
70225339 | 4867 | encoder = file_data->symtab_node_encoder; |
415d1b9a | 4868 | node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder, |
4869 | index)); | |
02774f2d | 4870 | gcc_assert (node->definition); |
8867b500 | 4871 | ipa_read_node_info (&ib_main, node, data_in); |
4872 | } | |
4873 | lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data, | |
4874 | len); | |
4875 | lto_data_in_delete (data_in); | |
4876 | } | |
4877 | ||
4878 | /* Read ipcp jump functions. */ | |
4879 | ||
4880 | void | |
4881 | ipa_prop_read_jump_functions (void) | |
4882 | { | |
4883 | struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data (); | |
4884 | struct lto_file_decl_data *file_data; | |
4885 | unsigned int j = 0; | |
4886 | ||
4887 | ipa_check_create_node_params (); | |
4888 | ipa_check_create_edge_args (); | |
4889 | ipa_register_cgraph_hooks (); | |
4890 | ||
4891 | while ((file_data = file_data_vec[j++])) | |
4892 | { | |
4893 | size_t len; | |
4894 | const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len); | |
4895 | ||
4896 | if (data) | |
4897 | ipa_prop_read_section (file_data, data, len); | |
4898 | } | |
4899 | } | |
4900 | ||
48e1416a | 4901 | /* After merging units, we can get mismatch in argument counts. |
0a10fd82 | 4902 | Also decl merging might've rendered parameter lists obsolete. |
8867b500 | 4903 | Also compute called_with_variable_arg info. */ |
4904 | ||
4905 | void | |
4906 | ipa_update_after_lto_read (void) | |
4907 | { | |
e8c62a6f | 4908 | ipa_check_create_node_params (); |
4909 | ipa_check_create_edge_args (); | |
8867b500 | 4910 | } |
803a7988 | 4911 | |
4912 | void | |
ae7b7bc8 | 4913 | write_ipcp_transformation_info (output_block *ob, cgraph_node *node) |
803a7988 | 4914 | { |
4915 | int node_ref; | |
4916 | unsigned int count = 0; | |
4917 | lto_symtab_encoder_t encoder; | |
4918 | struct ipa_agg_replacement_value *aggvals, *av; | |
4919 | ||
4920 | aggvals = ipa_get_agg_replacements_for_node (node); | |
4921 | encoder = ob->decl_state->symtab_node_encoder; | |
02774f2d | 4922 | node_ref = lto_symtab_encoder_encode (encoder, node); |
803a7988 | 4923 | streamer_write_uhwi (ob, node_ref); |
4924 | ||
4925 | for (av = aggvals; av; av = av->next) | |
4926 | count++; | |
4927 | streamer_write_uhwi (ob, count); | |
4928 | ||
4929 | for (av = aggvals; av; av = av->next) | |
4930 | { | |
c42e4f2e | 4931 | struct bitpack_d bp; |
4932 | ||
803a7988 | 4933 | streamer_write_uhwi (ob, av->offset); |
4934 | streamer_write_uhwi (ob, av->index); | |
4935 | stream_write_tree (ob, av->value, true); | |
c42e4f2e | 4936 | |
4937 | bp = bitpack_create (ob->main_stream); | |
4938 | bp_pack_value (&bp, av->by_ref, 1); | |
4939 | streamer_write_bitpack (&bp); | |
803a7988 | 4940 | } |
ae7b7bc8 | 4941 | |
4942 | ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node); | |
4943 | if (ts && vec_safe_length (ts->alignments) > 0) | |
4944 | { | |
4945 | count = ts->alignments->length (); | |
4946 | ||
4947 | streamer_write_uhwi (ob, count); | |
4948 | for (unsigned i = 0; i < count; ++i) | |
4949 | { | |
4950 | ipa_alignment *parm_al = &(*ts->alignments)[i]; | |
4951 | ||
4952 | struct bitpack_d bp; | |
4953 | bp = bitpack_create (ob->main_stream); | |
4954 | bp_pack_value (&bp, parm_al->known, 1); | |
4955 | streamer_write_bitpack (&bp); | |
4956 | if (parm_al->known) | |
4957 | { | |
4958 | streamer_write_uhwi (ob, parm_al->align); | |
4959 | streamer_write_hwi_in_range (ob->main_stream, 0, parm_al->align, | |
4960 | parm_al->misalign); | |
4961 | } | |
4962 | } | |
4963 | } | |
4964 | else | |
4965 | streamer_write_uhwi (ob, 0); | |
803a7988 | 4966 | } |
4967 | ||
4968 | /* Stream in the aggregate value replacement chain for NODE from IB. */ | |
4969 | ||
4970 | static void | |
ae7b7bc8 | 4971 | read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node, |
4972 | data_in *data_in) | |
803a7988 | 4973 | { |
4974 | struct ipa_agg_replacement_value *aggvals = NULL; | |
4975 | unsigned int count, i; | |
4976 | ||
4977 | count = streamer_read_uhwi (ib); | |
4978 | for (i = 0; i <count; i++) | |
4979 | { | |
4980 | struct ipa_agg_replacement_value *av; | |
c42e4f2e | 4981 | struct bitpack_d bp; |
803a7988 | 4982 | |
25a27413 | 4983 | av = ggc_alloc<ipa_agg_replacement_value> (); |
803a7988 | 4984 | av->offset = streamer_read_uhwi (ib); |
4985 | av->index = streamer_read_uhwi (ib); | |
4986 | av->value = stream_read_tree (ib, data_in); | |
c42e4f2e | 4987 | bp = streamer_read_bitpack (ib); |
4988 | av->by_ref = bp_unpack_value (&bp, 1); | |
803a7988 | 4989 | av->next = aggvals; |
4990 | aggvals = av; | |
4991 | } | |
4992 | ipa_set_node_agg_value_chain (node, aggvals); | |
ae7b7bc8 | 4993 | |
4994 | count = streamer_read_uhwi (ib); | |
4995 | if (count > 0) | |
4996 | { | |
4997 | ipcp_grow_transformations_if_necessary (); | |
4998 | ||
4999 | ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node); | |
5000 | vec_safe_grow_cleared (ts->alignments, count); | |
5001 | ||
5002 | for (i = 0; i < count; i++) | |
5003 | { | |
5004 | ipa_alignment *parm_al; | |
5005 | parm_al = &(*ts->alignments)[i]; | |
5006 | struct bitpack_d bp; | |
5007 | bp = streamer_read_bitpack (ib); | |
5008 | parm_al->known = bp_unpack_value (&bp, 1); | |
5009 | if (parm_al->known) | |
5010 | { | |
5011 | parm_al->align = streamer_read_uhwi (ib); | |
5012 | parm_al->misalign | |
5013 | = streamer_read_hwi_in_range (ib, "ipa-prop misalign", | |
5014 | 0, parm_al->align); | |
5015 | } | |
5016 | } | |
5017 | } | |
803a7988 | 5018 | } |
5019 | ||
5020 | /* Write all aggregate replacement for nodes in set. */ | |
5021 | ||
5022 | void | |
ae7b7bc8 | 5023 | ipcp_write_transformation_summaries (void) |
803a7988 | 5024 | { |
5025 | struct cgraph_node *node; | |
5026 | struct output_block *ob; | |
5027 | unsigned int count = 0; | |
5028 | lto_symtab_encoder_iterator lsei; | |
5029 | lto_symtab_encoder_t encoder; | |
5030 | ||
803a7988 | 5031 | ob = create_output_block (LTO_section_ipcp_transform); |
5032 | encoder = ob->decl_state->symtab_node_encoder; | |
afb0d513 | 5033 | ob->symbol = NULL; |
803a7988 | 5034 | for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei); |
5035 | lsei_next_function_in_partition (&lsei)) | |
5036 | { | |
5037 | node = lsei_cgraph_node (lsei); | |
ae7b7bc8 | 5038 | if (node->has_gimple_body_p ()) |
803a7988 | 5039 | count++; |
5040 | } | |
5041 | ||
5042 | streamer_write_uhwi (ob, count); | |
5043 | ||
5044 | for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei); | |
5045 | lsei_next_function_in_partition (&lsei)) | |
5046 | { | |
5047 | node = lsei_cgraph_node (lsei); | |
ae7b7bc8 | 5048 | if (node->has_gimple_body_p ()) |
5049 | write_ipcp_transformation_info (ob, node); | |
803a7988 | 5050 | } |
5051 | streamer_write_char_stream (ob->main_stream, 0); | |
5052 | produce_asm (ob, NULL); | |
5053 | destroy_output_block (ob); | |
5054 | } | |
5055 | ||
5056 | /* Read replacements section in file FILE_DATA of length LEN with data | |
5057 | DATA. */ | |
5058 | ||
5059 | static void | |
5060 | read_replacements_section (struct lto_file_decl_data *file_data, | |
5061 | const char *data, | |
5062 | size_t len) | |
5063 | { | |
5064 | const struct lto_function_header *header = | |
5065 | (const struct lto_function_header *) data; | |
5066 | const int cfg_offset = sizeof (struct lto_function_header); | |
5067 | const int main_offset = cfg_offset + header->cfg_size; | |
5068 | const int string_offset = main_offset + header->main_size; | |
5069 | struct data_in *data_in; | |
803a7988 | 5070 | unsigned int i; |
5071 | unsigned int count; | |
5072 | ||
472ca566 | 5073 | lto_input_block ib_main ((const char *) data + main_offset, |
2e971afd | 5074 | header->main_size, file_data->mode_table); |
803a7988 | 5075 | |
5076 | data_in = lto_data_in_create (file_data, (const char *) data + string_offset, | |
1e094109 | 5077 | header->string_size, vNULL); |
803a7988 | 5078 | count = streamer_read_uhwi (&ib_main); |
5079 | ||
5080 | for (i = 0; i < count; i++) | |
5081 | { | |
5082 | unsigned int index; | |
5083 | struct cgraph_node *node; | |
5084 | lto_symtab_encoder_t encoder; | |
5085 | ||
5086 | index = streamer_read_uhwi (&ib_main); | |
5087 | encoder = file_data->symtab_node_encoder; | |
415d1b9a | 5088 | node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder, |
5089 | index)); | |
02774f2d | 5090 | gcc_assert (node->definition); |
ae7b7bc8 | 5091 | read_ipcp_transformation_info (&ib_main, node, data_in); |
803a7988 | 5092 | } |
5093 | lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data, | |
5094 | len); | |
5095 | lto_data_in_delete (data_in); | |
5096 | } | |
5097 | ||
5098 | /* Read IPA-CP aggregate replacements. */ | |
5099 | ||
5100 | void | |
ae7b7bc8 | 5101 | ipcp_read_transformation_summaries (void) |
803a7988 | 5102 | { |
5103 | struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data (); | |
5104 | struct lto_file_decl_data *file_data; | |
5105 | unsigned int j = 0; | |
5106 | ||
5107 | while ((file_data = file_data_vec[j++])) | |
5108 | { | |
5109 | size_t len; | |
5110 | const char *data = lto_get_section_data (file_data, | |
5111 | LTO_section_ipcp_transform, | |
5112 | NULL, &len); | |
5113 | if (data) | |
5114 | read_replacements_section (file_data, data, len); | |
5115 | } | |
5116 | } | |
5117 | ||
5118 | /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in | |
5119 | NODE. */ | |
5120 | ||
5121 | static void | |
5122 | adjust_agg_replacement_values (struct cgraph_node *node, | |
5123 | struct ipa_agg_replacement_value *aggval) | |
5124 | { | |
5125 | struct ipa_agg_replacement_value *v; | |
5126 | int i, c = 0, d = 0, *adj; | |
5127 | ||
5128 | if (!node->clone.combined_args_to_skip) | |
5129 | return; | |
5130 | ||
5131 | for (v = aggval; v; v = v->next) | |
5132 | { | |
5133 | gcc_assert (v->index >= 0); | |
5134 | if (c < v->index) | |
5135 | c = v->index; | |
5136 | } | |
5137 | c++; | |
5138 | ||
5139 | adj = XALLOCAVEC (int, c); | |
5140 | for (i = 0; i < c; i++) | |
5141 | if (bitmap_bit_p (node->clone.combined_args_to_skip, i)) | |
5142 | { | |
5143 | adj[i] = -1; | |
5144 | d++; | |
5145 | } | |
5146 | else | |
5147 | adj[i] = i - d; | |
5148 | ||
5149 | for (v = aggval; v; v = v->next) | |
5150 | v->index = adj[v->index]; | |
5151 | } | |
5152 | ||
24430d08 | 5153 | /* Dominator walker driving the ipcp modification phase. */ |
5154 | ||
5155 | class ipcp_modif_dom_walker : public dom_walker | |
5156 | { | |
5157 | public: | |
5158 | ipcp_modif_dom_walker (struct func_body_info *fbi, | |
5159 | vec<ipa_param_descriptor> descs, | |
5160 | struct ipa_agg_replacement_value *av, | |
5161 | bool *sc, bool *cc) | |
5162 | : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs), | |
5163 | m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {} | |
5164 | ||
5165 | virtual void before_dom_children (basic_block); | |
5166 | ||
5167 | private: | |
5168 | struct func_body_info *m_fbi; | |
5169 | vec<ipa_param_descriptor> m_descriptors; | |
5170 | struct ipa_agg_replacement_value *m_aggval; | |
5171 | bool *m_something_changed, *m_cfg_changed; | |
5172 | }; | |
5173 | ||
5174 | void | |
5175 | ipcp_modif_dom_walker::before_dom_children (basic_block bb) | |
5176 | { | |
5177 | gimple_stmt_iterator gsi; | |
5178 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
5179 | { | |
5180 | struct ipa_agg_replacement_value *v; | |
5181 | gimple stmt = gsi_stmt (gsi); | |
5182 | tree rhs, val, t; | |
5183 | HOST_WIDE_INT offset, size; | |
5184 | int index; | |
5185 | bool by_ref, vce; | |
5186 | ||
5187 | if (!gimple_assign_load_p (stmt)) | |
5188 | continue; | |
5189 | rhs = gimple_assign_rhs1 (stmt); | |
5190 | if (!is_gimple_reg_type (TREE_TYPE (rhs))) | |
5191 | continue; | |
803a7988 | 5192 | |
24430d08 | 5193 | vce = false; |
5194 | t = rhs; | |
5195 | while (handled_component_p (t)) | |
5196 | { | |
5197 | /* V_C_E can do things like convert an array of integers to one | |
5198 | bigger integer and similar things we do not handle below. */ | |
5199 | if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR) | |
5200 | { | |
5201 | vce = true; | |
5202 | break; | |
5203 | } | |
5204 | t = TREE_OPERAND (t, 0); | |
5205 | } | |
5206 | if (vce) | |
5207 | continue; | |
5208 | ||
5209 | if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index, | |
5210 | &offset, &size, &by_ref)) | |
5211 | continue; | |
5212 | for (v = m_aggval; v; v = v->next) | |
5213 | if (v->index == index | |
5214 | && v->offset == offset) | |
5215 | break; | |
5216 | if (!v | |
5217 | || v->by_ref != by_ref | |
5218 | || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size) | |
5219 | continue; | |
5220 | ||
5221 | gcc_checking_assert (is_gimple_ip_invariant (v->value)); | |
5222 | if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value))) | |
5223 | { | |
5224 | if (fold_convertible_p (TREE_TYPE (rhs), v->value)) | |
5225 | val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value); | |
5226 | else if (TYPE_SIZE (TREE_TYPE (rhs)) | |
5227 | == TYPE_SIZE (TREE_TYPE (v->value))) | |
5228 | val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value); | |
5229 | else | |
5230 | { | |
5231 | if (dump_file) | |
5232 | { | |
5233 | fprintf (dump_file, " const "); | |
5234 | print_generic_expr (dump_file, v->value, 0); | |
5235 | fprintf (dump_file, " can't be converted to type of "); | |
5236 | print_generic_expr (dump_file, rhs, 0); | |
5237 | fprintf (dump_file, "\n"); | |
5238 | } | |
5239 | continue; | |
5240 | } | |
5241 | } | |
5242 | else | |
5243 | val = v->value; | |
5244 | ||
5245 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
5246 | { | |
5247 | fprintf (dump_file, "Modifying stmt:\n "); | |
5248 | print_gimple_stmt (dump_file, stmt, 0, 0); | |
5249 | } | |
5250 | gimple_assign_set_rhs_from_tree (&gsi, val); | |
5251 | update_stmt (stmt); | |
5252 | ||
5253 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
5254 | { | |
5255 | fprintf (dump_file, "into:\n "); | |
5256 | print_gimple_stmt (dump_file, stmt, 0, 0); | |
5257 | fprintf (dump_file, "\n"); | |
5258 | } | |
5259 | ||
5260 | *m_something_changed = true; | |
5261 | if (maybe_clean_eh_stmt (stmt) | |
5262 | && gimple_purge_dead_eh_edges (gimple_bb (stmt))) | |
5263 | *m_cfg_changed = true; | |
5264 | } | |
5265 | ||
5266 | } | |
5267 | ||
ae7b7bc8 | 5268 | /* Update alignment of formal parameters as described in |
5269 | ipcp_transformation_summary. */ | |
5270 | ||
5271 | static void | |
5272 | ipcp_update_alignments (struct cgraph_node *node) | |
5273 | { | |
5274 | tree fndecl = node->decl; | |
5275 | tree parm = DECL_ARGUMENTS (fndecl); | |
5276 | tree next_parm = parm; | |
5277 | ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node); | |
5278 | if (!ts || vec_safe_length (ts->alignments) == 0) | |
5279 | return; | |
5280 | const vec<ipa_alignment, va_gc> &alignments = *ts->alignments; | |
5281 | unsigned count = alignments.length (); | |
5282 | ||
5283 | for (unsigned i = 0; i < count; ++i, parm = next_parm) | |
5284 | { | |
5285 | if (node->clone.combined_args_to_skip | |
5286 | && bitmap_bit_p (node->clone.combined_args_to_skip, i)) | |
5287 | continue; | |
5288 | gcc_checking_assert (parm); | |
5289 | next_parm = DECL_CHAIN (parm); | |
5290 | ||
5291 | if (!alignments[i].known || !is_gimple_reg (parm)) | |
5292 | continue; | |
5293 | tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm); | |
5294 | if (!ddef) | |
5295 | continue; | |
5296 | ||
5297 | if (dump_file) | |
5298 | fprintf (dump_file, " Adjusting alignment of param %u to %u, " | |
5299 | "misalignment to %u\n", i, alignments[i].align, | |
5300 | alignments[i].misalign); | |
5301 | ||
5302 | struct ptr_info_def *pi = get_ptr_info (ddef); | |
5303 | gcc_checking_assert (pi); | |
5304 | unsigned old_align; | |
5305 | unsigned old_misalign; | |
5306 | bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign); | |
5307 | ||
5308 | if (old_known | |
5309 | && old_align >= alignments[i].align) | |
5310 | { | |
5311 | if (dump_file) | |
5312 | fprintf (dump_file, " But the alignment was already %u.\n", | |
5313 | old_align); | |
5314 | continue; | |
5315 | } | |
5316 | set_ptr_info_alignment (pi, alignments[i].align, alignments[i].misalign); | |
5317 | } | |
5318 | } | |
5319 | ||
24430d08 | 5320 | /* IPCP transformation phase doing propagation of aggregate values. */ |
803a7988 | 5321 | |
5322 | unsigned int | |
5323 | ipcp_transform_function (struct cgraph_node *node) | |
5324 | { | |
b3e7c666 | 5325 | vec<ipa_param_descriptor> descriptors = vNULL; |
24430d08 | 5326 | struct func_body_info fbi; |
803a7988 | 5327 | struct ipa_agg_replacement_value *aggval; |
803a7988 | 5328 | int param_count; |
5329 | bool cfg_changed = false, something_changed = false; | |
5330 | ||
5331 | gcc_checking_assert (cfun); | |
5332 | gcc_checking_assert (current_function_decl); | |
5333 | ||
5334 | if (dump_file) | |
5335 | fprintf (dump_file, "Modification phase of node %s/%i\n", | |
f1c8b4d7 | 5336 | node->name (), node->order); |
803a7988 | 5337 | |
ae7b7bc8 | 5338 | ipcp_update_alignments (node); |
803a7988 | 5339 | aggval = ipa_get_agg_replacements_for_node (node); |
5340 | if (!aggval) | |
5341 | return 0; | |
02774f2d | 5342 | param_count = count_formal_params (node->decl); |
803a7988 | 5343 | if (param_count == 0) |
5344 | return 0; | |
5345 | adjust_agg_replacement_values (node, aggval); | |
5346 | if (dump_file) | |
5347 | ipa_dump_agg_replacement_values (dump_file, aggval); | |
803a7988 | 5348 | |
24430d08 | 5349 | fbi.node = node; |
5350 | fbi.info = NULL; | |
5351 | fbi.bb_infos = vNULL; | |
5352 | fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun)); | |
5353 | fbi.param_count = param_count; | |
5354 | fbi.aa_walked = 0; | |
803a7988 | 5355 | |
24430d08 | 5356 | descriptors.safe_grow_cleared (param_count); |
5357 | ipa_populate_param_decls (node, descriptors); | |
5358 | calculate_dominance_info (CDI_DOMINATORS); | |
5359 | ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed, | |
5360 | &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun)); | |
803a7988 | 5361 | |
24430d08 | 5362 | int i; |
5363 | struct ipa_bb_info *bi; | |
5364 | FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi) | |
5365 | free_ipa_bb_info (bi); | |
5366 | fbi.bb_infos.release (); | |
5367 | free_dominance_info (CDI_DOMINATORS); | |
ae7b7bc8 | 5368 | (*ipcp_transformations)[node->uid].agg_values = NULL; |
5369 | (*ipcp_transformations)[node->uid].alignments = NULL; | |
f1f41a6c | 5370 | descriptors.release (); |
803a7988 | 5371 | |
5372 | if (!something_changed) | |
5373 | return 0; | |
5374 | else if (cfg_changed) | |
5375 | return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg; | |
5376 | else | |
5377 | return TODO_update_ssa_only_virtuals; | |
5378 | } |