]>
Commit | Line | Data |
---|---|---|
518dc859 | 1 | /* Interprocedural analyses. |
7adcbafe | 2 | Copyright (C) 2005-2022 Free Software Foundation, Inc. |
518dc859 RL |
3 | |
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify it under | |
7 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 8 | Software Foundation; either version 3, or (at your option) any later |
518dc859 RL |
9 | version. |
10 | ||
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
518dc859 RL |
19 | |
20 | #include "config.h" | |
21 | #include "system.h" | |
22 | #include "coretypes.h" | |
c7131fb2 | 23 | #include "backend.h" |
957060b5 | 24 | #include "rtl.h" |
40e23961 | 25 | #include "tree.h" |
c7131fb2 | 26 | #include "gimple.h" |
957060b5 AM |
27 | #include "alloc-pool.h" |
28 | #include "tree-pass.h" | |
c7131fb2 | 29 | #include "ssa.h" |
957060b5 AM |
30 | #include "tree-streamer.h" |
31 | #include "cgraph.h" | |
32 | #include "diagnostic.h" | |
40e23961 | 33 | #include "fold-const.h" |
2fb9a547 AM |
34 | #include "gimple-fold.h" |
35 | #include "tree-eh.h" | |
36566b39 | 36 | #include "calls.h" |
d8a2d370 DN |
37 | #include "stor-layout.h" |
38 | #include "print-tree.h" | |
45b0be94 | 39 | #include "gimplify.h" |
5be5c238 | 40 | #include "gimple-iterator.h" |
18f429e2 | 41 | #include "gimplify-me.h" |
5be5c238 | 42 | #include "gimple-walk.h" |
dd912cb8 | 43 | #include "symbol-summary.h" |
518dc859 | 44 | #include "ipa-prop.h" |
442b4905 | 45 | #include "tree-cfg.h" |
442b4905 | 46 | #include "tree-dfa.h" |
771578a0 | 47 | #include "tree-inline.h" |
27d020cf | 48 | #include "ipa-fnsummary.h" |
cf835838 | 49 | #include "gimple-pretty-print.h" |
450ad0cd | 50 | #include "ipa-utils.h" |
2b5f0895 | 51 | #include "dbgcnt.h" |
8aab5218 | 52 | #include "domwalk.h" |
9b2b7279 | 53 | #include "builtins.h" |
95a2ed03 | 54 | #include "tree-cfgcleanup.h" |
c7b6a758 | 55 | #include "options.h" |
ae7a23a3 | 56 | #include "symtab-clones.h" |
6cef01c3 | 57 | #include "attr-fnspec.h" |
45f4e2b0 | 58 | #include "gimple-range.h" |
771578a0 | 59 | |
dd912cb8 ML |
60 | /* Function summary where the parameter infos are actually stored. */ |
61 | ipa_node_params_t *ipa_node_params_sum = NULL; | |
9d3e0adc ML |
62 | |
63 | function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL; | |
64 | ||
6fe906a3 MJ |
65 | /* Edge summary for IPA-CP edge information. */ |
66 | ipa_edge_args_sum_t *ipa_edge_args_sum; | |
771578a0 | 67 | |
86cd0334 MJ |
68 | /* Traits for a hash table for reusing already existing ipa_bits. */ |
69 | ||
70 | struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *> | |
71 | { | |
72 | typedef ipa_bits *value_type; | |
73 | typedef ipa_bits *compare_type; | |
74 | static hashval_t | |
75 | hash (const ipa_bits *p) | |
76 | { | |
77 | hashval_t t = (hashval_t) p->value.to_shwi (); | |
78 | return iterative_hash_host_wide_int (p->mask.to_shwi (), t); | |
79 | } | |
80 | static bool | |
81 | equal (const ipa_bits *a, const ipa_bits *b) | |
82 | { | |
83 | return a->value == b->value && a->mask == b->mask; | |
84 | } | |
7ca50de0 | 85 | static const bool empty_zero_p = true; |
86cd0334 MJ |
86 | static void |
87 | mark_empty (ipa_bits *&p) | |
88 | { | |
89 | p = NULL; | |
90 | } | |
91 | static bool | |
92 | is_empty (const ipa_bits *p) | |
93 | { | |
94 | return p == NULL; | |
95 | } | |
96 | static bool | |
97 | is_deleted (const ipa_bits *p) | |
98 | { | |
99 | return p == reinterpret_cast<const ipa_bits *> (1); | |
100 | } | |
101 | static void | |
102 | mark_deleted (ipa_bits *&p) | |
103 | { | |
104 | p = reinterpret_cast<ipa_bits *> (1); | |
105 | } | |
106 | }; | |
107 | ||
108 | /* Hash table for avoid repeated allocations of equal ipa_bits. */ | |
109 | static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table; | |
110 | ||
111 | /* Traits for a hash table for reusing value_ranges used for IPA. Note that | |
112 | the equiv bitmap is not hashed and is expected to be NULL. */ | |
113 | ||
028d81b1 | 114 | struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *> |
86cd0334 | 115 | { |
028d81b1 AH |
116 | typedef value_range *value_type; |
117 | typedef value_range *compare_type; | |
86cd0334 | 118 | static hashval_t |
028d81b1 | 119 | hash (const value_range *p) |
59b2c134 | 120 | { |
54994253 | 121 | inchash::hash hstate (p->kind ()); |
74ca1c01 ML |
122 | inchash::add_expr (p->min (), hstate); |
123 | inchash::add_expr (p->max (), hstate); | |
59b2c134 JJ |
124 | return hstate.end (); |
125 | } | |
86cd0334 | 126 | static bool |
028d81b1 | 127 | equal (const value_range *a, const value_range *b) |
86cd0334 | 128 | { |
a8662390 ML |
129 | return (a->equal_p (*b) |
130 | && types_compatible_p (a->type (), b->type ())); | |
86cd0334 | 131 | } |
7ca50de0 | 132 | static const bool empty_zero_p = true; |
86cd0334 | 133 | static void |
028d81b1 | 134 | mark_empty (value_range *&p) |
86cd0334 MJ |
135 | { |
136 | p = NULL; | |
137 | } | |
138 | static bool | |
028d81b1 | 139 | is_empty (const value_range *p) |
86cd0334 MJ |
140 | { |
141 | return p == NULL; | |
142 | } | |
143 | static bool | |
028d81b1 | 144 | is_deleted (const value_range *p) |
86cd0334 | 145 | { |
028d81b1 | 146 | return p == reinterpret_cast<const value_range *> (1); |
86cd0334 MJ |
147 | } |
148 | static void | |
028d81b1 | 149 | mark_deleted (value_range *&p) |
86cd0334 | 150 | { |
028d81b1 | 151 | p = reinterpret_cast<value_range *> (1); |
86cd0334 MJ |
152 | } |
153 | }; | |
154 | ||
155 | /* Hash table for avoid repeated allocations of equal value_ranges. */ | |
156 | static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table; | |
157 | ||
771578a0 | 158 | /* Holders of ipa cgraph hooks: */ |
40982661 | 159 | static struct cgraph_node_hook_list *function_insertion_hook_holder; |
518dc859 | 160 | |
4502fe8d MJ |
161 | /* Description of a reference to an IPA constant. */ |
162 | struct ipa_cst_ref_desc | |
163 | { | |
164 | /* Edge that corresponds to the statement which took the reference. */ | |
165 | struct cgraph_edge *cs; | |
166 | /* Linked list of duplicates created when call graph edges are cloned. */ | |
167 | struct ipa_cst_ref_desc *next_duplicate; | |
168 | /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value | |
169 | if out of control. */ | |
170 | int refcount; | |
171 | }; | |
172 | ||
173 | /* Allocation pool for reference descriptions. */ | |
174 | ||
fb0b2914 | 175 | static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool |
fcb87c50 | 176 | ("IPA-PROP ref descriptions"); |
4502fe8d | 177 | |
5fe8e757 MJ |
178 | /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated |
179 | with NODE should prevent us from analyzing it for the purposes of IPA-CP. */ | |
180 | ||
181 | static bool | |
182 | ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node) | |
183 | { | |
67348ccc | 184 | tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl); |
5fe8e757 MJ |
185 | |
186 | if (!fs_opts) | |
187 | return false; | |
2bf86c84 | 188 | return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp); |
5fe8e757 MJ |
189 | } |
190 | ||
be95e2b9 MJ |
191 | /* Return index of the formal whose tree is PTREE in function which corresponds |
192 | to INFO. */ | |
193 | ||
d044dd17 | 194 | static int |
f65f1ae3 MJ |
195 | ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors, |
196 | tree ptree) | |
518dc859 RL |
197 | { |
198 | int i, count; | |
199 | ||
f65f1ae3 | 200 | count = vec_safe_length (descriptors); |
518dc859 | 201 | for (i = 0; i < count; i++) |
f65f1ae3 | 202 | if ((*descriptors)[i].decl_or_type == ptree) |
518dc859 RL |
203 | return i; |
204 | ||
205 | return -1; | |
206 | } | |
207 | ||
d044dd17 MJ |
208 | /* Return index of the formal whose tree is PTREE in function which corresponds |
209 | to INFO. */ | |
210 | ||
211 | int | |
99b1c316 | 212 | ipa_get_param_decl_index (class ipa_node_params *info, tree ptree) |
d044dd17 MJ |
213 | { |
214 | return ipa_get_param_decl_index_1 (info->descriptors, ptree); | |
215 | } | |
216 | ||
217 | /* Populate the param_decl field in parameter DESCRIPTORS that correspond to | |
218 | NODE. */ | |
be95e2b9 | 219 | |
f8e2a1ed MJ |
220 | static void |
221 | ipa_populate_param_decls (struct cgraph_node *node, | |
f65f1ae3 | 222 | vec<ipa_param_descriptor, va_gc> &descriptors) |
518dc859 RL |
223 | { |
224 | tree fndecl; | |
225 | tree fnargs; | |
226 | tree parm; | |
227 | int param_num; | |
3e293154 | 228 | |
67348ccc | 229 | fndecl = node->decl; |
0e8853ee | 230 | gcc_assert (gimple_has_body_p (fndecl)); |
518dc859 RL |
231 | fnargs = DECL_ARGUMENTS (fndecl); |
232 | param_num = 0; | |
910ad8de | 233 | for (parm = fnargs; parm; parm = DECL_CHAIN (parm)) |
518dc859 | 234 | { |
209ca542 | 235 | descriptors[param_num].decl_or_type = parm; |
40a777e8 JH |
236 | unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true); |
237 | descriptors[param_num].move_cost = cost; | |
238 | /* Watch overflow, move_cost is a bitfield. */ | |
239 | gcc_checking_assert (cost == descriptors[param_num].move_cost); | |
518dc859 RL |
240 | param_num++; |
241 | } | |
242 | } | |
243 | ||
3f84bf08 MJ |
244 | /* Return how many formal parameters FNDECL has. */ |
245 | ||
fd29c024 | 246 | int |
310bc633 | 247 | count_formal_params (tree fndecl) |
3f84bf08 MJ |
248 | { |
249 | tree parm; | |
250 | int count = 0; | |
0e8853ee | 251 | gcc_assert (gimple_has_body_p (fndecl)); |
3f84bf08 | 252 | |
910ad8de | 253 | for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm)) |
3f84bf08 MJ |
254 | count++; |
255 | ||
256 | return count; | |
257 | } | |
258 | ||
0e8853ee JH |
259 | /* Return the declaration of Ith formal parameter of the function corresponding |
260 | to INFO. Note there is no setter function as this array is built just once | |
261 | using ipa_initialize_node_params. */ | |
262 | ||
263 | void | |
99b1c316 | 264 | ipa_dump_param (FILE *file, class ipa_node_params *info, int i) |
0e8853ee JH |
265 | { |
266 | fprintf (file, "param #%i", i); | |
f65f1ae3 | 267 | if ((*info->descriptors)[i].decl_or_type) |
0e8853ee JH |
268 | { |
269 | fprintf (file, " "); | |
ef6cb4c7 | 270 | print_generic_expr (file, (*info->descriptors)[i].decl_or_type); |
0e8853ee JH |
271 | } |
272 | } | |
273 | ||
159f01f8 MJ |
274 | /* If necessary, allocate vector of parameter descriptors in info of NODE. |
275 | Return true if they were allocated, false if not. */ | |
0e8853ee | 276 | |
159f01f8 | 277 | static bool |
0e8853ee JH |
278 | ipa_alloc_node_params (struct cgraph_node *node, int param_count) |
279 | { | |
a4a3cdd0 | 280 | ipa_node_params *info = ipa_node_params_sum->get_create (node); |
0e8853ee | 281 | |
f65f1ae3 | 282 | if (!info->descriptors && param_count) |
159f01f8 | 283 | { |
cb3874dc | 284 | vec_safe_grow_cleared (info->descriptors, param_count, true); |
159f01f8 MJ |
285 | return true; |
286 | } | |
287 | else | |
288 | return false; | |
0e8853ee JH |
289 | } |
290 | ||
f8e2a1ed MJ |
291 | /* Initialize the ipa_node_params structure associated with NODE by counting |
292 | the function parameters, creating the descriptors and populating their | |
293 | param_decls. */ | |
be95e2b9 | 294 | |
f8e2a1ed MJ |
295 | void |
296 | ipa_initialize_node_params (struct cgraph_node *node) | |
297 | { | |
a4a3cdd0 | 298 | ipa_node_params *info = ipa_node_params_sum->get_create (node); |
f8e2a1ed | 299 | |
159f01f8 MJ |
300 | if (!info->descriptors |
301 | && ipa_alloc_node_params (node, count_formal_params (node->decl))) | |
302 | ipa_populate_param_decls (node, *info->descriptors); | |
518dc859 RL |
303 | } |
304 | ||
749aa96d MJ |
305 | /* Print the jump functions associated with call graph edge CS to file F. */ |
306 | ||
307 | static void | |
308 | ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs) | |
309 | { | |
a4a3cdd0 MJ |
310 | ipa_edge_args *args = ipa_edge_args_sum->get (cs); |
311 | int count = ipa_get_cs_argument_count (args); | |
749aa96d | 312 | |
a4a3cdd0 | 313 | for (int i = 0; i < count; i++) |
749aa96d MJ |
314 | { |
315 | struct ipa_jump_func *jump_func; | |
316 | enum jump_func_type type; | |
317 | ||
a4a3cdd0 | 318 | jump_func = ipa_get_ith_jump_func (args, i); |
749aa96d MJ |
319 | type = jump_func->type; |
320 | ||
321 | fprintf (f, " param %d: ", i); | |
322 | if (type == IPA_JF_UNKNOWN) | |
323 | fprintf (f, "UNKNOWN\n"); | |
749aa96d MJ |
324 | else if (type == IPA_JF_CONST) |
325 | { | |
4502fe8d | 326 | tree val = jump_func->value.constant.value; |
749aa96d | 327 | fprintf (f, "CONST: "); |
ef6cb4c7 | 328 | print_generic_expr (f, val); |
749aa96d MJ |
329 | if (TREE_CODE (val) == ADDR_EXPR |
330 | && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL) | |
331 | { | |
332 | fprintf (f, " -> "); | |
ef6cb4c7 | 333 | print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0))); |
749aa96d MJ |
334 | } |
335 | fprintf (f, "\n"); | |
336 | } | |
749aa96d MJ |
337 | else if (type == IPA_JF_PASS_THROUGH) |
338 | { | |
339 | fprintf (f, "PASS THROUGH: "); | |
8b7773a4 | 340 | fprintf (f, "%d, op %s", |
749aa96d | 341 | jump_func->value.pass_through.formal_id, |
5806f481 | 342 | get_tree_code_name(jump_func->value.pass_through.operation)); |
749aa96d | 343 | if (jump_func->value.pass_through.operation != NOP_EXPR) |
8b7773a4 MJ |
344 | { |
345 | fprintf (f, " "); | |
ef6cb4c7 | 346 | print_generic_expr (f, jump_func->value.pass_through.operand); |
8b7773a4 MJ |
347 | } |
348 | if (jump_func->value.pass_through.agg_preserved) | |
349 | fprintf (f, ", agg_preserved"); | |
3ea6239f | 350 | fprintf (f, "\n"); |
749aa96d MJ |
351 | } |
352 | else if (type == IPA_JF_ANCESTOR) | |
353 | { | |
354 | fprintf (f, "ANCESTOR: "); | |
16998094 | 355 | fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC, |
749aa96d MJ |
356 | jump_func->value.ancestor.formal_id, |
357 | jump_func->value.ancestor.offset); | |
8b7773a4 MJ |
358 | if (jump_func->value.ancestor.agg_preserved) |
359 | fprintf (f, ", agg_preserved"); | |
7ea3a73c MJ |
360 | if (jump_func->value.ancestor.keep_null) |
361 | fprintf (f, ", keep_null"); | |
3ea6239f | 362 | fprintf (f, "\n"); |
749aa96d | 363 | } |
8b7773a4 MJ |
364 | |
365 | if (jump_func->agg.items) | |
366 | { | |
367 | struct ipa_agg_jf_item *item; | |
368 | int j; | |
369 | ||
370 | fprintf (f, " Aggregate passed by %s:\n", | |
371 | jump_func->agg.by_ref ? "reference" : "value"); | |
eb270950 | 372 | FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item) |
8b7773a4 MJ |
373 | { |
374 | fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ", | |
375 | item->offset); | |
eb270950 FX |
376 | fprintf (f, "type: "); |
377 | print_generic_expr (f, item->type); | |
378 | fprintf (f, ", "); | |
379 | if (item->jftype == IPA_JF_PASS_THROUGH) | |
380 | fprintf (f, "PASS THROUGH: %d,", | |
381 | item->value.pass_through.formal_id); | |
382 | else if (item->jftype == IPA_JF_LOAD_AGG) | |
383 | { | |
384 | fprintf (f, "LOAD AGG: %d", | |
385 | item->value.pass_through.formal_id); | |
386 | fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],", | |
387 | item->value.load_agg.offset, | |
388 | item->value.load_agg.by_ref ? "reference" | |
389 | : "value"); | |
390 | } | |
391 | ||
392 | if (item->jftype == IPA_JF_PASS_THROUGH | |
393 | || item->jftype == IPA_JF_LOAD_AGG) | |
394 | { | |
395 | fprintf (f, " op %s", | |
396 | get_tree_code_name (item->value.pass_through.operation)); | |
397 | if (item->value.pass_through.operation != NOP_EXPR) | |
398 | { | |
399 | fprintf (f, " "); | |
400 | print_generic_expr (f, item->value.pass_through.operand); | |
401 | } | |
402 | } | |
403 | else if (item->jftype == IPA_JF_CONST) | |
8b7773a4 | 404 | { |
eb270950 FX |
405 | fprintf (f, "CONST: "); |
406 | print_generic_expr (f, item->value.constant); | |
8b7773a4 | 407 | } |
eb270950 FX |
408 | else if (item->jftype == IPA_JF_UNKNOWN) |
409 | fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits", | |
410 | tree_to_uhwi (TYPE_SIZE (item->type))); | |
8b7773a4 MJ |
411 | fprintf (f, "\n"); |
412 | } | |
413 | } | |
44210a96 | 414 | |
99b1c316 | 415 | class ipa_polymorphic_call_context *ctx |
a4a3cdd0 | 416 | = ipa_get_ith_polymorhic_call_context (args, i); |
44210a96 MJ |
417 | if (ctx && !ctx->useless_p ()) |
418 | { | |
419 | fprintf (f, " Context: "); | |
420 | ctx->dump (dump_file); | |
421 | } | |
04be694e | 422 | |
86cd0334 | 423 | if (jump_func->bits) |
209ca542 | 424 | { |
86cd0334 MJ |
425 | fprintf (f, " value: "); |
426 | print_hex (jump_func->bits->value, f); | |
427 | fprintf (f, ", mask: "); | |
428 | print_hex (jump_func->bits->mask, f); | |
209ca542 PK |
429 | fprintf (f, "\n"); |
430 | } | |
431 | else | |
432 | fprintf (f, " Unknown bits\n"); | |
8bc5448f | 433 | |
86cd0334 | 434 | if (jump_func->m_vr) |
8bc5448f KV |
435 | { |
436 | fprintf (f, " VR "); | |
437 | fprintf (f, "%s[", | |
54994253 AH |
438 | (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : ""); |
439 | print_decs (wi::to_wide (jump_func->m_vr->min ()), f); | |
8bc5448f | 440 | fprintf (f, ", "); |
54994253 | 441 | print_decs (wi::to_wide (jump_func->m_vr->max ()), f); |
8bc5448f KV |
442 | fprintf (f, "]\n"); |
443 | } | |
444 | else | |
445 | fprintf (f, " Unknown VR\n"); | |
749aa96d MJ |
446 | } |
447 | } | |
448 | ||
449 | ||
be95e2b9 MJ |
450 | /* Print the jump functions of all arguments on all call graph edges going from |
451 | NODE to file F. */ | |
452 | ||
518dc859 | 453 | void |
3e293154 | 454 | ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node) |
518dc859 | 455 | { |
3e293154 | 456 | struct cgraph_edge *cs; |
518dc859 | 457 | |
464d0118 | 458 | fprintf (f, " Jump functions of caller %s:\n", node->dump_name ()); |
3e293154 MJ |
459 | for (cs = node->callees; cs; cs = cs->next_callee) |
460 | { | |
3e293154 | 461 | |
464d0118 ML |
462 | fprintf (f, " callsite %s -> %s : \n", |
463 | node->dump_name (), | |
464 | cs->callee->dump_name ()); | |
0302955a JH |
465 | if (!ipa_edge_args_info_available_for_edge_p (cs)) |
466 | fprintf (f, " no arg info\n"); | |
467 | else | |
468 | ipa_print_node_jump_functions_for_edge (f, cs); | |
749aa96d | 469 | } |
518dc859 | 470 | |
9de04252 | 471 | for (cs = node->indirect_calls; cs; cs = cs->next_callee) |
749aa96d | 472 | { |
99b1c316 | 473 | class cgraph_indirect_call_info *ii; |
3e293154 | 474 | |
9de04252 MJ |
475 | ii = cs->indirect_info; |
476 | if (ii->agg_contents) | |
c13bc3d9 | 477 | fprintf (f, " indirect %s callsite, calling param %i, " |
9de04252 | 478 | "offset " HOST_WIDE_INT_PRINT_DEC ", %s", |
c13bc3d9 | 479 | ii->member_ptr ? "member ptr" : "aggregate", |
9de04252 MJ |
480 | ii->param_index, ii->offset, |
481 | ii->by_ref ? "by reference" : "by_value"); | |
482 | else | |
85942f45 JH |
483 | fprintf (f, " indirect %s callsite, calling param %i, " |
484 | "offset " HOST_WIDE_INT_PRINT_DEC, | |
485 | ii->polymorphic ? "polymorphic" : "simple", ii->param_index, | |
486 | ii->offset); | |
9de04252 | 487 | |
749aa96d MJ |
488 | if (cs->call_stmt) |
489 | { | |
9de04252 | 490 | fprintf (f, ", for stmt "); |
749aa96d | 491 | print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM); |
3e293154 | 492 | } |
749aa96d | 493 | else |
9de04252 | 494 | fprintf (f, "\n"); |
ba392339 JH |
495 | if (ii->polymorphic) |
496 | ii->context.dump (f); | |
0302955a JH |
497 | if (!ipa_edge_args_info_available_for_edge_p (cs)) |
498 | fprintf (f, " no arg info\n"); | |
499 | else | |
500 | ipa_print_node_jump_functions_for_edge (f, cs); | |
3e293154 MJ |
501 | } |
502 | } | |
503 | ||
504 | /* Print ipa_jump_func data structures of all nodes in the call graph to F. */ | |
be95e2b9 | 505 | |
3e293154 MJ |
506 | void |
507 | ipa_print_all_jump_functions (FILE *f) | |
508 | { | |
509 | struct cgraph_node *node; | |
510 | ||
ca30a539 | 511 | fprintf (f, "\nJump functions:\n"); |
65c70e6b | 512 | FOR_EACH_FUNCTION (node) |
3e293154 MJ |
513 | { |
514 | ipa_print_node_jump_functions (f, node); | |
515 | } | |
516 | } | |
517 | ||
04be694e MJ |
518 | /* Set jfunc to be a know-really nothing jump function. */ |
519 | ||
520 | static void | |
521 | ipa_set_jf_unknown (struct ipa_jump_func *jfunc) | |
522 | { | |
523 | jfunc->type = IPA_JF_UNKNOWN; | |
04be694e MJ |
524 | } |
525 | ||
b8f6e610 MJ |
526 | /* Set JFUNC to be a copy of another jmp (to be used by jump function |
527 | combination code). The two functions will share their rdesc. */ | |
528 | ||
529 | static void | |
530 | ipa_set_jf_cst_copy (struct ipa_jump_func *dst, | |
531 | struct ipa_jump_func *src) | |
532 | ||
533 | { | |
534 | gcc_checking_assert (src->type == IPA_JF_CONST); | |
535 | dst->type = IPA_JF_CONST; | |
536 | dst->value.constant = src->value.constant; | |
537 | } | |
538 | ||
7b872d9e MJ |
539 | /* Set JFUNC to be a constant jmp function. */ |
540 | ||
541 | static void | |
4502fe8d MJ |
542 | ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant, |
543 | struct cgraph_edge *cs) | |
7b872d9e MJ |
544 | { |
545 | jfunc->type = IPA_JF_CONST; | |
4502fe8d MJ |
546 | jfunc->value.constant.value = unshare_expr_without_location (constant); |
547 | ||
548 | if (TREE_CODE (constant) == ADDR_EXPR | |
13586172 MJ |
549 | && (TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL |
550 | || (TREE_CODE (TREE_OPERAND (constant, 0)) == VAR_DECL | |
551 | && TREE_STATIC (TREE_OPERAND (constant, 0))))) | |
4502fe8d MJ |
552 | { |
553 | struct ipa_cst_ref_desc *rdesc; | |
4502fe8d | 554 | |
601f3293 | 555 | rdesc = ipa_refdesc_pool.allocate (); |
4502fe8d MJ |
556 | rdesc->cs = cs; |
557 | rdesc->next_duplicate = NULL; | |
558 | rdesc->refcount = 1; | |
559 | jfunc->value.constant.rdesc = rdesc; | |
560 | } | |
561 | else | |
562 | jfunc->value.constant.rdesc = NULL; | |
7b872d9e MJ |
563 | } |
564 | ||
565 | /* Set JFUNC to be a simple pass-through jump function. */ | |
566 | static void | |
8b7773a4 | 567 | ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id, |
3b97a5c7 | 568 | bool agg_preserved) |
7b872d9e MJ |
569 | { |
570 | jfunc->type = IPA_JF_PASS_THROUGH; | |
571 | jfunc->value.pass_through.operand = NULL_TREE; | |
572 | jfunc->value.pass_through.formal_id = formal_id; | |
573 | jfunc->value.pass_through.operation = NOP_EXPR; | |
8b7773a4 | 574 | jfunc->value.pass_through.agg_preserved = agg_preserved; |
7b872d9e MJ |
575 | } |
576 | ||
a2b4c188 KV |
577 | /* Set JFUNC to be an unary pass through jump function. */ |
578 | ||
579 | static void | |
580 | ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id, | |
581 | enum tree_code operation) | |
582 | { | |
583 | jfunc->type = IPA_JF_PASS_THROUGH; | |
584 | jfunc->value.pass_through.operand = NULL_TREE; | |
585 | jfunc->value.pass_through.formal_id = formal_id; | |
586 | jfunc->value.pass_through.operation = operation; | |
587 | jfunc->value.pass_through.agg_preserved = false; | |
588 | } | |
7b872d9e MJ |
589 | /* Set JFUNC to be an arithmetic pass through jump function. */ |
590 | ||
591 | static void | |
592 | ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id, | |
593 | tree operand, enum tree_code operation) | |
594 | { | |
595 | jfunc->type = IPA_JF_PASS_THROUGH; | |
d1f98542 | 596 | jfunc->value.pass_through.operand = unshare_expr_without_location (operand); |
7b872d9e MJ |
597 | jfunc->value.pass_through.formal_id = formal_id; |
598 | jfunc->value.pass_through.operation = operation; | |
8b7773a4 | 599 | jfunc->value.pass_through.agg_preserved = false; |
7b872d9e MJ |
600 | } |
601 | ||
602 | /* Set JFUNC to be an ancestor jump function. */ | |
603 | ||
604 | static void | |
605 | ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset, | |
7ea3a73c | 606 | int formal_id, bool agg_preserved, bool keep_null) |
7b872d9e MJ |
607 | { |
608 | jfunc->type = IPA_JF_ANCESTOR; | |
609 | jfunc->value.ancestor.formal_id = formal_id; | |
610 | jfunc->value.ancestor.offset = offset; | |
8b7773a4 | 611 | jfunc->value.ancestor.agg_preserved = agg_preserved; |
7ea3a73c | 612 | jfunc->value.ancestor.keep_null = keep_null; |
e248d83f MJ |
613 | } |
614 | ||
8aab5218 MJ |
615 | /* Get IPA BB information about the given BB. FBI is the context of analyzis |
616 | of this function body. */ | |
617 | ||
618 | static struct ipa_bb_info * | |
56b40062 | 619 | ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb) |
8aab5218 MJ |
620 | { |
621 | gcc_checking_assert (fbi); | |
622 | return &fbi->bb_infos[bb->index]; | |
623 | } | |
624 | ||
f65cf2b7 MJ |
625 | /* Structure to be passed in between detect_type_change and |
626 | check_stmt_for_type_change. */ | |
627 | ||
11478306 | 628 | struct prop_type_change_info |
f65cf2b7 | 629 | { |
290ebcb7 MJ |
630 | /* Offset into the object where there is the virtual method pointer we are |
631 | looking for. */ | |
632 | HOST_WIDE_INT offset; | |
633 | /* The declaration or SSA_NAME pointer of the base that we are checking for | |
634 | type change. */ | |
635 | tree object; | |
f65cf2b7 MJ |
636 | /* Set to true if dynamic type change has been detected. */ |
637 | bool type_maybe_changed; | |
638 | }; | |
639 | ||
640 | /* Return true if STMT can modify a virtual method table pointer. | |
641 | ||
642 | This function makes special assumptions about both constructors and | |
643 | destructors which are all the functions that are allowed to alter the VMT | |
644 | pointers. It assumes that destructors begin with assignment into all VMT | |
645 | pointers and that constructors essentially look in the following way: | |
646 | ||
647 | 1) The very first thing they do is that they call constructors of ancestor | |
648 | sub-objects that have them. | |
649 | ||
650 | 2) Then VMT pointers of this and all its ancestors is set to new values | |
651 | corresponding to the type corresponding to the constructor. | |
652 | ||
653 | 3) Only afterwards, other stuff such as constructor of member sub-objects | |
654 | and the code written by the user is run. Only this may include calling | |
655 | virtual functions, directly or indirectly. | |
656 | ||
657 | There is no way to call a constructor of an ancestor sub-object in any | |
658 | other way. | |
659 | ||
660 | This means that we do not have to care whether constructors get the correct | |
661 | type information because they will always change it (in fact, if we define | |
662 | the type to be given by the VMT pointer, it is undefined). | |
663 | ||
664 | The most important fact to derive from the above is that if, for some | |
665 | statement in the section 3, we try to detect whether the dynamic type has | |
666 | changed, we can safely ignore all calls as we examine the function body | |
667 | backwards until we reach statements in section 2 because these calls cannot | |
668 | be ancestor constructors or destructors (if the input is not bogus) and so | |
669 | do not change the dynamic type (this holds true only for automatically | |
670 | allocated objects but at the moment we devirtualize only these). We then | |
671 | must detect that statements in section 2 change the dynamic type and can try | |
672 | to derive the new type. That is enough and we can stop, we will never see | |
673 | the calls into constructors of sub-objects in this code. Therefore we can | |
674 | safely ignore all call statements that we traverse. | |
675 | */ | |
676 | ||
677 | static bool | |
355fe088 | 678 | stmt_may_be_vtbl_ptr_store (gimple *stmt) |
f65cf2b7 MJ |
679 | { |
680 | if (is_gimple_call (stmt)) | |
681 | return false; | |
70f633c5 JH |
682 | if (gimple_clobber_p (stmt)) |
683 | return false; | |
f65cf2b7 MJ |
684 | else if (is_gimple_assign (stmt)) |
685 | { | |
686 | tree lhs = gimple_assign_lhs (stmt); | |
687 | ||
0004f992 MJ |
688 | if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs))) |
689 | { | |
690 | if (flag_strict_aliasing | |
691 | && !POINTER_TYPE_P (TREE_TYPE (lhs))) | |
692 | return false; | |
693 | ||
694 | if (TREE_CODE (lhs) == COMPONENT_REF | |
695 | && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))) | |
f65cf2b7 | 696 | return false; |
450aa0ee | 697 | /* In the future we might want to use get_ref_base_and_extent to find |
0004f992 MJ |
698 | if there is a field corresponding to the offset and if so, proceed |
699 | almost like if it was a component ref. */ | |
700 | } | |
f65cf2b7 MJ |
701 | } |
702 | return true; | |
703 | } | |
704 | ||
3b97a5c7 MJ |
705 | /* Callback of walk_aliased_vdefs and a helper function for detect_type_change |
706 | to check whether a particular statement may modify the virtual table | |
707 | pointerIt stores its result into DATA, which points to a | |
11478306 | 708 | prop_type_change_info structure. */ |
f65cf2b7 MJ |
709 | |
710 | static bool | |
711 | check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data) | |
712 | { | |
355fe088 | 713 | gimple *stmt = SSA_NAME_DEF_STMT (vdef); |
11478306 | 714 | struct prop_type_change_info *tci = (struct prop_type_change_info *) data; |
f65cf2b7 MJ |
715 | |
716 | if (stmt_may_be_vtbl_ptr_store (stmt)) | |
717 | { | |
718 | tci->type_maybe_changed = true; | |
719 | return true; | |
720 | } | |
721 | else | |
722 | return false; | |
723 | } | |
724 | ||
058d0a90 JH |
725 | /* See if ARG is PARAM_DECl describing instance passed by pointer |
726 | or reference in FUNCTION. Return false if the dynamic type may change | |
727 | in between beggining of the function until CALL is invoked. | |
290ebcb7 | 728 | |
058d0a90 | 729 | Generally functions are not allowed to change type of such instances, |
67914693 | 730 | but they call destructors. We assume that methods cannot destroy the THIS |
058d0a90 JH |
731 | pointer. Also as a special cases, constructor and destructors may change |
732 | type of the THIS pointer. */ | |
733 | ||
734 | static bool | |
355fe088 | 735 | param_type_may_change_p (tree function, tree arg, gimple *call) |
058d0a90 | 736 | { |
67914693 | 737 | /* Pure functions cannot do any changes on the dynamic type; |
058d0a90 JH |
738 | that require writting to memory. */ |
739 | if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST)) | |
740 | return false; | |
741 | /* We need to check if we are within inlined consturctor | |
742 | or destructor (ideally we would have way to check that the | |
743 | inline cdtor is actually working on ARG, but we don't have | |
744 | easy tie on this, so punt on all non-pure cdtors. | |
745 | We may also record the types of cdtors and once we know type | |
746 | of the instance match them. | |
747 | ||
748 | Also code unification optimizations may merge calls from | |
749 | different blocks making return values unreliable. So | |
750 | do nothing during late optimization. */ | |
751 | if (DECL_STRUCT_FUNCTION (function)->after_inlining) | |
752 | return true; | |
753 | if (TREE_CODE (arg) == SSA_NAME | |
754 | && SSA_NAME_IS_DEFAULT_DEF (arg) | |
755 | && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL) | |
756 | { | |
757 | /* Normal (non-THIS) argument. */ | |
758 | if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function) | |
759 | || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE) | |
026c3cfd | 760 | /* THIS pointer of an method - here we want to watch constructors |
058d0a90 JH |
761 | and destructors as those definitely may change the dynamic |
762 | type. */ | |
763 | || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE | |
764 | && !DECL_CXX_CONSTRUCTOR_P (function) | |
765 | && !DECL_CXX_DESTRUCTOR_P (function) | |
766 | && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function)))) | |
767 | { | |
768 | /* Walk the inline stack and watch out for ctors/dtors. */ | |
769 | for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK; | |
770 | block = BLOCK_SUPERCONTEXT (block)) | |
00a0ea64 JJ |
771 | if (inlined_polymorphic_ctor_dtor_block_p (block, false)) |
772 | return true; | |
058d0a90 JH |
773 | return false; |
774 | } | |
775 | } | |
776 | return true; | |
777 | } | |
290ebcb7 | 778 | |
06d65050 JH |
779 | /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before |
780 | callsite CALL) by looking for assignments to its virtual table pointer. If | |
c199f329 | 781 | it is, return true. ARG is the object itself (not a pointer |
06d65050 | 782 | to it, unless dereferenced). BASE is the base of the memory access as |
058d0a90 JH |
783 | returned by get_ref_base_and_extent, as is the offset. |
784 | ||
785 | This is helper function for detect_type_change and detect_type_change_ssa | |
786 | that does the heavy work which is usually unnecesary. */ | |
f65cf2b7 MJ |
787 | |
788 | static bool | |
c628d1c3 MJ |
789 | detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg, |
790 | tree base, tree comp_type, gcall *call, | |
058d0a90 | 791 | HOST_WIDE_INT offset) |
f65cf2b7 | 792 | { |
11478306 | 793 | struct prop_type_change_info tci; |
f65cf2b7 MJ |
794 | ao_ref ao; |
795 | ||
796 | gcc_checking_assert (DECL_P (arg) | |
797 | || TREE_CODE (arg) == MEM_REF | |
798 | || handled_component_p (arg)); | |
f65cf2b7 | 799 | |
b49407f8 JH |
800 | comp_type = TYPE_MAIN_VARIANT (comp_type); |
801 | ||
d570d364 JH |
802 | /* Const calls cannot call virtual methods through VMT and so type changes do |
803 | not matter. */ | |
804 | if (!flag_devirtualize || !gimple_vuse (call) | |
805 | /* Be sure expected_type is polymorphic. */ | |
806 | || !comp_type | |
807 | || TREE_CODE (comp_type) != RECORD_TYPE | |
808 | || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type)) | |
809 | || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type)))) | |
810 | return true; | |
4bf2a588 | 811 | |
6cc886bf RB |
812 | if (fbi->aa_walk_budget == 0) |
813 | return false; | |
814 | ||
dd887943 | 815 | ao_ref_init (&ao, arg); |
f65cf2b7 MJ |
816 | ao.base = base; |
817 | ao.offset = offset; | |
818 | ao.size = POINTER_SIZE; | |
819 | ao.max_size = ao.size; | |
f65cf2b7 | 820 | |
290ebcb7 MJ |
821 | tci.offset = offset; |
822 | tci.object = get_base_address (arg); | |
290ebcb7 | 823 | tci.type_maybe_changed = false; |
290ebcb7 | 824 | |
c628d1c3 MJ |
825 | int walked |
826 | = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change, | |
6cc886bf RB |
827 | &tci, NULL, NULL, fbi->aa_walk_budget); |
828 | if (walked >= 0) | |
829 | fbi->aa_walk_budget -= walked; | |
830 | else | |
831 | fbi->aa_walk_budget = 0; | |
c628d1c3 MJ |
832 | |
833 | if (walked >= 0 && !tci.type_maybe_changed) | |
f65cf2b7 MJ |
834 | return false; |
835 | ||
f65cf2b7 MJ |
836 | return true; |
837 | } | |
838 | ||
058d0a90 | 839 | /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed. |
c199f329 | 840 | If it is, return true. ARG is the object itself (not a pointer |
058d0a90 JH |
841 | to it, unless dereferenced). BASE is the base of the memory access as |
842 | returned by get_ref_base_and_extent, as is the offset. */ | |
843 | ||
844 | static bool | |
c628d1c3 | 845 | detect_type_change (ipa_func_body_info *fbi, tree arg, tree base, |
c199f329 | 846 | tree comp_type, gcall *call, |
c628d1c3 | 847 | HOST_WIDE_INT offset) |
058d0a90 JH |
848 | { |
849 | if (!flag_devirtualize) | |
850 | return false; | |
851 | ||
852 | if (TREE_CODE (base) == MEM_REF | |
853 | && !param_type_may_change_p (current_function_decl, | |
854 | TREE_OPERAND (base, 0), | |
855 | call)) | |
856 | return false; | |
c628d1c3 | 857 | return detect_type_change_from_memory_writes (fbi, arg, base, comp_type, |
c199f329 | 858 | call, offset); |
058d0a90 JH |
859 | } |
860 | ||
f65cf2b7 MJ |
861 | /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer |
862 | SSA name (its dereference will become the base and the offset is assumed to | |
863 | be zero). */ | |
864 | ||
865 | static bool | |
c628d1c3 | 866 | detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type, |
c199f329 | 867 | gcall *call) |
f65cf2b7 MJ |
868 | { |
869 | gcc_checking_assert (TREE_CODE (arg) == SSA_NAME); | |
05842ff5 | 870 | if (!flag_devirtualize |
06d65050 | 871 | || !POINTER_TYPE_P (TREE_TYPE (arg))) |
f65cf2b7 MJ |
872 | return false; |
873 | ||
058d0a90 JH |
874 | if (!param_type_may_change_p (current_function_decl, arg, call)) |
875 | return false; | |
876 | ||
f65cf2b7 | 877 | arg = build2 (MEM_REF, ptr_type_node, arg, |
290ebcb7 | 878 | build_int_cst (ptr_type_node, 0)); |
f65cf2b7 | 879 | |
c628d1c3 | 880 | return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type, |
c199f329 | 881 | call, 0); |
f65cf2b7 MJ |
882 | } |
883 | ||
fdb0e1b4 MJ |
884 | /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the |
885 | boolean variable pointed to by DATA. */ | |
886 | ||
887 | static bool | |
888 | mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED, | |
889 | void *data) | |
890 | { | |
891 | bool *b = (bool *) data; | |
892 | *b = true; | |
893 | return true; | |
894 | } | |
895 | ||
8aab5218 MJ |
896 | /* Find the nearest valid aa status for parameter specified by INDEX that |
897 | dominates BB. */ | |
898 | ||
56b40062 MJ |
899 | static struct ipa_param_aa_status * |
900 | find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb, | |
8aab5218 MJ |
901 | int index) |
902 | { | |
903 | while (true) | |
904 | { | |
905 | bb = get_immediate_dominator (CDI_DOMINATORS, bb); | |
906 | if (!bb) | |
907 | return NULL; | |
908 | struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb); | |
909 | if (!bi->param_aa_statuses.is_empty () | |
910 | && bi->param_aa_statuses[index].valid) | |
911 | return &bi->param_aa_statuses[index]; | |
912 | } | |
913 | } | |
914 | ||
915 | /* Get AA status structure for the given BB and parameter with INDEX. Allocate | |
916 | structures and/or intialize the result with a dominating description as | |
917 | necessary. */ | |
918 | ||
56b40062 MJ |
919 | static struct ipa_param_aa_status * |
920 | parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb, | |
8aab5218 MJ |
921 | int index) |
922 | { | |
923 | gcc_checking_assert (fbi); | |
924 | struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb); | |
925 | if (bi->param_aa_statuses.is_empty ()) | |
cb3874dc | 926 | bi->param_aa_statuses.safe_grow_cleared (fbi->param_count, true); |
56b40062 | 927 | struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index]; |
8aab5218 MJ |
928 | if (!paa->valid) |
929 | { | |
930 | gcc_checking_assert (!paa->parm_modified | |
931 | && !paa->ref_modified | |
932 | && !paa->pt_modified); | |
56b40062 | 933 | struct ipa_param_aa_status *dom_paa; |
8aab5218 MJ |
934 | dom_paa = find_dominating_aa_status (fbi, bb, index); |
935 | if (dom_paa) | |
936 | *paa = *dom_paa; | |
937 | else | |
938 | paa->valid = true; | |
939 | } | |
940 | ||
941 | return paa; | |
942 | } | |
943 | ||
688010ba | 944 | /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve |
8b7773a4 | 945 | a value known not to be modified in this function before reaching the |
8aab5218 MJ |
946 | statement STMT. FBI holds information about the function we have so far |
947 | gathered but do not survive the summary building stage. */ | |
fdb0e1b4 MJ |
948 | |
949 | static bool | |
56b40062 | 950 | parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index, |
355fe088 | 951 | gimple *stmt, tree parm_load) |
fdb0e1b4 | 952 | { |
56b40062 | 953 | struct ipa_param_aa_status *paa; |
fdb0e1b4 MJ |
954 | bool modified = false; |
955 | ao_ref refd; | |
956 | ||
776e4fe2 MJ |
957 | tree base = get_base_address (parm_load); |
958 | gcc_assert (TREE_CODE (base) == PARM_DECL); | |
959 | if (TREE_READONLY (base)) | |
960 | return true; | |
961 | ||
c628d1c3 MJ |
962 | gcc_checking_assert (fbi); |
963 | paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index); | |
6cc886bf | 964 | if (paa->parm_modified || fbi->aa_walk_budget == 0) |
c628d1c3 | 965 | return false; |
fdb0e1b4 MJ |
966 | |
967 | gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE); | |
8b7773a4 | 968 | ao_ref_init (&refd, parm_load); |
8aab5218 | 969 | int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, |
c628d1c3 | 970 | &modified, NULL, NULL, |
6cc886bf | 971 | fbi->aa_walk_budget); |
c628d1c3 MJ |
972 | if (walked < 0) |
973 | { | |
974 | modified = true; | |
6cc886bf | 975 | fbi->aa_walk_budget = 0; |
c628d1c3 | 976 | } |
6cc886bf | 977 | else |
c628d1c3 | 978 | fbi->aa_walk_budget -= walked; |
8aab5218 MJ |
979 | if (paa && modified) |
980 | paa->parm_modified = true; | |
8b7773a4 | 981 | return !modified; |
fdb0e1b4 MJ |
982 | } |
983 | ||
a2b4c188 KV |
984 | /* If STMT is an assignment that loads a value from an parameter declaration, |
985 | return the index of the parameter in ipa_node_params which has not been | |
986 | modified. Otherwise return -1. */ | |
987 | ||
988 | static int | |
989 | load_from_unmodified_param (struct ipa_func_body_info *fbi, | |
f65f1ae3 | 990 | vec<ipa_param_descriptor, va_gc> *descriptors, |
a2b4c188 KV |
991 | gimple *stmt) |
992 | { | |
bda2bc48 MJ |
993 | int index; |
994 | tree op1; | |
995 | ||
a2b4c188 KV |
996 | if (!gimple_assign_single_p (stmt)) |
997 | return -1; | |
998 | ||
bda2bc48 MJ |
999 | op1 = gimple_assign_rhs1 (stmt); |
1000 | if (TREE_CODE (op1) != PARM_DECL) | |
a2b4c188 KV |
1001 | return -1; |
1002 | ||
bda2bc48 MJ |
1003 | index = ipa_get_param_decl_index_1 (descriptors, op1); |
1004 | if (index < 0 | |
1005 | || !parm_preserved_before_stmt_p (fbi, index, stmt, op1)) | |
a2b4c188 KV |
1006 | return -1; |
1007 | ||
bda2bc48 | 1008 | return index; |
a2b4c188 KV |
1009 | } |
1010 | ||
8aab5218 MJ |
1011 | /* Return true if memory reference REF (which must be a load through parameter |
1012 | with INDEX) loads data that are known to be unmodified in this function | |
1013 | before reaching statement STMT. */ | |
8b7773a4 MJ |
1014 | |
1015 | static bool | |
56b40062 | 1016 | parm_ref_data_preserved_p (struct ipa_func_body_info *fbi, |
355fe088 | 1017 | int index, gimple *stmt, tree ref) |
8b7773a4 | 1018 | { |
56b40062 | 1019 | struct ipa_param_aa_status *paa; |
8b7773a4 MJ |
1020 | bool modified = false; |
1021 | ao_ref refd; | |
1022 | ||
c628d1c3 MJ |
1023 | gcc_checking_assert (fbi); |
1024 | paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index); | |
6cc886bf | 1025 | if (paa->ref_modified || fbi->aa_walk_budget == 0) |
c628d1c3 | 1026 | return false; |
8b7773a4 | 1027 | |
8aab5218 | 1028 | gcc_checking_assert (gimple_vuse (stmt)); |
8b7773a4 | 1029 | ao_ref_init (&refd, ref); |
8aab5218 | 1030 | int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, |
c628d1c3 | 1031 | &modified, NULL, NULL, |
6cc886bf | 1032 | fbi->aa_walk_budget); |
c628d1c3 MJ |
1033 | if (walked < 0) |
1034 | { | |
1035 | modified = true; | |
1036 | fbi->aa_walk_budget = 0; | |
1037 | } | |
1038 | else | |
1039 | fbi->aa_walk_budget -= walked; | |
1040 | if (modified) | |
8aab5218 | 1041 | paa->ref_modified = true; |
8b7773a4 MJ |
1042 | return !modified; |
1043 | } | |
1044 | ||
8aab5218 MJ |
1045 | /* Return true if the data pointed to by PARM (which is a parameter with INDEX) |
1046 | is known to be unmodified in this function before reaching call statement | |
1047 | CALL into which it is passed. FBI describes the function body. */ | |
8b7773a4 MJ |
1048 | |
1049 | static bool | |
56b40062 | 1050 | parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index, |
355fe088 | 1051 | gimple *call, tree parm) |
8b7773a4 MJ |
1052 | { |
1053 | bool modified = false; | |
1054 | ao_ref refd; | |
1055 | ||
1056 | /* It's unnecessary to calculate anything about memory contnets for a const | |
1057 | function because it is not goin to use it. But do not cache the result | |
1058 | either. Also, no such calculations for non-pointers. */ | |
1059 | if (!gimple_vuse (call) | |
c628d1c3 | 1060 | || !POINTER_TYPE_P (TREE_TYPE (parm))) |
8b7773a4 MJ |
1061 | return false; |
1062 | ||
56b40062 MJ |
1063 | struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, |
1064 | gimple_bb (call), | |
1065 | index); | |
6cc886bf | 1066 | if (paa->pt_modified || fbi->aa_walk_budget == 0) |
8b7773a4 MJ |
1067 | return false; |
1068 | ||
1069 | ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE); | |
8aab5218 | 1070 | int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, |
c628d1c3 | 1071 | &modified, NULL, NULL, |
6cc886bf | 1072 | fbi->aa_walk_budget); |
c628d1c3 MJ |
1073 | if (walked < 0) |
1074 | { | |
1075 | fbi->aa_walk_budget = 0; | |
1076 | modified = true; | |
1077 | } | |
1078 | else | |
1079 | fbi->aa_walk_budget -= walked; | |
8b7773a4 | 1080 | if (modified) |
8aab5218 | 1081 | paa->pt_modified = true; |
8b7773a4 MJ |
1082 | return !modified; |
1083 | } | |
1084 | ||
91bb9f80 MJ |
1085 | /* Return true if we can prove that OP is a memory reference loading |
1086 | data from an aggregate passed as a parameter. | |
1087 | ||
1088 | The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return | |
1089 | false if it cannot prove that the value has not been modified before the | |
1090 | load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even | |
1091 | if it cannot prove the value has not been modified, in that case it will | |
1092 | store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there. | |
1093 | ||
8b7773a4 MJ |
1094 | INFO and PARMS_AINFO describe parameters of the current function (but the |
1095 | latter can be NULL), STMT is the load statement. If function returns true, | |
1096 | *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset | |
1097 | within the aggregate and whether it is a load from a value passed by | |
1098 | reference respectively. */ | |
1099 | ||
ff302741 | 1100 | bool |
56b40062 | 1101 | ipa_load_from_parm_agg (struct ipa_func_body_info *fbi, |
f65f1ae3 | 1102 | vec<ipa_param_descriptor, va_gc> *descriptors, |
355fe088 | 1103 | gimple *stmt, tree op, int *index_p, |
86003645 | 1104 | HOST_WIDE_INT *offset_p, poly_int64 *size_p, |
91bb9f80 | 1105 | bool *by_ref_p, bool *guaranteed_unmodified) |
8b7773a4 MJ |
1106 | { |
1107 | int index; | |
588db50c | 1108 | HOST_WIDE_INT size; |
ee45a32d | 1109 | bool reverse; |
588db50c | 1110 | tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse); |
8b7773a4 | 1111 | |
588db50c | 1112 | if (!base) |
8b7773a4 MJ |
1113 | return false; |
1114 | ||
1115 | if (DECL_P (base)) | |
1116 | { | |
d044dd17 | 1117 | int index = ipa_get_param_decl_index_1 (descriptors, base); |
8b7773a4 | 1118 | if (index >= 0 |
8aab5218 | 1119 | && parm_preserved_before_stmt_p (fbi, index, stmt, op)) |
8b7773a4 MJ |
1120 | { |
1121 | *index_p = index; | |
1122 | *by_ref_p = false; | |
3ff2ca23 JJ |
1123 | if (size_p) |
1124 | *size_p = size; | |
91bb9f80 MJ |
1125 | if (guaranteed_unmodified) |
1126 | *guaranteed_unmodified = true; | |
8b7773a4 MJ |
1127 | return true; |
1128 | } | |
1129 | return false; | |
1130 | } | |
1131 | ||
1132 | if (TREE_CODE (base) != MEM_REF | |
1133 | || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME | |
1134 | || !integer_zerop (TREE_OPERAND (base, 1))) | |
1135 | return false; | |
1136 | ||
1137 | if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))) | |
1138 | { | |
1139 | tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0)); | |
d044dd17 | 1140 | index = ipa_get_param_decl_index_1 (descriptors, parm); |
8b7773a4 MJ |
1141 | } |
1142 | else | |
1143 | { | |
1144 | /* This branch catches situations where a pointer parameter is not a | |
1145 | gimple register, for example: | |
1146 | ||
1147 | void hip7(S*) (struct S * p) | |
1148 | { | |
1149 | void (*<T2e4>) (struct S *) D.1867; | |
1150 | struct S * p.1; | |
1151 | ||
1152 | <bb 2>: | |
1153 | p.1_1 = p; | |
1154 | D.1867_2 = p.1_1->f; | |
1155 | D.1867_2 (); | |
1156 | gdp = &p; | |
1157 | */ | |
1158 | ||
355fe088 | 1159 | gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0)); |
8aab5218 | 1160 | index = load_from_unmodified_param (fbi, descriptors, def); |
8b7773a4 MJ |
1161 | } |
1162 | ||
91bb9f80 | 1163 | if (index >= 0) |
8b7773a4 | 1164 | { |
91bb9f80 MJ |
1165 | bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op); |
1166 | if (!data_preserved && !guaranteed_unmodified) | |
1167 | return false; | |
1168 | ||
8b7773a4 MJ |
1169 | *index_p = index; |
1170 | *by_ref_p = true; | |
3ff2ca23 JJ |
1171 | if (size_p) |
1172 | *size_p = size; | |
91bb9f80 MJ |
1173 | if (guaranteed_unmodified) |
1174 | *guaranteed_unmodified = data_preserved; | |
8b7773a4 MJ |
1175 | return true; |
1176 | } | |
1177 | return false; | |
1178 | } | |
1179 | ||
eb270950 FX |
1180 | /* If STMT is an assignment that loads a value from a parameter declaration, |
1181 | or from an aggregate passed as the parameter either by value or reference, | |
1182 | return the index of the parameter in ipa_node_params. Otherwise return -1. | |
1183 | ||
1184 | FBI holds gathered information about the function. INFO describes | |
1185 | parameters of the function, STMT is the assignment statement. If it is a | |
1186 | memory load from an aggregate, *OFFSET_P is filled with offset within the | |
1187 | aggregate, and *BY_REF_P specifies whether the aggregate is passed by | |
1188 | reference. */ | |
1189 | ||
1190 | static int | |
1191 | load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi, | |
1192 | class ipa_node_params *info, | |
1193 | gimple *stmt, | |
1194 | HOST_WIDE_INT *offset_p, | |
1195 | bool *by_ref_p) | |
1196 | { | |
1197 | int index = load_from_unmodified_param (fbi, info->descriptors, stmt); | |
1198 | poly_int64 size; | |
1199 | ||
1200 | /* Load value from a parameter declaration. */ | |
1201 | if (index >= 0) | |
1202 | { | |
1203 | *offset_p = -1; | |
1204 | return index; | |
1205 | } | |
1206 | ||
1207 | if (!gimple_assign_load_p (stmt)) | |
1208 | return -1; | |
1209 | ||
1210 | tree rhs = gimple_assign_rhs1 (stmt); | |
1211 | ||
1212 | /* Skip memory reference containing VIEW_CONVERT_EXPR. */ | |
1213 | for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0)) | |
1214 | if (TREE_CODE (t) == VIEW_CONVERT_EXPR) | |
1215 | return -1; | |
1216 | ||
1217 | /* Skip memory reference containing bit-field. */ | |
1218 | if (TREE_CODE (rhs) == BIT_FIELD_REF | |
1219 | || contains_bitfld_component_ref_p (rhs)) | |
1220 | return -1; | |
1221 | ||
1222 | if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index, | |
1223 | offset_p, &size, by_ref_p)) | |
1224 | return -1; | |
1225 | ||
1226 | gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))), | |
1227 | size)); | |
1228 | if (!*by_ref_p) | |
1229 | { | |
1230 | tree param_type = ipa_get_type (info, index); | |
1231 | ||
1232 | if (!param_type || !AGGREGATE_TYPE_P (param_type)) | |
1233 | return -1; | |
1234 | } | |
1235 | else if (TREE_THIS_VOLATILE (rhs)) | |
1236 | return -1; | |
1237 | ||
1238 | return index; | |
1239 | } | |
1240 | ||
c7b6a758 JH |
1241 | /* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR) |
1242 | to find original pointer. Initialize RET to the pointer which results from | |
1243 | the walk. | |
1244 | If offset is known return true and initialize OFFSET_RET. */ | |
1245 | ||
1246 | bool | |
1247 | unadjusted_ptr_and_unit_offset (tree op, tree *ret, poly_int64 *offset_ret) | |
1248 | { | |
1249 | poly_int64 offset = 0; | |
1250 | bool offset_known = true; | |
1251 | int i; | |
1252 | ||
1253 | for (i = 0; i < param_ipa_jump_function_lookups; i++) | |
1254 | { | |
1255 | if (TREE_CODE (op) == ADDR_EXPR) | |
1256 | { | |
1257 | poly_int64 extra_offset = 0; | |
1258 | tree base = get_addr_base_and_unit_offset (TREE_OPERAND (op, 0), | |
1259 | &offset); | |
1260 | if (!base) | |
1261 | { | |
1262 | base = get_base_address (TREE_OPERAND (op, 0)); | |
1263 | if (TREE_CODE (base) != MEM_REF) | |
1264 | break; | |
1265 | offset_known = false; | |
1266 | } | |
1267 | else | |
1268 | { | |
1269 | if (TREE_CODE (base) != MEM_REF) | |
1270 | break; | |
1271 | offset += extra_offset; | |
1272 | } | |
1273 | op = TREE_OPERAND (base, 0); | |
1274 | if (mem_ref_offset (base).to_shwi (&extra_offset)) | |
1275 | offset += extra_offset; | |
1276 | else | |
1277 | offset_known = false; | |
1278 | } | |
1279 | else if (TREE_CODE (op) == SSA_NAME | |
1280 | && !SSA_NAME_IS_DEFAULT_DEF (op)) | |
1281 | { | |
1282 | gimple *pstmt = SSA_NAME_DEF_STMT (op); | |
1283 | ||
1284 | if (gimple_assign_single_p (pstmt)) | |
1285 | op = gimple_assign_rhs1 (pstmt); | |
1286 | else if (is_gimple_assign (pstmt) | |
1287 | && gimple_assign_rhs_code (pstmt) == POINTER_PLUS_EXPR) | |
1288 | { | |
1289 | poly_int64 extra_offset = 0; | |
1290 | if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt), | |
1291 | &extra_offset)) | |
1292 | offset += extra_offset; | |
1293 | else | |
1294 | offset_known = false; | |
1295 | op = gimple_assign_rhs1 (pstmt); | |
1296 | } | |
1297 | else | |
1298 | break; | |
1299 | } | |
1300 | else | |
1301 | break; | |
1302 | } | |
1303 | *ret = op; | |
1304 | *offset_ret = offset; | |
1305 | return offset_known; | |
1306 | } | |
1307 | ||
b258210c | 1308 | /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result |
fdb0e1b4 MJ |
1309 | of an assignment statement STMT, try to determine whether we are actually |
1310 | handling any of the following cases and construct an appropriate jump | |
1311 | function into JFUNC if so: | |
1312 | ||
1313 | 1) The passed value is loaded from a formal parameter which is not a gimple | |
1314 | register (most probably because it is addressable, the value has to be | |
1315 | scalar) and we can guarantee the value has not changed. This case can | |
1316 | therefore be described by a simple pass-through jump function. For example: | |
1317 | ||
1318 | foo (int a) | |
1319 | { | |
1320 | int a.0; | |
1321 | ||
1322 | a.0_2 = a; | |
1323 | bar (a.0_2); | |
1324 | ||
1325 | 2) The passed value can be described by a simple arithmetic pass-through | |
1326 | jump function. E.g. | |
1327 | ||
1328 | foo (int a) | |
1329 | { | |
1330 | int D.2064; | |
1331 | ||
1332 | D.2064_4 = a.1(D) + 4; | |
1333 | bar (D.2064_4); | |
1334 | ||
1335 | This case can also occur in combination of the previous one, e.g.: | |
1336 | ||
1337 | foo (int a, int z) | |
1338 | { | |
1339 | int a.0; | |
1340 | int D.2064; | |
1341 | ||
1342 | a.0_3 = a; | |
1343 | D.2064_4 = a.0_3 + 4; | |
1344 | foo (D.2064_4); | |
1345 | ||
1346 | 3) The passed value is an address of an object within another one (which | |
1347 | also passed by reference). Such situations are described by an ancestor | |
1348 | jump function and describe situations such as: | |
1349 | ||
1350 | B::foo() (struct B * const this) | |
1351 | { | |
1352 | struct A * D.1845; | |
1353 | ||
1354 | D.1845_2 = &this_1(D)->D.1748; | |
1355 | A::bar (D.1845_2); | |
1356 | ||
1357 | INFO is the structure describing individual parameters access different | |
1358 | stages of IPA optimizations. PARMS_AINFO contains the information that is | |
1359 | only needed for intraprocedural analysis. */ | |
685b0d13 MJ |
1360 | |
1361 | static void | |
56b40062 | 1362 | compute_complex_assign_jump_func (struct ipa_func_body_info *fbi, |
99b1c316 | 1363 | class ipa_node_params *info, |
b258210c | 1364 | struct ipa_jump_func *jfunc, |
355fe088 | 1365 | gcall *call, gimple *stmt, tree name, |
06d65050 | 1366 | tree param_type) |
685b0d13 | 1367 | { |
588db50c | 1368 | HOST_WIDE_INT offset, size; |
fdb0e1b4 | 1369 | tree op1, tc_ssa, base, ssa; |
ee45a32d | 1370 | bool reverse; |
685b0d13 | 1371 | int index; |
685b0d13 | 1372 | |
685b0d13 | 1373 | op1 = gimple_assign_rhs1 (stmt); |
685b0d13 | 1374 | |
fdb0e1b4 | 1375 | if (TREE_CODE (op1) == SSA_NAME) |
685b0d13 | 1376 | { |
fdb0e1b4 MJ |
1377 | if (SSA_NAME_IS_DEFAULT_DEF (op1)) |
1378 | index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1)); | |
1379 | else | |
bda2bc48 MJ |
1380 | index = load_from_unmodified_param (fbi, info->descriptors, |
1381 | SSA_NAME_DEF_STMT (op1)); | |
fdb0e1b4 MJ |
1382 | tc_ssa = op1; |
1383 | } | |
1384 | else | |
1385 | { | |
bda2bc48 | 1386 | index = load_from_unmodified_param (fbi, info->descriptors, stmt); |
fdb0e1b4 MJ |
1387 | tc_ssa = gimple_assign_lhs (stmt); |
1388 | } | |
1389 | ||
1390 | if (index >= 0) | |
1391 | { | |
a77af182 | 1392 | switch (gimple_assign_rhs_class (stmt)) |
8b7773a4 | 1393 | { |
a77af182 RB |
1394 | case GIMPLE_BINARY_RHS: |
1395 | { | |
1396 | tree op2 = gimple_assign_rhs2 (stmt); | |
1397 | if (!is_gimple_ip_invariant (op2) | |
1398 | || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) | |
1399 | != tcc_comparison) | |
1400 | && !useless_type_conversion_p (TREE_TYPE (name), | |
1401 | TREE_TYPE (op1)))) | |
1402 | return; | |
1403 | ||
1404 | ipa_set_jf_arith_pass_through (jfunc, index, op2, | |
1405 | gimple_assign_rhs_code (stmt)); | |
1406 | break; | |
1407 | } | |
1408 | case GIMPLE_SINGLE_RHS: | |
1409 | { | |
1410 | bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, | |
1411 | tc_ssa); | |
1412 | ipa_set_jf_simple_pass_through (jfunc, index, agg_p); | |
1413 | break; | |
1414 | } | |
1415 | case GIMPLE_UNARY_RHS: | |
b66113e9 | 1416 | if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))) |
a77af182 | 1417 | ipa_set_jf_unary_pass_through (jfunc, index, |
bda2bc48 | 1418 | gimple_assign_rhs_code (stmt)); |
a77af182 | 1419 | default:; |
8b7773a4 | 1420 | } |
685b0d13 MJ |
1421 | return; |
1422 | } | |
1423 | ||
1424 | if (TREE_CODE (op1) != ADDR_EXPR) | |
1425 | return; | |
1426 | op1 = TREE_OPERAND (op1, 0); | |
588db50c | 1427 | base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse); |
aca52e6f RS |
1428 | offset_int mem_offset; |
1429 | if (!base | |
1430 | || TREE_CODE (base) != MEM_REF | |
1431 | || !mem_ref_offset (base).is_constant (&mem_offset)) | |
685b0d13 | 1432 | return; |
aca52e6f | 1433 | offset += mem_offset.to_short_addr () * BITS_PER_UNIT; |
f65cf2b7 MJ |
1434 | ssa = TREE_OPERAND (base, 0); |
1435 | if (TREE_CODE (ssa) != SSA_NAME | |
1436 | || !SSA_NAME_IS_DEFAULT_DEF (ssa) | |
280fedf0 | 1437 | || offset < 0) |
685b0d13 MJ |
1438 | return; |
1439 | ||
b8f6e610 | 1440 | /* Dynamic types are changed in constructors and destructors. */ |
f65cf2b7 | 1441 | index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa)); |
06d65050 | 1442 | if (index >= 0 && param_type && POINTER_TYPE_P (param_type)) |
3b97a5c7 | 1443 | ipa_set_ancestor_jf (jfunc, offset, index, |
7ea3a73c MJ |
1444 | parm_ref_data_pass_through_p (fbi, index, call, ssa), |
1445 | false); | |
685b0d13 MJ |
1446 | } |
1447 | ||
40591473 MJ |
1448 | /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if |
1449 | it looks like: | |
1450 | ||
1451 | iftmp.1_3 = &obj_2(D)->D.1762; | |
1452 | ||
1453 | The base of the MEM_REF must be a default definition SSA NAME of a | |
1454 | parameter. Return NULL_TREE if it looks otherwise. If case of success, the | |
1455 | whole MEM_REF expression is returned and the offset calculated from any | |
1456 | handled components and the MEM_REF itself is stored into *OFFSET. The whole | |
1457 | RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */ | |
1458 | ||
1459 | static tree | |
355fe088 | 1460 | get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset) |
40591473 | 1461 | { |
588db50c | 1462 | HOST_WIDE_INT size; |
40591473 | 1463 | tree expr, parm, obj; |
ee45a32d | 1464 | bool reverse; |
40591473 MJ |
1465 | |
1466 | if (!gimple_assign_single_p (assign)) | |
1467 | return NULL_TREE; | |
1468 | expr = gimple_assign_rhs1 (assign); | |
1469 | ||
1470 | if (TREE_CODE (expr) != ADDR_EXPR) | |
1471 | return NULL_TREE; | |
1472 | expr = TREE_OPERAND (expr, 0); | |
1473 | obj = expr; | |
588db50c | 1474 | expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse); |
40591473 | 1475 | |
aca52e6f RS |
1476 | offset_int mem_offset; |
1477 | if (!expr | |
1478 | || TREE_CODE (expr) != MEM_REF | |
1479 | || !mem_ref_offset (expr).is_constant (&mem_offset)) | |
40591473 MJ |
1480 | return NULL_TREE; |
1481 | parm = TREE_OPERAND (expr, 0); | |
1482 | if (TREE_CODE (parm) != SSA_NAME | |
1483 | || !SSA_NAME_IS_DEFAULT_DEF (parm) | |
1484 | || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL) | |
1485 | return NULL_TREE; | |
1486 | ||
aca52e6f | 1487 | *offset += mem_offset.to_short_addr () * BITS_PER_UNIT; |
40591473 MJ |
1488 | *obj_p = obj; |
1489 | return expr; | |
1490 | } | |
1491 | ||
685b0d13 | 1492 | |
b258210c MJ |
1493 | /* Given that an actual argument is an SSA_NAME that is a result of a phi |
1494 | statement PHI, try to find out whether NAME is in fact a | |
1495 | multiple-inheritance typecast from a descendant into an ancestor of a formal | |
1496 | parameter and thus can be described by an ancestor jump function and if so, | |
1497 | write the appropriate function into JFUNC. | |
1498 | ||
1499 | Essentially we want to match the following pattern: | |
1500 | ||
1501 | if (obj_2(D) != 0B) | |
1502 | goto <bb 3>; | |
1503 | else | |
1504 | goto <bb 4>; | |
1505 | ||
1506 | <bb 3>: | |
1507 | iftmp.1_3 = &obj_2(D)->D.1762; | |
1508 | ||
1509 | <bb 4>: | |
1510 | # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)> | |
1511 | D.1879_6 = middleman_1 (iftmp.1_1, i_5(D)); | |
1512 | return D.1879_6; */ | |
1513 | ||
1514 | static void | |
56b40062 | 1515 | compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi, |
99b1c316 | 1516 | class ipa_node_params *info, |
b258210c | 1517 | struct ipa_jump_func *jfunc, |
538dd0b7 | 1518 | gcall *call, gphi *phi) |
b258210c | 1519 | { |
40591473 | 1520 | HOST_WIDE_INT offset; |
355fe088 | 1521 | gimple *assign, *cond; |
b258210c | 1522 | basic_block phi_bb, assign_bb, cond_bb; |
f65cf2b7 | 1523 | tree tmp, parm, expr, obj; |
b258210c MJ |
1524 | int index, i; |
1525 | ||
54e348cb | 1526 | if (gimple_phi_num_args (phi) != 2) |
b258210c MJ |
1527 | return; |
1528 | ||
54e348cb MJ |
1529 | if (integer_zerop (PHI_ARG_DEF (phi, 1))) |
1530 | tmp = PHI_ARG_DEF (phi, 0); | |
1531 | else if (integer_zerop (PHI_ARG_DEF (phi, 0))) | |
1532 | tmp = PHI_ARG_DEF (phi, 1); | |
1533 | else | |
1534 | return; | |
b258210c MJ |
1535 | if (TREE_CODE (tmp) != SSA_NAME |
1536 | || SSA_NAME_IS_DEFAULT_DEF (tmp) | |
1537 | || !POINTER_TYPE_P (TREE_TYPE (tmp)) | |
1538 | || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE) | |
1539 | return; | |
1540 | ||
1541 | assign = SSA_NAME_DEF_STMT (tmp); | |
1542 | assign_bb = gimple_bb (assign); | |
40591473 | 1543 | if (!single_pred_p (assign_bb)) |
b258210c | 1544 | return; |
40591473 MJ |
1545 | expr = get_ancestor_addr_info (assign, &obj, &offset); |
1546 | if (!expr) | |
b258210c MJ |
1547 | return; |
1548 | parm = TREE_OPERAND (expr, 0); | |
b258210c | 1549 | index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm)); |
20afe640 EB |
1550 | if (index < 0) |
1551 | return; | |
b258210c MJ |
1552 | |
1553 | cond_bb = single_pred (assign_bb); | |
1554 | cond = last_stmt (cond_bb); | |
69610617 SB |
1555 | if (!cond |
1556 | || gimple_code (cond) != GIMPLE_COND | |
b258210c MJ |
1557 | || gimple_cond_code (cond) != NE_EXPR |
1558 | || gimple_cond_lhs (cond) != parm | |
1559 | || !integer_zerop (gimple_cond_rhs (cond))) | |
1560 | return; | |
1561 | ||
b258210c MJ |
1562 | phi_bb = gimple_bb (phi); |
1563 | for (i = 0; i < 2; i++) | |
1564 | { | |
1565 | basic_block pred = EDGE_PRED (phi_bb, i)->src; | |
1566 | if (pred != assign_bb && pred != cond_bb) | |
1567 | return; | |
1568 | } | |
1569 | ||
3b97a5c7 | 1570 | ipa_set_ancestor_jf (jfunc, offset, index, |
7ea3a73c MJ |
1571 | parm_ref_data_pass_through_p (fbi, index, call, parm), |
1572 | true); | |
b258210c MJ |
1573 | } |
1574 | ||
be95e2b9 MJ |
1575 | /* Inspect the given TYPE and return true iff it has the same structure (the |
1576 | same number of fields of the same types) as a C++ member pointer. If | |
1577 | METHOD_PTR and DELTA are non-NULL, store the trees representing the | |
1578 | corresponding fields there. */ | |
1579 | ||
3e293154 MJ |
1580 | static bool |
1581 | type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta) | |
1582 | { | |
1583 | tree fld; | |
1584 | ||
1585 | if (TREE_CODE (type) != RECORD_TYPE) | |
1586 | return false; | |
1587 | ||
1588 | fld = TYPE_FIELDS (type); | |
1589 | if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld)) | |
8b7773a4 | 1590 | || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE |
cc269bb6 | 1591 | || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld))) |
3e293154 MJ |
1592 | return false; |
1593 | ||
1594 | if (method_ptr) | |
1595 | *method_ptr = fld; | |
1596 | ||
910ad8de | 1597 | fld = DECL_CHAIN (fld); |
8b7773a4 | 1598 | if (!fld || INTEGRAL_TYPE_P (fld) |
cc269bb6 | 1599 | || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld))) |
3e293154 MJ |
1600 | return false; |
1601 | if (delta) | |
1602 | *delta = fld; | |
1603 | ||
910ad8de | 1604 | if (DECL_CHAIN (fld)) |
3e293154 MJ |
1605 | return false; |
1606 | ||
1607 | return true; | |
1608 | } | |
1609 | ||
61502ca8 | 1610 | /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement, |
eb270950 FX |
1611 | return the rhs of its defining statement, and this statement is stored in |
1612 | *RHS_STMT. Otherwise return RHS as it is. */ | |
7ec49257 MJ |
1613 | |
1614 | static inline tree | |
eb270950 | 1615 | get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt) |
7ec49257 MJ |
1616 | { |
1617 | while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs)) | |
1618 | { | |
355fe088 | 1619 | gimple *def_stmt = SSA_NAME_DEF_STMT (rhs); |
7ec49257 MJ |
1620 | |
1621 | if (gimple_assign_single_p (def_stmt)) | |
1622 | rhs = gimple_assign_rhs1 (def_stmt); | |
9961eb45 MJ |
1623 | else |
1624 | break; | |
eb270950 | 1625 | *rhs_stmt = def_stmt; |
7ec49257 MJ |
1626 | } |
1627 | return rhs; | |
1628 | } | |
1629 | ||
eb270950 | 1630 | /* Simple linked list, describing contents of an aggregate before call. */ |
8b7773a4 MJ |
1631 | |
1632 | struct ipa_known_agg_contents_list | |
1633 | { | |
1634 | /* Offset and size of the described part of the aggregate. */ | |
1635 | HOST_WIDE_INT offset, size; | |
eb270950 FX |
1636 | |
1637 | /* Type of the described part of the aggregate. */ | |
1638 | tree type; | |
1639 | ||
1640 | /* Known constant value or jump function data describing contents. */ | |
1641 | struct ipa_load_agg_data value; | |
1642 | ||
8b7773a4 MJ |
1643 | /* Pointer to the next structure in the list. */ |
1644 | struct ipa_known_agg_contents_list *next; | |
1645 | }; | |
3e293154 | 1646 | |
eb270950 FX |
1647 | /* Add an aggregate content item into a linked list of |
1648 | ipa_known_agg_contents_list structure, in which all elements | |
1649 | are sorted ascendingly by offset. */ | |
0d48ee34 | 1650 | |
46771da5 FX |
1651 | static inline void |
1652 | add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist, | |
1653 | struct ipa_known_agg_contents_list *item) | |
0d48ee34 | 1654 | { |
46771da5 FX |
1655 | struct ipa_known_agg_contents_list *list = *plist; |
1656 | ||
1657 | for (; list; list = list->next) | |
0d48ee34 | 1658 | { |
46771da5 FX |
1659 | if (list->offset >= item->offset) |
1660 | break; | |
1661 | ||
1662 | plist = &list->next; | |
0d48ee34 MJ |
1663 | } |
1664 | ||
46771da5 FX |
1665 | item->next = list; |
1666 | *plist = item; | |
1667 | } | |
1668 | ||
eb270950 | 1669 | /* Check whether a given aggregate content is clobbered by certain element in |
46771da5 FX |
1670 | a linked list of ipa_known_agg_contents_list. */ |
1671 | ||
1672 | static inline bool | |
1673 | clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list, | |
1674 | struct ipa_known_agg_contents_list *item) | |
1675 | { | |
1676 | for (; list; list = list->next) | |
0d48ee34 | 1677 | { |
46771da5 FX |
1678 | if (list->offset >= item->offset) |
1679 | return list->offset < item->offset + item->size; | |
1680 | ||
1681 | if (list->offset + list->size > item->offset) | |
1682 | return true; | |
0d48ee34 | 1683 | } |
46771da5 FX |
1684 | |
1685 | return false; | |
0d48ee34 MJ |
1686 | } |
1687 | ||
1688 | /* Build aggregate jump function from LIST, assuming there are exactly | |
eb270950 | 1689 | VALUE_COUNT entries there and that offset of the passed argument |
0d48ee34 MJ |
1690 | is ARG_OFFSET and store it into JFUNC. */ |
1691 | ||
1692 | static void | |
1693 | build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list, | |
eb270950 | 1694 | int value_count, HOST_WIDE_INT arg_offset, |
0d48ee34 MJ |
1695 | struct ipa_jump_func *jfunc) |
1696 | { | |
7ee0681e | 1697 | vec_safe_reserve (jfunc->agg.items, value_count, true); |
eb270950 FX |
1698 | for (; list; list = list->next) |
1699 | { | |
1700 | struct ipa_agg_jf_item item; | |
1701 | tree operand = list->value.pass_through.operand; | |
1702 | ||
1703 | if (list->value.pass_through.formal_id >= 0) | |
1704 | { | |
1705 | /* Content value is derived from some formal paramerter. */ | |
1706 | if (list->value.offset >= 0) | |
1707 | item.jftype = IPA_JF_LOAD_AGG; | |
1708 | else | |
1709 | item.jftype = IPA_JF_PASS_THROUGH; | |
1710 | ||
1711 | item.value.load_agg = list->value; | |
1712 | if (operand) | |
1713 | item.value.pass_through.operand | |
1714 | = unshare_expr_without_location (operand); | |
1715 | } | |
1716 | else if (operand) | |
1717 | { | |
1718 | /* Content value is known constant. */ | |
1719 | item.jftype = IPA_JF_CONST; | |
1720 | item.value.constant = unshare_expr_without_location (operand); | |
1721 | } | |
1722 | else | |
1723 | continue; | |
1724 | ||
1725 | item.type = list->type; | |
1726 | gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size); | |
1727 | ||
1728 | item.offset = list->offset - arg_offset; | |
1729 | gcc_assert ((item.offset % BITS_PER_UNIT) == 0); | |
1730 | ||
1731 | jfunc->agg.items->quick_push (item); | |
1732 | } | |
1733 | } | |
1734 | ||
1735 | /* Given an assignment statement STMT, try to collect information into | |
1736 | AGG_VALUE that will be used to construct jump function for RHS of the | |
1737 | assignment, from which content value of an aggregate part comes. | |
1738 | ||
1739 | Besides constant and simple pass-through jump functions, also try to | |
1740 | identify whether it matches the following pattern that can be described by | |
1741 | a load-value-from-aggregate jump function, which is a derivative of simple | |
1742 | pass-through jump function. | |
1743 | ||
1744 | foo (int *p) | |
1745 | { | |
1746 | ... | |
1747 | ||
1748 | *(q_5 + 4) = *(p_3(D) + 28) op 1; | |
1749 | bar (q_5); | |
1750 | } | |
1751 | ||
1752 | Here IPA_LOAD_AGG_DATA data structure is informative enough to describe | |
1753 | constant, simple pass-through and load-vale-from-aggregate. If value | |
1754 | is constant, it will be kept in field OPERAND, and field FORMAL_ID is | |
1755 | set to -1. For simple pass-through and load-value-from-aggregate, field | |
1756 | FORMAL_ID specifies the related formal parameter index, and field | |
1757 | OFFSET can be used to distinguish them, -1 means simple pass-through, | |
1758 | otherwise means load-value-from-aggregate. */ | |
1759 | ||
1760 | static void | |
1761 | analyze_agg_content_value (struct ipa_func_body_info *fbi, | |
1762 | struct ipa_load_agg_data *agg_value, | |
1763 | gimple *stmt) | |
1764 | { | |
1765 | tree lhs = gimple_assign_lhs (stmt); | |
1766 | tree rhs1 = gimple_assign_rhs1 (stmt); | |
1767 | enum tree_code code; | |
1768 | int index = -1; | |
1769 | ||
1770 | /* Initialize jump function data for the aggregate part. */ | |
1771 | memset (agg_value, 0, sizeof (*agg_value)); | |
1772 | agg_value->pass_through.operation = NOP_EXPR; | |
1773 | agg_value->pass_through.formal_id = -1; | |
1774 | agg_value->offset = -1; | |
1775 | ||
1776 | if (AGGREGATE_TYPE_P (TREE_TYPE (lhs)) /* TODO: Support aggregate type. */ | |
1777 | || TREE_THIS_VOLATILE (lhs) | |
1778 | || TREE_CODE (lhs) == BIT_FIELD_REF | |
1779 | || contains_bitfld_component_ref_p (lhs)) | |
1780 | return; | |
1781 | ||
1782 | /* Skip SSA copies. */ | |
1783 | while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS) | |
1784 | { | |
1785 | if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1)) | |
1786 | break; | |
1787 | ||
1788 | stmt = SSA_NAME_DEF_STMT (rhs1); | |
1789 | if (!is_gimple_assign (stmt)) | |
f38a33a2 | 1790 | break; |
eb270950 FX |
1791 | |
1792 | rhs1 = gimple_assign_rhs1 (stmt); | |
1793 | } | |
1794 | ||
f38a33a2 | 1795 | if (gphi *phi = dyn_cast<gphi *> (stmt)) |
0d48ee34 | 1796 | { |
f38a33a2 MJ |
1797 | /* Also special case like the following (a is a formal parameter): |
1798 | ||
1799 | _12 = *a_11(D).dim[0].stride; | |
1800 | ... | |
1801 | # iftmp.22_9 = PHI <_12(2), 1(3)> | |
1802 | ... | |
1803 | parm.6.dim[0].stride = iftmp.22_9; | |
1804 | ... | |
1805 | __x_MOD_foo (&parm.6, b_31(D)); | |
1806 | ||
1807 | The aggregate function describing parm.6.dim[0].stride is encoded as a | |
1808 | PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1 | |
1809 | (the constant from the PHI node). */ | |
1810 | ||
1811 | if (gimple_phi_num_args (phi) != 2) | |
1812 | return; | |
1813 | tree arg0 = gimple_phi_arg_def (phi, 0); | |
1814 | tree arg1 = gimple_phi_arg_def (phi, 1); | |
1815 | tree operand; | |
1816 | ||
1817 | if (is_gimple_ip_invariant (arg1)) | |
0d48ee34 | 1818 | { |
f38a33a2 MJ |
1819 | operand = arg1; |
1820 | rhs1 = arg0; | |
1821 | } | |
1822 | else if (is_gimple_ip_invariant (arg0)) | |
1823 | { | |
1824 | operand = arg0; | |
1825 | rhs1 = arg1; | |
0d48ee34 | 1826 | } |
f38a33a2 | 1827 | else |
eb270950 FX |
1828 | return; |
1829 | ||
1830 | rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt); | |
f38a33a2 MJ |
1831 | if (!is_gimple_assign (stmt)) |
1832 | return; | |
eb270950 | 1833 | |
f38a33a2 MJ |
1834 | code = ASSERT_EXPR; |
1835 | agg_value->pass_through.operand = operand; | |
1836 | } | |
1837 | else if (is_gimple_assign (stmt)) | |
1838 | { | |
1839 | code = gimple_assign_rhs_code (stmt); | |
1840 | switch (gimple_assign_rhs_class (stmt)) | |
1841 | { | |
1842 | case GIMPLE_SINGLE_RHS: | |
1843 | if (is_gimple_ip_invariant (rhs1)) | |
1844 | { | |
1845 | agg_value->pass_through.operand = rhs1; | |
1846 | return; | |
1847 | } | |
1848 | code = NOP_EXPR; | |
1849 | break; | |
1850 | ||
1851 | case GIMPLE_UNARY_RHS: | |
1852 | /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary | |
1853 | (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply | |
1854 | tcc_binary, this subtleness is somewhat misleading. | |
eb270950 | 1855 | |
f38a33a2 MJ |
1856 | Since tcc_unary is widely used in IPA-CP code to check an operation |
1857 | with one operand, here we only allow tc_unary operation to avoid | |
1858 | possible problem. Then we can use (opclass == tc_unary) or not to | |
1859 | distinguish unary and binary. */ | |
1860 | if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code)) | |
1861 | return; | |
eb270950 | 1862 | |
f38a33a2 MJ |
1863 | rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt); |
1864 | break; | |
1865 | ||
1866 | case GIMPLE_BINARY_RHS: | |
eb270950 | 1867 | { |
f38a33a2 MJ |
1868 | gimple *rhs1_stmt = stmt; |
1869 | gimple *rhs2_stmt = stmt; | |
1870 | tree rhs2 = gimple_assign_rhs2 (stmt); | |
1871 | ||
1872 | rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt); | |
1873 | rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt); | |
1874 | ||
1875 | if (is_gimple_ip_invariant (rhs2)) | |
1876 | { | |
1877 | agg_value->pass_through.operand = rhs2; | |
1878 | stmt = rhs1_stmt; | |
1879 | } | |
1880 | else if (is_gimple_ip_invariant (rhs1)) | |
1881 | { | |
1882 | if (TREE_CODE_CLASS (code) == tcc_comparison) | |
1883 | code = swap_tree_comparison (code); | |
1884 | else if (!commutative_tree_code (code)) | |
1885 | return; | |
1886 | ||
1887 | agg_value->pass_through.operand = rhs1; | |
1888 | stmt = rhs2_stmt; | |
1889 | rhs1 = rhs2; | |
1890 | } | |
1891 | else | |
eb270950 FX |
1892 | return; |
1893 | ||
f38a33a2 MJ |
1894 | if (TREE_CODE_CLASS (code) != tcc_comparison |
1895 | && !useless_type_conversion_p (TREE_TYPE (lhs), | |
1896 | TREE_TYPE (rhs1))) | |
1897 | return; | |
eb270950 | 1898 | } |
f38a33a2 | 1899 | break; |
eb270950 | 1900 | |
f38a33a2 | 1901 | default: |
eb270950 | 1902 | return; |
f38a33a2 MJ |
1903 | } |
1904 | } | |
1905 | else | |
1906 | return; | |
eb270950 FX |
1907 | |
1908 | if (TREE_CODE (rhs1) != SSA_NAME) | |
1909 | index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt, | |
1910 | &agg_value->offset, | |
1911 | &agg_value->by_ref); | |
1912 | else if (SSA_NAME_IS_DEFAULT_DEF (rhs1)) | |
1913 | index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1)); | |
1914 | ||
1915 | if (index >= 0) | |
1916 | { | |
1917 | if (agg_value->offset >= 0) | |
1918 | agg_value->type = TREE_TYPE (rhs1); | |
1919 | agg_value->pass_through.formal_id = index; | |
1920 | agg_value->pass_through.operation = code; | |
0d48ee34 | 1921 | } |
eb270950 FX |
1922 | else |
1923 | agg_value->pass_through.operand = NULL_TREE; | |
0d48ee34 MJ |
1924 | } |
1925 | ||
46771da5 FX |
1926 | /* If STMT is a memory store to the object whose address is BASE, extract |
1927 | information (offset, size, and value) into CONTENT, and return true, | |
1928 | otherwise we conservatively assume the whole object is modified with | |
1929 | unknown content, and return false. CHECK_REF means that access to object | |
1930 | is expected to be in form of MEM_REF expression. */ | |
1931 | ||
1932 | static bool | |
eb270950 FX |
1933 | extract_mem_content (struct ipa_func_body_info *fbi, |
1934 | gimple *stmt, tree base, bool check_ref, | |
46771da5 FX |
1935 | struct ipa_known_agg_contents_list *content) |
1936 | { | |
1937 | HOST_WIDE_INT lhs_offset, lhs_size; | |
46771da5 FX |
1938 | bool reverse; |
1939 | ||
eb270950 | 1940 | if (!is_gimple_assign (stmt)) |
46771da5 FX |
1941 | return false; |
1942 | ||
eb270950 FX |
1943 | tree lhs = gimple_assign_lhs (stmt); |
1944 | tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size, | |
1945 | &reverse); | |
46771da5 FX |
1946 | if (!lhs_base) |
1947 | return false; | |
1948 | ||
1949 | if (check_ref) | |
1950 | { | |
1951 | if (TREE_CODE (lhs_base) != MEM_REF | |
1952 | || TREE_OPERAND (lhs_base, 0) != base | |
1953 | || !integer_zerop (TREE_OPERAND (lhs_base, 1))) | |
1954 | return false; | |
1955 | } | |
1956 | else if (lhs_base != base) | |
1957 | return false; | |
1958 | ||
46771da5 | 1959 | content->offset = lhs_offset; |
eb270950 FX |
1960 | content->size = lhs_size; |
1961 | content->type = TREE_TYPE (lhs); | |
46771da5 FX |
1962 | content->next = NULL; |
1963 | ||
eb270950 | 1964 | analyze_agg_content_value (fbi, &content->value, stmt); |
46771da5 FX |
1965 | return true; |
1966 | } | |
1967 | ||
8b7773a4 | 1968 | /* Traverse statements from CALL backwards, scanning whether an aggregate given |
eb270950 FX |
1969 | in ARG is filled in constants or values that are derived from caller's |
1970 | formal parameter in the way described by some kinds of jump functions. FBI | |
1971 | is the context of the caller function for interprocedural analysis. ARG can | |
1972 | either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is | |
1973 | the type of the aggregate, JFUNC is the jump function for the aggregate. */ | |
be95e2b9 | 1974 | |
3e293154 | 1975 | static void |
eb270950 FX |
1976 | determine_known_aggregate_parts (struct ipa_func_body_info *fbi, |
1977 | gcall *call, tree arg, | |
46771da5 | 1978 | tree arg_type, |
eb270950 | 1979 | struct ipa_jump_func *jfunc) |
3e293154 | 1980 | { |
46771da5 FX |
1981 | struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL; |
1982 | bitmap visited = NULL; | |
eb270950 | 1983 | int item_count = 0, value_count = 0; |
8b7773a4 | 1984 | HOST_WIDE_INT arg_offset, arg_size; |
8b7773a4 MJ |
1985 | tree arg_base; |
1986 | bool check_ref, by_ref; | |
1987 | ao_ref r; | |
de2e0835 | 1988 | int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items); |
3e293154 | 1989 | |
de2e0835 | 1990 | if (max_agg_items == 0) |
29799e9d MJ |
1991 | return; |
1992 | ||
8b7773a4 MJ |
1993 | /* The function operates in three stages. First, we prepare check_ref, r, |
1994 | arg_base and arg_offset based on what is actually passed as an actual | |
1995 | argument. */ | |
3e293154 | 1996 | |
85942f45 | 1997 | if (POINTER_TYPE_P (arg_type)) |
8b7773a4 MJ |
1998 | { |
1999 | by_ref = true; | |
2000 | if (TREE_CODE (arg) == SSA_NAME) | |
2001 | { | |
2002 | tree type_size; | |
63831879 MJ |
2003 | if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))) |
2004 | || !POINTER_TYPE_P (TREE_TYPE (arg))) | |
8b7773a4 MJ |
2005 | return; |
2006 | check_ref = true; | |
2007 | arg_base = arg; | |
2008 | arg_offset = 0; | |
85942f45 | 2009 | type_size = TYPE_SIZE (TREE_TYPE (arg_type)); |
ae7e9ddd | 2010 | arg_size = tree_to_uhwi (type_size); |
8b7773a4 MJ |
2011 | ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE); |
2012 | } | |
2013 | else if (TREE_CODE (arg) == ADDR_EXPR) | |
2014 | { | |
ee45a32d | 2015 | bool reverse; |
8b7773a4 MJ |
2016 | |
2017 | arg = TREE_OPERAND (arg, 0); | |
588db50c RS |
2018 | arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset, |
2019 | &arg_size, &reverse); | |
2020 | if (!arg_base) | |
8b7773a4 MJ |
2021 | return; |
2022 | if (DECL_P (arg_base)) | |
2023 | { | |
8b7773a4 | 2024 | check_ref = false; |
0d48ee34 | 2025 | ao_ref_init (&r, arg_base); |
8b7773a4 MJ |
2026 | } |
2027 | else | |
2028 | return; | |
2029 | } | |
2030 | else | |
2031 | return; | |
2032 | } | |
2033 | else | |
2034 | { | |
ee45a32d | 2035 | bool reverse; |
8b7773a4 MJ |
2036 | |
2037 | gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg))); | |
2038 | ||
2039 | by_ref = false; | |
2040 | check_ref = false; | |
588db50c RS |
2041 | arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset, |
2042 | &arg_size, &reverse); | |
2043 | if (!arg_base) | |
8b7773a4 MJ |
2044 | return; |
2045 | ||
2046 | ao_ref_init (&r, arg); | |
2047 | } | |
2048 | ||
46771da5 FX |
2049 | /* Second stage traverses virtual SSA web backwards starting from the call |
2050 | statement, only looks at individual dominating virtual operand (its | |
2051 | definition dominates the call), as long as it is confident that content | |
2052 | of the aggregate is affected by definition of the virtual operand, it | |
2053 | builds a sorted linked list of ipa_agg_jf_list describing that. */ | |
3e293154 | 2054 | |
6cc886bf RB |
2055 | for (tree dom_vuse = gimple_vuse (call); |
2056 | dom_vuse && fbi->aa_walk_budget > 0;) | |
46771da5 FX |
2057 | { |
2058 | gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse); | |
3e293154 | 2059 | |
46771da5 | 2060 | if (gimple_code (stmt) == GIMPLE_PHI) |
518dc859 | 2061 | { |
fb4697e3 | 2062 | dom_vuse = get_continuation_for_phi (stmt, &r, true, |
eb270950 | 2063 | fbi->aa_walk_budget, |
46771da5 FX |
2064 | &visited, false, NULL, NULL); |
2065 | continue; | |
3e293154 | 2066 | } |
46771da5 | 2067 | |
6cc886bf | 2068 | fbi->aa_walk_budget--; |
46771da5 | 2069 | if (stmt_may_clobber_ref_p_1 (stmt, &r)) |
774b8a55 | 2070 | { |
46771da5 FX |
2071 | struct ipa_known_agg_contents_list *content |
2072 | = XALLOCA (struct ipa_known_agg_contents_list); | |
2073 | ||
eb270950 | 2074 | if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content)) |
774b8a55 | 2075 | break; |
3e293154 | 2076 | |
46771da5 FX |
2077 | /* Now we get a dominating virtual operand, and need to check |
2078 | whether its value is clobbered any other dominating one. */ | |
eb270950 FX |
2079 | if ((content->value.pass_through.formal_id >= 0 |
2080 | || content->value.pass_through.operand) | |
46771da5 FX |
2081 | && !clobber_by_agg_contents_list_p (all_list, content)) |
2082 | { | |
2083 | struct ipa_known_agg_contents_list *copy | |
2084 | = XALLOCA (struct ipa_known_agg_contents_list); | |
3e293154 | 2085 | |
46771da5 FX |
2086 | /* Add to the list consisting of only dominating virtual |
2087 | operands, whose definitions can finally reach the call. */ | |
2088 | add_to_agg_contents_list (&list, (*copy = *content, copy)); | |
2089 | ||
de2e0835 | 2090 | if (++value_count == max_agg_items) |
46771da5 FX |
2091 | break; |
2092 | } | |
2093 | ||
2094 | /* Add to the list consisting of all dominating virtual operands. */ | |
2095 | add_to_agg_contents_list (&all_list, content); | |
2096 | ||
de2e0835 | 2097 | if (++item_count == 2 * max_agg_items) |
46771da5 | 2098 | break; |
8b7773a4 | 2099 | } |
46771da5 FX |
2100 | dom_vuse = gimple_vuse (stmt); |
2101 | } | |
3e293154 | 2102 | |
46771da5 FX |
2103 | if (visited) |
2104 | BITMAP_FREE (visited); | |
be95e2b9 | 2105 | |
8b7773a4 | 2106 | /* Third stage just goes over the list and creates an appropriate vector of |
46771da5 | 2107 | ipa_agg_jf_item structures out of it, of course only if there are |
eb270950 | 2108 | any meaningful items to begin with. */ |
3e293154 | 2109 | |
eb270950 | 2110 | if (value_count) |
3e293154 | 2111 | { |
8b7773a4 | 2112 | jfunc->agg.by_ref = by_ref; |
eb270950 | 2113 | build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc); |
3e293154 MJ |
2114 | } |
2115 | } | |
2116 | ||
46771da5 | 2117 | |
5d5f1e95 KV |
2118 | /* Return the Ith param type of callee associated with call graph |
2119 | edge E. */ | |
2120 | ||
2121 | tree | |
06d65050 JH |
2122 | ipa_get_callee_param_type (struct cgraph_edge *e, int i) |
2123 | { | |
2124 | int n; | |
2125 | tree type = (e->callee | |
67348ccc | 2126 | ? TREE_TYPE (e->callee->decl) |
06d65050 JH |
2127 | : gimple_call_fntype (e->call_stmt)); |
2128 | tree t = TYPE_ARG_TYPES (type); | |
2129 | ||
2130 | for (n = 0; n < i; n++) | |
2131 | { | |
2132 | if (!t) | |
2133 | break; | |
2134 | t = TREE_CHAIN (t); | |
2135 | } | |
2136 | if (t) | |
2137 | return TREE_VALUE (t); | |
2138 | if (!e->callee) | |
2139 | return NULL; | |
67348ccc | 2140 | t = DECL_ARGUMENTS (e->callee->decl); |
06d65050 JH |
2141 | for (n = 0; n < i; n++) |
2142 | { | |
2143 | if (!t) | |
2144 | return NULL; | |
2145 | t = TREE_CHAIN (t); | |
2146 | } | |
2147 | if (t) | |
2148 | return TREE_TYPE (t); | |
2149 | return NULL; | |
2150 | } | |
2151 | ||
86cd0334 MJ |
2152 | /* Return ipa_bits with VALUE and MASK values, which can be either a newly |
2153 | allocated structure or a previously existing one shared with other jump | |
2154 | functions and/or transformation summaries. */ | |
2155 | ||
2156 | ipa_bits * | |
2157 | ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask) | |
2158 | { | |
2159 | ipa_bits tmp; | |
2160 | tmp.value = value; | |
2161 | tmp.mask = mask; | |
2162 | ||
2163 | ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT); | |
2164 | if (*slot) | |
2165 | return *slot; | |
2166 | ||
2167 | ipa_bits *res = ggc_alloc<ipa_bits> (); | |
2168 | res->value = value; | |
2169 | res->mask = mask; | |
2170 | *slot = res; | |
2171 | ||
2172 | return res; | |
2173 | } | |
2174 | ||
2175 | /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash | |
2176 | table in order to avoid creating multiple same ipa_bits structures. */ | |
2177 | ||
2178 | static void | |
2179 | ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value, | |
2180 | const widest_int &mask) | |
2181 | { | |
2182 | jf->bits = ipa_get_ipa_bits_for_value (value, mask); | |
2183 | } | |
2184 | ||
2185 | /* Return a pointer to a value_range just like *TMP, but either find it in | |
2186 | ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */ | |
2187 | ||
028d81b1 AH |
2188 | static value_range * |
2189 | ipa_get_value_range (value_range *tmp) | |
86cd0334 | 2190 | { |
028d81b1 | 2191 | value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT); |
86cd0334 MJ |
2192 | if (*slot) |
2193 | return *slot; | |
2194 | ||
4ba9fb0a | 2195 | value_range *vr = new (ggc_alloc<value_range> ()) value_range; |
86cd0334 MJ |
2196 | *vr = *tmp; |
2197 | *slot = vr; | |
2198 | ||
2199 | return vr; | |
2200 | } | |
2201 | ||
2202 | /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty | |
2203 | equiv set. Use hash table in order to avoid creating multiple same copies of | |
2204 | value_ranges. */ | |
2205 | ||
028d81b1 | 2206 | static value_range * |
5d462877 | 2207 | ipa_get_value_range (enum value_range_kind kind, tree min, tree max) |
86cd0334 | 2208 | { |
5d462877 | 2209 | value_range tmp (min, max, kind); |
86cd0334 MJ |
2210 | return ipa_get_value_range (&tmp); |
2211 | } | |
2212 | ||
2213 | /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and | |
2214 | a NULL equiv bitmap. Use hash table in order to avoid creating multiple | |
2215 | same value_range structures. */ | |
2216 | ||
2217 | static void | |
54994253 | 2218 | ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type, |
86cd0334 MJ |
2219 | tree min, tree max) |
2220 | { | |
2221 | jf->m_vr = ipa_get_value_range (type, min, max); | |
2222 | } | |
2223 | ||
46771da5 | 2224 | /* Assign to JF a pointer to a value_range just like TMP but either fetch a |
86cd0334 MJ |
2225 | copy from ipa_vr_hash_table or allocate a new on in GC memory. */ |
2226 | ||
2227 | static void | |
028d81b1 | 2228 | ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp) |
86cd0334 MJ |
2229 | { |
2230 | jf->m_vr = ipa_get_value_range (tmp); | |
2231 | } | |
2232 | ||
3e293154 MJ |
2233 | /* Compute jump function for all arguments of callsite CS and insert the |
2234 | information in the jump_functions array in the ipa_edge_args corresponding | |
2235 | to this callsite. */ | |
be95e2b9 | 2236 | |
749aa96d | 2237 | static void |
56b40062 | 2238 | ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi, |
062c604f | 2239 | struct cgraph_edge *cs) |
3e293154 | 2240 | { |
a4a3cdd0 MJ |
2241 | ipa_node_params *info = ipa_node_params_sum->get (cs->caller); |
2242 | ipa_edge_args *args = ipa_edge_args_sum->get_create (cs); | |
538dd0b7 | 2243 | gcall *call = cs->call_stmt; |
8b7773a4 | 2244 | int n, arg_num = gimple_call_num_args (call); |
5ce97055 | 2245 | bool useful_context = false; |
45f4e2b0 | 2246 | value_range vr; |
3e293154 | 2247 | |
606d9a09 | 2248 | if (arg_num == 0 || args->jump_functions) |
3e293154 | 2249 | return; |
cb3874dc | 2250 | vec_safe_grow_cleared (args->jump_functions, arg_num, true); |
5ce97055 | 2251 | if (flag_devirtualize) |
cb3874dc | 2252 | vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num, true); |
3e293154 | 2253 | |
96e24d49 JJ |
2254 | if (gimple_call_internal_p (call)) |
2255 | return; | |
5fe8e757 MJ |
2256 | if (ipa_func_spec_opts_forbid_analysis_p (cs->caller)) |
2257 | return; | |
2258 | ||
8b7773a4 MJ |
2259 | for (n = 0; n < arg_num; n++) |
2260 | { | |
2261 | struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n); | |
2262 | tree arg = gimple_call_arg (call, n); | |
06d65050 | 2263 | tree param_type = ipa_get_callee_param_type (cs, n); |
5ce97055 JH |
2264 | if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg))) |
2265 | { | |
049e6d36 | 2266 | tree instance; |
99b1c316 | 2267 | class ipa_polymorphic_call_context context (cs->caller->decl, |
5ce97055 | 2268 | arg, cs->call_stmt, |
049e6d36 | 2269 | &instance); |
c628d1c3 MJ |
2270 | context.get_dynamic_type (instance, arg, NULL, cs->call_stmt, |
2271 | &fbi->aa_walk_budget); | |
5ce97055 JH |
2272 | *ipa_get_ith_polymorhic_call_context (args, n) = context; |
2273 | if (!context.useless_p ()) | |
2274 | useful_context = true; | |
2275 | } | |
3e293154 | 2276 | |
718625ad KV |
2277 | if (POINTER_TYPE_P (TREE_TYPE (arg))) |
2278 | { | |
f7503699 KV |
2279 | bool addr_nonzero = false; |
2280 | bool strict_overflow = false; | |
2281 | ||
718625ad KV |
2282 | if (TREE_CODE (arg) == SSA_NAME |
2283 | && param_type | |
45f4e2b0 AH |
2284 | && get_range_query (cfun)->range_of_expr (vr, arg) |
2285 | && vr.nonzero_p ()) | |
f7503699 KV |
2286 | addr_nonzero = true; |
2287 | else if (tree_single_nonzero_warnv_p (arg, &strict_overflow)) | |
2288 | addr_nonzero = true; | |
2289 | ||
2290 | if (addr_nonzero) | |
718625ad | 2291 | { |
86cd0334 MJ |
2292 | tree z = build_int_cst (TREE_TYPE (arg), 0); |
2293 | ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z); | |
718625ad KV |
2294 | } |
2295 | else | |
86cd0334 | 2296 | gcc_assert (!jfunc->m_vr); |
718625ad KV |
2297 | } |
2298 | else | |
8bc5448f | 2299 | { |
8bc5448f KV |
2300 | if (TREE_CODE (arg) == SSA_NAME |
2301 | && param_type | |
45f4e2b0 AH |
2302 | && get_range_query (cfun)->range_of_expr (vr, arg) |
2303 | && !vr.undefined_p ()) | |
8bc5448f | 2304 | { |
028d81b1 | 2305 | value_range resvr; |
38a73435 | 2306 | range_fold_unary_expr (&resvr, NOP_EXPR, param_type, |
45f4e2b0 | 2307 | &vr, TREE_TYPE (arg)); |
54994253 | 2308 | if (!resvr.undefined_p () && !resvr.varying_p ()) |
86cd0334 | 2309 | ipa_set_jfunc_vr (jfunc, &resvr); |
3a4228ba | 2310 | else |
86cd0334 | 2311 | gcc_assert (!jfunc->m_vr); |
8bc5448f KV |
2312 | } |
2313 | else | |
86cd0334 | 2314 | gcc_assert (!jfunc->m_vr); |
8bc5448f | 2315 | } |
04be694e | 2316 | |
209ca542 PK |
2317 | if (INTEGRAL_TYPE_P (TREE_TYPE (arg)) |
2318 | && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST)) | |
2319 | { | |
209ca542 | 2320 | if (TREE_CODE (arg) == SSA_NAME) |
86cd0334 MJ |
2321 | ipa_set_jfunc_bits (jfunc, 0, |
2322 | widest_int::from (get_nonzero_bits (arg), | |
2323 | TYPE_SIGN (TREE_TYPE (arg)))); | |
209ca542 | 2324 | else |
86cd0334 | 2325 | ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0); |
209ca542 | 2326 | } |
67b97478 PK |
2327 | else if (POINTER_TYPE_P (TREE_TYPE (arg))) |
2328 | { | |
2329 | unsigned HOST_WIDE_INT bitpos; | |
2330 | unsigned align; | |
2331 | ||
67b97478 | 2332 | get_pointer_alignment_1 (arg, &align, &bitpos); |
7b27cb4b RS |
2333 | widest_int mask = wi::bit_and_not |
2334 | (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false), | |
2335 | align / BITS_PER_UNIT - 1); | |
86cd0334 MJ |
2336 | widest_int value = bitpos / BITS_PER_UNIT; |
2337 | ipa_set_jfunc_bits (jfunc, value, mask); | |
67b97478 | 2338 | } |
209ca542 | 2339 | else |
86cd0334 | 2340 | gcc_assert (!jfunc->bits); |
209ca542 | 2341 | |
04643334 | 2342 | if (is_gimple_ip_invariant (arg) |
8813a647 | 2343 | || (VAR_P (arg) |
04643334 MJ |
2344 | && is_global_var (arg) |
2345 | && TREE_READONLY (arg))) | |
4502fe8d | 2346 | ipa_set_jf_constant (jfunc, arg, cs); |
8b7773a4 MJ |
2347 | else if (!is_gimple_reg_type (TREE_TYPE (arg)) |
2348 | && TREE_CODE (arg) == PARM_DECL) | |
2349 | { | |
2350 | int index = ipa_get_param_decl_index (info, arg); | |
2351 | ||
2352 | gcc_assert (index >=0); | |
2353 | /* Aggregate passed by value, check for pass-through, otherwise we | |
2354 | will attempt to fill in aggregate contents later in this | |
2355 | for cycle. */ | |
8aab5218 | 2356 | if (parm_preserved_before_stmt_p (fbi, index, call, arg)) |
8b7773a4 | 2357 | { |
3b97a5c7 | 2358 | ipa_set_jf_simple_pass_through (jfunc, index, false); |
8b7773a4 MJ |
2359 | continue; |
2360 | } | |
2361 | } | |
2362 | else if (TREE_CODE (arg) == SSA_NAME) | |
2363 | { | |
2364 | if (SSA_NAME_IS_DEFAULT_DEF (arg)) | |
2365 | { | |
2366 | int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg)); | |
b8f6e610 | 2367 | if (index >= 0) |
8b7773a4 | 2368 | { |
3b97a5c7 | 2369 | bool agg_p; |
8aab5218 | 2370 | agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg); |
3b97a5c7 | 2371 | ipa_set_jf_simple_pass_through (jfunc, index, agg_p); |
8b7773a4 MJ |
2372 | } |
2373 | } | |
2374 | else | |
2375 | { | |
355fe088 | 2376 | gimple *stmt = SSA_NAME_DEF_STMT (arg); |
8b7773a4 | 2377 | if (is_gimple_assign (stmt)) |
8aab5218 | 2378 | compute_complex_assign_jump_func (fbi, info, jfunc, |
06d65050 | 2379 | call, stmt, arg, param_type); |
8b7773a4 | 2380 | else if (gimple_code (stmt) == GIMPLE_PHI) |
8aab5218 | 2381 | compute_complex_ancestor_jump_func (fbi, info, jfunc, |
538dd0b7 DM |
2382 | call, |
2383 | as_a <gphi *> (stmt)); | |
8b7773a4 MJ |
2384 | } |
2385 | } | |
3e293154 | 2386 | |
67914693 | 2387 | /* If ARG is pointer, we cannot use its type to determine the type of aggregate |
85942f45 JH |
2388 | passed (because type conversions are ignored in gimple). Usually we can |
2389 | safely get type from function declaration, but in case of K&R prototypes or | |
2390 | variadic functions we can try our luck with type of the pointer passed. | |
2391 | TODO: Since we look for actual initialization of the memory object, we may better | |
2392 | work out the type based on the memory stores we find. */ | |
2393 | if (!param_type) | |
2394 | param_type = TREE_TYPE (arg); | |
2395 | ||
8b7773a4 MJ |
2396 | if ((jfunc->type != IPA_JF_PASS_THROUGH |
2397 | || !ipa_get_jf_pass_through_agg_preserved (jfunc)) | |
2398 | && (jfunc->type != IPA_JF_ANCESTOR | |
2399 | || !ipa_get_jf_ancestor_agg_preserved (jfunc)) | |
2400 | && (AGGREGATE_TYPE_P (TREE_TYPE (arg)) | |
85942f45 | 2401 | || POINTER_TYPE_P (param_type))) |
eb270950 | 2402 | determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc); |
8b7773a4 | 2403 | } |
5ce97055 JH |
2404 | if (!useful_context) |
2405 | vec_free (args->polymorphic_call_contexts); | |
3e293154 MJ |
2406 | } |
2407 | ||
749aa96d | 2408 | /* Compute jump functions for all edges - both direct and indirect - outgoing |
8aab5218 | 2409 | from BB. */ |
749aa96d | 2410 | |
062c604f | 2411 | static void |
56b40062 | 2412 | ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb) |
749aa96d | 2413 | { |
8aab5218 MJ |
2414 | struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb); |
2415 | int i; | |
749aa96d MJ |
2416 | struct cgraph_edge *cs; |
2417 | ||
8aab5218 | 2418 | FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs) |
749aa96d | 2419 | { |
8aab5218 | 2420 | struct cgraph_node *callee = cs->callee; |
749aa96d | 2421 | |
8aab5218 MJ |
2422 | if (callee) |
2423 | { | |
d7caa129 | 2424 | callee = callee->ultimate_alias_target (); |
8aab5218 MJ |
2425 | /* We do not need to bother analyzing calls to unknown functions |
2426 | unless they may become known during lto/whopr. */ | |
6cef01c3 JH |
2427 | if (!callee->definition && !flag_lto |
2428 | && !gimple_call_fnspec (cs->call_stmt).known_p ()) | |
8aab5218 MJ |
2429 | continue; |
2430 | } | |
2431 | ipa_compute_jump_functions_for_edge (fbi, cs); | |
2432 | } | |
749aa96d MJ |
2433 | } |
2434 | ||
8b7773a4 MJ |
2435 | /* If STMT looks like a statement loading a value from a member pointer formal |
2436 | parameter, return that parameter and store the offset of the field to | |
2437 | *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still | |
2438 | might be clobbered). If USE_DELTA, then we look for a use of the delta | |
2439 | field rather than the pfn. */ | |
be95e2b9 | 2440 | |
3e293154 | 2441 | static tree |
355fe088 | 2442 | ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta, |
8b7773a4 | 2443 | HOST_WIDE_INT *offset_p) |
3e293154 | 2444 | { |
8b7773a4 MJ |
2445 | tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field; |
2446 | ||
2447 | if (!gimple_assign_single_p (stmt)) | |
2448 | return NULL_TREE; | |
3e293154 | 2449 | |
8b7773a4 | 2450 | rhs = gimple_assign_rhs1 (stmt); |
ae788515 EB |
2451 | if (TREE_CODE (rhs) == COMPONENT_REF) |
2452 | { | |
2453 | ref_field = TREE_OPERAND (rhs, 1); | |
2454 | rhs = TREE_OPERAND (rhs, 0); | |
2455 | } | |
2456 | else | |
2457 | ref_field = NULL_TREE; | |
d242d063 | 2458 | if (TREE_CODE (rhs) != MEM_REF) |
3e293154 | 2459 | return NULL_TREE; |
3e293154 | 2460 | rec = TREE_OPERAND (rhs, 0); |
d242d063 MJ |
2461 | if (TREE_CODE (rec) != ADDR_EXPR) |
2462 | return NULL_TREE; | |
2463 | rec = TREE_OPERAND (rec, 0); | |
3e293154 | 2464 | if (TREE_CODE (rec) != PARM_DECL |
6f7b8b70 | 2465 | || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field)) |
3e293154 | 2466 | return NULL_TREE; |
d242d063 | 2467 | ref_offset = TREE_OPERAND (rhs, 1); |
ae788515 | 2468 | |
8b7773a4 MJ |
2469 | if (use_delta) |
2470 | fld = delta_field; | |
2471 | else | |
2472 | fld = ptr_field; | |
2473 | if (offset_p) | |
2474 | *offset_p = int_bit_position (fld); | |
2475 | ||
ae788515 EB |
2476 | if (ref_field) |
2477 | { | |
2478 | if (integer_nonzerop (ref_offset)) | |
2479 | return NULL_TREE; | |
ae788515 EB |
2480 | return ref_field == fld ? rec : NULL_TREE; |
2481 | } | |
3e293154 | 2482 | else |
8b7773a4 MJ |
2483 | return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec |
2484 | : NULL_TREE; | |
3e293154 MJ |
2485 | } |
2486 | ||
2487 | /* Returns true iff T is an SSA_NAME defined by a statement. */ | |
be95e2b9 | 2488 | |
3e293154 MJ |
2489 | static bool |
2490 | ipa_is_ssa_with_stmt_def (tree t) | |
2491 | { | |
2492 | if (TREE_CODE (t) == SSA_NAME | |
2493 | && !SSA_NAME_IS_DEFAULT_DEF (t)) | |
2494 | return true; | |
2495 | else | |
2496 | return false; | |
2497 | } | |
2498 | ||
40591473 MJ |
2499 | /* Find the indirect call graph edge corresponding to STMT and mark it as a |
2500 | call to a parameter number PARAM_INDEX. NODE is the caller. Return the | |
40a777e8 JH |
2501 | indirect call graph edge. |
2502 | If POLYMORPHIC is true record is as a destination of polymorphic call. */ | |
be95e2b9 | 2503 | |
40591473 | 2504 | static struct cgraph_edge * |
538dd0b7 | 2505 | ipa_note_param_call (struct cgraph_node *node, int param_index, |
40a777e8 | 2506 | gcall *stmt, bool polymorphic) |
3e293154 | 2507 | { |
e33c6cd6 | 2508 | struct cgraph_edge *cs; |
3e293154 | 2509 | |
d52f5295 | 2510 | cs = node->get_edge (stmt); |
b258210c | 2511 | cs->indirect_info->param_index = param_index; |
8b7773a4 | 2512 | cs->indirect_info->agg_contents = 0; |
c13bc3d9 | 2513 | cs->indirect_info->member_ptr = 0; |
91bb9f80 | 2514 | cs->indirect_info->guaranteed_unmodified = 0; |
a4a3cdd0 MJ |
2515 | ipa_node_params *info = ipa_node_params_sum->get (node); |
2516 | ipa_set_param_used_by_indirect_call (info, param_index, true); | |
40a777e8 | 2517 | if (cs->indirect_info->polymorphic || polymorphic) |
a4a3cdd0 | 2518 | ipa_set_param_used_by_polymorphic_call (info, param_index, true); |
40591473 | 2519 | return cs; |
3e293154 MJ |
2520 | } |
2521 | ||
e33c6cd6 | 2522 | /* Analyze the CALL and examine uses of formal parameters of the caller NODE |
c419671c | 2523 | (described by INFO). PARMS_AINFO is a pointer to a vector containing |
062c604f MJ |
2524 | intermediate information about each formal parameter. Currently it checks |
2525 | whether the call calls a pointer that is a formal parameter and if so, the | |
2526 | parameter is marked with the called flag and an indirect call graph edge | |
2527 | describing the call is created. This is very simple for ordinary pointers | |
2528 | represented in SSA but not-so-nice when it comes to member pointers. The | |
2529 | ugly part of this function does nothing more than trying to match the | |
2530 | pattern of such a call. An example of such a pattern is the gimple dump | |
2531 | below, the call is on the last line: | |
3e293154 | 2532 | |
ae788515 EB |
2533 | <bb 2>: |
2534 | f$__delta_5 = f.__delta; | |
2535 | f$__pfn_24 = f.__pfn; | |
2536 | ||
2537 | or | |
3e293154 | 2538 | <bb 2>: |
d242d063 MJ |
2539 | f$__delta_5 = MEM[(struct *)&f]; |
2540 | f$__pfn_24 = MEM[(struct *)&f + 4B]; | |
8aa29647 | 2541 | |
ae788515 | 2542 | and a few lines below: |
8aa29647 MJ |
2543 | |
2544 | <bb 5> | |
3e293154 MJ |
2545 | D.2496_3 = (int) f$__pfn_24; |
2546 | D.2497_4 = D.2496_3 & 1; | |
2547 | if (D.2497_4 != 0) | |
2548 | goto <bb 3>; | |
2549 | else | |
2550 | goto <bb 4>; | |
2551 | ||
8aa29647 | 2552 | <bb 6>: |
3e293154 MJ |
2553 | D.2500_7 = (unsigned int) f$__delta_5; |
2554 | D.2501_8 = &S + D.2500_7; | |
2555 | D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8; | |
2556 | D.2503_10 = *D.2502_9; | |
2557 | D.2504_12 = f$__pfn_24 + -1; | |
2558 | D.2505_13 = (unsigned int) D.2504_12; | |
2559 | D.2506_14 = D.2503_10 + D.2505_13; | |
2560 | D.2507_15 = *D.2506_14; | |
2561 | iftmp.11_16 = (String:: *) D.2507_15; | |
2562 | ||
8aa29647 | 2563 | <bb 7>: |
3e293154 MJ |
2564 | # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)> |
2565 | D.2500_19 = (unsigned int) f$__delta_5; | |
2566 | D.2508_20 = &S + D.2500_19; | |
2567 | D.2493_21 = iftmp.11_1 (D.2508_20, 4); | |
2568 | ||
2569 | Such patterns are results of simple calls to a member pointer: | |
2570 | ||
2571 | int doprinting (int (MyString::* f)(int) const) | |
2572 | { | |
2573 | MyString S ("somestring"); | |
2574 | ||
2575 | return (S.*f)(4); | |
2576 | } | |
8b7773a4 MJ |
2577 | |
2578 | Moreover, the function also looks for called pointers loaded from aggregates | |
2579 | passed by value or reference. */ | |
3e293154 MJ |
2580 | |
2581 | static void | |
56b40062 | 2582 | ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call, |
8aab5218 | 2583 | tree target) |
3e293154 | 2584 | { |
99b1c316 | 2585 | class ipa_node_params *info = fbi->info; |
8b7773a4 MJ |
2586 | HOST_WIDE_INT offset; |
2587 | bool by_ref; | |
3e293154 | 2588 | |
3e293154 MJ |
2589 | if (SSA_NAME_IS_DEFAULT_DEF (target)) |
2590 | { | |
b258210c | 2591 | tree var = SSA_NAME_VAR (target); |
8aab5218 | 2592 | int index = ipa_get_param_decl_index (info, var); |
3e293154 | 2593 | if (index >= 0) |
40a777e8 | 2594 | ipa_note_param_call (fbi->node, index, call, false); |
3e293154 MJ |
2595 | return; |
2596 | } | |
2597 | ||
8aab5218 | 2598 | int index; |
355fe088 | 2599 | gimple *def = SSA_NAME_DEF_STMT (target); |
91bb9f80 | 2600 | bool guaranteed_unmodified; |
8b7773a4 | 2601 | if (gimple_assign_single_p (def) |
ff302741 PB |
2602 | && ipa_load_from_parm_agg (fbi, info->descriptors, def, |
2603 | gimple_assign_rhs1 (def), &index, &offset, | |
91bb9f80 | 2604 | NULL, &by_ref, &guaranteed_unmodified)) |
8b7773a4 | 2605 | { |
40a777e8 JH |
2606 | struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, |
2607 | call, false); | |
8b7773a4 MJ |
2608 | cs->indirect_info->offset = offset; |
2609 | cs->indirect_info->agg_contents = 1; | |
2610 | cs->indirect_info->by_ref = by_ref; | |
91bb9f80 | 2611 | cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified; |
8b7773a4 MJ |
2612 | return; |
2613 | } | |
2614 | ||
3e293154 MJ |
2615 | /* Now we need to try to match the complex pattern of calling a member |
2616 | pointer. */ | |
8b7773a4 MJ |
2617 | if (gimple_code (def) != GIMPLE_PHI |
2618 | || gimple_phi_num_args (def) != 2 | |
2619 | || !POINTER_TYPE_P (TREE_TYPE (target)) | |
3e293154 MJ |
2620 | || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE) |
2621 | return; | |
2622 | ||
3e293154 MJ |
2623 | /* First, we need to check whether one of these is a load from a member |
2624 | pointer that is a parameter to this function. */ | |
8aab5218 MJ |
2625 | tree n1 = PHI_ARG_DEF (def, 0); |
2626 | tree n2 = PHI_ARG_DEF (def, 1); | |
1fc8feb5 | 2627 | if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2)) |
3e293154 | 2628 | return; |
355fe088 TS |
2629 | gimple *d1 = SSA_NAME_DEF_STMT (n1); |
2630 | gimple *d2 = SSA_NAME_DEF_STMT (n2); | |
3e293154 | 2631 | |
8aab5218 MJ |
2632 | tree rec; |
2633 | basic_block bb, virt_bb; | |
2634 | basic_block join = gimple_bb (def); | |
8b7773a4 | 2635 | if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset))) |
3e293154 | 2636 | { |
8b7773a4 | 2637 | if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL)) |
3e293154 MJ |
2638 | return; |
2639 | ||
8aa29647 | 2640 | bb = EDGE_PRED (join, 0)->src; |
726a989a | 2641 | virt_bb = gimple_bb (d2); |
3e293154 | 2642 | } |
8b7773a4 | 2643 | else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset))) |
3e293154 | 2644 | { |
8aa29647 | 2645 | bb = EDGE_PRED (join, 1)->src; |
726a989a | 2646 | virt_bb = gimple_bb (d1); |
3e293154 MJ |
2647 | } |
2648 | else | |
2649 | return; | |
2650 | ||
2651 | /* Second, we need to check that the basic blocks are laid out in the way | |
2652 | corresponding to the pattern. */ | |
2653 | ||
3e293154 MJ |
2654 | if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb) |
2655 | || single_pred (virt_bb) != bb | |
2656 | || single_succ (virt_bb) != join) | |
2657 | return; | |
2658 | ||
2659 | /* Third, let's see that the branching is done depending on the least | |
2660 | significant bit of the pfn. */ | |
2661 | ||
355fe088 | 2662 | gimple *branch = last_stmt (bb); |
8aa29647 | 2663 | if (!branch || gimple_code (branch) != GIMPLE_COND) |
3e293154 MJ |
2664 | return; |
2665 | ||
12430896 RG |
2666 | if ((gimple_cond_code (branch) != NE_EXPR |
2667 | && gimple_cond_code (branch) != EQ_EXPR) | |
726a989a | 2668 | || !integer_zerop (gimple_cond_rhs (branch))) |
3e293154 | 2669 | return; |
3e293154 | 2670 | |
8aab5218 | 2671 | tree cond = gimple_cond_lhs (branch); |
3e293154 MJ |
2672 | if (!ipa_is_ssa_with_stmt_def (cond)) |
2673 | return; | |
2674 | ||
726a989a | 2675 | def = SSA_NAME_DEF_STMT (cond); |
8b75fc9b | 2676 | if (!is_gimple_assign (def) |
726a989a RB |
2677 | || gimple_assign_rhs_code (def) != BIT_AND_EXPR |
2678 | || !integer_onep (gimple_assign_rhs2 (def))) | |
3e293154 | 2679 | return; |
726a989a RB |
2680 | |
2681 | cond = gimple_assign_rhs1 (def); | |
3e293154 MJ |
2682 | if (!ipa_is_ssa_with_stmt_def (cond)) |
2683 | return; | |
2684 | ||
726a989a | 2685 | def = SSA_NAME_DEF_STMT (cond); |
3e293154 | 2686 | |
8b75fc9b MJ |
2687 | if (is_gimple_assign (def) |
2688 | && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def))) | |
3e293154 | 2689 | { |
726a989a | 2690 | cond = gimple_assign_rhs1 (def); |
3e293154 MJ |
2691 | if (!ipa_is_ssa_with_stmt_def (cond)) |
2692 | return; | |
726a989a | 2693 | def = SSA_NAME_DEF_STMT (cond); |
3e293154 MJ |
2694 | } |
2695 | ||
8aab5218 | 2696 | tree rec2; |
6f7b8b70 RE |
2697 | rec2 = ipa_get_stmt_member_ptr_load_param (def, |
2698 | (TARGET_PTRMEMFUNC_VBIT_LOCATION | |
8b7773a4 MJ |
2699 | == ptrmemfunc_vbit_in_delta), |
2700 | NULL); | |
3e293154 MJ |
2701 | if (rec != rec2) |
2702 | return; | |
2703 | ||
2704 | index = ipa_get_param_decl_index (info, rec); | |
8b7773a4 | 2705 | if (index >= 0 |
8aab5218 | 2706 | && parm_preserved_before_stmt_p (fbi, index, call, rec)) |
8b7773a4 | 2707 | { |
40a777e8 JH |
2708 | struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, |
2709 | call, false); | |
8b7773a4 MJ |
2710 | cs->indirect_info->offset = offset; |
2711 | cs->indirect_info->agg_contents = 1; | |
c13bc3d9 | 2712 | cs->indirect_info->member_ptr = 1; |
91bb9f80 | 2713 | cs->indirect_info->guaranteed_unmodified = 1; |
8b7773a4 | 2714 | } |
3e293154 MJ |
2715 | |
2716 | return; | |
2717 | } | |
2718 | ||
b258210c MJ |
2719 | /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the |
2720 | object referenced in the expression is a formal parameter of the caller | |
8aab5218 MJ |
2721 | FBI->node (described by FBI->info), create a call note for the |
2722 | statement. */ | |
b258210c MJ |
2723 | |
2724 | static void | |
56b40062 | 2725 | ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi, |
538dd0b7 | 2726 | gcall *call, tree target) |
b258210c MJ |
2727 | { |
2728 | tree obj = OBJ_TYPE_REF_OBJECT (target); | |
b258210c | 2729 | int index; |
40591473 | 2730 | HOST_WIDE_INT anc_offset; |
b258210c | 2731 | |
05842ff5 MJ |
2732 | if (!flag_devirtualize) |
2733 | return; | |
2734 | ||
40591473 | 2735 | if (TREE_CODE (obj) != SSA_NAME) |
b258210c MJ |
2736 | return; |
2737 | ||
99b1c316 | 2738 | class ipa_node_params *info = fbi->info; |
40591473 MJ |
2739 | if (SSA_NAME_IS_DEFAULT_DEF (obj)) |
2740 | { | |
2741 | if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL) | |
2742 | return; | |
b258210c | 2743 | |
40591473 MJ |
2744 | anc_offset = 0; |
2745 | index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj)); | |
2746 | gcc_assert (index >= 0); | |
c628d1c3 | 2747 | if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target), |
c199f329 | 2748 | call)) |
40591473 MJ |
2749 | return; |
2750 | } | |
2751 | else | |
2752 | { | |
355fe088 | 2753 | gimple *stmt = SSA_NAME_DEF_STMT (obj); |
40591473 MJ |
2754 | tree expr; |
2755 | ||
2756 | expr = get_ancestor_addr_info (stmt, &obj, &anc_offset); | |
2757 | if (!expr) | |
2758 | return; | |
2759 | index = ipa_get_param_decl_index (info, | |
2760 | SSA_NAME_VAR (TREE_OPERAND (expr, 0))); | |
2761 | gcc_assert (index >= 0); | |
c628d1c3 | 2762 | if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target), |
c199f329 | 2763 | call, anc_offset)) |
40591473 MJ |
2764 | return; |
2765 | } | |
2766 | ||
40a777e8 JH |
2767 | struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, |
2768 | call, true); | |
99b1c316 | 2769 | class cgraph_indirect_call_info *ii = cs->indirect_info; |
8b7773a4 | 2770 | ii->offset = anc_offset; |
ae7e9ddd | 2771 | ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target)); |
c49bdb2e | 2772 | ii->otr_type = obj_type_ref_class (target); |
40591473 | 2773 | ii->polymorphic = 1; |
b258210c MJ |
2774 | } |
2775 | ||
2776 | /* Analyze a call statement CALL whether and how it utilizes formal parameters | |
c419671c | 2777 | of the caller (described by INFO). PARMS_AINFO is a pointer to a vector |
062c604f | 2778 | containing intermediate information about each formal parameter. */ |
b258210c MJ |
2779 | |
2780 | static void | |
56b40062 | 2781 | ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call) |
b258210c MJ |
2782 | { |
2783 | tree target = gimple_call_fn (call); | |
b786d31f JH |
2784 | |
2785 | if (!target | |
2786 | || (TREE_CODE (target) != SSA_NAME | |
2787 | && !virtual_method_call_p (target))) | |
2788 | return; | |
b258210c | 2789 | |
7d0aa05b | 2790 | struct cgraph_edge *cs = fbi->node->get_edge (call); |
b786d31f JH |
2791 | /* If we previously turned the call into a direct call, there is |
2792 | no need to analyze. */ | |
b786d31f | 2793 | if (cs && !cs->indirect_unknown_callee) |
25583c4f | 2794 | return; |
7d0aa05b | 2795 | |
a5b58b28 | 2796 | if (cs->indirect_info->polymorphic && flag_devirtualize) |
7d0aa05b | 2797 | { |
7d0aa05b JH |
2798 | tree instance; |
2799 | tree target = gimple_call_fn (call); | |
6f8091fc JH |
2800 | ipa_polymorphic_call_context context (current_function_decl, |
2801 | target, call, &instance); | |
7d0aa05b | 2802 | |
ba392339 JH |
2803 | gcc_checking_assert (cs->indirect_info->otr_type |
2804 | == obj_type_ref_class (target)); | |
2805 | gcc_checking_assert (cs->indirect_info->otr_token | |
2806 | == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target))); | |
7d0aa05b | 2807 | |
29c43c83 JH |
2808 | cs->indirect_info->vptr_changed |
2809 | = !context.get_dynamic_type (instance, | |
2810 | OBJ_TYPE_REF_OBJECT (target), | |
c628d1c3 MJ |
2811 | obj_type_ref_class (target), call, |
2812 | &fbi->aa_walk_budget); | |
0127c169 | 2813 | cs->indirect_info->context = context; |
7d0aa05b JH |
2814 | } |
2815 | ||
b258210c | 2816 | if (TREE_CODE (target) == SSA_NAME) |
8aab5218 | 2817 | ipa_analyze_indirect_call_uses (fbi, call, target); |
1d5755ef | 2818 | else if (virtual_method_call_p (target)) |
8aab5218 | 2819 | ipa_analyze_virtual_call_uses (fbi, call, target); |
b258210c MJ |
2820 | } |
2821 | ||
2822 | ||
e33c6cd6 | 2823 | /* Analyze the call statement STMT with respect to formal parameters (described |
8aab5218 MJ |
2824 | in INFO) of caller given by FBI->NODE. Currently it only checks whether |
2825 | formal parameters are called. */ | |
be95e2b9 | 2826 | |
3e293154 | 2827 | static void |
355fe088 | 2828 | ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt) |
3e293154 | 2829 | { |
726a989a | 2830 | if (is_gimple_call (stmt)) |
538dd0b7 | 2831 | ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt)); |
062c604f MJ |
2832 | } |
2833 | ||
2834 | /* Callback of walk_stmt_load_store_addr_ops for the visit_load. | |
2835 | If OP is a parameter declaration, mark it as used in the info structure | |
2836 | passed in DATA. */ | |
2837 | ||
2838 | static bool | |
355fe088 | 2839 | visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data) |
062c604f | 2840 | { |
99b1c316 | 2841 | class ipa_node_params *info = (class ipa_node_params *) data; |
062c604f MJ |
2842 | |
2843 | op = get_base_address (op); | |
2844 | if (op | |
2845 | && TREE_CODE (op) == PARM_DECL) | |
2846 | { | |
2847 | int index = ipa_get_param_decl_index (info, op); | |
2848 | gcc_assert (index >= 0); | |
310bc633 | 2849 | ipa_set_param_used (info, index, true); |
062c604f MJ |
2850 | } |
2851 | ||
2852 | return false; | |
3e293154 MJ |
2853 | } |
2854 | ||
8aab5218 MJ |
2855 | /* Scan the statements in BB and inspect the uses of formal parameters. Store |
2856 | the findings in various structures of the associated ipa_node_params | |
2857 | structure, such as parameter flags, notes etc. FBI holds various data about | |
2858 | the function being analyzed. */ | |
be95e2b9 | 2859 | |
062c604f | 2860 | static void |
56b40062 | 2861 | ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb) |
3e293154 | 2862 | { |
726a989a | 2863 | gimple_stmt_iterator gsi; |
8aab5218 MJ |
2864 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
2865 | { | |
355fe088 | 2866 | gimple *stmt = gsi_stmt (gsi); |
3e293154 | 2867 | |
8aab5218 MJ |
2868 | if (is_gimple_debug (stmt)) |
2869 | continue; | |
3e293154 | 2870 | |
8aab5218 MJ |
2871 | ipa_analyze_stmt_uses (fbi, stmt); |
2872 | walk_stmt_load_store_addr_ops (stmt, fbi->info, | |
2873 | visit_ref_for_mod_analysis, | |
2874 | visit_ref_for_mod_analysis, | |
2875 | visit_ref_for_mod_analysis); | |
5fe8e757 | 2876 | } |
8aab5218 MJ |
2877 | for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
2878 | walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info, | |
2879 | visit_ref_for_mod_analysis, | |
2880 | visit_ref_for_mod_analysis, | |
2881 | visit_ref_for_mod_analysis); | |
2882 | } | |
2883 | ||
13586172 MJ |
2884 | /* Return true EXPR is a load from a dereference of SSA_NAME NAME. */ |
2885 | ||
2886 | static bool | |
2887 | load_from_dereferenced_name (tree expr, tree name) | |
2888 | { | |
2889 | tree base = get_base_address (expr); | |
2890 | return (TREE_CODE (base) == MEM_REF | |
2891 | && TREE_OPERAND (base, 0) == name); | |
2892 | } | |
2893 | ||
8aab5218 MJ |
2894 | /* Calculate controlled uses of parameters of NODE. */ |
2895 | ||
2896 | static void | |
2897 | ipa_analyze_controlled_uses (struct cgraph_node *node) | |
2898 | { | |
a4a3cdd0 | 2899 | ipa_node_params *info = ipa_node_params_sum->get (node); |
5fe8e757 | 2900 | |
8aab5218 | 2901 | for (int i = 0; i < ipa_get_param_count (info); i++) |
062c604f MJ |
2902 | { |
2903 | tree parm = ipa_get_param (info, i); | |
13586172 MJ |
2904 | int call_uses = 0; |
2905 | bool load_dereferenced = false; | |
4502fe8d | 2906 | |
062c604f MJ |
2907 | /* For SSA regs see if parameter is used. For non-SSA we compute |
2908 | the flag during modification analysis. */ | |
4502fe8d MJ |
2909 | if (is_gimple_reg (parm)) |
2910 | { | |
67348ccc | 2911 | tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), |
4502fe8d MJ |
2912 | parm); |
2913 | if (ddef && !has_zero_uses (ddef)) | |
2914 | { | |
2915 | imm_use_iterator imm_iter; | |
13586172 | 2916 | gimple *stmt; |
4502fe8d MJ |
2917 | |
2918 | ipa_set_param_used (info, i, true); | |
13586172 MJ |
2919 | FOR_EACH_IMM_USE_STMT (stmt, imm_iter, ddef) |
2920 | { | |
2921 | if (is_gimple_debug (stmt)) | |
2922 | continue; | |
2923 | ||
2924 | int all_stmt_uses = 0; | |
2925 | use_operand_p use_p; | |
2926 | FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter) | |
2927 | all_stmt_uses++; | |
2928 | ||
2929 | if (is_gimple_call (stmt)) | |
2930 | { | |
2931 | if (gimple_call_internal_p (stmt)) | |
2932 | { | |
2933 | call_uses = IPA_UNDESCRIBED_USE; | |
2934 | break; | |
2935 | } | |
2936 | int recognized_stmt_uses; | |
2937 | if (gimple_call_fn (stmt) == ddef) | |
2938 | recognized_stmt_uses = 1; | |
2939 | else | |
2940 | recognized_stmt_uses = 0; | |
2941 | unsigned arg_count = gimple_call_num_args (stmt); | |
2942 | for (unsigned i = 0; i < arg_count; i++) | |
2943 | { | |
2944 | tree arg = gimple_call_arg (stmt, i); | |
2945 | if (arg == ddef) | |
2946 | recognized_stmt_uses++; | |
2947 | else if (load_from_dereferenced_name (arg, ddef)) | |
2948 | { | |
2949 | load_dereferenced = true; | |
2950 | recognized_stmt_uses++; | |
2951 | } | |
2952 | } | |
2953 | ||
2954 | if (recognized_stmt_uses != all_stmt_uses) | |
2955 | { | |
2956 | call_uses = IPA_UNDESCRIBED_USE; | |
2957 | break; | |
2958 | } | |
2959 | if (call_uses >= 0) | |
2960 | call_uses += all_stmt_uses; | |
2961 | } | |
2962 | else if (gimple_assign_single_p (stmt)) | |
2963 | { | |
2964 | tree rhs = gimple_assign_rhs1 (stmt); | |
2965 | if (all_stmt_uses != 1 | |
2966 | || !load_from_dereferenced_name (rhs, ddef)) | |
2967 | { | |
2968 | call_uses = IPA_UNDESCRIBED_USE; | |
2969 | break; | |
2970 | } | |
2971 | load_dereferenced = true; | |
2972 | } | |
2973 | else | |
2974 | { | |
2975 | call_uses = IPA_UNDESCRIBED_USE; | |
2976 | break; | |
2977 | } | |
2978 | } | |
4502fe8d MJ |
2979 | } |
2980 | else | |
13586172 | 2981 | call_uses = 0; |
4502fe8d MJ |
2982 | } |
2983 | else | |
13586172 MJ |
2984 | call_uses = IPA_UNDESCRIBED_USE; |
2985 | ipa_set_controlled_uses (info, i, call_uses); | |
2986 | ipa_set_param_load_dereferenced (info, i, load_dereferenced); | |
062c604f | 2987 | } |
8aab5218 | 2988 | } |
062c604f | 2989 | |
8aab5218 | 2990 | /* Free stuff in BI. */ |
062c604f | 2991 | |
8aab5218 MJ |
2992 | static void |
2993 | free_ipa_bb_info (struct ipa_bb_info *bi) | |
2994 | { | |
2995 | bi->cg_edges.release (); | |
2996 | bi->param_aa_statuses.release (); | |
3e293154 MJ |
2997 | } |
2998 | ||
8aab5218 | 2999 | /* Dominator walker driving the analysis. */ |
2c9561b5 | 3000 | |
8aab5218 | 3001 | class analysis_dom_walker : public dom_walker |
2c9561b5 | 3002 | { |
8aab5218 | 3003 | public: |
56b40062 | 3004 | analysis_dom_walker (struct ipa_func_body_info *fbi) |
8aab5218 | 3005 | : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {} |
2c9561b5 | 3006 | |
3daacdcd | 3007 | virtual edge before_dom_children (basic_block); |
8aab5218 MJ |
3008 | |
3009 | private: | |
56b40062 | 3010 | struct ipa_func_body_info *m_fbi; |
8aab5218 MJ |
3011 | }; |
3012 | ||
3daacdcd | 3013 | edge |
8aab5218 MJ |
3014 | analysis_dom_walker::before_dom_children (basic_block bb) |
3015 | { | |
3016 | ipa_analyze_params_uses_in_bb (m_fbi, bb); | |
3017 | ipa_compute_jump_functions_for_bb (m_fbi, bb); | |
3daacdcd | 3018 | return NULL; |
2c9561b5 MJ |
3019 | } |
3020 | ||
c3431191 ML |
3021 | /* Release body info FBI. */ |
3022 | ||
3023 | void | |
3024 | ipa_release_body_info (struct ipa_func_body_info *fbi) | |
3025 | { | |
3026 | int i; | |
3027 | struct ipa_bb_info *bi; | |
3028 | ||
3029 | FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi) | |
3030 | free_ipa_bb_info (bi); | |
3031 | fbi->bb_infos.release (); | |
3032 | } | |
3033 | ||
026c3cfd | 3034 | /* Initialize the array describing properties of formal parameters |
dd5a833e MS |
3035 | of NODE, analyze their uses and compute jump functions associated |
3036 | with actual arguments of calls from within NODE. */ | |
062c604f MJ |
3037 | |
3038 | void | |
3039 | ipa_analyze_node (struct cgraph_node *node) | |
3040 | { | |
56b40062 | 3041 | struct ipa_func_body_info fbi; |
99b1c316 | 3042 | class ipa_node_params *info; |
062c604f | 3043 | |
57dbdc5a MJ |
3044 | ipa_check_create_node_params (); |
3045 | ipa_check_create_edge_args (); | |
a4a3cdd0 | 3046 | info = ipa_node_params_sum->get_create (node); |
8aab5218 MJ |
3047 | |
3048 | if (info->analysis_done) | |
3049 | return; | |
3050 | info->analysis_done = 1; | |
3051 | ||
3052 | if (ipa_func_spec_opts_forbid_analysis_p (node)) | |
3053 | { | |
3054 | for (int i = 0; i < ipa_get_param_count (info); i++) | |
3055 | { | |
3056 | ipa_set_param_used (info, i, true); | |
3057 | ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE); | |
3058 | } | |
3059 | return; | |
3060 | } | |
3061 | ||
3062 | struct function *func = DECL_STRUCT_FUNCTION (node->decl); | |
3063 | push_cfun (func); | |
3064 | calculate_dominance_info (CDI_DOMINATORS); | |
062c604f | 3065 | ipa_initialize_node_params (node); |
8aab5218 | 3066 | ipa_analyze_controlled_uses (node); |
062c604f | 3067 | |
8aab5218 | 3068 | fbi.node = node; |
a4a3cdd0 | 3069 | fbi.info = info; |
8aab5218 | 3070 | fbi.bb_infos = vNULL; |
cb3874dc | 3071 | fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true); |
8aab5218 | 3072 | fbi.param_count = ipa_get_param_count (info); |
fdfd7f53 | 3073 | fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps); |
062c604f | 3074 | |
8aab5218 MJ |
3075 | for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee) |
3076 | { | |
3077 | ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt)); | |
3078 | bi->cg_edges.safe_push (cs); | |
3079 | } | |
062c604f | 3080 | |
8aab5218 MJ |
3081 | for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee) |
3082 | { | |
3083 | ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt)); | |
3084 | bi->cg_edges.safe_push (cs); | |
3085 | } | |
3086 | ||
3087 | analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun)); | |
3088 | ||
c3431191 | 3089 | ipa_release_body_info (&fbi); |
8aab5218 | 3090 | free_dominance_info (CDI_DOMINATORS); |
f65cf2b7 | 3091 | pop_cfun (); |
062c604f | 3092 | } |
062c604f | 3093 | |
be95e2b9 | 3094 | /* Update the jump functions associated with call graph edge E when the call |
3e293154 | 3095 | graph edge CS is being inlined, assuming that E->caller is already (possibly |
b258210c | 3096 | indirectly) inlined into CS->callee and that E has not been inlined. */ |
be95e2b9 | 3097 | |
3e293154 MJ |
3098 | static void |
3099 | update_jump_functions_after_inlining (struct cgraph_edge *cs, | |
3100 | struct cgraph_edge *e) | |
3101 | { | |
a4a3cdd0 MJ |
3102 | ipa_edge_args *top = ipa_edge_args_sum->get (cs); |
3103 | ipa_edge_args *args = ipa_edge_args_sum->get (e); | |
a33c028e JH |
3104 | if (!args) |
3105 | return; | |
3e293154 MJ |
3106 | int count = ipa_get_cs_argument_count (args); |
3107 | int i; | |
3108 | ||
3109 | for (i = 0; i < count; i++) | |
3110 | { | |
b258210c | 3111 | struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i); |
99b1c316 | 3112 | class ipa_polymorphic_call_context *dst_ctx |
5ce97055 | 3113 | = ipa_get_ith_polymorhic_call_context (args, i); |
3e293154 | 3114 | |
eb270950 FX |
3115 | if (dst->agg.items) |
3116 | { | |
3117 | struct ipa_agg_jf_item *item; | |
3118 | int j; | |
3119 | ||
3120 | FOR_EACH_VEC_ELT (*dst->agg.items, j, item) | |
3121 | { | |
3122 | int dst_fid; | |
3123 | struct ipa_jump_func *src; | |
3124 | ||
3125 | if (item->jftype != IPA_JF_PASS_THROUGH | |
3126 | && item->jftype != IPA_JF_LOAD_AGG) | |
3127 | continue; | |
3128 | ||
3129 | dst_fid = item->value.pass_through.formal_id; | |
1c3c3f45 | 3130 | if (!top || dst_fid >= ipa_get_cs_argument_count (top)) |
eb270950 FX |
3131 | { |
3132 | item->jftype = IPA_JF_UNKNOWN; | |
3133 | continue; | |
3134 | } | |
3135 | ||
3136 | item->value.pass_through.formal_id = -1; | |
3137 | src = ipa_get_ith_jump_func (top, dst_fid); | |
3138 | if (src->type == IPA_JF_CONST) | |
3139 | { | |
3140 | if (item->jftype == IPA_JF_PASS_THROUGH | |
3141 | && item->value.pass_through.operation == NOP_EXPR) | |
3142 | { | |
3143 | item->jftype = IPA_JF_CONST; | |
3144 | item->value.constant = src->value.constant.value; | |
3145 | continue; | |
3146 | } | |
3147 | } | |
3148 | else if (src->type == IPA_JF_PASS_THROUGH | |
3149 | && src->value.pass_through.operation == NOP_EXPR) | |
3150 | { | |
3151 | if (item->jftype == IPA_JF_PASS_THROUGH | |
3152 | || !item->value.load_agg.by_ref | |
3153 | || src->value.pass_through.agg_preserved) | |
3154 | item->value.pass_through.formal_id | |
3155 | = src->value.pass_through.formal_id; | |
3156 | } | |
3157 | else if (src->type == IPA_JF_ANCESTOR) | |
3158 | { | |
3159 | if (item->jftype == IPA_JF_PASS_THROUGH) | |
3160 | { | |
3161 | if (!src->value.ancestor.offset) | |
3162 | item->value.pass_through.formal_id | |
3163 | = src->value.ancestor.formal_id; | |
3164 | } | |
3165 | else if (src->value.ancestor.agg_preserved) | |
3166 | { | |
3167 | gcc_checking_assert (item->value.load_agg.by_ref); | |
3168 | ||
3169 | item->value.pass_through.formal_id | |
3170 | = src->value.ancestor.formal_id; | |
3171 | item->value.load_agg.offset | |
3172 | += src->value.ancestor.offset; | |
3173 | } | |
3174 | } | |
3175 | ||
3176 | if (item->value.pass_through.formal_id < 0) | |
3177 | item->jftype = IPA_JF_UNKNOWN; | |
3178 | } | |
3179 | } | |
3180 | ||
1c3c3f45 FX |
3181 | if (!top) |
3182 | { | |
3183 | ipa_set_jf_unknown (dst); | |
3184 | continue; | |
3185 | } | |
3186 | ||
685b0d13 MJ |
3187 | if (dst->type == IPA_JF_ANCESTOR) |
3188 | { | |
b258210c | 3189 | struct ipa_jump_func *src; |
8b7773a4 | 3190 | int dst_fid = dst->value.ancestor.formal_id; |
99b1c316 | 3191 | class ipa_polymorphic_call_context *src_ctx |
5ce97055 | 3192 | = ipa_get_ith_polymorhic_call_context (top, dst_fid); |
685b0d13 | 3193 | |
b258210c MJ |
3194 | /* Variable number of arguments can cause havoc if we try to access |
3195 | one that does not exist in the inlined edge. So make sure we | |
3196 | don't. */ | |
8b7773a4 | 3197 | if (dst_fid >= ipa_get_cs_argument_count (top)) |
b258210c | 3198 | { |
04be694e | 3199 | ipa_set_jf_unknown (dst); |
b258210c MJ |
3200 | continue; |
3201 | } | |
3202 | ||
8b7773a4 MJ |
3203 | src = ipa_get_ith_jump_func (top, dst_fid); |
3204 | ||
5ce97055 JH |
3205 | if (src_ctx && !src_ctx->useless_p ()) |
3206 | { | |
99b1c316 | 3207 | class ipa_polymorphic_call_context ctx = *src_ctx; |
5ce97055 JH |
3208 | |
3209 | /* TODO: Make type preserved safe WRT contexts. */ | |
44210a96 | 3210 | if (!ipa_get_jf_ancestor_type_preserved (dst)) |
f9bb202b | 3211 | ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor); |
5ce97055 JH |
3212 | ctx.offset_by (dst->value.ancestor.offset); |
3213 | if (!ctx.useless_p ()) | |
3214 | { | |
a7d1f3fe ML |
3215 | if (!dst_ctx) |
3216 | { | |
3217 | vec_safe_grow_cleared (args->polymorphic_call_contexts, | |
cb3874dc | 3218 | count, true); |
a7d1f3fe ML |
3219 | dst_ctx = ipa_get_ith_polymorhic_call_context (args, i); |
3220 | } | |
3221 | ||
3222 | dst_ctx->combine_with (ctx); | |
5ce97055 JH |
3223 | } |
3224 | } | |
3225 | ||
eb270950 FX |
3226 | /* Parameter and argument in ancestor jump function must be pointer |
3227 | type, which means access to aggregate must be by-reference. */ | |
3228 | gcc_assert (!src->agg.items || src->agg.by_ref); | |
3229 | ||
3230 | if (src->agg.items && dst->value.ancestor.agg_preserved) | |
8b7773a4 MJ |
3231 | { |
3232 | struct ipa_agg_jf_item *item; | |
3233 | int j; | |
3234 | ||
3235 | /* Currently we do not produce clobber aggregate jump functions, | |
3236 | replace with merging when we do. */ | |
3237 | gcc_assert (!dst->agg.items); | |
3238 | ||
9771b263 | 3239 | dst->agg.items = vec_safe_copy (src->agg.items); |
8b7773a4 | 3240 | dst->agg.by_ref = src->agg.by_ref; |
9771b263 | 3241 | FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item) |
8b7773a4 MJ |
3242 | item->offset -= dst->value.ancestor.offset; |
3243 | } | |
3244 | ||
3b97a5c7 MJ |
3245 | if (src->type == IPA_JF_PASS_THROUGH |
3246 | && src->value.pass_through.operation == NOP_EXPR) | |
8b7773a4 MJ |
3247 | { |
3248 | dst->value.ancestor.formal_id = src->value.pass_through.formal_id; | |
3249 | dst->value.ancestor.agg_preserved &= | |
3250 | src->value.pass_through.agg_preserved; | |
3251 | } | |
b258210c MJ |
3252 | else if (src->type == IPA_JF_ANCESTOR) |
3253 | { | |
3254 | dst->value.ancestor.formal_id = src->value.ancestor.formal_id; | |
3255 | dst->value.ancestor.offset += src->value.ancestor.offset; | |
8b7773a4 MJ |
3256 | dst->value.ancestor.agg_preserved &= |
3257 | src->value.ancestor.agg_preserved; | |
7ea3a73c | 3258 | dst->value.ancestor.keep_null |= src->value.ancestor.keep_null; |
b258210c MJ |
3259 | } |
3260 | else | |
04be694e | 3261 | ipa_set_jf_unknown (dst); |
b258210c MJ |
3262 | } |
3263 | else if (dst->type == IPA_JF_PASS_THROUGH) | |
3e293154 | 3264 | { |
b258210c MJ |
3265 | struct ipa_jump_func *src; |
3266 | /* We must check range due to calls with variable number of arguments | |
3267 | and we cannot combine jump functions with operations. */ | |
3268 | if (dst->value.pass_through.operation == NOP_EXPR | |
5a0236f8 | 3269 | && (top && dst->value.pass_through.formal_id |
b258210c MJ |
3270 | < ipa_get_cs_argument_count (top))) |
3271 | { | |
8b7773a4 MJ |
3272 | int dst_fid = dst->value.pass_through.formal_id; |
3273 | src = ipa_get_ith_jump_func (top, dst_fid); | |
b8f6e610 | 3274 | bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst); |
99b1c316 | 3275 | class ipa_polymorphic_call_context *src_ctx |
5ce97055 | 3276 | = ipa_get_ith_polymorhic_call_context (top, dst_fid); |
8b7773a4 | 3277 | |
5ce97055 JH |
3278 | if (src_ctx && !src_ctx->useless_p ()) |
3279 | { | |
99b1c316 | 3280 | class ipa_polymorphic_call_context ctx = *src_ctx; |
5ce97055 JH |
3281 | |
3282 | /* TODO: Make type preserved safe WRT contexts. */ | |
44210a96 | 3283 | if (!ipa_get_jf_pass_through_type_preserved (dst)) |
f9bb202b | 3284 | ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor); |
5ce97055 JH |
3285 | if (!ctx.useless_p ()) |
3286 | { | |
3287 | if (!dst_ctx) | |
3288 | { | |
3289 | vec_safe_grow_cleared (args->polymorphic_call_contexts, | |
cb3874dc | 3290 | count, true); |
5ce97055 JH |
3291 | dst_ctx = ipa_get_ith_polymorhic_call_context (args, i); |
3292 | } | |
3293 | dst_ctx->combine_with (ctx); | |
3294 | } | |
3295 | } | |
b8f6e610 MJ |
3296 | switch (src->type) |
3297 | { | |
3298 | case IPA_JF_UNKNOWN: | |
04be694e | 3299 | ipa_set_jf_unknown (dst); |
b8f6e610 | 3300 | break; |
b8f6e610 MJ |
3301 | case IPA_JF_CONST: |
3302 | ipa_set_jf_cst_copy (dst, src); | |
3303 | break; | |
3304 | ||
3305 | case IPA_JF_PASS_THROUGH: | |
3306 | { | |
3307 | int formal_id = ipa_get_jf_pass_through_formal_id (src); | |
3308 | enum tree_code operation; | |
3309 | operation = ipa_get_jf_pass_through_operation (src); | |
3310 | ||
3311 | if (operation == NOP_EXPR) | |
3312 | { | |
3b97a5c7 | 3313 | bool agg_p; |
b8f6e610 MJ |
3314 | agg_p = dst_agg_p |
3315 | && ipa_get_jf_pass_through_agg_preserved (src); | |
3b97a5c7 | 3316 | ipa_set_jf_simple_pass_through (dst, formal_id, agg_p); |
b8f6e610 | 3317 | } |
a2b4c188 KV |
3318 | else if (TREE_CODE_CLASS (operation) == tcc_unary) |
3319 | ipa_set_jf_unary_pass_through (dst, formal_id, operation); | |
b8f6e610 MJ |
3320 | else |
3321 | { | |
3322 | tree operand = ipa_get_jf_pass_through_operand (src); | |
3323 | ipa_set_jf_arith_pass_through (dst, formal_id, operand, | |
3324 | operation); | |
3325 | } | |
3326 | break; | |
3327 | } | |
3328 | case IPA_JF_ANCESTOR: | |
3329 | { | |
3b97a5c7 | 3330 | bool agg_p; |
b8f6e610 MJ |
3331 | agg_p = dst_agg_p |
3332 | && ipa_get_jf_ancestor_agg_preserved (src); | |
b8f6e610 MJ |
3333 | ipa_set_ancestor_jf (dst, |
3334 | ipa_get_jf_ancestor_offset (src), | |
b8f6e610 | 3335 | ipa_get_jf_ancestor_formal_id (src), |
7ea3a73c MJ |
3336 | agg_p, |
3337 | ipa_get_jf_ancestor_keep_null (src)); | |
b8f6e610 MJ |
3338 | break; |
3339 | } | |
3340 | default: | |
3341 | gcc_unreachable (); | |
3342 | } | |
8b7773a4 MJ |
3343 | |
3344 | if (src->agg.items | |
b8f6e610 | 3345 | && (dst_agg_p || !src->agg.by_ref)) |
8b7773a4 MJ |
3346 | { |
3347 | /* Currently we do not produce clobber aggregate jump | |
3348 | functions, replace with merging when we do. */ | |
3349 | gcc_assert (!dst->agg.items); | |
3350 | ||
3351 | dst->agg.by_ref = src->agg.by_ref; | |
9771b263 | 3352 | dst->agg.items = vec_safe_copy (src->agg.items); |
8b7773a4 | 3353 | } |
b258210c MJ |
3354 | } |
3355 | else | |
04be694e | 3356 | ipa_set_jf_unknown (dst); |
3e293154 | 3357 | } |
b258210c MJ |
3358 | } |
3359 | } | |
3360 | ||
5ce97055 JH |
3361 | /* If TARGET is an addr_expr of a function declaration, make it the |
3362 | (SPECULATIVE)destination of an indirect edge IE and return the edge. | |
3363 | Otherwise, return NULL. */ | |
b258210c | 3364 | |
3949c4a7 | 3365 | struct cgraph_edge * |
5ce97055 JH |
3366 | ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target, |
3367 | bool speculative) | |
b258210c MJ |
3368 | { |
3369 | struct cgraph_node *callee; | |
48b1474e | 3370 | bool unreachable = false; |
b258210c | 3371 | |
ceeffab0 MJ |
3372 | if (TREE_CODE (target) == ADDR_EXPR) |
3373 | target = TREE_OPERAND (target, 0); | |
b258210c | 3374 | if (TREE_CODE (target) != FUNCTION_DECL) |
a0a7b611 JH |
3375 | { |
3376 | target = canonicalize_constructor_val (target, NULL); | |
3377 | if (!target || TREE_CODE (target) != FUNCTION_DECL) | |
3378 | { | |
db66bf68 JH |
3379 | /* Member pointer call that goes through a VMT lookup. */ |
3380 | if (ie->indirect_info->member_ptr | |
3381 | /* Or if target is not an invariant expression and we do not | |
3382 | know if it will evaulate to function at runtime. | |
3383 | This can happen when folding through &VAR, where &VAR | |
3384 | is IP invariant, but VAR itself is not. | |
3385 | ||
3386 | TODO: Revisit this when GCC 5 is branched. It seems that | |
3387 | member_ptr check is not needed and that we may try to fold | |
3388 | the expression and see if VAR is readonly. */ | |
3389 | || !is_gimple_ip_invariant (target)) | |
3390 | { | |
3391 | if (dump_enabled_p ()) | |
3392 | { | |
4f5b9c80 | 3393 | dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt, |
464d0118 ML |
3394 | "discovered direct call non-invariant %s\n", |
3395 | ie->caller->dump_name ()); | |
db66bf68 JH |
3396 | } |
3397 | return NULL; | |
3398 | } | |
3399 | ||
c13bc3d9 | 3400 | |
2b5f0895 XDL |
3401 | if (dump_enabled_p ()) |
3402 | { | |
4f5b9c80 | 3403 | dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt, |
464d0118 | 3404 | "discovered direct call to non-function in %s, " |
807b7d62 | 3405 | "making it __builtin_unreachable\n", |
464d0118 | 3406 | ie->caller->dump_name ()); |
2b5f0895 | 3407 | } |
3c9e6fca | 3408 | |
48b1474e | 3409 | target = builtin_decl_implicit (BUILT_IN_UNREACHABLE); |
d52f5295 | 3410 | callee = cgraph_node::get_create (target); |
48b1474e | 3411 | unreachable = true; |
a0a7b611 | 3412 | } |
48b1474e | 3413 | else |
d52f5295 | 3414 | callee = cgraph_node::get (target); |
a0a7b611 | 3415 | } |
48b1474e | 3416 | else |
d52f5295 | 3417 | callee = cgraph_node::get (target); |
a0a7b611 JH |
3418 | |
3419 | /* Because may-edges are not explicitely represented and vtable may be external, | |
3420 | we may create the first reference to the object in the unit. */ | |
a62bfab5 | 3421 | if (!callee || callee->inlined_to) |
a0a7b611 | 3422 | { |
a0a7b611 JH |
3423 | |
3424 | /* We are better to ensure we can refer to it. | |
3425 | In the case of static functions we are out of luck, since we already | |
3426 | removed its body. In the case of public functions we may or may | |
3427 | not introduce the reference. */ | |
3428 | if (!canonicalize_constructor_val (target, NULL) | |
3429 | || !TREE_PUBLIC (target)) | |
3430 | { | |
3431 | if (dump_file) | |
3432 | fprintf (dump_file, "ipa-prop: Discovered call to a known target " | |
845bb366 | 3433 | "(%s -> %s) but cannot refer to it. Giving up.\n", |
464d0118 ML |
3434 | ie->caller->dump_name (), |
3435 | ie->callee->dump_name ()); | |
a0a7b611 JH |
3436 | return NULL; |
3437 | } | |
d52f5295 | 3438 | callee = cgraph_node::get_create (target); |
a0a7b611 | 3439 | } |
2b5f0895 | 3440 | |
0127c169 JH |
3441 | /* If the edge is already speculated. */ |
3442 | if (speculative && ie->speculative) | |
3443 | { | |
845bb366 | 3444 | if (dump_file) |
0127c169 | 3445 | { |
845bb366 JH |
3446 | cgraph_edge *e2 = ie->speculative_call_for_target (callee); |
3447 | if (!e2) | |
3448 | { | |
3449 | if (dump_file) | |
3450 | fprintf (dump_file, "ipa-prop: Discovered call to a " | |
3451 | "speculative target (%s -> %s) but the call is " | |
3452 | "already speculated to different target. " | |
3453 | "Giving up.\n", | |
3454 | ie->caller->dump_name (), callee->dump_name ()); | |
3455 | } | |
3456 | else | |
3457 | { | |
3458 | if (dump_file) | |
3459 | fprintf (dump_file, | |
3460 | "ipa-prop: Discovered call to a speculative target " | |
3461 | "(%s -> %s) this agree with previous speculation.\n", | |
3462 | ie->caller->dump_name (), callee->dump_name ()); | |
3463 | } | |
0127c169 JH |
3464 | } |
3465 | return NULL; | |
3466 | } | |
3467 | ||
2b5f0895 XDL |
3468 | if (!dbg_cnt (devirt)) |
3469 | return NULL; | |
3470 | ||
1dbee8c9 | 3471 | ipa_check_create_node_params (); |
ceeffab0 | 3472 | |
67914693 | 3473 | /* We cannot make edges to inline clones. It is bug that someone removed |
81fa35bd | 3474 | the cgraph node too early. */ |
a62bfab5 | 3475 | gcc_assert (!callee->inlined_to); |
17afc0fe | 3476 | |
48b1474e | 3477 | if (dump_file && !unreachable) |
b258210c | 3478 | { |
5ce97055 | 3479 | fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target " |
464d0118 | 3480 | "(%s -> %s), for stmt ", |
b258210c | 3481 | ie->indirect_info->polymorphic ? "a virtual" : "an indirect", |
5ce97055 | 3482 | speculative ? "speculative" : "known", |
464d0118 ML |
3483 | ie->caller->dump_name (), |
3484 | callee->dump_name ()); | |
b258210c MJ |
3485 | if (ie->call_stmt) |
3486 | print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM); | |
3487 | else | |
3488 | fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid); | |
042ae7d2 | 3489 | } |
2b5f0895 XDL |
3490 | if (dump_enabled_p ()) |
3491 | { | |
4f5b9c80 | 3492 | dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt, |
807b7d62 | 3493 | "converting indirect call in %s to direct call to %s\n", |
3629ff8a | 3494 | ie->caller->dump_name (), callee->dump_name ()); |
2b5f0895 | 3495 | } |
5ce97055 | 3496 | if (!speculative) |
d8d5aef1 JH |
3497 | { |
3498 | struct cgraph_edge *orig = ie; | |
27c5a177 | 3499 | ie = cgraph_edge::make_direct (ie, callee); |
d8d5aef1 JH |
3500 | /* If we resolved speculative edge the cost is already up to date |
3501 | for direct call (adjusted by inline_edge_duplication_hook). */ | |
3502 | if (ie == orig) | |
3503 | { | |
56f62793 | 3504 | ipa_call_summary *es = ipa_call_summaries->get (ie); |
d8d5aef1 JH |
3505 | es->call_stmt_size -= (eni_size_weights.indirect_call_cost |
3506 | - eni_size_weights.call_cost); | |
3507 | es->call_stmt_time -= (eni_time_weights.indirect_call_cost | |
3508 | - eni_time_weights.call_cost); | |
3509 | } | |
3510 | } | |
5ce97055 JH |
3511 | else |
3512 | { | |
3513 | if (!callee->can_be_discarded_p ()) | |
3514 | { | |
3515 | cgraph_node *alias; | |
3516 | alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ()); | |
3517 | if (alias) | |
3518 | callee = alias; | |
3519 | } | |
d8d5aef1 | 3520 | /* make_speculative will update ie's cost to direct call cost. */ |
5ce97055 | 3521 | ie = ie->make_speculative |
1bad9c18 | 3522 | (callee, ie->count.apply_scale (8, 10)); |
5ce97055 | 3523 | } |
749aa96d | 3524 | |
b258210c | 3525 | return ie; |
3e293154 MJ |
3526 | } |
3527 | ||
91bb9f80 MJ |
3528 | /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in |
3529 | CONSTRUCTOR and return it. Return NULL if the search fails for some | |
3530 | reason. */ | |
3531 | ||
3532 | static tree | |
3533 | find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset) | |
3534 | { | |
3535 | tree type = TREE_TYPE (constructor); | |
3536 | if (TREE_CODE (type) != ARRAY_TYPE | |
3537 | && TREE_CODE (type) != RECORD_TYPE) | |
3538 | return NULL; | |
3539 | ||
3540 | unsigned ix; | |
3541 | tree index, val; | |
3542 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val) | |
3543 | { | |
3544 | HOST_WIDE_INT elt_offset; | |
3545 | if (TREE_CODE (type) == ARRAY_TYPE) | |
3546 | { | |
3547 | offset_int off; | |
3548 | tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type)); | |
3549 | gcc_assert (TREE_CODE (unit_size) == INTEGER_CST); | |
3550 | ||
3551 | if (index) | |
3552 | { | |
db9bbdec RB |
3553 | if (TREE_CODE (index) == RANGE_EXPR) |
3554 | off = wi::to_offset (TREE_OPERAND (index, 0)); | |
3555 | else | |
3556 | off = wi::to_offset (index); | |
91bb9f80 MJ |
3557 | if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type))) |
3558 | { | |
3559 | tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type)); | |
3560 | gcc_assert (TREE_CODE (unit_size) == INTEGER_CST); | |
3561 | off = wi::sext (off - wi::to_offset (low_bound), | |
3562 | TYPE_PRECISION (TREE_TYPE (index))); | |
3563 | } | |
3564 | off *= wi::to_offset (unit_size); | |
db9bbdec RB |
3565 | /* ??? Handle more than just the first index of a |
3566 | RANGE_EXPR. */ | |
91bb9f80 MJ |
3567 | } |
3568 | else | |
3569 | off = wi::to_offset (unit_size) * ix; | |
3570 | ||
3571 | off = wi::lshift (off, LOG2_BITS_PER_UNIT); | |
3572 | if (!wi::fits_shwi_p (off) || wi::neg_p (off)) | |
3573 | continue; | |
3574 | elt_offset = off.to_shwi (); | |
3575 | } | |
3576 | else if (TREE_CODE (type) == RECORD_TYPE) | |
3577 | { | |
3578 | gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL); | |
3579 | if (DECL_BIT_FIELD (index)) | |
3580 | continue; | |
3581 | elt_offset = int_bit_position (index); | |
3582 | } | |
3583 | else | |
3584 | gcc_unreachable (); | |
3585 | ||
3586 | if (elt_offset > req_offset) | |
3587 | return NULL; | |
3588 | ||
3589 | if (TREE_CODE (val) == CONSTRUCTOR) | |
3590 | return find_constructor_constant_at_offset (val, | |
3591 | req_offset - elt_offset); | |
3592 | ||
3593 | if (elt_offset == req_offset | |
3594 | && is_gimple_reg_type (TREE_TYPE (val)) | |
3595 | && is_gimple_ip_invariant (val)) | |
3596 | return val; | |
3597 | } | |
3598 | return NULL; | |
3599 | } | |
3600 | ||
3601 | /* Check whether SCALAR could be used to look up an aggregate interprocedural | |
3602 | invariant from a static constructor and if so, return it. Otherwise return | |
3603 | NULL. */ | |
3604 | ||
3605 | static tree | |
3606 | ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref) | |
3607 | { | |
3608 | if (by_ref) | |
3609 | { | |
3610 | if (TREE_CODE (scalar) != ADDR_EXPR) | |
3611 | return NULL; | |
3612 | scalar = TREE_OPERAND (scalar, 0); | |
3613 | } | |
3614 | ||
8813a647 | 3615 | if (!VAR_P (scalar) |
91bb9f80 MJ |
3616 | || !is_global_var (scalar) |
3617 | || !TREE_READONLY (scalar) | |
3618 | || !DECL_INITIAL (scalar) | |
3619 | || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR) | |
3620 | return NULL; | |
3621 | ||
3622 | return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset); | |
3623 | } | |
3624 | ||
eb270950 FX |
3625 | /* Retrieve value from AGG, a set of known offset/value for an aggregate or |
3626 | static initializer of SCALAR (which can be NULL) for the given OFFSET or | |
3627 | return NULL if there is none. BY_REF specifies whether the value has to be | |
3628 | passed by reference or by value. If FROM_GLOBAL_CONSTANT is non-NULL, then | |
3629 | the boolean it points to is set to true if the value comes from an | |
3630 | initializer of a constant. */ | |
8b7773a4 MJ |
3631 | |
3632 | tree | |
00dcc88a | 3633 | ipa_find_agg_cst_for_param (const ipa_agg_value_set *agg, tree scalar, |
91bb9f80 MJ |
3634 | HOST_WIDE_INT offset, bool by_ref, |
3635 | bool *from_global_constant) | |
8b7773a4 | 3636 | { |
eb270950 | 3637 | struct ipa_agg_value *item; |
8b7773a4 MJ |
3638 | int i; |
3639 | ||
91bb9f80 MJ |
3640 | if (scalar) |
3641 | { | |
3642 | tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref); | |
3643 | if (res) | |
3644 | { | |
3645 | if (from_global_constant) | |
3646 | *from_global_constant = true; | |
3647 | return res; | |
3648 | } | |
3649 | } | |
3650 | ||
3651 | if (!agg | |
3652 | || by_ref != agg->by_ref) | |
8b7773a4 MJ |
3653 | return NULL; |
3654 | ||
eb270950 | 3655 | FOR_EACH_VEC_ELT (agg->items, i, item) |
2c9561b5 MJ |
3656 | if (item->offset == offset) |
3657 | { | |
3658 | /* Currently we do not have clobber values, return NULL for them once | |
3659 | we do. */ | |
3660 | gcc_checking_assert (is_gimple_ip_invariant (item->value)); | |
91bb9f80 MJ |
3661 | if (from_global_constant) |
3662 | *from_global_constant = false; | |
2c9561b5 MJ |
3663 | return item->value; |
3664 | } | |
8b7773a4 MJ |
3665 | return NULL; |
3666 | } | |
3667 | ||
4502fe8d | 3668 | /* Remove a reference to SYMBOL from the list of references of a node given by |
568cda29 MJ |
3669 | reference description RDESC. Return true if the reference has been |
3670 | successfully found and removed. */ | |
4502fe8d | 3671 | |
568cda29 | 3672 | static bool |
5e20cdc9 | 3673 | remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc) |
4502fe8d MJ |
3674 | { |
3675 | struct ipa_ref *to_del; | |
3676 | struct cgraph_edge *origin; | |
3677 | ||
3678 | origin = rdesc->cs; | |
a854f856 MJ |
3679 | if (!origin) |
3680 | return false; | |
d122681a ML |
3681 | to_del = origin->caller->find_reference (symbol, origin->call_stmt, |
3682 | origin->lto_stmt_uid); | |
568cda29 MJ |
3683 | if (!to_del) |
3684 | return false; | |
3685 | ||
d122681a | 3686 | to_del->remove_reference (); |
4502fe8d | 3687 | if (dump_file) |
464d0118 | 3688 | fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n", |
3629ff8a | 3689 | origin->caller->dump_name (), symbol->dump_name ()); |
568cda29 | 3690 | return true; |
4502fe8d MJ |
3691 | } |
3692 | ||
3693 | /* If JFUNC has a reference description with refcount different from | |
3694 | IPA_UNDESCRIBED_USE, return the reference description, otherwise return | |
3695 | NULL. JFUNC must be a constant jump function. */ | |
3696 | ||
3697 | static struct ipa_cst_ref_desc * | |
3698 | jfunc_rdesc_usable (struct ipa_jump_func *jfunc) | |
3699 | { | |
3700 | struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc); | |
3701 | if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE) | |
3702 | return rdesc; | |
3703 | else | |
3704 | return NULL; | |
3705 | } | |
3706 | ||
568cda29 MJ |
3707 | /* If the value of constant jump function JFUNC is an address of a function |
3708 | declaration, return the associated call graph node. Otherwise return | |
3709 | NULL. */ | |
3710 | ||
13586172 MJ |
3711 | static symtab_node * |
3712 | symtab_node_for_jfunc (struct ipa_jump_func *jfunc) | |
568cda29 MJ |
3713 | { |
3714 | gcc_checking_assert (jfunc->type == IPA_JF_CONST); | |
3715 | tree cst = ipa_get_jf_constant (jfunc); | |
3716 | if (TREE_CODE (cst) != ADDR_EXPR | |
13586172 MJ |
3717 | || (TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL |
3718 | && TREE_CODE (TREE_OPERAND (cst, 0)) != VAR_DECL)) | |
568cda29 MJ |
3719 | return NULL; |
3720 | ||
13586172 | 3721 | return symtab_node::get (TREE_OPERAND (cst, 0)); |
568cda29 MJ |
3722 | } |
3723 | ||
3724 | ||
3725 | /* If JFUNC is a constant jump function with a usable rdesc, decrement its | |
3726 | refcount and if it hits zero, remove reference to SYMBOL from the caller of | |
3727 | the edge specified in the rdesc. Return false if either the symbol or the | |
3728 | reference could not be found, otherwise return true. */ | |
3729 | ||
3730 | static bool | |
3731 | try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc) | |
3732 | { | |
3733 | struct ipa_cst_ref_desc *rdesc; | |
3734 | if (jfunc->type == IPA_JF_CONST | |
3735 | && (rdesc = jfunc_rdesc_usable (jfunc)) | |
3736 | && --rdesc->refcount == 0) | |
3737 | { | |
13586172 | 3738 | symtab_node *symbol = symtab_node_for_jfunc (jfunc); |
568cda29 MJ |
3739 | if (!symbol) |
3740 | return false; | |
3741 | ||
3742 | return remove_described_reference (symbol, rdesc); | |
3743 | } | |
3744 | return true; | |
3745 | } | |
3746 | ||
b258210c MJ |
3747 | /* Try to find a destination for indirect edge IE that corresponds to a simple |
3748 | call or a call of a member function pointer and where the destination is a | |
e5cf5e11 PK |
3749 | pointer formal parameter described by jump function JFUNC. TARGET_TYPE is |
3750 | the type of the parameter to which the result of JFUNC is passed. If it can | |
3751 | be determined, return the newly direct edge, otherwise return NULL. | |
eb270950 FX |
3752 | NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are |
3753 | relative to. */ | |
be95e2b9 | 3754 | |
b258210c MJ |
3755 | static struct cgraph_edge * |
3756 | try_make_edge_direct_simple_call (struct cgraph_edge *ie, | |
e5cf5e11 | 3757 | struct ipa_jump_func *jfunc, tree target_type, |
eb270950 | 3758 | struct cgraph_node *new_root, |
99b1c316 | 3759 | class ipa_node_params *new_root_info) |
b258210c | 3760 | { |
4502fe8d | 3761 | struct cgraph_edge *cs; |
b258210c | 3762 | tree target; |
042ae7d2 | 3763 | bool agg_contents = ie->indirect_info->agg_contents; |
e5cf5e11 | 3764 | tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type); |
91bb9f80 MJ |
3765 | if (agg_contents) |
3766 | { | |
3767 | bool from_global_constant; | |
eb270950 FX |
3768 | ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info, |
3769 | new_root, | |
3770 | &jfunc->agg); | |
3771 | target = ipa_find_agg_cst_for_param (&agg, scalar, | |
91bb9f80 MJ |
3772 | ie->indirect_info->offset, |
3773 | ie->indirect_info->by_ref, | |
3774 | &from_global_constant); | |
eb270950 | 3775 | agg.release (); |
91bb9f80 MJ |
3776 | if (target |
3777 | && !from_global_constant | |
3778 | && !ie->indirect_info->guaranteed_unmodified) | |
3779 | return NULL; | |
3780 | } | |
b258210c | 3781 | else |
91bb9f80 | 3782 | target = scalar; |
d250540a MJ |
3783 | if (!target) |
3784 | return NULL; | |
4502fe8d MJ |
3785 | cs = ipa_make_edge_direct_to_target (ie, target); |
3786 | ||
a12cd2db | 3787 | if (cs && !agg_contents) |
568cda29 MJ |
3788 | { |
3789 | bool ok; | |
3790 | gcc_checking_assert (cs->callee | |
ae6d0907 MJ |
3791 | && (cs != ie |
3792 | || jfunc->type != IPA_JF_CONST | |
13586172 MJ |
3793 | || !symtab_node_for_jfunc (jfunc) |
3794 | || cs->callee == symtab_node_for_jfunc (jfunc))); | |
568cda29 MJ |
3795 | ok = try_decrement_rdesc_refcount (jfunc); |
3796 | gcc_checking_assert (ok); | |
3797 | } | |
4502fe8d MJ |
3798 | |
3799 | return cs; | |
b258210c MJ |
3800 | } |
3801 | ||
bec81025 MJ |
3802 | /* Return the target to be used in cases of impossible devirtualization. IE |
3803 | and target (the latter can be NULL) are dumped when dumping is enabled. */ | |
3804 | ||
72972c22 MJ |
3805 | tree |
3806 | ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target) | |
bec81025 MJ |
3807 | { |
3808 | if (dump_file) | |
3809 | { | |
3810 | if (target) | |
3811 | fprintf (dump_file, | |
464d0118 ML |
3812 | "Type inconsistent devirtualization: %s->%s\n", |
3813 | ie->caller->dump_name (), | |
bec81025 MJ |
3814 | IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target))); |
3815 | else | |
3816 | fprintf (dump_file, | |
464d0118 ML |
3817 | "No devirtualization target in %s\n", |
3818 | ie->caller->dump_name ()); | |
bec81025 MJ |
3819 | } |
3820 | tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE); | |
d52f5295 | 3821 | cgraph_node::get_create (new_target); |
bec81025 MJ |
3822 | return new_target; |
3823 | } | |
3824 | ||
d250540a MJ |
3825 | /* Try to find a destination for indirect edge IE that corresponds to a virtual |
3826 | call based on a formal parameter which is described by jump function JFUNC | |
3827 | and if it can be determined, make it direct and return the direct edge. | |
44210a96 | 3828 | Otherwise, return NULL. CTX describes the polymorphic context that the |
eb270950 FX |
3829 | parameter the call is based on brings along with it. NEW_ROOT and |
3830 | NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative | |
3831 | to. */ | |
b258210c MJ |
3832 | |
3833 | static struct cgraph_edge * | |
3834 | try_make_edge_direct_virtual_call (struct cgraph_edge *ie, | |
d250540a | 3835 | struct ipa_jump_func *jfunc, |
eb270950 FX |
3836 | class ipa_polymorphic_call_context ctx, |
3837 | struct cgraph_node *new_root, | |
3838 | class ipa_node_params *new_root_info) | |
3e293154 | 3839 | { |
44210a96 | 3840 | tree target = NULL; |
5ce97055 | 3841 | bool speculative = false; |
85942f45 | 3842 | |
2bf86c84 | 3843 | if (!opt_for_fn (ie->caller->decl, flag_devirtualize)) |
85942f45 | 3844 | return NULL; |
b258210c | 3845 | |
44210a96 | 3846 | gcc_assert (!ie->indirect_info->by_ref); |
5ce97055 JH |
3847 | |
3848 | /* Try to do lookup via known virtual table pointer value. */ | |
2bf86c84 JH |
3849 | if (!ie->indirect_info->vptr_changed |
3850 | || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)) | |
85942f45 | 3851 | { |
9de2f554 JH |
3852 | tree vtable; |
3853 | unsigned HOST_WIDE_INT offset; | |
91bb9f80 MJ |
3854 | tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc) |
3855 | : NULL; | |
eb270950 FX |
3856 | ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info, |
3857 | new_root, | |
3858 | &jfunc->agg); | |
3859 | tree t = ipa_find_agg_cst_for_param (&agg, scalar, | |
85942f45 JH |
3860 | ie->indirect_info->offset, |
3861 | true); | |
eb270950 | 3862 | agg.release (); |
9de2f554 JH |
3863 | if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset)) |
3864 | { | |
2994ab20 | 3865 | bool can_refer; |
0127c169 | 3866 | t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token, |
2994ab20 JH |
3867 | vtable, offset, &can_refer); |
3868 | if (can_refer) | |
9de2f554 | 3869 | { |
2994ab20 | 3870 | if (!t |
cb1180d5 | 3871 | || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE) |
9de2f554 | 3872 | || !possible_polymorphic_call_target_p |
0127c169 JH |
3873 | (ie, cgraph_node::get (t))) |
3874 | { | |
33c3b6be | 3875 | /* Do not speculate builtin_unreachable, it is stupid! */ |
0127c169 JH |
3876 | if (!ie->indirect_info->vptr_changed) |
3877 | target = ipa_impossible_devirt_target (ie, target); | |
2994ab20 JH |
3878 | else |
3879 | target = NULL; | |
0127c169 JH |
3880 | } |
3881 | else | |
3882 | { | |
3883 | target = t; | |
3884 | speculative = ie->indirect_info->vptr_changed; | |
3885 | } | |
9de2f554 JH |
3886 | } |
3887 | } | |
85942f45 JH |
3888 | } |
3889 | ||
44210a96 MJ |
3890 | ipa_polymorphic_call_context ie_context (ie); |
3891 | vec <cgraph_node *>targets; | |
3892 | bool final; | |
d250540a | 3893 | |
44210a96 MJ |
3894 | ctx.offset_by (ie->indirect_info->offset); |
3895 | if (ie->indirect_info->vptr_changed) | |
3896 | ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor, | |
3897 | ie->indirect_info->otr_type); | |
3898 | ctx.combine_with (ie_context, ie->indirect_info->otr_type); | |
3899 | targets = possible_polymorphic_call_targets | |
3900 | (ie->indirect_info->otr_type, | |
3901 | ie->indirect_info->otr_token, | |
3902 | ctx, &final); | |
3903 | if (final && targets.length () <= 1) | |
5ce97055 | 3904 | { |
33c3b6be | 3905 | speculative = false; |
44210a96 MJ |
3906 | if (targets.length () == 1) |
3907 | target = targets[0]->decl; | |
3908 | else | |
3909 | target = ipa_impossible_devirt_target (ie, NULL_TREE); | |
5ce97055 | 3910 | } |
2bf86c84 | 3911 | else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively) |
44210a96 | 3912 | && !ie->speculative && ie->maybe_hot_p ()) |
5bccb77a | 3913 | { |
44210a96 MJ |
3914 | cgraph_node *n; |
3915 | n = try_speculative_devirtualization (ie->indirect_info->otr_type, | |
3916 | ie->indirect_info->otr_token, | |
3917 | ie->indirect_info->context); | |
3918 | if (n) | |
5ce97055 | 3919 | { |
44210a96 MJ |
3920 | target = n->decl; |
3921 | speculative = true; | |
5ce97055 | 3922 | } |
5bccb77a | 3923 | } |
b258210c MJ |
3924 | |
3925 | if (target) | |
450ad0cd | 3926 | { |
44210a96 MJ |
3927 | if (!possible_polymorphic_call_target_p |
3928 | (ie, cgraph_node::get_create (target))) | |
0127c169 | 3929 | { |
29c43c83 | 3930 | if (speculative) |
0127c169 JH |
3931 | return NULL; |
3932 | target = ipa_impossible_devirt_target (ie, target); | |
3933 | } | |
5ce97055 | 3934 | return ipa_make_edge_direct_to_target (ie, target, speculative); |
450ad0cd | 3935 | } |
b258210c MJ |
3936 | else |
3937 | return NULL; | |
3e293154 MJ |
3938 | } |
3939 | ||
3940 | /* Update the param called notes associated with NODE when CS is being inlined, | |
3941 | assuming NODE is (potentially indirectly) inlined into CS->callee. | |
3942 | Moreover, if the callee is discovered to be constant, create a new cgraph | |
e56f5f3e | 3943 | edge for it. Newly discovered indirect edges will be added to *NEW_EDGES, |
f8e2a1ed | 3944 | unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */ |
be95e2b9 | 3945 | |
f8e2a1ed | 3946 | static bool |
e33c6cd6 MJ |
3947 | update_indirect_edges_after_inlining (struct cgraph_edge *cs, |
3948 | struct cgraph_node *node, | |
d52f5295 | 3949 | vec<cgraph_edge *> *new_edges) |
3e293154 | 3950 | { |
99b1c316 | 3951 | class ipa_edge_args *top; |
b258210c | 3952 | struct cgraph_edge *ie, *next_ie, *new_direct_edge; |
eb270950 | 3953 | struct cgraph_node *new_root; |
99b1c316 | 3954 | class ipa_node_params *new_root_info, *inlined_node_info; |
f8e2a1ed | 3955 | bool res = false; |
3e293154 | 3956 | |
e33c6cd6 | 3957 | ipa_check_create_edge_args (); |
a4a3cdd0 | 3958 | top = ipa_edge_args_sum->get (cs); |
eb270950 FX |
3959 | new_root = cs->caller->inlined_to |
3960 | ? cs->caller->inlined_to : cs->caller; | |
a4a3cdd0 MJ |
3961 | new_root_info = ipa_node_params_sum->get (new_root); |
3962 | inlined_node_info = ipa_node_params_sum->get (cs->callee->function_symbol ()); | |
e33c6cd6 MJ |
3963 | |
3964 | for (ie = node->indirect_calls; ie; ie = next_ie) | |
3e293154 | 3965 | { |
99b1c316 | 3966 | class cgraph_indirect_call_info *ici = ie->indirect_info; |
3e293154 | 3967 | struct ipa_jump_func *jfunc; |
8b7773a4 | 3968 | int param_index; |
3e293154 | 3969 | |
e33c6cd6 | 3970 | next_ie = ie->next_callee; |
3e293154 | 3971 | |
5f902d76 JH |
3972 | if (ici->param_index == -1) |
3973 | continue; | |
e33c6cd6 | 3974 | |
3e293154 | 3975 | /* We must check range due to calls with variable number of arguments: */ |
5a0236f8 | 3976 | if (!top || ici->param_index >= ipa_get_cs_argument_count (top)) |
3e293154 | 3977 | { |
5ee53a06 | 3978 | ici->param_index = -1; |
3e293154 MJ |
3979 | continue; |
3980 | } | |
3981 | ||
8b7773a4 MJ |
3982 | param_index = ici->param_index; |
3983 | jfunc = ipa_get_ith_jump_func (top, param_index); | |
5ee53a06 | 3984 | |
e089e433 | 3985 | auto_vec<cgraph_node *, 4> spec_targets; |
3ff29913 | 3986 | if (ie->speculative) |
e089e433 MJ |
3987 | for (cgraph_edge *direct = ie->first_speculative_call_target (); |
3988 | direct; | |
3989 | direct = direct->next_speculative_call_target ()) | |
3990 | spec_targets.safe_push (direct->callee); | |
3ff29913 | 3991 | |
2bf86c84 | 3992 | if (!opt_for_fn (node->decl, flag_indirect_inlining)) |
36b72910 JH |
3993 | new_direct_edge = NULL; |
3994 | else if (ici->polymorphic) | |
5ce97055 | 3995 | { |
44210a96 MJ |
3996 | ipa_polymorphic_call_context ctx; |
3997 | ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc); | |
eb270950 FX |
3998 | new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx, |
3999 | new_root, | |
4000 | new_root_info); | |
5ce97055 | 4001 | } |
b258210c | 4002 | else |
e5cf5e11 PK |
4003 | { |
4004 | tree target_type = ipa_get_type (inlined_node_info, param_index); | |
4005 | new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc, | |
4006 | target_type, | |
eb270950 | 4007 | new_root, |
e5cf5e11 PK |
4008 | new_root_info); |
4009 | } | |
4010 | ||
042ae7d2 | 4011 | /* If speculation was removed, then we need to do nothing. */ |
3ff29913 | 4012 | if (new_direct_edge && new_direct_edge != ie |
e089e433 | 4013 | && spec_targets.contains (new_direct_edge->callee)) |
042ae7d2 JH |
4014 | { |
4015 | new_direct_edge->indirect_inlining_edge = 1; | |
042ae7d2 | 4016 | res = true; |
73d098df JH |
4017 | if (!new_direct_edge->speculative) |
4018 | continue; | |
042ae7d2 JH |
4019 | } |
4020 | else if (new_direct_edge) | |
685b0d13 | 4021 | { |
b258210c MJ |
4022 | new_direct_edge->indirect_inlining_edge = 1; |
4023 | if (new_edges) | |
4024 | { | |
9771b263 | 4025 | new_edges->safe_push (new_direct_edge); |
b258210c MJ |
4026 | res = true; |
4027 | } | |
3ff29913 JH |
4028 | /* If speculative edge was introduced we still need to update |
4029 | call info of the indirect edge. */ | |
4030 | if (!new_direct_edge->speculative) | |
4031 | continue; | |
685b0d13 | 4032 | } |
3ff29913 JH |
4033 | if (jfunc->type == IPA_JF_PASS_THROUGH |
4034 | && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR) | |
36b72910 | 4035 | { |
d0502276 JH |
4036 | if (ici->agg_contents |
4037 | && !ipa_get_jf_pass_through_agg_preserved (jfunc) | |
4038 | && !ici->polymorphic) | |
36b72910 JH |
4039 | ici->param_index = -1; |
4040 | else | |
d0502276 JH |
4041 | { |
4042 | ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc); | |
4043 | if (ici->polymorphic | |
4044 | && !ipa_get_jf_pass_through_type_preserved (jfunc)) | |
4045 | ici->vptr_changed = true; | |
40a777e8 JH |
4046 | ipa_set_param_used_by_indirect_call (new_root_info, |
4047 | ici->param_index, true); | |
4048 | if (ici->polymorphic) | |
4049 | ipa_set_param_used_by_polymorphic_call (new_root_info, | |
4050 | ici->param_index, true); | |
d0502276 | 4051 | } |
36b72910 JH |
4052 | } |
4053 | else if (jfunc->type == IPA_JF_ANCESTOR) | |
4054 | { | |
d0502276 JH |
4055 | if (ici->agg_contents |
4056 | && !ipa_get_jf_ancestor_agg_preserved (jfunc) | |
4057 | && !ici->polymorphic) | |
36b72910 JH |
4058 | ici->param_index = -1; |
4059 | else | |
4060 | { | |
4061 | ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc); | |
4062 | ici->offset += ipa_get_jf_ancestor_offset (jfunc); | |
d0502276 JH |
4063 | if (ici->polymorphic |
4064 | && !ipa_get_jf_ancestor_type_preserved (jfunc)) | |
4065 | ici->vptr_changed = true; | |
b914768c JH |
4066 | ipa_set_param_used_by_indirect_call (new_root_info, |
4067 | ici->param_index, true); | |
4068 | if (ici->polymorphic) | |
4069 | ipa_set_param_used_by_polymorphic_call (new_root_info, | |
4070 | ici->param_index, true); | |
36b72910 JH |
4071 | } |
4072 | } | |
4073 | else | |
4074 | /* Either we can find a destination for this edge now or never. */ | |
4075 | ici->param_index = -1; | |
3e293154 | 4076 | } |
e33c6cd6 | 4077 | |
f8e2a1ed | 4078 | return res; |
3e293154 MJ |
4079 | } |
4080 | ||
4081 | /* Recursively traverse subtree of NODE (including node) made of inlined | |
4082 | cgraph_edges when CS has been inlined and invoke | |
e33c6cd6 | 4083 | update_indirect_edges_after_inlining on all nodes and |
3e293154 MJ |
4084 | update_jump_functions_after_inlining on all non-inlined edges that lead out |
4085 | of this subtree. Newly discovered indirect edges will be added to | |
f8e2a1ed MJ |
4086 | *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were |
4087 | created. */ | |
be95e2b9 | 4088 | |
f8e2a1ed | 4089 | static bool |
3e293154 MJ |
4090 | propagate_info_to_inlined_callees (struct cgraph_edge *cs, |
4091 | struct cgraph_node *node, | |
d52f5295 | 4092 | vec<cgraph_edge *> *new_edges) |
3e293154 MJ |
4093 | { |
4094 | struct cgraph_edge *e; | |
f8e2a1ed | 4095 | bool res; |
3e293154 | 4096 | |
e33c6cd6 | 4097 | res = update_indirect_edges_after_inlining (cs, node, new_edges); |
3e293154 MJ |
4098 | |
4099 | for (e = node->callees; e; e = e->next_callee) | |
4100 | if (!e->inline_failed) | |
f8e2a1ed | 4101 | res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges); |
3e293154 MJ |
4102 | else |
4103 | update_jump_functions_after_inlining (cs, e); | |
5ee53a06 JH |
4104 | for (e = node->indirect_calls; e; e = e->next_callee) |
4105 | update_jump_functions_after_inlining (cs, e); | |
f8e2a1ed MJ |
4106 | |
4107 | return res; | |
3e293154 MJ |
4108 | } |
4109 | ||
4502fe8d MJ |
4110 | /* Combine two controlled uses counts as done during inlining. */ |
4111 | ||
4112 | static int | |
4113 | combine_controlled_uses_counters (int c, int d) | |
4114 | { | |
4115 | if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE) | |
4116 | return IPA_UNDESCRIBED_USE; | |
4117 | else | |
4118 | return c + d - 1; | |
4119 | } | |
4120 | ||
4121 | /* Propagate number of controlled users from CS->caleee to the new root of the | |
4122 | tree of inlined nodes. */ | |
4123 | ||
4124 | static void | |
4125 | propagate_controlled_uses (struct cgraph_edge *cs) | |
4126 | { | |
a4a3cdd0 | 4127 | ipa_edge_args *args = ipa_edge_args_sum->get (cs); |
a33c028e JH |
4128 | if (!args) |
4129 | return; | |
a62bfab5 ML |
4130 | struct cgraph_node *new_root = cs->caller->inlined_to |
4131 | ? cs->caller->inlined_to : cs->caller; | |
a4a3cdd0 MJ |
4132 | ipa_node_params *new_root_info = ipa_node_params_sum->get (new_root); |
4133 | ipa_node_params *old_root_info = ipa_node_params_sum->get (cs->callee); | |
4502fe8d MJ |
4134 | int count, i; |
4135 | ||
6cf67b62 JH |
4136 | if (!old_root_info) |
4137 | return; | |
4138 | ||
4502fe8d MJ |
4139 | count = MIN (ipa_get_cs_argument_count (args), |
4140 | ipa_get_param_count (old_root_info)); | |
4141 | for (i = 0; i < count; i++) | |
4142 | { | |
4143 | struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i); | |
4144 | struct ipa_cst_ref_desc *rdesc; | |
4145 | ||
4146 | if (jf->type == IPA_JF_PASS_THROUGH) | |
4147 | { | |
4148 | int src_idx, c, d; | |
4149 | src_idx = ipa_get_jf_pass_through_formal_id (jf); | |
4150 | c = ipa_get_controlled_uses (new_root_info, src_idx); | |
4151 | d = ipa_get_controlled_uses (old_root_info, i); | |
4152 | ||
4153 | gcc_checking_assert (ipa_get_jf_pass_through_operation (jf) | |
4154 | == NOP_EXPR || c == IPA_UNDESCRIBED_USE); | |
4155 | c = combine_controlled_uses_counters (c, d); | |
4156 | ipa_set_controlled_uses (new_root_info, src_idx, c); | |
13586172 MJ |
4157 | bool lderef = true; |
4158 | if (c != IPA_UNDESCRIBED_USE) | |
4159 | { | |
4160 | lderef = (ipa_get_param_load_dereferenced (new_root_info, src_idx) | |
4161 | || ipa_get_param_load_dereferenced (old_root_info, i)); | |
4162 | ipa_set_param_load_dereferenced (new_root_info, src_idx, lderef); | |
4163 | } | |
4164 | ||
4165 | if (c == 0 && !lderef && new_root_info->ipcp_orig_node) | |
4502fe8d MJ |
4166 | { |
4167 | struct cgraph_node *n; | |
4168 | struct ipa_ref *ref; | |
44210a96 | 4169 | tree t = new_root_info->known_csts[src_idx]; |
4502fe8d MJ |
4170 | |
4171 | if (t && TREE_CODE (t) == ADDR_EXPR | |
4172 | && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL | |
d52f5295 | 4173 | && (n = cgraph_node::get (TREE_OPERAND (t, 0))) |
d122681a | 4174 | && (ref = new_root->find_reference (n, NULL, 0))) |
4502fe8d MJ |
4175 | { |
4176 | if (dump_file) | |
4177 | fprintf (dump_file, "ipa-prop: Removing cloning-created " | |
464d0118 ML |
4178 | "reference from %s to %s.\n", |
4179 | new_root->dump_name (), | |
4180 | n->dump_name ()); | |
d122681a | 4181 | ref->remove_reference (); |
4502fe8d MJ |
4182 | } |
4183 | } | |
4184 | } | |
4185 | else if (jf->type == IPA_JF_CONST | |
4186 | && (rdesc = jfunc_rdesc_usable (jf))) | |
4187 | { | |
4188 | int d = ipa_get_controlled_uses (old_root_info, i); | |
4189 | int c = rdesc->refcount; | |
4190 | rdesc->refcount = combine_controlled_uses_counters (c, d); | |
f6d65e80 MJ |
4191 | if (rdesc->refcount != IPA_UNDESCRIBED_USE |
4192 | && ipa_get_param_load_dereferenced (old_root_info, i)) | |
4193 | { | |
4194 | tree cst = ipa_get_jf_constant (jf); | |
4195 | gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR | |
4196 | && (TREE_CODE (TREE_OPERAND (cst, 0)) | |
4197 | == VAR_DECL)); | |
4198 | symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0)); | |
4199 | new_root->create_reference (n, IPA_REF_LOAD, NULL); | |
4200 | if (dump_file) | |
4201 | fprintf (dump_file, "ipa-prop: Address IPA constant will reach " | |
4202 | "a load so adding LOAD reference from %s to %s.\n", | |
4203 | new_root->dump_name (), n->dump_name ()); | |
4204 | } | |
4502fe8d MJ |
4205 | if (rdesc->refcount == 0) |
4206 | { | |
4207 | tree cst = ipa_get_jf_constant (jf); | |
4502fe8d | 4208 | gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR |
13586172 MJ |
4209 | && ((TREE_CODE (TREE_OPERAND (cst, 0)) |
4210 | == FUNCTION_DECL) | |
4211 | || (TREE_CODE (TREE_OPERAND (cst, 0)) | |
4212 | == VAR_DECL))); | |
4213 | ||
4214 | symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0)); | |
4502fe8d MJ |
4215 | if (n) |
4216 | { | |
f6d65e80 MJ |
4217 | remove_described_reference (n, rdesc); |
4218 | cgraph_node *clone = cs->caller; | |
a62bfab5 | 4219 | while (clone->inlined_to |
6cf67b62 JH |
4220 | && clone->ipcp_clone |
4221 | && clone != rdesc->cs->caller) | |
4502fe8d MJ |
4222 | { |
4223 | struct ipa_ref *ref; | |
d122681a | 4224 | ref = clone->find_reference (n, NULL, 0); |
4502fe8d MJ |
4225 | if (ref) |
4226 | { | |
4227 | if (dump_file) | |
4228 | fprintf (dump_file, "ipa-prop: Removing " | |
4229 | "cloning-created reference " | |
464d0118 ML |
4230 | "from %s to %s.\n", |
4231 | clone->dump_name (), | |
4232 | n->dump_name ()); | |
d122681a | 4233 | ref->remove_reference (); |
4502fe8d MJ |
4234 | } |
4235 | clone = clone->callers->caller; | |
4236 | } | |
4237 | } | |
4238 | } | |
4239 | } | |
4240 | } | |
4241 | ||
4242 | for (i = ipa_get_param_count (old_root_info); | |
4243 | i < ipa_get_cs_argument_count (args); | |
4244 | i++) | |
4245 | { | |
4246 | struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i); | |
4247 | ||
4248 | if (jf->type == IPA_JF_CONST) | |
4249 | { | |
4250 | struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf); | |
4251 | if (rdesc) | |
4252 | rdesc->refcount = IPA_UNDESCRIBED_USE; | |
4253 | } | |
4254 | else if (jf->type == IPA_JF_PASS_THROUGH) | |
4255 | ipa_set_controlled_uses (new_root_info, | |
4256 | jf->value.pass_through.formal_id, | |
4257 | IPA_UNDESCRIBED_USE); | |
4258 | } | |
4259 | } | |
4260 | ||
3e293154 MJ |
4261 | /* Update jump functions and call note functions on inlining the call site CS. |
4262 | CS is expected to lead to a node already cloned by | |
4263 | cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to | |
f8e2a1ed MJ |
4264 | *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were + |
4265 | created. */ | |
be95e2b9 | 4266 | |
f8e2a1ed | 4267 | bool |
3e293154 | 4268 | ipa_propagate_indirect_call_infos (struct cgraph_edge *cs, |
d52f5295 | 4269 | vec<cgraph_edge *> *new_edges) |
3e293154 | 4270 | { |
5ee53a06 | 4271 | bool changed; |
f8e2a1ed MJ |
4272 | /* Do nothing if the preparation phase has not been carried out yet |
4273 | (i.e. during early inlining). */ | |
dd912cb8 | 4274 | if (!ipa_node_params_sum) |
f8e2a1ed | 4275 | return false; |
6fe906a3 | 4276 | gcc_assert (ipa_edge_args_sum); |
f8e2a1ed | 4277 | |
4502fe8d | 4278 | propagate_controlled_uses (cs); |
5ee53a06 | 4279 | changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges); |
6cf67b62 | 4280 | ipa_node_params_sum->remove (cs->callee); |
5ee53a06 | 4281 | |
a4a3cdd0 | 4282 | ipa_edge_args *args = ipa_edge_args_sum->get (cs); |
d40e2362 JH |
4283 | if (args) |
4284 | { | |
4285 | bool ok = true; | |
4286 | if (args->jump_functions) | |
4287 | { | |
4288 | struct ipa_jump_func *jf; | |
4289 | int i; | |
4290 | FOR_EACH_VEC_ELT (*args->jump_functions, i, jf) | |
4291 | if (jf->type == IPA_JF_CONST | |
4292 | && ipa_get_jf_constant_rdesc (jf)) | |
4293 | { | |
4294 | ok = false; | |
4295 | break; | |
4296 | } | |
4297 | } | |
4298 | if (ok) | |
4299 | ipa_edge_args_sum->remove (cs); | |
4300 | } | |
98aad294 JH |
4301 | if (ipcp_transformation_sum) |
4302 | ipcp_transformation_sum->remove (cs->callee); | |
d40e2362 | 4303 | |
5ee53a06 | 4304 | return changed; |
518dc859 RL |
4305 | } |
4306 | ||
86cd0334 MJ |
4307 | /* Ensure that array of edge arguments infos is big enough to accommodate a |
4308 | structure for all edges and reallocates it if not. Also, allocate | |
4309 | associated hash tables is they do not already exist. */ | |
4310 | ||
4311 | void | |
4312 | ipa_check_create_edge_args (void) | |
4313 | { | |
6fe906a3 MJ |
4314 | if (!ipa_edge_args_sum) |
4315 | ipa_edge_args_sum | |
78cd68c0 | 4316 | = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ()) |
6fe906a3 | 4317 | ipa_edge_args_sum_t (symtab, true)); |
86cd0334 MJ |
4318 | if (!ipa_bits_hash_table) |
4319 | ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37); | |
4320 | if (!ipa_vr_hash_table) | |
4321 | ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37); | |
4322 | } | |
4323 | ||
771578a0 | 4324 | /* Free all ipa_edge structures. */ |
be95e2b9 | 4325 | |
518dc859 | 4326 | void |
771578a0 | 4327 | ipa_free_all_edge_args (void) |
518dc859 | 4328 | { |
6fe906a3 | 4329 | if (!ipa_edge_args_sum) |
9771b263 DN |
4330 | return; |
4331 | ||
ddf628e4 | 4332 | ggc_delete (ipa_edge_args_sum); |
6fe906a3 | 4333 | ipa_edge_args_sum = NULL; |
518dc859 RL |
4334 | } |
4335 | ||
771578a0 | 4336 | /* Free all ipa_node_params structures. */ |
be95e2b9 | 4337 | |
518dc859 | 4338 | void |
771578a0 | 4339 | ipa_free_all_node_params (void) |
518dc859 | 4340 | { |
bc2fcccd JH |
4341 | if (ipa_node_params_sum) |
4342 | ggc_delete (ipa_node_params_sum); | |
dd912cb8 | 4343 | ipa_node_params_sum = NULL; |
771578a0 MJ |
4344 | } |
4345 | ||
9d3e0adc | 4346 | /* Initialize IPA CP transformation summary and also allocate any necessary hash |
86cd0334 | 4347 | tables if they do not already exist. */ |
04be694e MJ |
4348 | |
4349 | void | |
9d3e0adc | 4350 | ipcp_transformation_initialize (void) |
04be694e | 4351 | { |
86cd0334 MJ |
4352 | if (!ipa_bits_hash_table) |
4353 | ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37); | |
4354 | if (!ipa_vr_hash_table) | |
4355 | ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37); | |
9d3e0adc | 4356 | if (ipcp_transformation_sum == NULL) |
40e67ab8 JH |
4357 | { |
4358 | ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab); | |
4359 | ipcp_transformation_sum->disable_insertion_hook (); | |
4360 | } | |
04be694e MJ |
4361 | } |
4362 | ||
12e088ba AC |
4363 | /* Release the IPA CP transformation summary. */ |
4364 | ||
4365 | void | |
4366 | ipcp_free_transformation_sum (void) | |
4367 | { | |
4368 | if (!ipcp_transformation_sum) | |
4369 | return; | |
4370 | ||
78cd68c0 ML |
4371 | ipcp_transformation_sum->~function_summary<ipcp_transformation *> (); |
4372 | ggc_free (ipcp_transformation_sum); | |
12e088ba AC |
4373 | ipcp_transformation_sum = NULL; |
4374 | } | |
4375 | ||
2c9561b5 MJ |
4376 | /* Set the aggregate replacements of NODE to be AGGVALS. */ |
4377 | ||
4378 | void | |
4379 | ipa_set_node_agg_value_chain (struct cgraph_node *node, | |
4380 | struct ipa_agg_replacement_value *aggvals) | |
4381 | { | |
9d3e0adc ML |
4382 | ipcp_transformation_initialize (); |
4383 | ipcp_transformation *s = ipcp_transformation_sum->get_create (node); | |
4384 | s->agg_values = aggvals; | |
2c9561b5 MJ |
4385 | } |
4386 | ||
e53b6e56 | 4387 | /* Hook that is called by cgraph.cc when an edge is removed. Adjust reference |
6fe906a3 | 4388 | count data structures accordingly. */ |
be95e2b9 | 4389 | |
6fe906a3 MJ |
4390 | void |
4391 | ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args) | |
771578a0 | 4392 | { |
568cda29 MJ |
4393 | if (args->jump_functions) |
4394 | { | |
4395 | struct ipa_jump_func *jf; | |
4396 | int i; | |
4397 | FOR_EACH_VEC_ELT (*args->jump_functions, i, jf) | |
a854f856 MJ |
4398 | { |
4399 | struct ipa_cst_ref_desc *rdesc; | |
4400 | try_decrement_rdesc_refcount (jf); | |
4401 | if (jf->type == IPA_JF_CONST | |
4402 | && (rdesc = ipa_get_jf_constant_rdesc (jf)) | |
4403 | && rdesc->cs == cs) | |
4404 | rdesc->cs = NULL; | |
4405 | } | |
568cda29 | 4406 | } |
518dc859 RL |
4407 | } |
4408 | ||
6fe906a3 MJ |
4409 | /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust |
4410 | reference count data strucutres accordingly. */ | |
be95e2b9 | 4411 | |
6fe906a3 MJ |
4412 | void |
4413 | ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst, | |
4414 | ipa_edge_args *old_args, ipa_edge_args *new_args) | |
771578a0 | 4415 | { |
8b7773a4 | 4416 | unsigned int i; |
771578a0 | 4417 | |
9771b263 | 4418 | new_args->jump_functions = vec_safe_copy (old_args->jump_functions); |
5ce97055 JH |
4419 | if (old_args->polymorphic_call_contexts) |
4420 | new_args->polymorphic_call_contexts | |
4421 | = vec_safe_copy (old_args->polymorphic_call_contexts); | |
8b7773a4 | 4422 | |
9771b263 | 4423 | for (i = 0; i < vec_safe_length (old_args->jump_functions); i++) |
4502fe8d MJ |
4424 | { |
4425 | struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i); | |
4426 | struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i); | |
4427 | ||
4428 | dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items); | |
4429 | ||
4430 | if (src_jf->type == IPA_JF_CONST) | |
4431 | { | |
4432 | struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf); | |
4433 | ||
4434 | if (!src_rdesc) | |
4435 | dst_jf->value.constant.rdesc = NULL; | |
568cda29 MJ |
4436 | else if (src->caller == dst->caller) |
4437 | { | |
1a7d452c MJ |
4438 | /* Creation of a speculative edge. If the source edge is the one |
4439 | grabbing a reference, we must create a new (duplicate) | |
4440 | reference description. Otherwise they refer to the same | |
4441 | description corresponding to a reference taken in a function | |
4442 | src->caller is inlined to. In that case we just must | |
4443 | increment the refcount. */ | |
4444 | if (src_rdesc->cs == src) | |
4445 | { | |
4446 | symtab_node *n = symtab_node_for_jfunc (src_jf); | |
4447 | gcc_checking_assert (n); | |
4448 | ipa_ref *ref | |
4449 | = src->caller->find_reference (n, src->call_stmt, | |
4450 | src->lto_stmt_uid); | |
4451 | gcc_checking_assert (ref); | |
4452 | dst->caller->clone_reference (ref, ref->stmt); | |
4453 | ||
4454 | ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate (); | |
4455 | dst_rdesc->cs = dst; | |
4456 | dst_rdesc->refcount = src_rdesc->refcount; | |
4457 | dst_rdesc->next_duplicate = NULL; | |
4458 | dst_jf->value.constant.rdesc = dst_rdesc; | |
4459 | } | |
4460 | else | |
4461 | { | |
4462 | src_rdesc->refcount++; | |
4463 | dst_jf->value.constant.rdesc = src_rdesc; | |
4464 | } | |
568cda29 | 4465 | } |
4502fe8d MJ |
4466 | else if (src_rdesc->cs == src) |
4467 | { | |
601f3293 | 4468 | struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate (); |
4502fe8d | 4469 | dst_rdesc->cs = dst; |
4502fe8d | 4470 | dst_rdesc->refcount = src_rdesc->refcount; |
2fd0985c MJ |
4471 | dst_rdesc->next_duplicate = src_rdesc->next_duplicate; |
4472 | src_rdesc->next_duplicate = dst_rdesc; | |
4502fe8d MJ |
4473 | dst_jf->value.constant.rdesc = dst_rdesc; |
4474 | } | |
4475 | else | |
4476 | { | |
4477 | struct ipa_cst_ref_desc *dst_rdesc; | |
4478 | /* This can happen during inlining, when a JFUNC can refer to a | |
4479 | reference taken in a function up in the tree of inline clones. | |
4480 | We need to find the duplicate that refers to our tree of | |
4481 | inline clones. */ | |
4482 | ||
a62bfab5 | 4483 | gcc_assert (dst->caller->inlined_to); |
4502fe8d MJ |
4484 | for (dst_rdesc = src_rdesc->next_duplicate; |
4485 | dst_rdesc; | |
4486 | dst_rdesc = dst_rdesc->next_duplicate) | |
2fd0985c MJ |
4487 | { |
4488 | struct cgraph_node *top; | |
a62bfab5 ML |
4489 | top = dst_rdesc->cs->caller->inlined_to |
4490 | ? dst_rdesc->cs->caller->inlined_to | |
2fd0985c | 4491 | : dst_rdesc->cs->caller; |
a62bfab5 | 4492 | if (dst->caller->inlined_to == top) |
2fd0985c MJ |
4493 | break; |
4494 | } | |
44a60244 | 4495 | gcc_assert (dst_rdesc); |
4502fe8d MJ |
4496 | dst_jf->value.constant.rdesc = dst_rdesc; |
4497 | } | |
4498 | } | |
6fe45955 MJ |
4499 | else if (dst_jf->type == IPA_JF_PASS_THROUGH |
4500 | && src->caller == dst->caller) | |
4501 | { | |
a62bfab5 ML |
4502 | struct cgraph_node *inline_root = dst->caller->inlined_to |
4503 | ? dst->caller->inlined_to : dst->caller; | |
a4a3cdd0 | 4504 | ipa_node_params *root_info = ipa_node_params_sum->get (inline_root); |
6fe45955 MJ |
4505 | int idx = ipa_get_jf_pass_through_formal_id (dst_jf); |
4506 | ||
4507 | int c = ipa_get_controlled_uses (root_info, idx); | |
4508 | if (c != IPA_UNDESCRIBED_USE) | |
4509 | { | |
4510 | c++; | |
4511 | ipa_set_controlled_uses (root_info, idx, c); | |
4512 | } | |
4513 | } | |
4502fe8d | 4514 | } |
771578a0 MJ |
4515 | } |
4516 | ||
dd912cb8 | 4517 | /* Analyze newly added function into callgraph. */ |
be95e2b9 | 4518 | |
771578a0 | 4519 | static void |
dd912cb8 | 4520 | ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED) |
771578a0 | 4521 | { |
dd912cb8 ML |
4522 | if (node->has_gimple_body_p ()) |
4523 | ipa_analyze_node (node); | |
4524 | } | |
771578a0 | 4525 | |
dd912cb8 ML |
4526 | /* Hook that is called by summary when a node is duplicated. */ |
4527 | ||
4528 | void | |
4529 | ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst, | |
4530 | ipa_node_params *old_info, | |
4531 | ipa_node_params *new_info) | |
4532 | { | |
4533 | ipa_agg_replacement_value *old_av, *new_av; | |
771578a0 | 4534 | |
f65f1ae3 | 4535 | new_info->descriptors = vec_safe_copy (old_info->descriptors); |
310bc633 | 4536 | new_info->lattices = NULL; |
771578a0 | 4537 | new_info->ipcp_orig_node = old_info->ipcp_orig_node; |
f65f1ae3 MJ |
4538 | new_info->known_csts = old_info->known_csts.copy (); |
4539 | new_info->known_contexts = old_info->known_contexts.copy (); | |
3949c4a7 | 4540 | |
8aab5218 | 4541 | new_info->analysis_done = old_info->analysis_done; |
3949c4a7 | 4542 | new_info->node_enqueued = old_info->node_enqueued; |
7e729474 | 4543 | new_info->versionable = old_info->versionable; |
2c9561b5 MJ |
4544 | |
4545 | old_av = ipa_get_agg_replacements_for_node (src); | |
04be694e | 4546 | if (old_av) |
2c9561b5 | 4547 | { |
04be694e MJ |
4548 | new_av = NULL; |
4549 | while (old_av) | |
4550 | { | |
4551 | struct ipa_agg_replacement_value *v; | |
2c9561b5 | 4552 | |
04be694e MJ |
4553 | v = ggc_alloc<ipa_agg_replacement_value> (); |
4554 | memcpy (v, old_av, sizeof (*v)); | |
4555 | v->next = new_av; | |
4556 | new_av = v; | |
4557 | old_av = old_av->next; | |
4558 | } | |
4559 | ipa_set_node_agg_value_chain (dst, new_av); | |
4560 | } | |
98aad294 | 4561 | } |
04be694e | 4562 | |
98aad294 | 4563 | /* Duplication of ipcp transformation summaries. */ |
86cd0334 | 4564 | |
98aad294 JH |
4565 | void |
4566 | ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst, | |
4567 | ipcp_transformation *src_trans, | |
4568 | ipcp_transformation *dst_trans) | |
4569 | { | |
4570 | /* Avoid redundant work of duplicating vectors we will never use. */ | |
4571 | if (dst->inlined_to) | |
4572 | return; | |
4573 | dst_trans->bits = vec_safe_copy (src_trans->bits); | |
4574 | dst_trans->m_vr = vec_safe_copy (src_trans->m_vr); | |
4575 | ipa_agg_replacement_value *agg = src_trans->agg_values, | |
4576 | **aggptr = &dst_trans->agg_values; | |
4577 | while (agg) | |
4578 | { | |
4579 | *aggptr = ggc_alloc<ipa_agg_replacement_value> (); | |
4580 | **aggptr = *agg; | |
4581 | agg = agg->next; | |
4582 | aggptr = &(*aggptr)->next; | |
2c9561b5 | 4583 | } |
771578a0 MJ |
4584 | } |
4585 | ||
4586 | /* Register our cgraph hooks if they are not already there. */ | |
be95e2b9 | 4587 | |
518dc859 | 4588 | void |
771578a0 | 4589 | ipa_register_cgraph_hooks (void) |
518dc859 | 4590 | { |
dd912cb8 | 4591 | ipa_check_create_node_params (); |
6fe906a3 | 4592 | ipa_check_create_edge_args (); |
dd912cb8 | 4593 | |
dd912cb8 | 4594 | function_insertion_hook_holder = |
3dafb85c | 4595 | symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL); |
771578a0 | 4596 | } |
518dc859 | 4597 | |
771578a0 | 4598 | /* Unregister our cgraph hooks if they are not already there. */ |
be95e2b9 | 4599 | |
771578a0 MJ |
4600 | static void |
4601 | ipa_unregister_cgraph_hooks (void) | |
4602 | { | |
bc2fcccd JH |
4603 | if (function_insertion_hook_holder) |
4604 | symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder); | |
40982661 | 4605 | function_insertion_hook_holder = NULL; |
771578a0 MJ |
4606 | } |
4607 | ||
4608 | /* Free all ipa_node_params and all ipa_edge_args structures if they are no | |
4609 | longer needed after ipa-cp. */ | |
be95e2b9 | 4610 | |
771578a0 | 4611 | void |
e33c6cd6 | 4612 | ipa_free_all_structures_after_ipa_cp (void) |
3e293154 | 4613 | { |
2bf86c84 | 4614 | if (!optimize && !in_lto_p) |
3e293154 MJ |
4615 | { |
4616 | ipa_free_all_edge_args (); | |
4617 | ipa_free_all_node_params (); | |
2651e637 ML |
4618 | ipcp_sources_pool.release (); |
4619 | ipcp_cst_values_pool.release (); | |
4620 | ipcp_poly_ctx_values_pool.release (); | |
4621 | ipcp_agg_lattice_pool.release (); | |
3e293154 | 4622 | ipa_unregister_cgraph_hooks (); |
601f3293 | 4623 | ipa_refdesc_pool.release (); |
3e293154 MJ |
4624 | } |
4625 | } | |
4626 | ||
4627 | /* Free all ipa_node_params and all ipa_edge_args structures if they are no | |
4628 | longer needed after indirect inlining. */ | |
be95e2b9 | 4629 | |
3e293154 | 4630 | void |
e33c6cd6 | 4631 | ipa_free_all_structures_after_iinln (void) |
771578a0 MJ |
4632 | { |
4633 | ipa_free_all_edge_args (); | |
4634 | ipa_free_all_node_params (); | |
4635 | ipa_unregister_cgraph_hooks (); | |
2651e637 ML |
4636 | ipcp_sources_pool.release (); |
4637 | ipcp_cst_values_pool.release (); | |
4638 | ipcp_poly_ctx_values_pool.release (); | |
4639 | ipcp_agg_lattice_pool.release (); | |
601f3293 | 4640 | ipa_refdesc_pool.release (); |
518dc859 RL |
4641 | } |
4642 | ||
dcd416e3 | 4643 | /* Print ipa_tree_map data structures of all functions in the |
518dc859 | 4644 | callgraph to F. */ |
be95e2b9 | 4645 | |
518dc859 | 4646 | void |
2c9561b5 | 4647 | ipa_print_node_params (FILE *f, struct cgraph_node *node) |
518dc859 RL |
4648 | { |
4649 | int i, count; | |
99b1c316 | 4650 | class ipa_node_params *info; |
518dc859 | 4651 | |
67348ccc | 4652 | if (!node->definition) |
3e293154 | 4653 | return; |
a4a3cdd0 | 4654 | info = ipa_node_params_sum->get (node); |
464d0118 | 4655 | fprintf (f, " function %s parameter descriptors:\n", node->dump_name ()); |
0302955a JH |
4656 | if (!info) |
4657 | { | |
4658 | fprintf (f, " no params return\n"); | |
4659 | return; | |
4660 | } | |
3e293154 MJ |
4661 | count = ipa_get_param_count (info); |
4662 | for (i = 0; i < count; i++) | |
518dc859 | 4663 | { |
4502fe8d MJ |
4664 | int c; |
4665 | ||
a4e33812 | 4666 | fprintf (f, " "); |
e067bd43 | 4667 | ipa_dump_param (f, info, i); |
339f49ec JH |
4668 | if (ipa_is_param_used (info, i)) |
4669 | fprintf (f, " used"); | |
40a777e8 JH |
4670 | if (ipa_is_param_used_by_ipa_predicates (info, i)) |
4671 | fprintf (f, " used_by_ipa_predicates"); | |
4672 | if (ipa_is_param_used_by_indirect_call (info, i)) | |
4673 | fprintf (f, " used_by_indirect_call"); | |
4674 | if (ipa_is_param_used_by_polymorphic_call (info, i)) | |
4675 | fprintf (f, " used_by_polymorphic_call"); | |
4502fe8d MJ |
4676 | c = ipa_get_controlled_uses (info, i); |
4677 | if (c == IPA_UNDESCRIBED_USE) | |
4678 | fprintf (f, " undescribed_use"); | |
4679 | else | |
13586172 MJ |
4680 | fprintf (f, " controlled_uses=%i %s", c, |
4681 | ipa_get_param_load_dereferenced (info, i) | |
4682 | ? "(load_dereferenced)" : ""); | |
3e293154 | 4683 | fprintf (f, "\n"); |
518dc859 RL |
4684 | } |
4685 | } | |
dcd416e3 | 4686 | |
ca30a539 | 4687 | /* Print ipa_tree_map data structures of all functions in the |
3e293154 | 4688 | callgraph to F. */ |
be95e2b9 | 4689 | |
3e293154 | 4690 | void |
ca30a539 | 4691 | ipa_print_all_params (FILE * f) |
3e293154 MJ |
4692 | { |
4693 | struct cgraph_node *node; | |
4694 | ||
ca30a539 | 4695 | fprintf (f, "\nFunction parameters:\n"); |
65c70e6b | 4696 | FOR_EACH_FUNCTION (node) |
ca30a539 | 4697 | ipa_print_node_params (f, node); |
3e293154 | 4698 | } |
3f84bf08 | 4699 | |
2c9561b5 MJ |
4700 | /* Dump the AV linked list. */ |
4701 | ||
4702 | void | |
4703 | ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av) | |
4704 | { | |
4705 | bool comma = false; | |
4706 | fprintf (f, " Aggregate replacements:"); | |
4707 | for (; av; av = av->next) | |
4708 | { | |
4709 | fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "", | |
4710 | av->index, av->offset); | |
ef6cb4c7 | 4711 | print_generic_expr (f, av->value); |
2c9561b5 MJ |
4712 | comma = true; |
4713 | } | |
4714 | fprintf (f, "\n"); | |
4715 | } | |
4716 | ||
fb3f88cc JH |
4717 | /* Stream out jump function JUMP_FUNC to OB. */ |
4718 | ||
4719 | static void | |
4720 | ipa_write_jump_function (struct output_block *ob, | |
4721 | struct ipa_jump_func *jump_func) | |
4722 | { | |
8b7773a4 MJ |
4723 | struct ipa_agg_jf_item *item; |
4724 | struct bitpack_d bp; | |
4725 | int i, count; | |
f714ecf5 | 4726 | int flag = 0; |
fb3f88cc | 4727 | |
f714ecf5 JH |
4728 | /* ADDR_EXPRs are very comon IP invariants; save some streamer data |
4729 | as well as WPA memory by handling them specially. */ | |
4730 | if (jump_func->type == IPA_JF_CONST | |
4731 | && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR) | |
4732 | flag = 1; | |
4733 | ||
4734 | streamer_write_uhwi (ob, jump_func->type * 2 + flag); | |
fb3f88cc JH |
4735 | switch (jump_func->type) |
4736 | { | |
4737 | case IPA_JF_UNKNOWN: | |
4738 | break; | |
4739 | case IPA_JF_CONST: | |
5368224f | 4740 | gcc_assert ( |
4502fe8d | 4741 | EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION); |
f714ecf5 JH |
4742 | stream_write_tree (ob, |
4743 | flag | |
4744 | ? TREE_OPERAND (jump_func->value.constant.value, 0) | |
4745 | : jump_func->value.constant.value, true); | |
fb3f88cc JH |
4746 | break; |
4747 | case IPA_JF_PASS_THROUGH: | |
412288f1 | 4748 | streamer_write_uhwi (ob, jump_func->value.pass_through.operation); |
4a53743e MJ |
4749 | if (jump_func->value.pass_through.operation == NOP_EXPR) |
4750 | { | |
4751 | streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id); | |
4752 | bp = bitpack_create (ob->main_stream); | |
4753 | bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1); | |
4754 | streamer_write_bitpack (&bp); | |
4755 | } | |
a2b4c188 KV |
4756 | else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation) |
4757 | == tcc_unary) | |
4758 | streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id); | |
4a53743e MJ |
4759 | else |
4760 | { | |
4761 | stream_write_tree (ob, jump_func->value.pass_through.operand, true); | |
4762 | streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id); | |
4763 | } | |
fb3f88cc JH |
4764 | break; |
4765 | case IPA_JF_ANCESTOR: | |
412288f1 | 4766 | streamer_write_uhwi (ob, jump_func->value.ancestor.offset); |
412288f1 | 4767 | streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id); |
8b7773a4 MJ |
4768 | bp = bitpack_create (ob->main_stream); |
4769 | bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1); | |
7ea3a73c | 4770 | bp_pack_value (&bp, jump_func->value.ancestor.keep_null, 1); |
8b7773a4 | 4771 | streamer_write_bitpack (&bp); |
fb3f88cc | 4772 | break; |
eb270950 FX |
4773 | default: |
4774 | fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream"); | |
8b7773a4 MJ |
4775 | } |
4776 | ||
9771b263 | 4777 | count = vec_safe_length (jump_func->agg.items); |
8b7773a4 MJ |
4778 | streamer_write_uhwi (ob, count); |
4779 | if (count) | |
4780 | { | |
4781 | bp = bitpack_create (ob->main_stream); | |
4782 | bp_pack_value (&bp, jump_func->agg.by_ref, 1); | |
4783 | streamer_write_bitpack (&bp); | |
4784 | } | |
4785 | ||
9771b263 | 4786 | FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item) |
8b7773a4 | 4787 | { |
eb270950 | 4788 | stream_write_tree (ob, item->type, true); |
8b7773a4 | 4789 | streamer_write_uhwi (ob, item->offset); |
eb270950 FX |
4790 | streamer_write_uhwi (ob, item->jftype); |
4791 | switch (item->jftype) | |
4792 | { | |
4793 | case IPA_JF_UNKNOWN: | |
4794 | break; | |
4795 | case IPA_JF_CONST: | |
4796 | stream_write_tree (ob, item->value.constant, true); | |
4797 | break; | |
4798 | case IPA_JF_PASS_THROUGH: | |
4799 | case IPA_JF_LOAD_AGG: | |
4800 | streamer_write_uhwi (ob, item->value.pass_through.operation); | |
4801 | streamer_write_uhwi (ob, item->value.pass_through.formal_id); | |
4802 | if (TREE_CODE_CLASS (item->value.pass_through.operation) | |
4803 | != tcc_unary) | |
4804 | stream_write_tree (ob, item->value.pass_through.operand, true); | |
4805 | if (item->jftype == IPA_JF_LOAD_AGG) | |
4806 | { | |
4807 | stream_write_tree (ob, item->value.load_agg.type, true); | |
4808 | streamer_write_uhwi (ob, item->value.load_agg.offset); | |
4809 | bp = bitpack_create (ob->main_stream); | |
4810 | bp_pack_value (&bp, item->value.load_agg.by_ref, 1); | |
4811 | streamer_write_bitpack (&bp); | |
4812 | } | |
4813 | break; | |
4814 | default: | |
4815 | fatal_error (UNKNOWN_LOCATION, | |
4816 | "invalid jump function in LTO stream"); | |
4817 | } | |
fb3f88cc | 4818 | } |
04be694e | 4819 | |
209ca542 | 4820 | bp = bitpack_create (ob->main_stream); |
86cd0334 | 4821 | bp_pack_value (&bp, !!jump_func->bits, 1); |
209ca542 | 4822 | streamer_write_bitpack (&bp); |
86cd0334 | 4823 | if (jump_func->bits) |
209ca542 | 4824 | { |
86cd0334 MJ |
4825 | streamer_write_widest_int (ob, jump_func->bits->value); |
4826 | streamer_write_widest_int (ob, jump_func->bits->mask); | |
a5e14a42 | 4827 | } |
86cd0334 | 4828 | bp_pack_value (&bp, !!jump_func->m_vr, 1); |
8bc5448f | 4829 | streamer_write_bitpack (&bp); |
86cd0334 | 4830 | if (jump_func->m_vr) |
8bc5448f KV |
4831 | { |
4832 | streamer_write_enum (ob->main_stream, value_rang_type, | |
54994253 AH |
4833 | VR_LAST, jump_func->m_vr->kind ()); |
4834 | stream_write_tree (ob, jump_func->m_vr->min (), true); | |
4835 | stream_write_tree (ob, jump_func->m_vr->max (), true); | |
8bc5448f | 4836 | } |
fb3f88cc JH |
4837 | } |
4838 | ||
4839 | /* Read in jump function JUMP_FUNC from IB. */ | |
4840 | ||
4841 | static void | |
99b1c316 | 4842 | ipa_read_jump_function (class lto_input_block *ib, |
fb3f88cc | 4843 | struct ipa_jump_func *jump_func, |
4502fe8d | 4844 | struct cgraph_edge *cs, |
99b1c316 | 4845 | class data_in *data_in, |
f714ecf5 | 4846 | bool prevails) |
fb3f88cc | 4847 | { |
4a53743e MJ |
4848 | enum jump_func_type jftype; |
4849 | enum tree_code operation; | |
8b7773a4 | 4850 | int i, count; |
f714ecf5 JH |
4851 | int val = streamer_read_uhwi (ib); |
4852 | bool flag = val & 1; | |
fb3f88cc | 4853 | |
f714ecf5 | 4854 | jftype = (enum jump_func_type) (val / 2); |
4a53743e | 4855 | switch (jftype) |
fb3f88cc JH |
4856 | { |
4857 | case IPA_JF_UNKNOWN: | |
04be694e | 4858 | ipa_set_jf_unknown (jump_func); |
fb3f88cc JH |
4859 | break; |
4860 | case IPA_JF_CONST: | |
f714ecf5 JH |
4861 | { |
4862 | tree t = stream_read_tree (ib, data_in); | |
4863 | if (flag && prevails) | |
7a2090b0 | 4864 | t = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t); |
f714ecf5 JH |
4865 | ipa_set_jf_constant (jump_func, t, cs); |
4866 | } | |
fb3f88cc JH |
4867 | break; |
4868 | case IPA_JF_PASS_THROUGH: | |
4a53743e MJ |
4869 | operation = (enum tree_code) streamer_read_uhwi (ib); |
4870 | if (operation == NOP_EXPR) | |
4871 | { | |
4872 | int formal_id = streamer_read_uhwi (ib); | |
4873 | struct bitpack_d bp = streamer_read_bitpack (ib); | |
4874 | bool agg_preserved = bp_unpack_value (&bp, 1); | |
3b97a5c7 | 4875 | ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved); |
4a53743e | 4876 | } |
a2b4c188 KV |
4877 | else if (TREE_CODE_CLASS (operation) == tcc_unary) |
4878 | { | |
4879 | int formal_id = streamer_read_uhwi (ib); | |
4880 | ipa_set_jf_unary_pass_through (jump_func, formal_id, operation); | |
4881 | } | |
4a53743e MJ |
4882 | else |
4883 | { | |
4884 | tree operand = stream_read_tree (ib, data_in); | |
4885 | int formal_id = streamer_read_uhwi (ib); | |
4886 | ipa_set_jf_arith_pass_through (jump_func, formal_id, operand, | |
4887 | operation); | |
4888 | } | |
fb3f88cc JH |
4889 | break; |
4890 | case IPA_JF_ANCESTOR: | |
4a53743e MJ |
4891 | { |
4892 | HOST_WIDE_INT offset = streamer_read_uhwi (ib); | |
4a53743e MJ |
4893 | int formal_id = streamer_read_uhwi (ib); |
4894 | struct bitpack_d bp = streamer_read_bitpack (ib); | |
4895 | bool agg_preserved = bp_unpack_value (&bp, 1); | |
7ea3a73c MJ |
4896 | bool keep_null = bp_unpack_value (&bp, 1); |
4897 | ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved, | |
4898 | keep_null); | |
4a53743e MJ |
4899 | break; |
4900 | } | |
f714ecf5 JH |
4901 | default: |
4902 | fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream"); | |
8b7773a4 MJ |
4903 | } |
4904 | ||
4905 | count = streamer_read_uhwi (ib); | |
f714ecf5 | 4906 | if (prevails) |
7ee0681e JH |
4907 | { |
4908 | jump_func->agg.items = NULL; | |
4909 | vec_safe_reserve (jump_func->agg.items, count, true); | |
4910 | } | |
8b7773a4 MJ |
4911 | if (count) |
4912 | { | |
4a53743e | 4913 | struct bitpack_d bp = streamer_read_bitpack (ib); |
8b7773a4 MJ |
4914 | jump_func->agg.by_ref = bp_unpack_value (&bp, 1); |
4915 | } | |
4916 | for (i = 0; i < count; i++) | |
4917 | { | |
f32682ca | 4918 | struct ipa_agg_jf_item item; |
eb270950 | 4919 | item.type = stream_read_tree (ib, data_in); |
f32682ca | 4920 | item.offset = streamer_read_uhwi (ib); |
eb270950 FX |
4921 | item.jftype = (enum jump_func_type) streamer_read_uhwi (ib); |
4922 | ||
4923 | switch (item.jftype) | |
4924 | { | |
4925 | case IPA_JF_UNKNOWN: | |
4926 | break; | |
4927 | case IPA_JF_CONST: | |
4928 | item.value.constant = stream_read_tree (ib, data_in); | |
4929 | break; | |
4930 | case IPA_JF_PASS_THROUGH: | |
4931 | case IPA_JF_LOAD_AGG: | |
4932 | operation = (enum tree_code) streamer_read_uhwi (ib); | |
4933 | item.value.pass_through.operation = operation; | |
4934 | item.value.pass_through.formal_id = streamer_read_uhwi (ib); | |
4935 | if (TREE_CODE_CLASS (operation) == tcc_unary) | |
4936 | item.value.pass_through.operand = NULL_TREE; | |
4937 | else | |
4938 | item.value.pass_through.operand = stream_read_tree (ib, data_in); | |
4939 | if (item.jftype == IPA_JF_LOAD_AGG) | |
4940 | { | |
4941 | struct bitpack_d bp; | |
4942 | item.value.load_agg.type = stream_read_tree (ib, data_in); | |
4943 | item.value.load_agg.offset = streamer_read_uhwi (ib); | |
4944 | bp = streamer_read_bitpack (ib); | |
4945 | item.value.load_agg.by_ref = bp_unpack_value (&bp, 1); | |
4946 | } | |
4947 | break; | |
4948 | default: | |
4949 | fatal_error (UNKNOWN_LOCATION, | |
4950 | "invalid jump function in LTO stream"); | |
4951 | } | |
f714ecf5 JH |
4952 | if (prevails) |
4953 | jump_func->agg.items->quick_push (item); | |
fb3f88cc | 4954 | } |
04be694e MJ |
4955 | |
4956 | struct bitpack_d bp = streamer_read_bitpack (ib); | |
209ca542 PK |
4957 | bool bits_known = bp_unpack_value (&bp, 1); |
4958 | if (bits_known) | |
4959 | { | |
86cd0334 MJ |
4960 | widest_int value = streamer_read_widest_int (ib); |
4961 | widest_int mask = streamer_read_widest_int (ib); | |
f714ecf5 JH |
4962 | if (prevails) |
4963 | ipa_set_jfunc_bits (jump_func, value, mask); | |
209ca542 PK |
4964 | } |
4965 | else | |
86cd0334 | 4966 | jump_func->bits = NULL; |
8bc5448f KV |
4967 | |
4968 | struct bitpack_d vr_bp = streamer_read_bitpack (ib); | |
4969 | bool vr_known = bp_unpack_value (&vr_bp, 1); | |
4970 | if (vr_known) | |
4971 | { | |
54994253 | 4972 | enum value_range_kind type = streamer_read_enum (ib, value_range_kind, |
86cd0334 MJ |
4973 | VR_LAST); |
4974 | tree min = stream_read_tree (ib, data_in); | |
4975 | tree max = stream_read_tree (ib, data_in); | |
f714ecf5 JH |
4976 | if (prevails) |
4977 | ipa_set_jfunc_vr (jump_func, type, min, max); | |
8bc5448f KV |
4978 | } |
4979 | else | |
86cd0334 | 4980 | jump_func->m_vr = NULL; |
fb3f88cc JH |
4981 | } |
4982 | ||
e33c6cd6 MJ |
4983 | /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are |
4984 | relevant to indirect inlining to OB. */ | |
661e7330 MJ |
4985 | |
4986 | static void | |
e33c6cd6 MJ |
4987 | ipa_write_indirect_edge_info (struct output_block *ob, |
4988 | struct cgraph_edge *cs) | |
661e7330 | 4989 | { |
99b1c316 | 4990 | class cgraph_indirect_call_info *ii = cs->indirect_info; |
2465dcc2 | 4991 | struct bitpack_d bp; |
e33c6cd6 | 4992 | |
412288f1 | 4993 | streamer_write_hwi (ob, ii->param_index); |
2465dcc2 RG |
4994 | bp = bitpack_create (ob->main_stream); |
4995 | bp_pack_value (&bp, ii->polymorphic, 1); | |
8b7773a4 | 4996 | bp_pack_value (&bp, ii->agg_contents, 1); |
c13bc3d9 | 4997 | bp_pack_value (&bp, ii->member_ptr, 1); |
8b7773a4 | 4998 | bp_pack_value (&bp, ii->by_ref, 1); |
91bb9f80 | 4999 | bp_pack_value (&bp, ii->guaranteed_unmodified, 1); |
0127c169 | 5000 | bp_pack_value (&bp, ii->vptr_changed, 1); |
412288f1 | 5001 | streamer_write_bitpack (&bp); |
ba392339 JH |
5002 | if (ii->agg_contents || ii->polymorphic) |
5003 | streamer_write_hwi (ob, ii->offset); | |
5004 | else | |
5005 | gcc_assert (ii->offset == 0); | |
b258210c MJ |
5006 | |
5007 | if (ii->polymorphic) | |
5008 | { | |
412288f1 | 5009 | streamer_write_hwi (ob, ii->otr_token); |
b9393656 | 5010 | stream_write_tree (ob, ii->otr_type, true); |
ba392339 | 5011 | ii->context.stream_out (ob); |
b258210c | 5012 | } |
661e7330 MJ |
5013 | } |
5014 | ||
e33c6cd6 MJ |
5015 | /* Read in parts of cgraph_indirect_call_info corresponding to CS that are |
5016 | relevant to indirect inlining from IB. */ | |
661e7330 MJ |
5017 | |
5018 | static void | |
99b1c316 MS |
5019 | ipa_read_indirect_edge_info (class lto_input_block *ib, |
5020 | class data_in *data_in, | |
40a777e8 JH |
5021 | struct cgraph_edge *cs, |
5022 | class ipa_node_params *info) | |
661e7330 | 5023 | { |
99b1c316 | 5024 | class cgraph_indirect_call_info *ii = cs->indirect_info; |
2465dcc2 | 5025 | struct bitpack_d bp; |
661e7330 | 5026 | |
412288f1 | 5027 | ii->param_index = (int) streamer_read_hwi (ib); |
412288f1 | 5028 | bp = streamer_read_bitpack (ib); |
2465dcc2 | 5029 | ii->polymorphic = bp_unpack_value (&bp, 1); |
8b7773a4 | 5030 | ii->agg_contents = bp_unpack_value (&bp, 1); |
c13bc3d9 | 5031 | ii->member_ptr = bp_unpack_value (&bp, 1); |
8b7773a4 | 5032 | ii->by_ref = bp_unpack_value (&bp, 1); |
91bb9f80 | 5033 | ii->guaranteed_unmodified = bp_unpack_value (&bp, 1); |
0127c169 | 5034 | ii->vptr_changed = bp_unpack_value (&bp, 1); |
ba392339 JH |
5035 | if (ii->agg_contents || ii->polymorphic) |
5036 | ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib); | |
5037 | else | |
5038 | ii->offset = 0; | |
b258210c MJ |
5039 | if (ii->polymorphic) |
5040 | { | |
412288f1 | 5041 | ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib); |
b9393656 | 5042 | ii->otr_type = stream_read_tree (ib, data_in); |
ba392339 | 5043 | ii->context.stream_in (ib, data_in); |
b258210c | 5044 | } |
40a777e8 JH |
5045 | if (info && ii->param_index >= 0) |
5046 | { | |
5047 | if (ii->polymorphic) | |
5048 | ipa_set_param_used_by_polymorphic_call (info, | |
5049 | ii->param_index , true); | |
5050 | ipa_set_param_used_by_indirect_call (info, | |
5051 | ii->param_index, true); | |
5052 | } | |
661e7330 MJ |
5053 | } |
5054 | ||
fb3f88cc JH |
5055 | /* Stream out NODE info to OB. */ |
5056 | ||
5057 | static void | |
5058 | ipa_write_node_info (struct output_block *ob, struct cgraph_node *node) | |
5059 | { | |
5060 | int node_ref; | |
7380e6ef | 5061 | lto_symtab_encoder_t encoder; |
a4a3cdd0 | 5062 | ipa_node_params *info = ipa_node_params_sum->get (node); |
fb3f88cc JH |
5063 | int j; |
5064 | struct cgraph_edge *e; | |
2465dcc2 | 5065 | struct bitpack_d bp; |
fb3f88cc | 5066 | |
7380e6ef | 5067 | encoder = ob->decl_state->symtab_node_encoder; |
67348ccc | 5068 | node_ref = lto_symtab_encoder_encode (encoder, node); |
412288f1 | 5069 | streamer_write_uhwi (ob, node_ref); |
fb3f88cc | 5070 | |
0e8853ee JH |
5071 | streamer_write_uhwi (ob, ipa_get_param_count (info)); |
5072 | for (j = 0; j < ipa_get_param_count (info); j++) | |
5073 | streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j)); | |
2465dcc2 | 5074 | bp = bitpack_create (ob->main_stream); |
8aab5218 | 5075 | gcc_assert (info->analysis_done |
661e7330 | 5076 | || ipa_get_param_count (info) == 0); |
fb3f88cc JH |
5077 | gcc_assert (!info->node_enqueued); |
5078 | gcc_assert (!info->ipcp_orig_node); | |
5079 | for (j = 0; j < ipa_get_param_count (info); j++) | |
13586172 MJ |
5080 | { |
5081 | /* TODO: We could just not stream the bit in the undescribed case. */ | |
5082 | bool d = (ipa_get_controlled_uses (info, j) != IPA_UNDESCRIBED_USE) | |
5083 | ? ipa_get_param_load_dereferenced (info, j) : true; | |
5084 | bp_pack_value (&bp, d, 1); | |
5085 | bp_pack_value (&bp, ipa_is_param_used (info, j), 1); | |
5086 | } | |
412288f1 | 5087 | streamer_write_bitpack (&bp); |
4502fe8d | 5088 | for (j = 0; j < ipa_get_param_count (info); j++) |
a5e14a42 MJ |
5089 | { |
5090 | streamer_write_hwi (ob, ipa_get_controlled_uses (info, j)); | |
5091 | stream_write_tree (ob, ipa_get_type (info, j), true); | |
5092 | } | |
fb3f88cc JH |
5093 | for (e = node->callees; e; e = e->next_callee) |
5094 | { | |
a4a3cdd0 | 5095 | ipa_edge_args *args = ipa_edge_args_sum->get (e); |
fb3f88cc | 5096 | |
a33c028e JH |
5097 | if (!args) |
5098 | { | |
5099 | streamer_write_uhwi (ob, 0); | |
5100 | continue; | |
5101 | } | |
5102 | ||
5ce97055 JH |
5103 | streamer_write_uhwi (ob, |
5104 | ipa_get_cs_argument_count (args) * 2 | |
5105 | + (args->polymorphic_call_contexts != NULL)); | |
fb3f88cc | 5106 | for (j = 0; j < ipa_get_cs_argument_count (args); j++) |
5ce97055 JH |
5107 | { |
5108 | ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j)); | |
5109 | if (args->polymorphic_call_contexts != NULL) | |
5110 | ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob); | |
5111 | } | |
fb3f88cc | 5112 | } |
e33c6cd6 | 5113 | for (e = node->indirect_calls; e; e = e->next_callee) |
c8246dbe | 5114 | { |
a4a3cdd0 | 5115 | ipa_edge_args *args = ipa_edge_args_sum->get (e); |
a33c028e JH |
5116 | if (!args) |
5117 | streamer_write_uhwi (ob, 0); | |
5118 | else | |
5ce97055 | 5119 | { |
a33c028e JH |
5120 | streamer_write_uhwi (ob, |
5121 | ipa_get_cs_argument_count (args) * 2 | |
5122 | + (args->polymorphic_call_contexts != NULL)); | |
5123 | for (j = 0; j < ipa_get_cs_argument_count (args); j++) | |
5124 | { | |
5125 | ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j)); | |
5126 | if (args->polymorphic_call_contexts != NULL) | |
5127 | ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob); | |
5128 | } | |
5ce97055 | 5129 | } |
c8246dbe JH |
5130 | ipa_write_indirect_edge_info (ob, e); |
5131 | } | |
fb3f88cc JH |
5132 | } |
5133 | ||
f714ecf5 | 5134 | /* Stream in edge E from IB. */ |
7b377b22 | 5135 | |
f714ecf5 | 5136 | static void |
99b1c316 MS |
5137 | ipa_read_edge_info (class lto_input_block *ib, |
5138 | class data_in *data_in, | |
f714ecf5 | 5139 | struct cgraph_edge *e, bool prevails) |
7b377b22 | 5140 | { |
f714ecf5 JH |
5141 | int count = streamer_read_uhwi (ib); |
5142 | bool contexts_computed = count & 1; | |
5143 | ||
5144 | count /= 2; | |
5145 | if (!count) | |
5146 | return; | |
6cef01c3 JH |
5147 | if (prevails |
5148 | && (e->possibly_call_in_translation_unit_p () | |
5149 | /* Also stream in jump functions to builtins in hope that they | |
5150 | will get fnspecs. */ | |
5151 | || fndecl_built_in_p (e->callee->decl, BUILT_IN_NORMAL))) | |
f714ecf5 | 5152 | { |
a4a3cdd0 | 5153 | ipa_edge_args *args = ipa_edge_args_sum->get_create (e); |
cb3874dc | 5154 | vec_safe_grow_cleared (args->jump_functions, count, true); |
f714ecf5 | 5155 | if (contexts_computed) |
cb3874dc | 5156 | vec_safe_grow_cleared (args->polymorphic_call_contexts, count, true); |
f714ecf5 JH |
5157 | for (int k = 0; k < count; k++) |
5158 | { | |
5159 | ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e, | |
5160 | data_in, prevails); | |
5161 | if (contexts_computed) | |
5162 | ipa_get_ith_polymorhic_call_context (args, k)->stream_in | |
5163 | (ib, data_in); | |
5164 | } | |
5165 | } | |
5166 | else | |
5167 | { | |
5168 | for (int k = 0; k < count; k++) | |
5169 | { | |
5170 | struct ipa_jump_func dummy; | |
5171 | ipa_read_jump_function (ib, &dummy, e, | |
5172 | data_in, prevails); | |
5173 | if (contexts_computed) | |
5174 | { | |
99b1c316 | 5175 | class ipa_polymorphic_call_context ctx; |
f714ecf5 JH |
5176 | ctx.stream_in (ib, data_in); |
5177 | } | |
5178 | } | |
5179 | } | |
7b377b22 JH |
5180 | } |
5181 | ||
61502ca8 | 5182 | /* Stream in NODE info from IB. */ |
fb3f88cc JH |
5183 | |
5184 | static void | |
99b1c316 MS |
5185 | ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node, |
5186 | class data_in *data_in) | |
fb3f88cc | 5187 | { |
fb3f88cc JH |
5188 | int k; |
5189 | struct cgraph_edge *e; | |
2465dcc2 | 5190 | struct bitpack_d bp; |
f714ecf5 | 5191 | bool prevails = node->prevailing_p (); |
a4a3cdd0 MJ |
5192 | ipa_node_params *info |
5193 | = prevails ? ipa_node_params_sum->get_create (node) : NULL; | |
fb3f88cc | 5194 | |
f714ecf5 JH |
5195 | int param_count = streamer_read_uhwi (ib); |
5196 | if (prevails) | |
5197 | { | |
5198 | ipa_alloc_node_params (node, param_count); | |
5199 | for (k = 0; k < param_count; k++) | |
5200 | (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib); | |
5201 | if (ipa_get_param_count (info) != 0) | |
5202 | info->analysis_done = true; | |
5203 | info->node_enqueued = false; | |
5204 | } | |
5205 | else | |
5206 | for (k = 0; k < param_count; k++) | |
5207 | streamer_read_uhwi (ib); | |
a5e14a42 | 5208 | |
412288f1 | 5209 | bp = streamer_read_bitpack (ib); |
f714ecf5 | 5210 | for (k = 0; k < param_count; k++) |
a5e14a42 | 5211 | { |
13586172 | 5212 | bool load_dereferenced = bp_unpack_value (&bp, 1); |
f714ecf5 JH |
5213 | bool used = bp_unpack_value (&bp, 1); |
5214 | ||
5215 | if (prevails) | |
13586172 MJ |
5216 | { |
5217 | ipa_set_param_load_dereferenced (info, k, load_dereferenced); | |
5218 | ipa_set_param_used (info, k, used); | |
5219 | } | |
a5e14a42 | 5220 | } |
f714ecf5 | 5221 | for (k = 0; k < param_count; k++) |
fb3f88cc | 5222 | { |
f714ecf5 JH |
5223 | int nuses = streamer_read_hwi (ib); |
5224 | tree type = stream_read_tree (ib, data_in); | |
fb3f88cc | 5225 | |
f714ecf5 | 5226 | if (prevails) |
5ce97055 | 5227 | { |
f714ecf5 JH |
5228 | ipa_set_controlled_uses (info, k, nuses); |
5229 | (*info->descriptors)[k].decl_or_type = type; | |
5ce97055 | 5230 | } |
fb3f88cc | 5231 | } |
f714ecf5 JH |
5232 | for (e = node->callees; e; e = e->next_callee) |
5233 | ipa_read_edge_info (ib, data_in, e, prevails); | |
e33c6cd6 | 5234 | for (e = node->indirect_calls; e; e = e->next_callee) |
c8246dbe | 5235 | { |
f714ecf5 | 5236 | ipa_read_edge_info (ib, data_in, e, prevails); |
40a777e8 | 5237 | ipa_read_indirect_edge_info (ib, data_in, e, info); |
c8246dbe | 5238 | } |
fb3f88cc JH |
5239 | } |
5240 | ||
5241 | /* Write jump functions for nodes in SET. */ | |
5242 | ||
5243 | void | |
f27c1867 | 5244 | ipa_prop_write_jump_functions (void) |
fb3f88cc | 5245 | { |
93536c97 | 5246 | struct output_block *ob; |
fb3f88cc | 5247 | unsigned int count = 0; |
f27c1867 JH |
5248 | lto_symtab_encoder_iterator lsei; |
5249 | lto_symtab_encoder_t encoder; | |
5250 | ||
6fe906a3 | 5251 | if (!ipa_node_params_sum || !ipa_edge_args_sum) |
93536c97 | 5252 | return; |
fb3f88cc | 5253 | |
93536c97 | 5254 | ob = create_output_block (LTO_section_jump_functions); |
f27c1867 | 5255 | encoder = ob->decl_state->symtab_node_encoder; |
0b83e688 | 5256 | ob->symbol = NULL; |
f27c1867 JH |
5257 | for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei); |
5258 | lsei_next_function_in_partition (&lsei)) | |
fb3f88cc | 5259 | { |
a4a3cdd0 | 5260 | cgraph_node *node = lsei_cgraph_node (lsei); |
d52f5295 | 5261 | if (node->has_gimple_body_p () |
a4a3cdd0 | 5262 | && ipa_node_params_sum->get (node) != NULL) |
fb3f88cc JH |
5263 | count++; |
5264 | } | |
5265 | ||
412288f1 | 5266 | streamer_write_uhwi (ob, count); |
fb3f88cc JH |
5267 | |
5268 | /* Process all of the functions. */ | |
f27c1867 JH |
5269 | for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei); |
5270 | lsei_next_function_in_partition (&lsei)) | |
fb3f88cc | 5271 | { |
a4a3cdd0 | 5272 | cgraph_node *node = lsei_cgraph_node (lsei); |
d52f5295 | 5273 | if (node->has_gimple_body_p () |
a4a3cdd0 | 5274 | && ipa_node_params_sum->get (node) != NULL) |
fb3f88cc JH |
5275 | ipa_write_node_info (ob, node); |
5276 | } | |
412288f1 | 5277 | streamer_write_char_stream (ob->main_stream, 0); |
fb3f88cc JH |
5278 | produce_asm (ob, NULL); |
5279 | destroy_output_block (ob); | |
5280 | } | |
5281 | ||
5282 | /* Read section in file FILE_DATA of length LEN with data DATA. */ | |
5283 | ||
5284 | static void | |
5285 | ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data, | |
5286 | size_t len) | |
5287 | { | |
5288 | const struct lto_function_header *header = | |
5289 | (const struct lto_function_header *) data; | |
4ad9a9de EB |
5290 | const int cfg_offset = sizeof (struct lto_function_header); |
5291 | const int main_offset = cfg_offset + header->cfg_size; | |
5292 | const int string_offset = main_offset + header->main_size; | |
99b1c316 | 5293 | class data_in *data_in; |
fb3f88cc JH |
5294 | unsigned int i; |
5295 | unsigned int count; | |
5296 | ||
207c68cd | 5297 | lto_input_block ib_main ((const char *) data + main_offset, |
db847fa8 | 5298 | header->main_size, file_data->mode_table); |
fb3f88cc JH |
5299 | |
5300 | data_in = | |
5301 | lto_data_in_create (file_data, (const char *) data + string_offset, | |
6e1aa848 | 5302 | header->string_size, vNULL); |
412288f1 | 5303 | count = streamer_read_uhwi (&ib_main); |
fb3f88cc JH |
5304 | |
5305 | for (i = 0; i < count; i++) | |
5306 | { | |
5307 | unsigned int index; | |
5308 | struct cgraph_node *node; | |
7380e6ef | 5309 | lto_symtab_encoder_t encoder; |
fb3f88cc | 5310 | |
412288f1 | 5311 | index = streamer_read_uhwi (&ib_main); |
7380e6ef | 5312 | encoder = file_data->symtab_node_encoder; |
d52f5295 ML |
5313 | node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder, |
5314 | index)); | |
67348ccc | 5315 | gcc_assert (node->definition); |
fb3f88cc JH |
5316 | ipa_read_node_info (&ib_main, node, data_in); |
5317 | } | |
5318 | lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data, | |
5319 | len); | |
5320 | lto_data_in_delete (data_in); | |
5321 | } | |
5322 | ||
5323 | /* Read ipcp jump functions. */ | |
5324 | ||
5325 | void | |
5326 | ipa_prop_read_jump_functions (void) | |
5327 | { | |
5328 | struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data (); | |
5329 | struct lto_file_decl_data *file_data; | |
5330 | unsigned int j = 0; | |
5331 | ||
5332 | ipa_check_create_node_params (); | |
5333 | ipa_check_create_edge_args (); | |
5334 | ipa_register_cgraph_hooks (); | |
5335 | ||
5336 | while ((file_data = file_data_vec[j++])) | |
5337 | { | |
5338 | size_t len; | |
3c56d8d8 ML |
5339 | const char *data |
5340 | = lto_get_summary_section_data (file_data, LTO_section_jump_functions, | |
5341 | &len); | |
fb3f88cc JH |
5342 | if (data) |
5343 | ipa_prop_read_section (file_data, data, len); | |
5344 | } | |
5345 | } | |
5346 | ||
2c9561b5 | 5347 | void |
04be694e | 5348 | write_ipcp_transformation_info (output_block *ob, cgraph_node *node) |
2c9561b5 MJ |
5349 | { |
5350 | int node_ref; | |
5351 | unsigned int count = 0; | |
5352 | lto_symtab_encoder_t encoder; | |
5353 | struct ipa_agg_replacement_value *aggvals, *av; | |
5354 | ||
5355 | aggvals = ipa_get_agg_replacements_for_node (node); | |
5356 | encoder = ob->decl_state->symtab_node_encoder; | |
67348ccc | 5357 | node_ref = lto_symtab_encoder_encode (encoder, node); |
2c9561b5 MJ |
5358 | streamer_write_uhwi (ob, node_ref); |
5359 | ||
5360 | for (av = aggvals; av; av = av->next) | |
5361 | count++; | |
5362 | streamer_write_uhwi (ob, count); | |
5363 | ||
5364 | for (av = aggvals; av; av = av->next) | |
5365 | { | |
7b920a9a MJ |
5366 | struct bitpack_d bp; |
5367 | ||
2c9561b5 MJ |
5368 | streamer_write_uhwi (ob, av->offset); |
5369 | streamer_write_uhwi (ob, av->index); | |
5370 | stream_write_tree (ob, av->value, true); | |
7b920a9a MJ |
5371 | |
5372 | bp = bitpack_create (ob->main_stream); | |
5373 | bp_pack_value (&bp, av->by_ref, 1); | |
5374 | streamer_write_bitpack (&bp); | |
2c9561b5 | 5375 | } |
04be694e | 5376 | |
9d3e0adc | 5377 | ipcp_transformation *ts = ipcp_get_transformation_summary (node); |
8bc5448f KV |
5378 | if (ts && vec_safe_length (ts->m_vr) > 0) |
5379 | { | |
5380 | count = ts->m_vr->length (); | |
5381 | streamer_write_uhwi (ob, count); | |
5382 | for (unsigned i = 0; i < count; ++i) | |
5383 | { | |
5384 | struct bitpack_d bp; | |
5385 | ipa_vr *parm_vr = &(*ts->m_vr)[i]; | |
5386 | bp = bitpack_create (ob->main_stream); | |
5387 | bp_pack_value (&bp, parm_vr->known, 1); | |
5388 | streamer_write_bitpack (&bp); | |
5389 | if (parm_vr->known) | |
5390 | { | |
5391 | streamer_write_enum (ob->main_stream, value_rang_type, | |
5392 | VR_LAST, parm_vr->type); | |
5393 | streamer_write_wide_int (ob, parm_vr->min); | |
5394 | streamer_write_wide_int (ob, parm_vr->max); | |
5395 | } | |
5396 | } | |
5397 | } | |
5398 | else | |
5399 | streamer_write_uhwi (ob, 0); | |
5400 | ||
209ca542 PK |
5401 | if (ts && vec_safe_length (ts->bits) > 0) |
5402 | { | |
5403 | count = ts->bits->length (); | |
5404 | streamer_write_uhwi (ob, count); | |
5405 | ||
5406 | for (unsigned i = 0; i < count; ++i) | |
5407 | { | |
86cd0334 | 5408 | const ipa_bits *bits_jfunc = (*ts->bits)[i]; |
209ca542 | 5409 | struct bitpack_d bp = bitpack_create (ob->main_stream); |
86cd0334 | 5410 | bp_pack_value (&bp, !!bits_jfunc, 1); |
209ca542 | 5411 | streamer_write_bitpack (&bp); |
86cd0334 | 5412 | if (bits_jfunc) |
209ca542 | 5413 | { |
86cd0334 MJ |
5414 | streamer_write_widest_int (ob, bits_jfunc->value); |
5415 | streamer_write_widest_int (ob, bits_jfunc->mask); | |
209ca542 PK |
5416 | } |
5417 | } | |
5418 | } | |
5419 | else | |
5420 | streamer_write_uhwi (ob, 0); | |
2c9561b5 MJ |
5421 | } |
5422 | ||
5423 | /* Stream in the aggregate value replacement chain for NODE from IB. */ | |
5424 | ||
5425 | static void | |
04be694e MJ |
5426 | read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node, |
5427 | data_in *data_in) | |
2c9561b5 MJ |
5428 | { |
5429 | struct ipa_agg_replacement_value *aggvals = NULL; | |
5430 | unsigned int count, i; | |
5431 | ||
5432 | count = streamer_read_uhwi (ib); | |
5433 | for (i = 0; i <count; i++) | |
5434 | { | |
5435 | struct ipa_agg_replacement_value *av; | |
7b920a9a | 5436 | struct bitpack_d bp; |
2c9561b5 | 5437 | |
766090c2 | 5438 | av = ggc_alloc<ipa_agg_replacement_value> (); |
2c9561b5 MJ |
5439 | av->offset = streamer_read_uhwi (ib); |
5440 | av->index = streamer_read_uhwi (ib); | |
5441 | av->value = stream_read_tree (ib, data_in); | |
7b920a9a MJ |
5442 | bp = streamer_read_bitpack (ib); |
5443 | av->by_ref = bp_unpack_value (&bp, 1); | |
2c9561b5 MJ |
5444 | av->next = aggvals; |
5445 | aggvals = av; | |
5446 | } | |
5447 | ipa_set_node_agg_value_chain (node, aggvals); | |
67b97478 | 5448 | |
209ca542 PK |
5449 | count = streamer_read_uhwi (ib); |
5450 | if (count > 0) | |
5451 | { | |
9d3e0adc ML |
5452 | ipcp_transformation_initialize (); |
5453 | ipcp_transformation *ts = ipcp_transformation_sum->get_create (node); | |
cb3874dc | 5454 | vec_safe_grow_cleared (ts->m_vr, count, true); |
8bc5448f KV |
5455 | for (i = 0; i < count; i++) |
5456 | { | |
5457 | ipa_vr *parm_vr; | |
5458 | parm_vr = &(*ts->m_vr)[i]; | |
5459 | struct bitpack_d bp; | |
5460 | bp = streamer_read_bitpack (ib); | |
5461 | parm_vr->known = bp_unpack_value (&bp, 1); | |
5462 | if (parm_vr->known) | |
5463 | { | |
54994253 | 5464 | parm_vr->type = streamer_read_enum (ib, value_range_kind, |
8bc5448f KV |
5465 | VR_LAST); |
5466 | parm_vr->min = streamer_read_wide_int (ib); | |
5467 | parm_vr->max = streamer_read_wide_int (ib); | |
5468 | } | |
5469 | } | |
5470 | } | |
5471 | count = streamer_read_uhwi (ib); | |
5472 | if (count > 0) | |
5473 | { | |
9d3e0adc ML |
5474 | ipcp_transformation_initialize (); |
5475 | ipcp_transformation *ts = ipcp_transformation_sum->get_create (node); | |
cb3874dc | 5476 | vec_safe_grow_cleared (ts->bits, count, true); |
209ca542 PK |
5477 | |
5478 | for (i = 0; i < count; i++) | |
5479 | { | |
209ca542 | 5480 | struct bitpack_d bp = streamer_read_bitpack (ib); |
86cd0334 MJ |
5481 | bool known = bp_unpack_value (&bp, 1); |
5482 | if (known) | |
209ca542 | 5483 | { |
2fb1d6d6 JH |
5484 | const widest_int value = streamer_read_widest_int (ib); |
5485 | const widest_int mask = streamer_read_widest_int (ib); | |
86cd0334 | 5486 | ipa_bits *bits |
2fb1d6d6 | 5487 | = ipa_get_ipa_bits_for_value (value, mask); |
86cd0334 | 5488 | (*ts->bits)[i] = bits; |
209ca542 PK |
5489 | } |
5490 | } | |
5491 | } | |
2c9561b5 MJ |
5492 | } |
5493 | ||
5494 | /* Write all aggregate replacement for nodes in set. */ | |
5495 | ||
5496 | void | |
04be694e | 5497 | ipcp_write_transformation_summaries (void) |
2c9561b5 MJ |
5498 | { |
5499 | struct cgraph_node *node; | |
5500 | struct output_block *ob; | |
5501 | unsigned int count = 0; | |
5502 | lto_symtab_encoder_iterator lsei; | |
5503 | lto_symtab_encoder_t encoder; | |
5504 | ||
2c9561b5 MJ |
5505 | ob = create_output_block (LTO_section_ipcp_transform); |
5506 | encoder = ob->decl_state->symtab_node_encoder; | |
0b83e688 | 5507 | ob->symbol = NULL; |
2c9561b5 MJ |
5508 | for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei); |
5509 | lsei_next_function_in_partition (&lsei)) | |
5510 | { | |
5511 | node = lsei_cgraph_node (lsei); | |
04be694e | 5512 | if (node->has_gimple_body_p ()) |
2c9561b5 MJ |
5513 | count++; |
5514 | } | |
5515 | ||
5516 | streamer_write_uhwi (ob, count); | |
5517 | ||
5518 | for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei); | |
5519 | lsei_next_function_in_partition (&lsei)) | |
5520 | { | |
5521 | node = lsei_cgraph_node (lsei); | |
04be694e MJ |
5522 | if (node->has_gimple_body_p ()) |
5523 | write_ipcp_transformation_info (ob, node); | |
2c9561b5 MJ |
5524 | } |
5525 | streamer_write_char_stream (ob->main_stream, 0); | |
5526 | produce_asm (ob, NULL); | |
5527 | destroy_output_block (ob); | |
5528 | } | |
5529 | ||
5530 | /* Read replacements section in file FILE_DATA of length LEN with data | |
5531 | DATA. */ | |
5532 | ||
5533 | static void | |
5534 | read_replacements_section (struct lto_file_decl_data *file_data, | |
5535 | const char *data, | |
5536 | size_t len) | |
5537 | { | |
5538 | const struct lto_function_header *header = | |
5539 | (const struct lto_function_header *) data; | |
5540 | const int cfg_offset = sizeof (struct lto_function_header); | |
5541 | const int main_offset = cfg_offset + header->cfg_size; | |
5542 | const int string_offset = main_offset + header->main_size; | |
99b1c316 | 5543 | class data_in *data_in; |
2c9561b5 MJ |
5544 | unsigned int i; |
5545 | unsigned int count; | |
5546 | ||
207c68cd | 5547 | lto_input_block ib_main ((const char *) data + main_offset, |
db847fa8 | 5548 | header->main_size, file_data->mode_table); |
2c9561b5 MJ |
5549 | |
5550 | data_in = lto_data_in_create (file_data, (const char *) data + string_offset, | |
6e1aa848 | 5551 | header->string_size, vNULL); |
2c9561b5 MJ |
5552 | count = streamer_read_uhwi (&ib_main); |
5553 | ||
5554 | for (i = 0; i < count; i++) | |
5555 | { | |
5556 | unsigned int index; | |
5557 | struct cgraph_node *node; | |
5558 | lto_symtab_encoder_t encoder; | |
5559 | ||
5560 | index = streamer_read_uhwi (&ib_main); | |
5561 | encoder = file_data->symtab_node_encoder; | |
d52f5295 ML |
5562 | node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder, |
5563 | index)); | |
67348ccc | 5564 | gcc_assert (node->definition); |
04be694e | 5565 | read_ipcp_transformation_info (&ib_main, node, data_in); |
2c9561b5 MJ |
5566 | } |
5567 | lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data, | |
5568 | len); | |
5569 | lto_data_in_delete (data_in); | |
5570 | } | |
5571 | ||
5572 | /* Read IPA-CP aggregate replacements. */ | |
5573 | ||
5574 | void | |
04be694e | 5575 | ipcp_read_transformation_summaries (void) |
2c9561b5 MJ |
5576 | { |
5577 | struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data (); | |
5578 | struct lto_file_decl_data *file_data; | |
5579 | unsigned int j = 0; | |
5580 | ||
5581 | while ((file_data = file_data_vec[j++])) | |
5582 | { | |
5583 | size_t len; | |
3c56d8d8 ML |
5584 | const char *data |
5585 | = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform, | |
5586 | &len); | |
2c9561b5 MJ |
5587 | if (data) |
5588 | read_replacements_section (file_data, data, len); | |
5589 | } | |
5590 | } | |
5591 | ||
5592 | /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in | |
5bc4cb04 MJ |
5593 | NODE but also if any parameter was IPA-SRAed into a scalar go ahead with |
5594 | substitution of the default_definitions of that new param with the | |
5595 | appropriate constant. | |
2c9561b5 | 5596 | |
5bc4cb04 MJ |
5597 | Return two bools. the first it true if at least one item in AGGVAL still |
5598 | exists and function body walk should go ahead. The second is true if any | |
5599 | values were already substituted for scalarized parameters and update_cfg | |
5600 | shuld be run after replace_uses_by. */ | |
5601 | ||
5602 | static std::pair<bool, bool> | |
5603 | adjust_agg_replacement_values (cgraph_node *node, | |
5604 | ipa_agg_replacement_value *aggval, | |
5605 | const vec<ipa_param_descriptor, va_gc> | |
5606 | &descriptors) | |
2c9561b5 MJ |
5607 | { |
5608 | struct ipa_agg_replacement_value *v; | |
ae7a23a3 | 5609 | clone_info *cinfo = clone_info::get (node); |
ae7a23a3 | 5610 | if (!cinfo || !cinfo->param_adjustments) |
5bc4cb04 | 5611 | return std::pair<bool, bool> (true, false); |
2c9561b5 | 5612 | |
5bc4cb04 MJ |
5613 | bool anything_left = false; |
5614 | bool done_replacement = false; | |
2c9561b5 MJ |
5615 | for (v = aggval; v; v = v->next) |
5616 | { | |
ff6686d2 | 5617 | gcc_checking_assert (v->index >= 0); |
2c9561b5 | 5618 | |
5bc4cb04 MJ |
5619 | unsigned unit_offset = v->offset / BITS_PER_UNIT; |
5620 | tree cst_type = TREE_TYPE (v->value); | |
5621 | int split_idx; | |
5622 | int new_idx | |
5623 | = cinfo->param_adjustments->get_updated_index_or_split (v->index, | |
5624 | unit_offset, | |
5625 | cst_type, | |
5626 | &split_idx); | |
5627 | v->index = new_idx; | |
5628 | if (new_idx >= 0) | |
5629 | anything_left = true; | |
5630 | else if (split_idx >= 0) | |
5631 | { | |
5632 | tree parm = ipa_get_param (descriptors, split_idx); | |
5633 | tree ddef = ssa_default_def (cfun, parm); | |
5634 | if (ddef) | |
5635 | { | |
5636 | replace_uses_by (ddef, v->value); | |
5637 | done_replacement = true; | |
5638 | } | |
5639 | } | |
ff6686d2 | 5640 | } |
5bc4cb04 | 5641 | return std::pair<bool, bool> (anything_left, done_replacement); |
2c9561b5 MJ |
5642 | } |
5643 | ||
8aab5218 MJ |
5644 | /* Dominator walker driving the ipcp modification phase. */ |
5645 | ||
5646 | class ipcp_modif_dom_walker : public dom_walker | |
5647 | { | |
5648 | public: | |
56b40062 | 5649 | ipcp_modif_dom_walker (struct ipa_func_body_info *fbi, |
f65f1ae3 | 5650 | vec<ipa_param_descriptor, va_gc> *descs, |
8aab5218 | 5651 | struct ipa_agg_replacement_value *av, |
8ddce3f7 | 5652 | bool *sc) |
8aab5218 | 5653 | : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs), |
8ddce3f7 | 5654 | m_aggval (av), m_something_changed (sc) {} |
8aab5218 | 5655 | |
3daacdcd | 5656 | virtual edge before_dom_children (basic_block); |
8ddce3f7 RB |
5657 | bool cleanup_eh () |
5658 | { return gimple_purge_all_dead_eh_edges (m_need_eh_cleanup); } | |
8aab5218 MJ |
5659 | |
5660 | private: | |
56b40062 | 5661 | struct ipa_func_body_info *m_fbi; |
f65f1ae3 | 5662 | vec<ipa_param_descriptor, va_gc> *m_descriptors; |
8aab5218 | 5663 | struct ipa_agg_replacement_value *m_aggval; |
8ddce3f7 RB |
5664 | bool *m_something_changed; |
5665 | auto_bitmap m_need_eh_cleanup; | |
8aab5218 MJ |
5666 | }; |
5667 | ||
3daacdcd | 5668 | edge |
8aab5218 MJ |
5669 | ipcp_modif_dom_walker::before_dom_children (basic_block bb) |
5670 | { | |
5671 | gimple_stmt_iterator gsi; | |
5672 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
5673 | { | |
5674 | struct ipa_agg_replacement_value *v; | |
355fe088 | 5675 | gimple *stmt = gsi_stmt (gsi); |
8aab5218 | 5676 | tree rhs, val, t; |
86003645 RS |
5677 | HOST_WIDE_INT offset; |
5678 | poly_int64 size; | |
8aab5218 MJ |
5679 | int index; |
5680 | bool by_ref, vce; | |
5681 | ||
5682 | if (!gimple_assign_load_p (stmt)) | |
5683 | continue; | |
5684 | rhs = gimple_assign_rhs1 (stmt); | |
5685 | if (!is_gimple_reg_type (TREE_TYPE (rhs))) | |
5686 | continue; | |
2c9561b5 | 5687 | |
8aab5218 MJ |
5688 | vce = false; |
5689 | t = rhs; | |
5690 | while (handled_component_p (t)) | |
5691 | { | |
5692 | /* V_C_E can do things like convert an array of integers to one | |
5693 | bigger integer and similar things we do not handle below. */ | |
b66113e9 | 5694 | if (TREE_CODE (t) == VIEW_CONVERT_EXPR) |
8aab5218 MJ |
5695 | { |
5696 | vce = true; | |
5697 | break; | |
5698 | } | |
5699 | t = TREE_OPERAND (t, 0); | |
5700 | } | |
5701 | if (vce) | |
5702 | continue; | |
5703 | ||
ff302741 PB |
5704 | if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index, |
5705 | &offset, &size, &by_ref)) | |
8aab5218 MJ |
5706 | continue; |
5707 | for (v = m_aggval; v; v = v->next) | |
5708 | if (v->index == index | |
5709 | && v->offset == offset) | |
5710 | break; | |
5711 | if (!v | |
5712 | || v->by_ref != by_ref | |
86003645 RS |
5713 | || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))), |
5714 | size)) | |
8aab5218 MJ |
5715 | continue; |
5716 | ||
5717 | gcc_checking_assert (is_gimple_ip_invariant (v->value)); | |
5718 | if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value))) | |
5719 | { | |
5720 | if (fold_convertible_p (TREE_TYPE (rhs), v->value)) | |
5721 | val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value); | |
5722 | else if (TYPE_SIZE (TREE_TYPE (rhs)) | |
5723 | == TYPE_SIZE (TREE_TYPE (v->value))) | |
5724 | val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value); | |
5725 | else | |
5726 | { | |
5727 | if (dump_file) | |
5728 | { | |
5729 | fprintf (dump_file, " const "); | |
ef6cb4c7 | 5730 | print_generic_expr (dump_file, v->value); |
8aab5218 | 5731 | fprintf (dump_file, " can't be converted to type of "); |
ef6cb4c7 | 5732 | print_generic_expr (dump_file, rhs); |
8aab5218 MJ |
5733 | fprintf (dump_file, "\n"); |
5734 | } | |
5735 | continue; | |
5736 | } | |
5737 | } | |
5738 | else | |
5739 | val = v->value; | |
5740 | ||
5741 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
5742 | { | |
5743 | fprintf (dump_file, "Modifying stmt:\n "); | |
ef6cb4c7 | 5744 | print_gimple_stmt (dump_file, stmt, 0); |
8aab5218 MJ |
5745 | } |
5746 | gimple_assign_set_rhs_from_tree (&gsi, val); | |
5747 | update_stmt (stmt); | |
5748 | ||
5749 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
5750 | { | |
5751 | fprintf (dump_file, "into:\n "); | |
ef6cb4c7 | 5752 | print_gimple_stmt (dump_file, stmt, 0); |
8aab5218 MJ |
5753 | fprintf (dump_file, "\n"); |
5754 | } | |
5755 | ||
5756 | *m_something_changed = true; | |
8ddce3f7 RB |
5757 | if (maybe_clean_eh_stmt (stmt)) |
5758 | bitmap_set_bit (m_need_eh_cleanup, bb->index); | |
8aab5218 | 5759 | } |
3daacdcd | 5760 | return NULL; |
8aab5218 MJ |
5761 | } |
5762 | ||
c7ac9a0c JH |
5763 | /* Return true if we have recorded VALUE and MASK about PARM. |
5764 | Set VALUE and MASk accordingly. */ | |
5765 | ||
5766 | bool | |
5767 | ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask) | |
5768 | { | |
5769 | cgraph_node *cnode = cgraph_node::get (current_function_decl); | |
5770 | ipcp_transformation *ts = ipcp_get_transformation_summary (cnode); | |
5771 | if (!ts || vec_safe_length (ts->bits) == 0) | |
5772 | return false; | |
5773 | ||
5774 | int i = 0; | |
5775 | for (tree p = DECL_ARGUMENTS (current_function_decl); | |
5776 | p != parm; p = DECL_CHAIN (p)) | |
5777 | { | |
5778 | i++; | |
5779 | /* Ignore static chain. */ | |
5780 | if (!p) | |
5781 | return false; | |
5782 | } | |
5783 | ||
ae7a23a3 JH |
5784 | clone_info *cinfo = clone_info::get (cnode); |
5785 | if (cinfo && cinfo->param_adjustments) | |
c7ac9a0c | 5786 | { |
ae7a23a3 | 5787 | i = cinfo->param_adjustments->get_original_index (i); |
c7ac9a0c JH |
5788 | if (i < 0) |
5789 | return false; | |
5790 | } | |
5791 | ||
5792 | vec<ipa_bits *, va_gc> &bits = *ts->bits; | |
5793 | if (!bits[i]) | |
5794 | return false; | |
5795 | *mask = bits[i]->mask; | |
5796 | *value = wide_int_to_tree (TREE_TYPE (parm), bits[i]->value); | |
5797 | return true; | |
5798 | } | |
5799 | ||
5800 | ||
209ca542 | 5801 | /* Update bits info of formal parameters as described in |
9d3e0adc | 5802 | ipcp_transformation. */ |
209ca542 PK |
5803 | |
5804 | static void | |
5805 | ipcp_update_bits (struct cgraph_node *node) | |
5806 | { | |
9d3e0adc | 5807 | ipcp_transformation *ts = ipcp_get_transformation_summary (node); |
209ca542 PK |
5808 | |
5809 | if (!ts || vec_safe_length (ts->bits) == 0) | |
5810 | return; | |
86cd0334 | 5811 | vec<ipa_bits *, va_gc> &bits = *ts->bits; |
209ca542 | 5812 | unsigned count = bits.length (); |
ff6686d2 MJ |
5813 | if (!count) |
5814 | return; | |
209ca542 | 5815 | |
ff6686d2 MJ |
5816 | auto_vec<int, 16> new_indices; |
5817 | bool need_remapping = false; | |
ae7a23a3 JH |
5818 | clone_info *cinfo = clone_info::get (node); |
5819 | if (cinfo && cinfo->param_adjustments) | |
209ca542 | 5820 | { |
ae7a23a3 | 5821 | cinfo->param_adjustments->get_updated_indices (&new_indices); |
ff6686d2 MJ |
5822 | need_remapping = true; |
5823 | } | |
5824 | auto_vec <tree, 16> parm_decls; | |
5825 | push_function_arg_decls (&parm_decls, node->decl); | |
209ca542 | 5826 | |
ff6686d2 MJ |
5827 | for (unsigned i = 0; i < count; ++i) |
5828 | { | |
5829 | tree parm; | |
5830 | if (need_remapping) | |
5831 | { | |
5832 | if (i >= new_indices.length ()) | |
5833 | continue; | |
5834 | int idx = new_indices[i]; | |
5835 | if (idx < 0) | |
5836 | continue; | |
5837 | parm = parm_decls[idx]; | |
5838 | } | |
5839 | else | |
5840 | parm = parm_decls[i]; | |
209ca542 | 5841 | gcc_checking_assert (parm); |
ff6686d2 | 5842 | |
209ca542 | 5843 | |
86cd0334 MJ |
5844 | if (!bits[i] |
5845 | || !(INTEGRAL_TYPE_P (TREE_TYPE (parm)) | |
5846 | || POINTER_TYPE_P (TREE_TYPE (parm))) | |
209ca542 | 5847 | || !is_gimple_reg (parm)) |
86cd0334 | 5848 | continue; |
209ca542 PK |
5849 | |
5850 | tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm); | |
5851 | if (!ddef) | |
5852 | continue; | |
5853 | ||
5854 | if (dump_file) | |
5855 | { | |
86cd0334 MJ |
5856 | fprintf (dump_file, "Adjusting mask for param %u to ", i); |
5857 | print_hex (bits[i]->mask, dump_file); | |
209ca542 PK |
5858 | fprintf (dump_file, "\n"); |
5859 | } | |
5860 | ||
67b97478 PK |
5861 | if (INTEGRAL_TYPE_P (TREE_TYPE (ddef))) |
5862 | { | |
5863 | unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef)); | |
5864 | signop sgn = TYPE_SIGN (TREE_TYPE (ddef)); | |
5865 | ||
86cd0334 MJ |
5866 | wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED) |
5867 | | wide_int::from (bits[i]->value, prec, sgn); | |
67b97478 PK |
5868 | set_nonzero_bits (ddef, nonzero_bits); |
5869 | } | |
5870 | else | |
5871 | { | |
86cd0334 MJ |
5872 | unsigned tem = bits[i]->mask.to_uhwi (); |
5873 | unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi (); | |
67b97478 PK |
5874 | unsigned align = tem & -tem; |
5875 | unsigned misalign = bitpos & (align - 1); | |
209ca542 | 5876 | |
67b97478 PK |
5877 | if (align > 1) |
5878 | { | |
5879 | if (dump_file) | |
5880 | fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign); | |
5881 | ||
5882 | unsigned old_align, old_misalign; | |
5883 | struct ptr_info_def *pi = get_ptr_info (ddef); | |
5884 | bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign); | |
5885 | ||
5886 | if (old_known | |
5887 | && old_align > align) | |
5888 | { | |
5889 | if (dump_file) | |
5890 | { | |
5891 | fprintf (dump_file, "But alignment was already %u.\n", old_align); | |
5892 | if ((old_misalign & (align - 1)) != misalign) | |
5893 | fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n", | |
5894 | old_misalign, misalign); | |
5895 | } | |
5896 | continue; | |
5897 | } | |
5898 | ||
5899 | if (old_known | |
5900 | && ((misalign & (old_align - 1)) != old_misalign) | |
5901 | && dump_file) | |
5902 | fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n", | |
5903 | old_misalign, misalign); | |
5904 | ||
5905 | set_ptr_info_alignment (pi, align, misalign); | |
5906 | } | |
5907 | } | |
209ca542 PK |
5908 | } |
5909 | } | |
5910 | ||
523fe5b6 AH |
5911 | bool |
5912 | ipa_vr::nonzero_p (tree expr_type) const | |
5913 | { | |
5914 | if (type == VR_ANTI_RANGE && wi::eq_p (min, 0) && wi::eq_p (max, 0)) | |
5915 | return true; | |
5916 | ||
5917 | unsigned prec = TYPE_PRECISION (expr_type); | |
5918 | return (type == VR_RANGE | |
19849d15 | 5919 | && TYPE_UNSIGNED (expr_type) |
523fe5b6 AH |
5920 | && wi::eq_p (min, wi::one (prec)) |
5921 | && wi::eq_p (max, wi::max_value (prec, TYPE_SIGN (expr_type)))); | |
5922 | } | |
5923 | ||
8bc5448f | 5924 | /* Update value range of formal parameters as described in |
9d3e0adc | 5925 | ipcp_transformation. */ |
8bc5448f KV |
5926 | |
5927 | static void | |
5928 | ipcp_update_vr (struct cgraph_node *node) | |
5929 | { | |
9d3e0adc | 5930 | ipcp_transformation *ts = ipcp_get_transformation_summary (node); |
8bc5448f KV |
5931 | if (!ts || vec_safe_length (ts->m_vr) == 0) |
5932 | return; | |
5933 | const vec<ipa_vr, va_gc> &vr = *ts->m_vr; | |
5934 | unsigned count = vr.length (); | |
ff6686d2 MJ |
5935 | if (!count) |
5936 | return; | |
8bc5448f | 5937 | |
ff6686d2 MJ |
5938 | auto_vec<int, 16> new_indices; |
5939 | bool need_remapping = false; | |
ae7a23a3 JH |
5940 | clone_info *cinfo = clone_info::get (node); |
5941 | if (cinfo && cinfo->param_adjustments) | |
8bc5448f | 5942 | { |
ae7a23a3 | 5943 | cinfo->param_adjustments->get_updated_indices (&new_indices); |
ff6686d2 MJ |
5944 | need_remapping = true; |
5945 | } | |
5946 | auto_vec <tree, 16> parm_decls; | |
5947 | push_function_arg_decls (&parm_decls, node->decl); | |
5948 | ||
5949 | for (unsigned i = 0; i < count; ++i) | |
5950 | { | |
5951 | tree parm; | |
5952 | int remapped_idx; | |
5953 | if (need_remapping) | |
5954 | { | |
5955 | if (i >= new_indices.length ()) | |
5956 | continue; | |
5957 | remapped_idx = new_indices[i]; | |
5958 | if (remapped_idx < 0) | |
5959 | continue; | |
5960 | } | |
5961 | else | |
5962 | remapped_idx = i; | |
5963 | ||
5964 | parm = parm_decls[remapped_idx]; | |
5965 | ||
8bc5448f | 5966 | gcc_checking_assert (parm); |
8bc5448f KV |
5967 | tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm); |
5968 | ||
5969 | if (!ddef || !is_gimple_reg (parm)) | |
5970 | continue; | |
5971 | ||
5972 | if (vr[i].known | |
8bc5448f KV |
5973 | && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE)) |
5974 | { | |
5975 | tree type = TREE_TYPE (ddef); | |
5976 | unsigned prec = TYPE_PRECISION (type); | |
718625ad KV |
5977 | if (INTEGRAL_TYPE_P (TREE_TYPE (ddef))) |
5978 | { | |
5979 | if (dump_file) | |
5980 | { | |
ff6686d2 MJ |
5981 | fprintf (dump_file, "Setting value range of param %u " |
5982 | "(now %i) ", i, remapped_idx); | |
718625ad KV |
5983 | fprintf (dump_file, "%s[", |
5984 | (vr[i].type == VR_ANTI_RANGE) ? "~" : ""); | |
5985 | print_decs (vr[i].min, dump_file); | |
5986 | fprintf (dump_file, ", "); | |
5987 | print_decs (vr[i].max, dump_file); | |
5988 | fprintf (dump_file, "]\n"); | |
5989 | } | |
6ccc4356 AH |
5990 | value_range v (type, |
5991 | wide_int_storage::from (vr[i].min, prec, | |
5992 | TYPE_SIGN (type)), | |
5993 | wide_int_storage::from (vr[i].max, prec, | |
5994 | TYPE_SIGN (type)), | |
5995 | vr[i].type); | |
5996 | set_range_info (ddef, v); | |
718625ad KV |
5997 | } |
5998 | else if (POINTER_TYPE_P (TREE_TYPE (ddef)) | |
523fe5b6 | 5999 | && vr[i].nonzero_p (TREE_TYPE (ddef))) |
8bc5448f | 6000 | { |
718625ad KV |
6001 | if (dump_file) |
6002 | fprintf (dump_file, "Setting nonnull for %u\n", i); | |
6003 | set_ptr_nonnull (ddef); | |
8bc5448f | 6004 | } |
8bc5448f KV |
6005 | } |
6006 | } | |
6007 | } | |
6008 | ||
8aab5218 | 6009 | /* IPCP transformation phase doing propagation of aggregate values. */ |
2c9561b5 MJ |
6010 | |
6011 | unsigned int | |
6012 | ipcp_transform_function (struct cgraph_node *node) | |
6013 | { | |
f65f1ae3 | 6014 | vec<ipa_param_descriptor, va_gc> *descriptors = NULL; |
56b40062 | 6015 | struct ipa_func_body_info fbi; |
2c9561b5 | 6016 | struct ipa_agg_replacement_value *aggval; |
2c9561b5 | 6017 | int param_count; |
2c9561b5 MJ |
6018 | |
6019 | gcc_checking_assert (cfun); | |
6020 | gcc_checking_assert (current_function_decl); | |
6021 | ||
6022 | if (dump_file) | |
464d0118 ML |
6023 | fprintf (dump_file, "Modification phase of node %s\n", |
6024 | node->dump_name ()); | |
2c9561b5 | 6025 | |
209ca542 | 6026 | ipcp_update_bits (node); |
8bc5448f | 6027 | ipcp_update_vr (node); |
2c9561b5 MJ |
6028 | aggval = ipa_get_agg_replacements_for_node (node); |
6029 | if (!aggval) | |
6030 | return 0; | |
67348ccc | 6031 | param_count = count_formal_params (node->decl); |
2c9561b5 MJ |
6032 | if (param_count == 0) |
6033 | return 0; | |
5bc4cb04 MJ |
6034 | vec_safe_grow_cleared (descriptors, param_count, true); |
6035 | ipa_populate_param_decls (node, *descriptors); | |
6036 | std::pair<bool, bool> rr | |
6037 | = adjust_agg_replacement_values (node, aggval, *descriptors); | |
9e2e4739 | 6038 | bool cfg_changed = rr.second; |
5bc4cb04 MJ |
6039 | if (!rr.first) |
6040 | { | |
6041 | vec_free (descriptors); | |
6042 | if (dump_file) | |
6043 | fprintf (dump_file, " All affected aggregate parameters were either " | |
6044 | "removed or converted into scalars, phase done.\n"); | |
9e2e4739 MJ |
6045 | if (cfg_changed) |
6046 | delete_unreachable_blocks_update_callgraph (node, false); | |
6047 | return 0; | |
5bc4cb04 | 6048 | } |
2c9561b5 MJ |
6049 | if (dump_file) |
6050 | ipa_dump_agg_replacement_values (dump_file, aggval); | |
2c9561b5 | 6051 | |
8aab5218 MJ |
6052 | fbi.node = node; |
6053 | fbi.info = NULL; | |
6054 | fbi.bb_infos = vNULL; | |
cb3874dc | 6055 | fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true); |
8aab5218 | 6056 | fbi.param_count = param_count; |
fdfd7f53 | 6057 | fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps); |
2c9561b5 | 6058 | |
9e2e4739 | 6059 | bool modified_mem_access = false; |
8aab5218 | 6060 | calculate_dominance_info (CDI_DOMINATORS); |
9e2e4739 | 6061 | ipcp_modif_dom_walker walker (&fbi, descriptors, aggval, &modified_mem_access); |
8ddce3f7 RB |
6062 | walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun)); |
6063 | free_dominance_info (CDI_DOMINATORS); | |
9e2e4739 | 6064 | cfg_changed |= walker.cleanup_eh (); |
2c9561b5 | 6065 | |
8aab5218 MJ |
6066 | int i; |
6067 | struct ipa_bb_info *bi; | |
6068 | FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi) | |
6069 | free_ipa_bb_info (bi); | |
6070 | fbi.bb_infos.release (); | |
9d3e0adc ML |
6071 | |
6072 | ipcp_transformation *s = ipcp_transformation_sum->get (node); | |
6073 | s->agg_values = NULL; | |
6074 | s->bits = NULL; | |
6075 | s->m_vr = NULL; | |
676b4899 | 6076 | |
f65f1ae3 | 6077 | vec_free (descriptors); |
95a2ed03 MJ |
6078 | if (cfg_changed) |
6079 | delete_unreachable_blocks_update_callgraph (node, false); | |
6080 | ||
9e2e4739 | 6081 | return modified_mem_access ? TODO_update_ssa_only_virtuals : 0; |
2c9561b5 | 6082 | } |
86cd0334 | 6083 | |
ac6f2e59 | 6084 | |
eb270950 | 6085 | /* Return true if OTHER describes same agg value. */ |
ac6f2e59 | 6086 | bool |
eb270950 | 6087 | ipa_agg_value::equal_to (const ipa_agg_value &other) |
ac6f2e59 JH |
6088 | { |
6089 | return offset == other.offset | |
6090 | && operand_equal_p (value, other.value, 0); | |
6091 | } | |
9d5af1db MJ |
6092 | |
6093 | /* Destructor also removing individual aggregate values. */ | |
6094 | ||
6095 | ipa_auto_call_arg_values::~ipa_auto_call_arg_values () | |
6096 | { | |
6097 | ipa_release_agg_values (m_known_aggs, false); | |
6098 | } | |
6099 | ||
6100 | ||
6101 | ||
86cd0334 | 6102 | #include "gt-ipa-prop.h" |